hexsha
stringlengths
40
40
size
int64
2
1.05M
content
stringlengths
2
1.05M
avg_line_length
float64
1.33
100
max_line_length
int64
1
1k
alphanum_fraction
float64
0.25
1
610d1d8af3b5e8fbe834540b9509b87c61142ca2
2,096
//! Tidy check to ensure that there are no binaries checked into the source tree //! by accident. //! //! In the past we've accidentally checked in test binaries and such which add a //! huge amount of bloat to the Git history, so it's good to just ensure we //! don't do that again. use std::path::Path; // All files are executable on Windows, so just check on Unix. #[cfg(windows)] pub fn check(_path: &Path, _bad: &mut bool) {} #[cfg(unix)] pub fn check(path: &Path, bad: &mut bool) { use std::fs; use std::process::{Command, Stdio}; use std::os::unix::prelude::*; if let Ok(contents) = fs::read_to_string("/proc/version") { // Probably on Windows Linux Subsystem or Docker via VirtualBox, // all files will be marked as executable, so skip checking. if contents.contains("Microsoft") || contents.contains("boot2docker") { return; } } super::walk(path, &mut |path| super::filter_dirs(path) || path.ends_with("src/etc"), &mut |file| { let filename = file.file_name().unwrap().to_string_lossy(); let extensions = [".py", ".sh"]; if extensions.iter().any(|e| filename.ends_with(e)) { return; } let metadata = t!(fs::symlink_metadata(&file), &file); if metadata.mode() & 0o111 != 0 { let rel_path = file.strip_prefix(path).unwrap(); let git_friendly_path = rel_path.to_str().unwrap().replace("\\", "/"); let output = Command::new("git") .arg("ls-files") .arg(&git_friendly_path) .current_dir(path) .stderr(Stdio::null()) .output() .unwrap_or_else(|e| { panic!("could not run git ls-files: {}", e); }); let path_bytes = rel_path.as_os_str().as_bytes(); if output.status.success() && output.stdout.starts_with(path_bytes) { tidy_error!(bad, "binary checked into source: {}", file.display()); } } }) }
36.77193
83
0.561069
fb95220ae315ead20877758240da798eaf171681
285,327
#![doc = "generated by AutoRust"] #![allow(unused_mut)] #![allow(unused_variables)] #![allow(unused_imports)] use super::models; #[derive(Clone)] pub struct Client { endpoint: String, credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>, scopes: Vec<String>, pipeline: azure_core::Pipeline, } #[derive(Clone)] pub struct ClientBuilder { credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>, endpoint: Option<String>, scopes: Option<Vec<String>>, } pub const DEFAULT_ENDPOINT: &str = azure_core::resource_manager_endpoint::AZURE_PUBLIC_CLOUD; impl ClientBuilder { pub fn new(credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>) -> Self { Self { credential, endpoint: None, scopes: None, } } pub fn endpoint(mut self, endpoint: impl Into<String>) -> Self { self.endpoint = Some(endpoint.into()); self } pub fn scopes(mut self, scopes: &[&str]) -> Self { self.scopes = Some(scopes.iter().map(|scope| (*scope).to_owned()).collect()); self } pub fn build(self) -> Client { let endpoint = self.endpoint.unwrap_or_else(|| DEFAULT_ENDPOINT.to_owned()); let scopes = self.scopes.unwrap_or_else(|| vec![format!("{}/", endpoint)]); Client::new(endpoint, self.credential, scopes) } } impl Client { pub(crate) fn endpoint(&self) -> &str { self.endpoint.as_str() } pub(crate) fn token_credential(&self) -> &dyn azure_core::auth::TokenCredential { self.credential.as_ref() } pub(crate) fn scopes(&self) -> Vec<&str> { self.scopes.iter().map(String::as_str).collect() } pub(crate) async fn send(&self, request: impl Into<azure_core::Request>) -> azure_core::error::Result<azure_core::Response> { let mut context = azure_core::Context::default(); let mut request = request.into(); self.pipeline.send(&mut context, &mut request).await } pub fn new( endpoint: impl Into<String>, credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>, scopes: Vec<String>, ) -> Self { let endpoint = endpoint.into(); let pipeline = azure_core::Pipeline::new( option_env!("CARGO_PKG_NAME"), option_env!("CARGO_PKG_VERSION"), azure_core::ClientOptions::default(), Vec::new(), Vec::new(), ); Self { endpoint, credential, scopes, pipeline, } } pub fn available_service_tiers(&self) -> available_service_tiers::Client { available_service_tiers::Client(self.clone()) } pub fn clusters(&self) -> clusters::Client { clusters::Client(self.clone()) } pub fn data_exports(&self) -> data_exports::Client { data_exports::Client(self.clone()) } pub fn data_sources(&self) -> data_sources::Client { data_sources::Client(self.clone()) } pub fn deleted_workspaces(&self) -> deleted_workspaces::Client { deleted_workspaces::Client(self.clone()) } pub fn gateways(&self) -> gateways::Client { gateways::Client(self.clone()) } pub fn intelligence_packs(&self) -> intelligence_packs::Client { intelligence_packs::Client(self.clone()) } pub fn linked_services(&self) -> linked_services::Client { linked_services::Client(self.clone()) } pub fn linked_storage_accounts(&self) -> linked_storage_accounts::Client { linked_storage_accounts::Client(self.clone()) } pub fn management_groups(&self) -> management_groups::Client { management_groups::Client(self.clone()) } pub fn operation_statuses(&self) -> operation_statuses::Client { operation_statuses::Client(self.clone()) } pub fn operations(&self) -> operations::Client { operations::Client(self.clone()) } pub fn saved_searches(&self) -> saved_searches::Client { saved_searches::Client(self.clone()) } pub fn schema(&self) -> schema::Client { schema::Client(self.clone()) } pub fn shared_keys(&self) -> shared_keys::Client { shared_keys::Client(self.clone()) } pub fn storage_insight_configs(&self) -> storage_insight_configs::Client { storage_insight_configs::Client(self.clone()) } pub fn tables(&self) -> tables::Client { tables::Client(self.clone()) } pub fn usages(&self) -> usages::Client { usages::Client(self.clone()) } pub fn workspace_purge(&self) -> workspace_purge::Client { workspace_purge::Client(self.clone()) } pub fn workspaces(&self) -> workspaces::Client { workspaces::Client(self.clone()) } } #[non_exhaustive] #[derive(Debug, thiserror :: Error)] #[allow(non_camel_case_types)] pub enum Error { #[error(transparent)] Operations_List(#[from] operations::list::Error), #[error(transparent)] Workspaces_List(#[from] workspaces::list::Error), #[error(transparent)] Workspaces_ListByResourceGroup(#[from] workspaces::list_by_resource_group::Error), #[error(transparent)] Workspaces_Get(#[from] workspaces::get::Error), #[error(transparent)] Workspaces_CreateOrUpdate(#[from] workspaces::create_or_update::Error), #[error(transparent)] Workspaces_Update(#[from] workspaces::update::Error), #[error(transparent)] Workspaces_Delete(#[from] workspaces::delete::Error), #[error(transparent)] DeletedWorkspaces_List(#[from] deleted_workspaces::list::Error), #[error(transparent)] DeletedWorkspaces_ListByResourceGroup(#[from] deleted_workspaces::list_by_resource_group::Error), #[error(transparent)] Tables_ListByWorkspace(#[from] tables::list_by_workspace::Error), #[error(transparent)] Tables_Get(#[from] tables::get::Error), #[error(transparent)] Tables_CreateOrUpdate(#[from] tables::create_or_update::Error), #[error(transparent)] Tables_Update(#[from] tables::update::Error), #[error(transparent)] Tables_Delete(#[from] tables::delete::Error), #[error(transparent)] Tables_Migrate(#[from] tables::migrate::Error), #[error(transparent)] DataExports_ListByWorkspace(#[from] data_exports::list_by_workspace::Error), #[error(transparent)] DataExports_Get(#[from] data_exports::get::Error), #[error(transparent)] DataExports_CreateOrUpdate(#[from] data_exports::create_or_update::Error), #[error(transparent)] DataExports_Delete(#[from] data_exports::delete::Error), #[error(transparent)] DataSources_Get(#[from] data_sources::get::Error), #[error(transparent)] DataSources_CreateOrUpdate(#[from] data_sources::create_or_update::Error), #[error(transparent)] DataSources_Delete(#[from] data_sources::delete::Error), #[error(transparent)] DataSources_ListByWorkspace(#[from] data_sources::list_by_workspace::Error), #[error(transparent)] IntelligencePacks_Disable(#[from] intelligence_packs::disable::Error), #[error(transparent)] IntelligencePacks_Enable(#[from] intelligence_packs::enable::Error), #[error(transparent)] IntelligencePacks_List(#[from] intelligence_packs::list::Error), #[error(transparent)] LinkedServices_Get(#[from] linked_services::get::Error), #[error(transparent)] LinkedServices_CreateOrUpdate(#[from] linked_services::create_or_update::Error), #[error(transparent)] LinkedServices_Delete(#[from] linked_services::delete::Error), #[error(transparent)] LinkedServices_ListByWorkspace(#[from] linked_services::list_by_workspace::Error), #[error(transparent)] LinkedStorageAccounts_Get(#[from] linked_storage_accounts::get::Error), #[error(transparent)] LinkedStorageAccounts_CreateOrUpdate(#[from] linked_storage_accounts::create_or_update::Error), #[error(transparent)] LinkedStorageAccounts_Delete(#[from] linked_storage_accounts::delete::Error), #[error(transparent)] LinkedStorageAccounts_ListByWorkspace(#[from] linked_storage_accounts::list_by_workspace::Error), #[error(transparent)] ManagementGroups_List(#[from] management_groups::list::Error), #[error(transparent)] OperationStatuses_Get(#[from] operation_statuses::get::Error), #[error(transparent)] SharedKeys_GetSharedKeys(#[from] shared_keys::get_shared_keys::Error), #[error(transparent)] SharedKeys_Regenerate(#[from] shared_keys::regenerate::Error), #[error(transparent)] Usages_List(#[from] usages::list::Error), #[error(transparent)] StorageInsightConfigs_Get(#[from] storage_insight_configs::get::Error), #[error(transparent)] StorageInsightConfigs_CreateOrUpdate(#[from] storage_insight_configs::create_or_update::Error), #[error(transparent)] StorageInsightConfigs_Delete(#[from] storage_insight_configs::delete::Error), #[error(transparent)] StorageInsightConfigs_ListByWorkspace(#[from] storage_insight_configs::list_by_workspace::Error), #[error(transparent)] SavedSearches_Get(#[from] saved_searches::get::Error), #[error(transparent)] SavedSearches_CreateOrUpdate(#[from] saved_searches::create_or_update::Error), #[error(transparent)] SavedSearches_Delete(#[from] saved_searches::delete::Error), #[error(transparent)] SavedSearches_ListByWorkspace(#[from] saved_searches::list_by_workspace::Error), #[error(transparent)] AvailableServiceTiers_ListByWorkspace(#[from] available_service_tiers::list_by_workspace::Error), #[error(transparent)] Gateways_Delete(#[from] gateways::delete::Error), #[error(transparent)] Schema_Get(#[from] schema::get::Error), #[error(transparent)] WorkspacePurge_Purge(#[from] workspace_purge::purge::Error), #[error(transparent)] WorkspacePurge_GetPurgeStatus(#[from] workspace_purge::get_purge_status::Error), #[error(transparent)] Clusters_ListByResourceGroup(#[from] clusters::list_by_resource_group::Error), #[error(transparent)] Clusters_List(#[from] clusters::list::Error), #[error(transparent)] Clusters_Get(#[from] clusters::get::Error), #[error(transparent)] Clusters_CreateOrUpdate(#[from] clusters::create_or_update::Error), #[error(transparent)] Clusters_Update(#[from] clusters::update::Error), #[error(transparent)] Clusters_Delete(#[from] clusters::delete::Error), } pub mod operations { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn list(&self) -> list::Builder { list::Builder { client: self.0.clone() } } } pub mod list { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::OperationListResult, Error>> { Box::pin(async move { let url_str = &format!("{}/providers/Microsoft.OperationalInsights/operations", self.client.endpoint(),); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-12-01-preview"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::OperationListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } } pub mod workspaces { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn list(&self, subscription_id: impl Into<String>) -> list::Builder { list::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), } } pub fn list_by_resource_group( &self, resource_group_name: impl Into<String>, subscription_id: impl Into<String>, ) -> list_by_resource_group::Builder { list_by_resource_group::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), subscription_id: subscription_id.into(), } } pub fn get( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, subscription_id: impl Into<String>, ) -> get::Builder { get::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), subscription_id: subscription_id.into(), } } pub fn create_or_update( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, parameters: impl Into<models::Workspace>, subscription_id: impl Into<String>, ) -> create_or_update::Builder { create_or_update::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), parameters: parameters.into(), subscription_id: subscription_id.into(), } } pub fn update( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, parameters: impl Into<models::WorkspacePatch>, subscription_id: impl Into<String>, ) -> update::Builder { update::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), parameters: parameters.into(), subscription_id: subscription_id.into(), } } pub fn delete( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, subscription_id: impl Into<String>, ) -> delete::Builder { delete::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), subscription_id: subscription_id.into(), force: None, } } } pub mod list { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::WorkspaceListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.OperationalInsights/workspaces", self.client.endpoint(), &self.subscription_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-12-01-preview"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::WorkspaceListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod list_by_resource_group { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::WorkspaceListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces", self.client.endpoint(), &self.subscription_id, &self.resource_group_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-12-01-preview"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::WorkspaceListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod get { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Workspace, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-12-01-preview"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::Workspace = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod create_or_update { use super::models; #[derive(Debug)] pub enum Response { Ok200(models::Workspace), Created201(models::Workspace), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) parameters: models::Workspace, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-12-01-preview"); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::Workspace = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::Workspace = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Created201(rsp_value)) } http::StatusCode::ACCEPTED => Ok(Response::Accepted202), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod update { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) parameters: models::WorkspacePatch, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Workspace, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-12-01-preview"); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::Workspace = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod delete { use super::models; #[derive(Debug)] pub enum Response { Ok200, Accepted202, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) subscription_id: String, pub(crate) force: Option<bool>, } impl Builder { pub fn force(mut self, force: bool) -> Self { self.force = Some(force); self } pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-12-01-preview"); if let Some(force) = &self.force { url.query_pairs_mut().append_pair("force", &force.to_string()); } let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => Ok(Response::Ok200), http::StatusCode::ACCEPTED => Ok(Response::Accepted202), http::StatusCode::NO_CONTENT => Ok(Response::NoContent204), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } } pub mod deleted_workspaces { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn list(&self, subscription_id: impl Into<String>) -> list::Builder { list::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), } } pub fn list_by_resource_group( &self, resource_group_name: impl Into<String>, subscription_id: impl Into<String>, ) -> list_by_resource_group::Builder { list_by_resource_group::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), subscription_id: subscription_id.into(), } } } pub mod list { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::WorkspaceListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.OperationalInsights/deletedWorkspaces", self.client.endpoint(), &self.subscription_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-12-01-preview"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::WorkspaceListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod list_by_resource_group { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::WorkspaceListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/deletedWorkspaces", self.client.endpoint(), &self.subscription_id, &self.resource_group_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-12-01-preview"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::WorkspaceListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } } pub mod tables { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn list_by_workspace( &self, subscription_id: impl Into<String>, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, ) -> list_by_workspace::Builder { list_by_workspace::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), } } pub fn get( &self, subscription_id: impl Into<String>, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, table_name: impl Into<String>, ) -> get::Builder { get::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), table_name: table_name.into(), } } pub fn create_or_update( &self, subscription_id: impl Into<String>, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, table_name: impl Into<String>, parameters: impl Into<models::Table>, ) -> create_or_update::Builder { create_or_update::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), table_name: table_name.into(), parameters: parameters.into(), } } pub fn update( &self, subscription_id: impl Into<String>, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, table_name: impl Into<String>, parameters: impl Into<models::Table>, ) -> update::Builder { update::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), table_name: table_name.into(), parameters: parameters.into(), } } pub fn delete( &self, subscription_id: impl Into<String>, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, table_name: impl Into<String>, ) -> delete::Builder { delete::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), table_name: table_name.into(), } } pub fn migrate( &self, subscription_id: impl Into<String>, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, table_name: impl Into<String>, ) -> migrate::Builder { migrate::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), table_name: table_name.into(), } } } pub mod list_by_workspace { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::TablesListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/tables", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-12-01-preview"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::TablesListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod get { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) table_name: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Table, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/tables/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name, &self.table_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-12-01-preview"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::Table = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod create_or_update { use super::models; #[derive(Debug)] pub enum Response { Ok200(models::Table), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) table_name: String, pub(crate) parameters: models::Table, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/tables/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name, &self.table_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-12-01-preview"); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::Table = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Ok200(rsp_value)) } http::StatusCode::ACCEPTED => Ok(Response::Accepted202), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod update { use super::models; #[derive(Debug)] pub enum Response { Ok200(models::Table), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) table_name: String, pub(crate) parameters: models::Table, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/tables/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name, &self.table_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-12-01-preview"); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::Table = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Ok200(rsp_value)) } http::StatusCode::ACCEPTED => Ok(Response::Accepted202), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod delete { use super::models; #[derive(Debug)] pub enum Response { Ok200, NoContent204, Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) table_name: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/tables/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name, &self.table_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-12-01-preview"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => Ok(Response::Ok200), http::StatusCode::NO_CONTENT => Ok(Response::NoContent204), http::StatusCode::ACCEPTED => Ok(Response::Accepted202), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod migrate { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) table_name: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<(), Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/tables/{}/migrate", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name, &self.table_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-12-01-preview"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } } pub mod data_exports { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn list_by_workspace( &self, subscription_id: impl Into<String>, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, ) -> list_by_workspace::Builder { list_by_workspace::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), } } pub fn get( &self, subscription_id: impl Into<String>, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, data_export_name: impl Into<String>, ) -> get::Builder { get::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), data_export_name: data_export_name.into(), } } pub fn create_or_update( &self, subscription_id: impl Into<String>, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, data_export_name: impl Into<String>, parameters: impl Into<models::DataExport>, ) -> create_or_update::Builder { create_or_update::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), data_export_name: data_export_name.into(), parameters: parameters.into(), } } pub fn delete( &self, subscription_id: impl Into<String>, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, data_export_name: impl Into<String>, ) -> delete::Builder { delete::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), data_export_name: data_export_name.into(), } } } pub mod list_by_workspace { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::DataExportListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/dataExports", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::DataExportListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod get { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) data_export_name: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::DataExport, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/dataExports/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name, &self.data_export_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::DataExport = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod create_or_update { use super::models; #[derive(Debug)] pub enum Response { Ok200(models::DataExport), Created201(models::DataExport), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) data_export_name: String, pub(crate) parameters: models::DataExport, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/dataExports/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name, &self.data_export_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::DataExport = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::DataExport = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Created201(rsp_value)) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod delete { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Error response #response_type")] NotFound404 {}, #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) data_export_name: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<(), Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/dataExports/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name, &self.data_export_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => Ok(()), http::StatusCode::NOT_FOUND => Err(Error::NotFound404 {}), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } } pub mod data_sources { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn get( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, data_source_name: impl Into<String>, subscription_id: impl Into<String>, ) -> get::Builder { get::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), data_source_name: data_source_name.into(), subscription_id: subscription_id.into(), } } pub fn create_or_update( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, data_source_name: impl Into<String>, parameters: impl Into<models::DataSource>, subscription_id: impl Into<String>, ) -> create_or_update::Builder { create_or_update::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), data_source_name: data_source_name.into(), parameters: parameters.into(), subscription_id: subscription_id.into(), } } pub fn delete( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, data_source_name: impl Into<String>, subscription_id: impl Into<String>, ) -> delete::Builder { delete::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), data_source_name: data_source_name.into(), subscription_id: subscription_id.into(), } } pub fn list_by_workspace( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, filter: impl Into<String>, subscription_id: impl Into<String>, ) -> list_by_workspace::Builder { list_by_workspace::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), filter: filter.into(), subscription_id: subscription_id.into(), skiptoken: None, } } } pub mod get { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) data_source_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::DataSource, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/dataSources/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name, &self.data_source_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::DataSource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod create_or_update { use super::models; #[derive(Debug)] pub enum Response { Ok200(models::DataSource), Created201(models::DataSource), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) data_source_name: String, pub(crate) parameters: models::DataSource, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/dataSources/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name, &self.data_source_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::DataSource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::DataSource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Created201(rsp_value)) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod delete { use super::models; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) data_source_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/dataSources/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name, &self.data_source_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => Ok(Response::Ok200), http::StatusCode::NO_CONTENT => Ok(Response::NoContent204), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod list_by_workspace { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) filter: String, pub(crate) subscription_id: String, pub(crate) skiptoken: Option<String>, } impl Builder { pub fn skiptoken(mut self, skiptoken: impl Into<String>) -> Self { self.skiptoken = Some(skiptoken.into()); self } pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::DataSourceListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/dataSources", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let filter = &self.filter; url.query_pairs_mut().append_pair("$filter", filter); if let Some(skiptoken) = &self.skiptoken { url.query_pairs_mut().append_pair("$skiptoken", skiptoken); } let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::DataSourceListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } } pub mod intelligence_packs { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn disable( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, intelligence_pack_name: impl Into<String>, subscription_id: impl Into<String>, ) -> disable::Builder { disable::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), intelligence_pack_name: intelligence_pack_name.into(), subscription_id: subscription_id.into(), } } pub fn enable( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, intelligence_pack_name: impl Into<String>, subscription_id: impl Into<String>, ) -> enable::Builder { enable::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), intelligence_pack_name: intelligence_pack_name.into(), subscription_id: subscription_id.into(), } } pub fn list( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, subscription_id: impl Into<String>, ) -> list::Builder { list::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), subscription_id: subscription_id.into(), } } } pub mod disable { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) intelligence_pack_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<(), Error>> { Box::pin(async move { let url_str = & format ! ("{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/intelligencePacks/{}/Disable" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . workspace_name , & self . intelligence_pack_name) ; let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod enable { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) intelligence_pack_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<(), Error>> { Box::pin(async move { let url_str = & format ! ("{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/intelligencePacks/{}/Enable" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . workspace_name , & self . intelligence_pack_name) ; let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod list { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Vec<models::IntelligencePack>, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/intelligencePacks", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: Vec<models::IntelligencePack> = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } } pub mod linked_services { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn get( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, linked_service_name: impl Into<String>, subscription_id: impl Into<String>, ) -> get::Builder { get::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), linked_service_name: linked_service_name.into(), subscription_id: subscription_id.into(), } } pub fn create_or_update( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, linked_service_name: impl Into<String>, parameters: impl Into<models::LinkedService>, subscription_id: impl Into<String>, ) -> create_or_update::Builder { create_or_update::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), linked_service_name: linked_service_name.into(), parameters: parameters.into(), subscription_id: subscription_id.into(), } } pub fn delete( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, linked_service_name: impl Into<String>, subscription_id: impl Into<String>, ) -> delete::Builder { delete::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), linked_service_name: linked_service_name.into(), subscription_id: subscription_id.into(), } } pub fn list_by_workspace( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, subscription_id: impl Into<String>, ) -> list_by_workspace::Builder { list_by_workspace::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), subscription_id: subscription_id.into(), } } } pub mod get { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) linked_service_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::LinkedService, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/linkedServices/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name, &self.linked_service_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::LinkedService = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod create_or_update { use super::models; #[derive(Debug)] pub enum Response { Accepted202, Ok200(models::LinkedService), Created201(models::LinkedService), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) linked_service_name: String, pub(crate) parameters: models::LinkedService, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/linkedServices/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name, &self.linked_service_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::ACCEPTED => Ok(Response::Accepted202), http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::LinkedService = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::LinkedService = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Created201(rsp_value)) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod delete { use super::models; #[derive(Debug)] pub enum Response { Ok200(models::LinkedService), Accepted202, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) linked_service_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/linkedServices/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name, &self.linked_service_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::LinkedService = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Ok200(rsp_value)) } http::StatusCode::ACCEPTED => Ok(Response::Accepted202), http::StatusCode::NO_CONTENT => Ok(Response::NoContent204), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod list_by_workspace { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::LinkedServiceListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/linkedServices", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::LinkedServiceListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } } pub mod linked_storage_accounts { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn get( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, data_source_type: impl Into<String>, subscription_id: impl Into<String>, ) -> get::Builder { get::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), data_source_type: data_source_type.into(), subscription_id: subscription_id.into(), } } pub fn create_or_update( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, data_source_type: impl Into<String>, parameters: impl Into<models::LinkedStorageAccountsResource>, subscription_id: impl Into<String>, ) -> create_or_update::Builder { create_or_update::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), data_source_type: data_source_type.into(), parameters: parameters.into(), subscription_id: subscription_id.into(), } } pub fn delete( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, data_source_type: impl Into<String>, subscription_id: impl Into<String>, ) -> delete::Builder { delete::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), data_source_type: data_source_type.into(), subscription_id: subscription_id.into(), } } pub fn list_by_workspace( &self, subscription_id: impl Into<String>, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, ) -> list_by_workspace::Builder { list_by_workspace::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), } } } pub mod get { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) data_source_type: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::LinkedStorageAccountsResource, Error>> { Box::pin(async move { let url_str = & format ! ("{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/linkedStorageAccounts/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . workspace_name , & self . data_source_type) ; let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::LinkedStorageAccountsResource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod create_or_update { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) data_source_type: String, pub(crate) parameters: models::LinkedStorageAccountsResource, pub(crate) subscription_id: String, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::LinkedStorageAccountsResource, Error>> { Box::pin(async move { let url_str = & format ! ("{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/linkedStorageAccounts/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . workspace_name , & self . data_source_type) ; let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::LinkedStorageAccountsResource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod delete { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) data_source_type: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<(), Error>> { Box::pin(async move { let url_str = & format ! ("{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/linkedStorageAccounts/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . workspace_name , & self . data_source_type) ; let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod list_by_workspace { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::LinkedStorageAccountsListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/linkedStorageAccounts", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::LinkedStorageAccountsListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } } pub mod management_groups { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn list( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, subscription_id: impl Into<String>, ) -> list::Builder { list::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), subscription_id: subscription_id.into(), } } } pub mod list { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::WorkspaceListManagementGroupsResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/managementGroups", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::WorkspaceListManagementGroupsResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } } pub mod operation_statuses { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn get( &self, location: impl Into<String>, async_operation_id: impl Into<String>, subscription_id: impl Into<String>, ) -> get::Builder { get::Builder { client: self.0.clone(), location: location.into(), async_operation_id: async_operation_id.into(), subscription_id: subscription_id.into(), } } } pub mod get { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) location: String, pub(crate) async_operation_id: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::OperationStatus, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.OperationalInsights/locations/{}/operationStatuses/{}", self.client.endpoint(), &self.subscription_id, &self.location, &self.async_operation_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::OperationStatus = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } } pub mod shared_keys { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn get_shared_keys( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, subscription_id: impl Into<String>, ) -> get_shared_keys::Builder { get_shared_keys::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), subscription_id: subscription_id.into(), } } pub fn regenerate( &self, subscription_id: impl Into<String>, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, ) -> regenerate::Builder { regenerate::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), } } } pub mod get_shared_keys { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SharedKeys, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/sharedKeys", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::SharedKeys = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod regenerate { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SharedKeys, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/regenerateSharedKey", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::SharedKeys = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } } pub mod usages { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn list( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, subscription_id: impl Into<String>, ) -> list::Builder { list::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), subscription_id: subscription_id.into(), } } } pub mod list { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::WorkspaceListUsagesResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/usages", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::WorkspaceListUsagesResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } } pub mod storage_insight_configs { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn get( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, storage_insight_name: impl Into<String>, subscription_id: impl Into<String>, ) -> get::Builder { get::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), storage_insight_name: storage_insight_name.into(), subscription_id: subscription_id.into(), } } pub fn create_or_update( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, storage_insight_name: impl Into<String>, parameters: impl Into<models::StorageInsight>, subscription_id: impl Into<String>, ) -> create_or_update::Builder { create_or_update::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), storage_insight_name: storage_insight_name.into(), parameters: parameters.into(), subscription_id: subscription_id.into(), } } pub fn delete( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, storage_insight_name: impl Into<String>, subscription_id: impl Into<String>, ) -> delete::Builder { delete::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), storage_insight_name: storage_insight_name.into(), subscription_id: subscription_id.into(), } } pub fn list_by_workspace( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, subscription_id: impl Into<String>, ) -> list_by_workspace::Builder { list_by_workspace::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), subscription_id: subscription_id.into(), } } } pub mod get { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) storage_insight_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::StorageInsight, Error>> { Box::pin(async move { let url_str = & format ! ("{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/storageInsightConfigs/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . workspace_name , & self . storage_insight_name) ; let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::StorageInsight = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod create_or_update { use super::models; #[derive(Debug)] pub enum Response { Created201(models::StorageInsight), Ok200(models::StorageInsight), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) storage_insight_name: String, pub(crate) parameters: models::StorageInsight, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = & format ! ("{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/storageInsightConfigs/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . workspace_name , & self . storage_insight_name) ; let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::CREATED => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::StorageInsight = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Created201(rsp_value)) } http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::StorageInsight = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Ok200(rsp_value)) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod delete { use super::models; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) storage_insight_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = & format ! ("{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/storageInsightConfigs/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . workspace_name , & self . storage_insight_name) ; let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => Ok(Response::Ok200), http::StatusCode::NO_CONTENT => Ok(Response::NoContent204), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod list_by_workspace { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::StorageInsightListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/storageInsightConfigs", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::StorageInsightListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } } pub mod saved_searches { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn get( &self, subscription_id: impl Into<String>, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, saved_search_id: impl Into<String>, ) -> get::Builder { get::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), saved_search_id: saved_search_id.into(), } } pub fn create_or_update( &self, subscription_id: impl Into<String>, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, saved_search_id: impl Into<String>, parameters: impl Into<models::SavedSearch>, ) -> create_or_update::Builder { create_or_update::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), saved_search_id: saved_search_id.into(), parameters: parameters.into(), } } pub fn delete( &self, subscription_id: impl Into<String>, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, saved_search_id: impl Into<String>, ) -> delete::Builder { delete::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), saved_search_id: saved_search_id.into(), } } pub fn list_by_workspace( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, subscription_id: impl Into<String>, ) -> list_by_workspace::Builder { list_by_workspace::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), subscription_id: subscription_id.into(), } } } pub mod get { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) saved_search_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SavedSearch, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/savedSearches/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name, &self.saved_search_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::SavedSearch = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod create_or_update { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) saved_search_id: String, pub(crate) parameters: models::SavedSearch, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SavedSearch, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/savedSearches/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name, &self.saved_search_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::SavedSearch = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod delete { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) saved_search_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<(), Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/savedSearches/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name, &self.saved_search_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod list_by_workspace { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SavedSearchesListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/savedSearches", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::SavedSearchesListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } } pub mod available_service_tiers { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn list_by_workspace( &self, subscription_id: impl Into<String>, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, ) -> list_by_workspace::Builder { list_by_workspace::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), } } } pub mod list_by_workspace { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Vec<models::AvailableServiceTier>, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/availableServiceTiers", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: Vec<models::AvailableServiceTier> = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } } pub mod gateways { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn delete( &self, subscription_id: impl Into<String>, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, gateway_id: impl Into<String>, ) -> delete::Builder { delete::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), gateway_id: gateway_id.into(), } } } pub mod delete { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) gateway_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<(), Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/gateways/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name, &self.gateway_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } } pub mod schema { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn get( &self, resource_group_name: impl Into<String>, workspace_name: impl Into<String>, subscription_id: impl Into<String>, ) -> get::Builder { get::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), workspace_name: workspace_name.into(), subscription_id: subscription_id.into(), } } } pub mod get { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) workspace_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SearchGetSchemaResponse, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/schema", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::SearchGetSchemaResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } } pub mod workspace_purge { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn purge( &self, resource_group_name: impl Into<String>, subscription_id: impl Into<String>, workspace_name: impl Into<String>, body: impl Into<models::WorkspacePurgeBody>, ) -> purge::Builder { purge::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), subscription_id: subscription_id.into(), workspace_name: workspace_name.into(), body: body.into(), } } pub fn get_purge_status( &self, resource_group_name: impl Into<String>, subscription_id: impl Into<String>, workspace_name: impl Into<String>, purge_id: impl Into<String>, ) -> get_purge_status::Builder { get_purge_status::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), subscription_id: subscription_id.into(), workspace_name: workspace_name.into(), purge_id: purge_id.into(), } } } pub mod purge { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) subscription_id: String, pub(crate) workspace_name: String, pub(crate) body: models::WorkspacePurgeBody, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::WorkspacePurgeResponse, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/purge", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.body).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::ACCEPTED => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::WorkspacePurgeResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod get_purge_status { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) subscription_id: String, pub(crate) workspace_name: String, pub(crate) purge_id: String, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::WorkspacePurgeStatusResponse, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/operations/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.workspace_name, &self.purge_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2020-08-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::WorkspacePurgeStatusResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } } pub mod clusters { use super::models; pub struct Client(pub(crate) super::Client); impl Client { pub fn list_by_resource_group( &self, resource_group_name: impl Into<String>, subscription_id: impl Into<String>, ) -> list_by_resource_group::Builder { list_by_resource_group::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), subscription_id: subscription_id.into(), } } pub fn list(&self, subscription_id: impl Into<String>) -> list::Builder { list::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), } } pub fn get( &self, subscription_id: impl Into<String>, resource_group_name: impl Into<String>, cluster_name: impl Into<String>, ) -> get::Builder { get::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), resource_group_name: resource_group_name.into(), cluster_name: cluster_name.into(), } } pub fn create_or_update( &self, resource_group_name: impl Into<String>, subscription_id: impl Into<String>, cluster_name: impl Into<String>, parameters: impl Into<models::Cluster>, ) -> create_or_update::Builder { create_or_update::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), subscription_id: subscription_id.into(), cluster_name: cluster_name.into(), parameters: parameters.into(), } } pub fn update( &self, resource_group_name: impl Into<String>, cluster_name: impl Into<String>, parameters: impl Into<models::ClusterPatch>, subscription_id: impl Into<String>, ) -> update::Builder { update::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), cluster_name: cluster_name.into(), parameters: parameters.into(), subscription_id: subscription_id.into(), } } pub fn delete( &self, resource_group_name: impl Into<String>, cluster_name: impl Into<String>, subscription_id: impl Into<String>, ) -> delete::Builder { delete::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), cluster_name: cluster_name.into(), subscription_id: subscription_id.into(), } } } pub mod list_by_resource_group { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ClusterListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/clusters", self.client.endpoint(), &self.subscription_id, &self.resource_group_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-06-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ClusterListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod list { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ClusterListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.OperationalInsights/clusters", self.client.endpoint(), &self.subscription_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-06-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ClusterListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod get { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) resource_group_name: String, pub(crate) cluster_name: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Cluster, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/clusters/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.cluster_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-06-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::Cluster = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod create_or_update { use super::models; #[derive(Debug)] pub enum Response { Accepted202, Ok200(models::Cluster), Created201(models::Cluster), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) subscription_id: String, pub(crate) cluster_name: String, pub(crate) parameters: models::Cluster, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/clusters/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.cluster_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-06-01"); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::ACCEPTED => Ok(Response::Accepted202), http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::Cluster = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::Cluster = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Created201(rsp_value)) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod update { use super::models; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) cluster_name: String, pub(crate) parameters: models::ClusterPatch, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Cluster, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/clusters/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.cluster_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-06-01"); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::Cluster = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } pub mod delete { use super::models; #[derive(Debug)] pub enum Response { Accepted202, Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::ErrorResponse, }, #[error("Failed to parse request URL")] ParseUrl(#[source] url::ParseError), #[error("Failed to build request")] BuildRequest(#[source] http::Error), #[error("Failed to serialize request body")] Serialize(#[source] serde_json::Error), #[error("Failed to get access token")] GetToken(#[source] azure_core::Error), #[error("Failed to execute request")] SendRequest(#[source] azure_core::error::Error), #[error("Failed to get response bytes")] ResponseBytes(#[source] azure_core::error::Error), #[error("Failed to deserialize response, body: {1:?}")] Deserialize(#[source] serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) cluster_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationalInsights/clusters/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.cluster_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", "2021-06-01"); let req_body = azure_core::EMPTY_BODY; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::ACCEPTED => Ok(Response::Accepted202), http::StatusCode::OK => Ok(Response::Ok200), http::StatusCode::NO_CONTENT => Ok(Response::NoContent204), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ErrorResponse = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Err(Error::DefaultResponse { status_code, value: rsp_value, }) } } }) } } } }
51.097242
319
0.521945
f8723ad6a551a66cb7bec0d4982d546b604d5887
51,172
// DO NOT EDIT ! // This file was generated automatically from 'src/mako/cli/main.rs.mako' // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] #[macro_use] extern crate clap; extern crate yup_oauth2 as oauth2; extern crate yup_hyper_mock as mock; extern crate hyper_rustls; extern crate serde; extern crate serde_json; extern crate hyper; extern crate mime; extern crate strsim; extern crate google_youtubeanalytics1 as api; use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; mod cmn; use cmn::{InvalidOptionsError, CLIError, JsonTokenStorage, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, calltype_from_str, remove_json_null_values, ComplexType, JsonType, JsonTypeInfo}; use std::default::Default; use std::str::FromStr; use oauth2::{Authenticator, DefaultAuthenticatorDelegate, FlowType}; use serde_json as json; use clap::ArgMatches; enum DoitError { IoError(String, io::Error), ApiError(api::Error), } struct Engine<'n> { opt: ArgMatches<'n>, hub: api::YouTubeAnalytics<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, JsonTokenStorage, hyper::Client>>, gp: Vec<&'static str>, gpm: Vec<(&'static str, &'static str)>, } impl<'n> Engine<'n> { fn _group_items_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.group_items().delete(opt.value_of("id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "on-behalf-of-content-owner" => { call = call.on_behalf_of_content_owner(value.unwrap_or("")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["on-behalf-of-content-owner"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok(mut response) => { Ok(()) } } } } fn _group_items_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource.kind" => Some(("resource.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource.id" => Some(("resource.id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "group-id" => Some(("groupId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "group-id", "id", "kind", "resource"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::GroupItem = json::value::from_value(object).unwrap(); let mut call = self.hub.group_items().insert(request); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "on-behalf-of-content-owner" => { call = call.on_behalf_of_content_owner(value.unwrap_or("")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["on-behalf-of-content-owner"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _group_items_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.group_items().list(opt.value_of("group-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "on-behalf-of-content-owner" => { call = call.on_behalf_of_content_owner(value.unwrap_or("")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["on-behalf-of-content-owner"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _groups_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.groups().delete(opt.value_of("id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "on-behalf-of-content-owner" => { call = call.on_behalf_of_content_owner(value.unwrap_or("")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["on-behalf-of-content-owner"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok(mut response) => { Ok(()) } } } } fn _groups_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "snippet.published-at" => Some(("snippet.publishedAt", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snippet.title" => Some(("snippet.title", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-details.item-count" => Some(("contentDetails.itemCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "content-details.item-type" => Some(("contentDetails.itemType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["content-details", "etag", "id", "item-count", "item-type", "kind", "published-at", "snippet", "title"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::Group = json::value::from_value(object).unwrap(); let mut call = self.hub.groups().insert(request); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "on-behalf-of-content-owner" => { call = call.on_behalf_of_content_owner(value.unwrap_or("")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["on-behalf-of-content-owner"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _groups_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.groups().list(); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "page-token" => { call = call.page_token(value.unwrap_or("")); }, "on-behalf-of-content-owner" => { call = call.on_behalf_of_content_owner(value.unwrap_or("")); }, "mine" => { call = call.mine(arg_from_str(value.unwrap_or("false"), err, "mine", "boolean")); }, "id" => { call = call.id(value.unwrap_or("")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["page-token", "on-behalf-of-content-owner", "mine", "id"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _groups_update(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "snippet.published-at" => Some(("snippet.publishedAt", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snippet.title" => Some(("snippet.title", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-details.item-count" => Some(("contentDetails.itemCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "content-details.item-type" => Some(("contentDetails.itemType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["content-details", "etag", "id", "item-count", "item-type", "kind", "published-at", "snippet", "title"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::Group = json::value::from_value(object).unwrap(); let mut call = self.hub.groups().update(request); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "on-behalf-of-content-owner" => { call = call.on_behalf_of_content_owner(value.unwrap_or("")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["on-behalf-of-content-owner"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _reports_query(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.reports().query(opt.value_of("ids").unwrap_or(""), opt.value_of("start-date").unwrap_or(""), opt.value_of("end-date").unwrap_or(""), opt.value_of("metrics").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); }, "sort" => { call = call.sort(value.unwrap_or("")); }, "max-results" => { call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); }, "include-historical-channel-data" => { call = call.include_historical_channel_data(arg_from_str(value.unwrap_or("false"), err, "include-historical-channel-data", "boolean")); }, "filters" => { call = call.filters(value.unwrap_or("")); }, "dimensions" => { call = call.dimensions(value.unwrap_or("")); }, "currency" => { call = call.currency(value.unwrap_or("")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["sort", "max-results", "dimensions", "start-index", "include-historical-channel-data", "currency", "filters"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _doit(&self, dry_run: bool) -> Result<Result<(), DoitError>, Option<InvalidOptionsError>> { let mut err = InvalidOptionsError::new(); let mut call_result: Result<(), DoitError> = Ok(()); let mut err_opt: Option<InvalidOptionsError> = None; match self.opt.subcommand() { ("group-items", Some(opt)) => { match opt.subcommand() { ("delete", Some(opt)) => { call_result = self._group_items_delete(opt, dry_run, &mut err); }, ("insert", Some(opt)) => { call_result = self._group_items_insert(opt, dry_run, &mut err); }, ("list", Some(opt)) => { call_result = self._group_items_list(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("group-items".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, ("groups", Some(opt)) => { match opt.subcommand() { ("delete", Some(opt)) => { call_result = self._groups_delete(opt, dry_run, &mut err); }, ("insert", Some(opt)) => { call_result = self._groups_insert(opt, dry_run, &mut err); }, ("list", Some(opt)) => { call_result = self._groups_list(opt, dry_run, &mut err); }, ("update", Some(opt)) => { call_result = self._groups_update(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("groups".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, ("reports", Some(opt)) => { match opt.subcommand() { ("query", Some(opt)) => { call_result = self._reports_query(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("reports".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, _ => { err.issues.push(CLIError::MissingCommandError); writeln!(io::stderr(), "{}\n", self.opt.usage()).ok(); } } if dry_run { if err.issues.len() > 0 { err_opt = Some(err); } Err(err_opt) } else { Ok(call_result) } } // Please note that this call will fail if any part of the opt can't be handled fn new(opt: ArgMatches<'n>) -> Result<Engine<'n>, InvalidOptionsError> { let (config_dir, secret) = { let config_dir = match cmn::assure_config_dir_exists(opt.value_of("folder").unwrap_or("~/.google-service-cli")) { Err(e) => return Err(InvalidOptionsError::single(e, 3)), Ok(p) => p, }; match cmn::application_secret_from_directory(&config_dir, "youtubeanalytics1-secret.json", "{\"installed\":{\"auth_uri\":\"https://accounts.google.com/o/oauth2/auth\",\"client_secret\":\"hCsslbCUyfehWMmbkG8vTYxG\",\"token_uri\":\"https://accounts.google.com/o/oauth2/token\",\"client_email\":\"\",\"redirect_uris\":[\"urn:ietf:wg:oauth:2.0:oob\",\"oob\"],\"client_x509_cert_url\":\"\",\"client_id\":\"620010449518-9ngf7o4dhs0dka470npqvor6dc5lqb9b.apps.googleusercontent.com\",\"auth_provider_x509_cert_url\":\"https://www.googleapis.com/oauth2/v1/certs\"}}") { Ok(secret) => (config_dir, secret), Err(e) => return Err(InvalidOptionsError::single(e, 4)) } }; let auth = Authenticator::new( &secret, DefaultAuthenticatorDelegate, if opt.is_present("debug-auth") { hyper::Client::with_connector(mock::TeeConnector { connector: hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new()) }) } else { hyper::Client::with_connector(hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new())) }, JsonTokenStorage { program_name: "youtubeanalytics1", db_dir: config_dir.clone(), }, Some(FlowType::InstalledRedirect(54324))); let client = if opt.is_present("debug") { hyper::Client::with_connector(mock::TeeConnector { connector: hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new()) }) } else { hyper::Client::with_connector(hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new())) }; let engine = Engine { opt: opt, hub: api::YouTubeAnalytics::new(client, auth), gp: vec!["alt", "fields", "key", "oauth-token", "pretty-print", "quota-user", "user-ip"], gpm: vec![ ("oauth-token", "oauth_token"), ("pretty-print", "prettyPrint"), ("quota-user", "quotaUser"), ("user-ip", "userIp"), ] }; match engine._doit(true) { Err(Some(err)) => Err(err), Err(None) => Ok(engine), Ok(_) => unreachable!(), } } fn doit(&self) -> Result<(), DoitError> { match self._doit(false) { Ok(res) => res, Err(_) => unreachable!(), } } } fn main() { let mut exit_status = 0i32; let arg_data = [ ("group-items", "methods: 'delete', 'insert' and 'list'", vec![ ("delete", Some(r##"Removes an item from a group."##), "Details at http://byron.github.io/google-apis-rs/google_youtubeanalytics1_cli/group-items_delete", vec![ (Some(r##"id"##), None, Some(r##"The id parameter specifies the YouTube group item ID for the group that is being deleted."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), ]), ("insert", Some(r##"Creates a group item."##), "Details at http://byron.github.io/google-apis-rs/google_youtubeanalytics1_cli/group-items_insert", vec![ (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("list", Some(r##"Returns a collection of group items that match the API request parameters."##), "Details at http://byron.github.io/google-apis-rs/google_youtubeanalytics1_cli/group-items_list", vec![ (Some(r##"group-id"##), None, Some(r##"The id parameter specifies the unique ID of the group for which you want to retrieve group items."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ("groups", "methods: 'delete', 'insert', 'list' and 'update'", vec![ ("delete", Some(r##"Deletes a group."##), "Details at http://byron.github.io/google-apis-rs/google_youtubeanalytics1_cli/groups_delete", vec![ (Some(r##"id"##), None, Some(r##"The id parameter specifies the YouTube group ID for the group that is being deleted."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), ]), ("insert", Some(r##"Creates a group."##), "Details at http://byron.github.io/google-apis-rs/google_youtubeanalytics1_cli/groups_insert", vec![ (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("list", Some(r##"Returns a collection of groups that match the API request parameters. For example, you can retrieve all groups that the authenticated user owns, or you can retrieve one or more groups by their unique IDs."##), "Details at http://byron.github.io/google-apis-rs/google_youtubeanalytics1_cli/groups_list", vec![ (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("update", Some(r##"Modifies a group. For example, you could change a group's title."##), "Details at http://byron.github.io/google-apis-rs/google_youtubeanalytics1_cli/groups_update", vec![ (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ("reports", "methods: 'query'", vec![ ("query", Some(r##"Retrieve your YouTube Analytics reports."##), "Details at http://byron.github.io/google-apis-rs/google_youtubeanalytics1_cli/reports_query", vec![ (Some(r##"ids"##), None, Some(r##"Identifies the YouTube channel or content owner for which you are retrieving YouTube Analytics data. - To request data for a YouTube user, set the ids parameter value to channel==CHANNEL_ID, where CHANNEL_ID specifies the unique YouTube channel ID. - To request data for a YouTube CMS content owner, set the ids parameter value to contentOwner==OWNER_NAME, where OWNER_NAME is the CMS name of the content owner."##), Some(true), Some(false)), (Some(r##"start-date"##), None, Some(r##"The start date for fetching YouTube Analytics data. The value should be in YYYY-MM-DD format."##), Some(true), Some(false)), (Some(r##"end-date"##), None, Some(r##"The end date for fetching YouTube Analytics data. The value should be in YYYY-MM-DD format."##), Some(true), Some(false)), (Some(r##"metrics"##), None, Some(r##"A comma-separated list of YouTube Analytics metrics, such as views or likes,dislikes. See the Available Reports document for a list of the reports that you can retrieve and the metrics available in each report, and see the Metrics document for definitions of those metrics."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ]; let mut app = App::new("youtubeanalytics1") .author("Sebastian Thiel <byronimo@gmail.com>") .version("1.0.8+20181010") .about("Retrieves your YouTube Analytics data.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_youtubeanalytics1_cli") .arg(Arg::with_name("url") .long("scope") .help("Specify the authentication a method should be executed in. Each scope requires the user to grant this application permission to use it.If unset, it defaults to the shortest scope url for a particular method.") .multiple(true) .takes_value(true)) .arg(Arg::with_name("folder") .long("config-dir") .help("A directory into which we will store our persistent data. Defaults to a user-writable directory that we will create during the first invocation.[default: ~/.google-service-cli") .multiple(false) .takes_value(true)) .arg(Arg::with_name("debug") .long("debug") .help("Output all server communication to standard error. `tx` and `rx` are placed into the same stream.") .multiple(false) .takes_value(false)) .arg(Arg::with_name("debug-auth") .long("debug-auth") .help("Output all communication related to authentication to standard error. `tx` and `rx` are placed into the same stream.") .multiple(false) .takes_value(false)); for &(main_command_name, about, ref subcommands) in arg_data.iter() { let mut mcmd = SubCommand::with_name(main_command_name).about(about); for &(sub_command_name, ref desc, url_info, ref args) in subcommands { let mut scmd = SubCommand::with_name(sub_command_name); if let &Some(desc) = desc { scmd = scmd.about(desc); } scmd = scmd.after_help(url_info); for &(ref arg_name, ref flag, ref desc, ref required, ref multi) in args { let arg_name_str = match (arg_name, flag) { (&Some(an), _ ) => an, (_ , &Some(f)) => f, _ => unreachable!(), }; let mut arg = Arg::with_name(arg_name_str) .empty_values(false); if let &Some(short_flag) = flag { arg = arg.short(short_flag); } if let &Some(desc) = desc { arg = arg.help(desc); } if arg_name.is_some() && flag.is_some() { arg = arg.takes_value(true); } if let &Some(required) = required { arg = arg.required(required); } if let &Some(multi) = multi { arg = arg.multiple(multi); } scmd = scmd.arg(arg); } mcmd = mcmd.subcommand(scmd); } app = app.subcommand(mcmd); } let matches = app.get_matches(); let debug = matches.is_present("debug"); match Engine::new(matches) { Err(err) => { exit_status = err.exit_code; writeln!(io::stderr(), "{}", err).ok(); }, Ok(engine) => { if let Err(doit_err) = engine.doit() { exit_status = 1; match doit_err { DoitError::IoError(path, err) => { writeln!(io::stderr(), "Failed to open output file '{}': {}", path, err).ok(); }, DoitError::ApiError(err) => { if debug { writeln!(io::stderr(), "{:#?}", err).ok(); } else { writeln!(io::stderr(), "{}", err).ok(); } } } } } } std::process::exit(exit_status); }
49.06232
526
0.438365
914439fdb727d5712817594018ee2af98806f5d2
1,678
use toml; use serde_derive::{Deserialize, Serialize}; use std::collections::BTreeMap; #[derive(Debug, Default, Clone, Deserialize, Serialize)] pub struct Manifest { pub package: Metadata, pub badges: Option<BTreeMap<String, Badge>>, #[serde(rename = "build-dependencies")] #[serde(serialize_with = "toml::ser::tables_last")] pub build_dependencies: BTreeMap<String, Dependency>, #[serde(serialize_with = "toml::ser::tables_last")] pub dependencies: BTreeMap<String, Dependency>, #[serde(rename = "dev-dependencies")] #[serde(serialize_with = "toml::ser::tables_last")] pub dev_dependencies: BTreeMap<String, Dependency>, pub features: Option<BTreeMap<String, Vec<String>>>, } #[derive(Debug, Default, Clone, Deserialize, Serialize)] pub struct Metadata { pub authors: Option<Vec<String>>, pub description: Option<String>, pub documentation: Option<String>, pub keywords: Option<Vec<String>>, pub license: Option<String>, pub name: String, pub readme: Option<String>, pub repository: Option<String>, pub version: String, pub homepage: Option<String>, pub edition: String, pub exclude: Option<Vec<String>>, } #[derive(Debug, Clone, Deserialize, Serialize)] pub struct Badge { pub repository: String, pub branch: String, } #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(untagged)] pub enum Dependency { Simple(String), Extended { version: Option<String>, path: Option<String>, optional: Option<bool>, #[serde(rename = "default-features")] default_features: Option<bool>, features: Option<Vec<String>>, }, }
29.964286
57
0.6764
c16e264acdd896871945b19ee2065a383a26d972
1,875
use super::core::{ integration_step::builders::{self, Collector}, Integrator, }; pub struct Euler; impl Integrator for Euler { fn label(&self) -> String { "Midpoint (explicit, Euler)".to_string() } fn description(&self) -> String { "v₁ = v + a ½dt\n\ s₁ = s + v₁ ½dt\n\ a₁ = a(s₁)\n\ v' = v + a₁ dt\n\ s' = s + v' dt\n \ = s + v dt + a₁ dt²" .to_string() } fn integrate_step( &self, s0: builders::Position, v0: builders::Velocity, a0: builders::Acceleration, dt: builders::DtFraction<1, 1>, step: &mut builders::Step, ) { let dt_mid = dt.half(); let v_mid = step.compute(v0 + a0 * dt_mid); let s_mid = step.compute(s0 + v_mid * dt_mid); //step.set_display_position(v_mid, s_mid); let a_mid = step.acceleration_at(s_mid); let v1 = step.compute(v0 + a_mid * dt); step.compute(s0 + v1 * dt); } } pub struct SecondOrder; impl Integrator for SecondOrder { fn label(&self) -> String { "Midpoint (explicit, SecondOrder)".to_string() } fn description(&self) -> String { "s₁ = s + v ½dt + ½ a (½dt)²\n\ a₁ = a(s₁)\n\ v' = v + a₁ dt\n\ s' = s + v dt + ½ a₁ dt²" // !! string contains non-breakable spaces .to_string() } fn integrate_step( &self, s0: builders::Position, v0: builders::Velocity, a0: builders::Acceleration, dt: builders::DtFraction<1, 1>, step: &mut builders::Step, ) { let s_mid = step.compute(s0 + v0 * dt.half() + 0.5 * a0 * dt.half() * dt.half()); let a_mid = step.acceleration_at(s_mid); step.compute(s0 + v0 * dt + 0.5 * a_mid * dt * dt); step.compute(v0 + a_mid * dt); } }
26.785714
89
0.5168
db719a598b5669aa6c03278f6115a17b0565717c
2,919
#![feature(alloc_system)] extern crate reqwest; extern crate kuchiki; extern crate html5ever; extern crate semver; extern crate linuxver; use semver::Version; use std::thread; use kuchiki::traits::*; use std::string::String; fn get_request(s: &str) -> String { let res = reqwest::get(s).unwrap().text(); let body = format!("{}", res.unwrap().to_string()); return body; } fn eval_css_selector(text: &str, css_selector: &str) -> String{ let doc = kuchiki::parse_html().one(text); let css_match = doc.select_first(css_selector).unwrap(); let as_node = css_match.as_node(); let text_node = as_node.first_child().unwrap(); let text = text_node.as_text().unwrap().borrow(); let formatted_text = format!("{}", text); return formatted_text; } fn eval_css_version_selector(text: &str, css_selector: &str) -> String { let document = kuchiki::parse_html().one(text); let latest_version_string = eval_css_selector(text, css_selector); let mut latest_version = Version::parse(&latest_version_string).unwrap(); for css_match in document.select(css_selector).unwrap() { let as_node = css_match.as_node(); let text_node = as_node.first_child().unwrap(); let text = text_node.as_text().unwrap().borrow(); let version = Version::parse(&text).unwrap(); if version > latest_version { latest_version = version; } } return latest_version.to_string(); } fn get_version(url: &str, css_selector: &str) -> String { let result = get_request(url); let css_result = eval_css_selector(&result, css_selector); return css_result; } fn get_versions(url: &str, css_selector: &str) -> String { let result = get_request(url); let css_result = eval_css_version_selector(&result, css_selector); return css_result; } fn main() { // TODO: Clean up parallelization // TODO: more robust version detection. Latest version isn't always the first on the page let gentoo = thread::spawn(|| { let version= get_versions( "https://packages.gentoo.org/packages/sys-kernel/gentoo-sources", "td.kk-version.kk-cell-sep-right > strong > a"); println!("Gentoo: {}", version); }); // TODO: more robust version detection. stable (in the version table) isn't always the latest version, which might be shown under #latest_link, etc. let vanilla = thread::spawn(|| { let version = get_version("https://www.kernel.org", "#latest_link > a"); println!("Vanilla: {}", version); }); let local = thread::spawn(|| { let local_kernel = linuxver::version().unwrap(); println!("Currently running kernel version: {}.{}.{}", local_kernel.major, local_kernel.minor, local_kernel.patch ); }); // #[warn(unused_must_use)] on by default let _ = vanilla.join(); let _ = gentoo.join(); let _ = local.join(); }
33.551724
153
0.656389
d72169c5b1bd848ed0df1f8eebbf7c523f58c293
196,807
// Generated file, to regenerate run // ./gen-array-impls.sh > src/array/generated_impl.rs // from the repo root use super::Array; impl<T: Default> Array for [T; 0] { type Item = T; const CAPACITY: usize = 0; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [] } } impl<T: Default> Array for [T; 1] { type Item = T; const CAPACITY: usize = 1; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [T::default()] } } impl<T: Default> Array for [T; 2] { type Item = T; const CAPACITY: usize = 2; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [T::default(), T::default()] } } impl<T: Default> Array for [T; 3] { type Item = T; const CAPACITY: usize = 3; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [T::default(), T::default(), T::default()] } } impl<T: Default> Array for [T; 4] { type Item = T; const CAPACITY: usize = 4; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [T::default(), T::default(), T::default(), T::default()] } } impl<T: Default> Array for [T; 5] { type Item = T; const CAPACITY: usize = 5; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [T::default(), T::default(), T::default(), T::default(), T::default()] } } impl<T: Default> Array for [T; 6] { type Item = T; const CAPACITY: usize = 6; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 7] { type Item = T; const CAPACITY: usize = 7; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 8] { type Item = T; const CAPACITY: usize = 8; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 9] { type Item = T; const CAPACITY: usize = 9; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 10] { type Item = T; const CAPACITY: usize = 10; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 11] { type Item = T; const CAPACITY: usize = 11; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 12] { type Item = T; const CAPACITY: usize = 12; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 13] { type Item = T; const CAPACITY: usize = 13; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 14] { type Item = T; const CAPACITY: usize = 14; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 15] { type Item = T; const CAPACITY: usize = 15; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 16] { type Item = T; const CAPACITY: usize = 16; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 17] { type Item = T; const CAPACITY: usize = 17; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 18] { type Item = T; const CAPACITY: usize = 18; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 19] { type Item = T; const CAPACITY: usize = 19; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 20] { type Item = T; const CAPACITY: usize = 20; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 21] { type Item = T; const CAPACITY: usize = 21; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 22] { type Item = T; const CAPACITY: usize = 22; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 23] { type Item = T; const CAPACITY: usize = 23; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 24] { type Item = T; const CAPACITY: usize = 24; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 25] { type Item = T; const CAPACITY: usize = 25; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 26] { type Item = T; const CAPACITY: usize = 26; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 27] { type Item = T; const CAPACITY: usize = 27; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 28] { type Item = T; const CAPACITY: usize = 28; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 29] { type Item = T; const CAPACITY: usize = 29; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 30] { type Item = T; const CAPACITY: usize = 30; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 31] { type Item = T; const CAPACITY: usize = 31; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 32] { type Item = T; const CAPACITY: usize = 32; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 33] { type Item = T; const CAPACITY: usize = 33; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 64] { type Item = T; const CAPACITY: usize = 64; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 128] { type Item = T; const CAPACITY: usize = 128; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 256] { type Item = T; const CAPACITY: usize = 256; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 512] { type Item = T; const CAPACITY: usize = 512; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 1024] { type Item = T; const CAPACITY: usize = 1024; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 2048] { type Item = T; const CAPACITY: usize = 2048; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } } impl<T: Default> Array for [T; 4096] { type Item = T; const CAPACITY: usize = 4096; #[inline(always)] #[must_use] fn as_slice(&self) -> &[T] { &*self } #[inline(always)] #[must_use] fn as_slice_mut(&mut self) -> &mut [T] { &mut *self } #[inline(always)] fn default() -> Self { [ T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), T::default(), ] } }
20.46449
75
0.38867
5d7c31d779eb050de09a2c7a307e3a8d7053b5e9
5,254
use bevy::prelude::*; /// This example illustrates how to use States to control transitioning from a Menu state to an InGame state. fn main() { App::build() .add_plugins(DefaultPlugins) .init_resource::<ButtonMaterials>() .add_resource(State::new(AppState::Menu)) .add_stage_after(stage::UPDATE, STAGE, StateStage::<AppState>::default()) .on_state_enter(STAGE, AppState::Menu, setup_menu.system()) .on_state_update(STAGE, AppState::Menu, menu.system()) .on_state_exit(STAGE, AppState::Menu, cleanup_menu.system()) .on_state_enter(STAGE, AppState::InGame, setup_game.system()) .on_state_update(STAGE, AppState::InGame, movement.system()) .on_state_update(STAGE, AppState::InGame, change_color.system()) .run(); } const STAGE: &str = "app_state"; #[derive(Clone)] enum AppState { Menu, InGame, } struct MenuData { button_entity: Entity, } fn setup_menu( commands: &mut Commands, asset_server: Res<AssetServer>, button_materials: Res<ButtonMaterials>, ) { commands // ui camera .spawn(CameraUiBundle::default()) .spawn(ButtonBundle { style: Style { size: Size::new(Val::Px(150.0), Val::Px(65.0)), // center button margin: Rect::all(Val::Auto), // horizontally center child text justify_content: JustifyContent::Center, // vertically center child text align_items: AlignItems::Center, ..Default::default() }, material: button_materials.normal.clone(), ..Default::default() }) .with_children(|parent| { parent.spawn(TextBundle { text: Text { value: "Play".to_string(), font: asset_server.load("fonts/FiraSans-Bold.ttf"), style: TextStyle { font_size: 40.0, color: Color::rgb(0.9, 0.9, 0.9), ..Default::default() }, }, ..Default::default() }); }); commands.insert_resource(MenuData { button_entity: commands.current_entity().unwrap(), }); } fn menu( mut state: ResMut<State<AppState>>, button_materials: Res<ButtonMaterials>, mut interaction_query: Query< (&Interaction, &mut Handle<ColorMaterial>), (Mutated<Interaction>, With<Button>), >, ) { for (interaction, mut material) in interaction_query.iter_mut() { match *interaction { Interaction::Clicked => { *material = button_materials.pressed.clone(); state.set_next(AppState::InGame).unwrap(); } Interaction::Hovered => { *material = button_materials.hovered.clone(); } Interaction::None => { *material = button_materials.normal.clone(); } } } } fn cleanup_menu(commands: &mut Commands, menu_data: Res<MenuData>) { commands.despawn_recursive(menu_data.button_entity); } fn setup_game( commands: &mut Commands, asset_server: Res<AssetServer>, mut materials: ResMut<Assets<ColorMaterial>>, ) { let texture_handle = asset_server.load("branding/icon.png"); commands .spawn(Camera2dBundle::default()) .spawn(SpriteBundle { material: materials.add(texture_handle.into()), ..Default::default() }); } const SPEED: f32 = 100.0; fn movement( time: Res<Time>, input: Res<Input<KeyCode>>, mut query: Query<&mut Transform, With<Sprite>>, ) { for mut transform in query.iter_mut() { let mut direction = Vec3::default(); if input.pressed(KeyCode::Left) { direction.x += 1.0; } if input.pressed(KeyCode::Right) { direction.x -= 1.0; } if input.pressed(KeyCode::Up) { direction.y += 1.0; } if input.pressed(KeyCode::Down) { direction.y -= 1.0; } if direction != Vec3::default() { transform.translation += direction.normalize() * SPEED * time.delta_seconds(); } } } fn change_color( time: Res<Time>, mut assets: ResMut<Assets<ColorMaterial>>, query: Query<&Handle<ColorMaterial>, With<Sprite>>, ) { for handle in query.iter() { let material = assets.get_mut(handle).unwrap(); material .color .set_b((time.seconds_since_startup() * 5.0).sin() as f32 + 2.0); } } struct ButtonMaterials { normal: Handle<ColorMaterial>, hovered: Handle<ColorMaterial>, pressed: Handle<ColorMaterial>, } impl FromResources for ButtonMaterials { fn from_resources(resources: &Resources) -> Self { let mut materials = resources.get_mut::<Assets<ColorMaterial>>().unwrap(); ButtonMaterials { normal: materials.add(Color::rgb(0.15, 0.15, 0.15).into()), hovered: materials.add(Color::rgb(0.25, 0.25, 0.25).into()), pressed: materials.add(Color::rgb(0.35, 0.75, 0.35).into()), } } }
30.905882
109
0.568329
1e4d2241b86997a6fe5676109001b554c5c6a46d
11,686
use super::*; use enumflags2::{bitflags, BitFlags}; use std::{ collections::{vec_deque::Drain, HashMap, VecDeque}, str::FromStr, }; #[derive(Debug)] pub enum InputEvent { Key { down: bool, keycode: mq::KeyCode, keymods: mq::KeyMods, repeat: bool, }, Mouse { down: bool, button: mq::MouseButton, x: f32, y: f32, }, Wheel { dx: f32, dy: f32, }, } #[derive(Debug, Default)] pub struct InputEngine { pub mouse_x: f32, pub mouse_y: f32, pub(crate) queue: VecDeque<InputEvent>, pub state: BitFlags<InputState>, pub(crate) binds: HashMap<KeyBind, Vec<BindEntry>>, } #[derive(Debug)] pub enum BindEntry { State(KeyMods, BitFlags<InputState>), Script(KeyMods, String), } impl InputEngine { pub fn new() -> Self { InputEngine { mouse_x: 0.0, mouse_y: 0.0, queue: VecDeque::new(), state: Default::default(), binds: HashMap::new(), } } pub(crate) fn set_mouse_position(&mut self, x: f32, y: f32) { self.mouse_x = x; self.mouse_y = y; } pub fn add_event(&mut self, event: InputEvent) { self.queue.push_back(event); } pub fn drain_events(&mut self) -> Drain<'_, InputEvent> { self.queue.drain(..) } pub fn bind_key<F: Into<BitFlags<InputState>>>( &mut self, key: KeyBind, mods: KeyMods, inputs: F, ) { let bind = self.binds.entry(key).or_default(); bind.push(BindEntry::State(mods, inputs.into())); } pub fn bind_script<S: Into<String>>(&mut self, key: KeyBind, mods: KeyMods, script: S) { let bind = self.binds.entry(key).or_default(); bind.push(BindEntry::Script(mods, script.into())); } pub fn unbind_key(&mut self, key: KeyBind) { self.binds.remove(&key); } pub fn unbind_all(&mut self) { self.binds.clear(); } } #[inline] fn check_mods(mods: KeyMods, keymods: mq::KeyMods) -> bool { (!mods.shift || keymods.shift) && (!mods.alt || keymods.alt) && (!mods.ctrl || keymods.ctrl) } impl<G: Game> Runner<G> { pub fn handle_bind(&mut self, key: &KeyBind, keymods: mq::KeyMods, down: bool) { if let Some(binds) = self.input.binds.get(key) { for bind in binds { match &bind { BindEntry::State(mods, state) => { if down { if check_mods(*mods, keymods) { self.input.state.insert(*state); } } else { self.input.state.remove(*state); } } BindEntry::Script(mods, script) => { if down && check_mods(*mods, keymods) { if let Err(err) = self.event_sender.try_send(Event::Command(script.clone())) { log::error!("Cannot send Command Event: {}", err); } } } } } } } } #[bitflags] #[repr(u32)] #[derive(Copy, Clone, Debug, PartialEq)] pub enum InputState { MoveLeft, MoveRight, Jump, Crouch, Prone, Fire, Jet, ChangeWeapon, Reload, DropWeapon, ThrowGrenade, Chat, TeamChat, Radio, Weapons, FragsList, StatsMenu, MiniMap, Cmd, GameStats, } impl FromStr for InputState { type Err = (); fn from_str(s: &str) -> Result<Self, Self::Err> { match s.to_lowercase().as_str() { "left" => Ok(InputState::MoveLeft), "moveleft" => Ok(InputState::MoveLeft), "right" => Ok(InputState::MoveRight), "moveright" => Ok(InputState::MoveRight), "jump" => Ok(InputState::Jump), "crouch" => Ok(InputState::Crouch), "prone" => Ok(InputState::Prone), "fire" => Ok(InputState::Fire), "jet" => Ok(InputState::Jet), "changeweapon" => Ok(InputState::ChangeWeapon), "reload" => Ok(InputState::Reload), "dropweapon" => Ok(InputState::DropWeapon), "throwgrenade" => Ok(InputState::ThrowGrenade), "chat" => Ok(InputState::Chat), "teamchat" => Ok(InputState::TeamChat), "radio" => Ok(InputState::Radio), "weapons" => Ok(InputState::Weapons), "fragslist" => Ok(InputState::FragsList), "statsmenu" => Ok(InputState::StatsMenu), "minimap" => Ok(InputState::MiniMap), "cmd" => Ok(InputState::Cmd), "gamestats" => Ok(InputState::GameStats), _ => Err(()), } } } #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub enum Direction { Up, Down, Left, Right, } #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub enum KeyBind { Key(mq::KeyCode), Mouse(mq::MouseButton), Wheel(Direction), Axis(Direction), Button(), } #[derive(Default, Debug, Copy, Clone)] pub struct KeyMods { pub shift: bool, pub ctrl: bool, pub alt: bool, } impl std::str::FromStr for KeyBind { type Err = (); fn from_str(s: &str) -> Result<Self, Self::Err> { keycode_from_str(s).map(KeyBind::Key).map_err(|err| err) } } fn keycode_from_str(input: &str) -> Result<mq::KeyCode, ()> { match input.to_ascii_lowercase().as_str() { "space" => Ok(mq::KeyCode::Space), " " => Ok(mq::KeyCode::Space), "apostrophe" => Ok(mq::KeyCode::Apostrophe), "'" => Ok(mq::KeyCode::Apostrophe), "comma" => Ok(mq::KeyCode::Comma), "," => Ok(mq::KeyCode::Comma), "minus" => Ok(mq::KeyCode::Minus), "-" => Ok(mq::KeyCode::Minus), "period" => Ok(mq::KeyCode::Period), "." => Ok(mq::KeyCode::Period), "slash" => Ok(mq::KeyCode::Slash), "/" => Ok(mq::KeyCode::Slash), "key0" => Ok(mq::KeyCode::Key0), "key1" => Ok(mq::KeyCode::Key1), "key2" => Ok(mq::KeyCode::Key2), "key3" => Ok(mq::KeyCode::Key3), "key4" => Ok(mq::KeyCode::Key4), "key5" => Ok(mq::KeyCode::Key5), "key6" => Ok(mq::KeyCode::Key6), "key7" => Ok(mq::KeyCode::Key7), "key8" => Ok(mq::KeyCode::Key8), "key9" => Ok(mq::KeyCode::Key9), "0" => Ok(mq::KeyCode::Key0), "1" => Ok(mq::KeyCode::Key1), "2" => Ok(mq::KeyCode::Key2), "3" => Ok(mq::KeyCode::Key3), "4" => Ok(mq::KeyCode::Key4), "5" => Ok(mq::KeyCode::Key5), "6" => Ok(mq::KeyCode::Key6), "7" => Ok(mq::KeyCode::Key7), "8" => Ok(mq::KeyCode::Key8), "9" => Ok(mq::KeyCode::Key9), "semicolon" => Ok(mq::KeyCode::Semicolon), ";" => Ok(mq::KeyCode::Semicolon), "equal" => Ok(mq::KeyCode::Equal), "=" => Ok(mq::KeyCode::Equal), "a" => Ok(mq::KeyCode::A), "b" => Ok(mq::KeyCode::B), "c" => Ok(mq::KeyCode::C), "d" => Ok(mq::KeyCode::D), "e" => Ok(mq::KeyCode::E), "f" => Ok(mq::KeyCode::F), "g" => Ok(mq::KeyCode::G), "h" => Ok(mq::KeyCode::H), "i" => Ok(mq::KeyCode::I), "j" => Ok(mq::KeyCode::J), "k" => Ok(mq::KeyCode::K), "l" => Ok(mq::KeyCode::L), "m" => Ok(mq::KeyCode::M), "n" => Ok(mq::KeyCode::N), "o" => Ok(mq::KeyCode::O), "p" => Ok(mq::KeyCode::P), "q" => Ok(mq::KeyCode::Q), "r" => Ok(mq::KeyCode::R), "s" => Ok(mq::KeyCode::S), "t" => Ok(mq::KeyCode::T), "u" => Ok(mq::KeyCode::U), "v" => Ok(mq::KeyCode::V), "w" => Ok(mq::KeyCode::W), "x" => Ok(mq::KeyCode::X), "y" => Ok(mq::KeyCode::Y), "z" => Ok(mq::KeyCode::Z), "leftbracket" => Ok(mq::KeyCode::LeftBracket), "[" => Ok(mq::KeyCode::LeftBracket), "backslash" => Ok(mq::KeyCode::Backslash), "\\" => Ok(mq::KeyCode::Backslash), "rightbracket" => Ok(mq::KeyCode::RightBracket), "]" => Ok(mq::KeyCode::RightBracket), "graveaccent" => Ok(mq::KeyCode::GraveAccent), "`" => Ok(mq::KeyCode::GraveAccent), "world1" => Ok(mq::KeyCode::World1), "world2" => Ok(mq::KeyCode::World2), "escape" => Ok(mq::KeyCode::Escape), "esc" => Ok(mq::KeyCode::Escape), "enter" => Ok(mq::KeyCode::Enter), "return" => Ok(mq::KeyCode::Enter), "tab" => Ok(mq::KeyCode::Tab), "backspace" => Ok(mq::KeyCode::Backspace), "insert" => Ok(mq::KeyCode::Insert), "delete" => Ok(mq::KeyCode::Delete), "right" => Ok(mq::KeyCode::Right), "left" => Ok(mq::KeyCode::Left), "down" => Ok(mq::KeyCode::Down), "up" => Ok(mq::KeyCode::Up), "pageup" => Ok(mq::KeyCode::PageUp), "pagedown" => Ok(mq::KeyCode::PageDown), "home" => Ok(mq::KeyCode::Home), "end" => Ok(mq::KeyCode::End), "capslock" => Ok(mq::KeyCode::CapsLock), "scrolllock" => Ok(mq::KeyCode::ScrollLock), "numlock" => Ok(mq::KeyCode::NumLock), "printscreen" => Ok(mq::KeyCode::PrintScreen), "pause" => Ok(mq::KeyCode::Pause), "f1" => Ok(mq::KeyCode::F1), "f2" => Ok(mq::KeyCode::F2), "f3" => Ok(mq::KeyCode::F3), "f4" => Ok(mq::KeyCode::F4), "f5" => Ok(mq::KeyCode::F5), "f6" => Ok(mq::KeyCode::F6), "f7" => Ok(mq::KeyCode::F7), "f8" => Ok(mq::KeyCode::F8), "f9" => Ok(mq::KeyCode::F9), "f10" => Ok(mq::KeyCode::F10), "f11" => Ok(mq::KeyCode::F11), "f12" => Ok(mq::KeyCode::F12), "f13" => Ok(mq::KeyCode::F13), "f14" => Ok(mq::KeyCode::F14), "f15" => Ok(mq::KeyCode::F15), "f16" => Ok(mq::KeyCode::F16), "f17" => Ok(mq::KeyCode::F17), "f18" => Ok(mq::KeyCode::F18), "f19" => Ok(mq::KeyCode::F19), "f20" => Ok(mq::KeyCode::F20), "f21" => Ok(mq::KeyCode::F21), "f22" => Ok(mq::KeyCode::F22), "f23" => Ok(mq::KeyCode::F23), "f24" => Ok(mq::KeyCode::F24), "f25" => Ok(mq::KeyCode::F25), "kp0" => Ok(mq::KeyCode::Kp0), "kp1" => Ok(mq::KeyCode::Kp1), "kp2" => Ok(mq::KeyCode::Kp2), "kp3" => Ok(mq::KeyCode::Kp3), "kp4" => Ok(mq::KeyCode::Kp4), "kp5" => Ok(mq::KeyCode::Kp5), "kp6" => Ok(mq::KeyCode::Kp6), "kp7" => Ok(mq::KeyCode::Kp7), "kp8" => Ok(mq::KeyCode::Kp8), "kp9" => Ok(mq::KeyCode::Kp9), "kpdecimal" => Ok(mq::KeyCode::KpDecimal), "kpdivide" => Ok(mq::KeyCode::KpDivide), "kpmultiply" => Ok(mq::KeyCode::KpMultiply), "kpsubtract" => Ok(mq::KeyCode::KpSubtract), "kpadd" => Ok(mq::KeyCode::KpAdd), "kpenter" => Ok(mq::KeyCode::KpEnter), "kpequal" => Ok(mq::KeyCode::KpEqual), "leftshift" => Ok(mq::KeyCode::LeftShift), "leftcontrol" => Ok(mq::KeyCode::LeftControl), "leftalt" => Ok(mq::KeyCode::LeftAlt), "leftsuper" => Ok(mq::KeyCode::LeftSuper), "rightshift" => Ok(mq::KeyCode::RightShift), "rightcontrol" => Ok(mq::KeyCode::RightControl), "rightalt" => Ok(mq::KeyCode::RightAlt), "rightsuper" => Ok(mq::KeyCode::RightSuper), "menu" => Ok(mq::KeyCode::Menu), "unknown" => Ok(mq::KeyCode::Unknown), _ => Err(()), } }
31.928962
96
0.487763
69819064975a9d526b95c0761bca947a50e81521
138,879
#![doc = "generated by AutoRust 0.1.0"] #![allow(unused_mut)] #![allow(unused_variables)] #![allow(unused_imports)] use super::{models, API_VERSION}; #[derive(Clone)] pub struct Client { endpoint: String, credential: std::sync::Arc<dyn azure_core::TokenCredential>, scopes: Vec<String>, pipeline: azure_core::pipeline::Pipeline, } #[derive(Clone)] pub struct ClientBuilder { credential: std::sync::Arc<dyn azure_core::TokenCredential>, endpoint: Option<String>, scopes: Option<Vec<String>>, } pub const DEFAULT_ENDPOINT: &str = azure_core::resource_manager_endpoint::AZURE_PUBLIC_CLOUD; impl ClientBuilder { pub fn new(credential: std::sync::Arc<dyn azure_core::TokenCredential>) -> Self { Self { credential, endpoint: None, scopes: None, } } pub fn endpoint(mut self, endpoint: impl Into<String>) -> Self { self.endpoint = Some(endpoint.into()); self } pub fn scopes(mut self, scopes: &[&str]) -> Self { self.scopes = Some(scopes.iter().map(|scope| (*scope).to_owned()).collect()); self } pub fn build(self) -> Client { let endpoint = self.endpoint.unwrap_or_else(|| DEFAULT_ENDPOINT.to_owned()); let scopes = self.scopes.unwrap_or_else(|| vec![format!("{}/", endpoint)]); Client::new(endpoint, self.credential, scopes) } } impl Client { pub(crate) fn endpoint(&self) -> &str { self.endpoint.as_str() } pub(crate) fn token_credential(&self) -> &dyn azure_core::TokenCredential { self.credential.as_ref() } pub(crate) fn scopes(&self) -> Vec<&str> { self.scopes.iter().map(String::as_str).collect() } pub(crate) async fn send(&self, request: impl Into<azure_core::Request>) -> Result<azure_core::Response, azure_core::Error> { let mut context = azure_core::Context::default(); let mut request = request.into(); self.pipeline.send(&mut context, &mut request).await } pub fn new(endpoint: impl Into<String>, credential: std::sync::Arc<dyn azure_core::TokenCredential>, scopes: Vec<String>) -> Self { let endpoint = endpoint.into(); let pipeline = azure_core::pipeline::Pipeline::new( option_env!("CARGO_PKG_NAME"), option_env!("CARGO_PKG_VERSION"), azure_core::ClientOptions::default(), Vec::new(), Vec::new(), ); Self { endpoint, credential, scopes, pipeline, } } pub fn consumer_groups(&self) -> consumer_groups::Client { consumer_groups::Client(self.clone()) } pub fn event_hubs(&self) -> event_hubs::Client { event_hubs::Client(self.clone()) } pub fn namespaces(&self) -> namespaces::Client { namespaces::Client(self.clone()) } pub fn operations(&self) -> operations::Client { operations::Client(self.clone()) } } #[non_exhaustive] #[derive(Debug, thiserror :: Error)] #[allow(non_camel_case_types)] pub enum Error { #[error(transparent)] Operations_List(#[from] operations::list::Error), #[error(transparent)] Namespaces_CheckNameAvailability(#[from] namespaces::check_name_availability::Error), #[error(transparent)] Namespaces_CheckNameSpaceAvailability(#[from] namespaces::check_name_space_availability::Error), #[error(transparent)] Namespaces_ListBySubscription(#[from] namespaces::list_by_subscription::Error), #[error(transparent)] Namespaces_ListByResourceGroup(#[from] namespaces::list_by_resource_group::Error), #[error(transparent)] Namespaces_Get(#[from] namespaces::get::Error), #[error(transparent)] Namespaces_CreateOrUpdate(#[from] namespaces::create_or_update::Error), #[error(transparent)] Namespaces_Update(#[from] namespaces::update::Error), #[error(transparent)] Namespaces_Delete(#[from] namespaces::delete::Error), #[error(transparent)] Namespaces_ListAuthorizationRules(#[from] namespaces::list_authorization_rules::Error), #[error(transparent)] Namespaces_ListPostAuthorizationRules(#[from] namespaces::list_post_authorization_rules::Error), #[error(transparent)] Namespaces_GetAuthorizationRule(#[from] namespaces::get_authorization_rule::Error), #[error(transparent)] Namespaces_PostAuthorizationRule(#[from] namespaces::post_authorization_rule::Error), #[error(transparent)] Namespaces_CreateOrUpdateAuthorizationRule(#[from] namespaces::create_or_update_authorization_rule::Error), #[error(transparent)] Namespaces_DeleteAuthorizationRule(#[from] namespaces::delete_authorization_rule::Error), #[error(transparent)] EventHubs_ListAll(#[from] event_hubs::list_all::Error), #[error(transparent)] EventHubs_Get(#[from] event_hubs::get::Error), #[error(transparent)] EventHubs_CreateOrUpdate(#[from] event_hubs::create_or_update::Error), #[error(transparent)] EventHubs_Delete(#[from] event_hubs::delete::Error), #[error(transparent)] EventHubs_ListAuthorizationRules(#[from] event_hubs::list_authorization_rules::Error), #[error(transparent)] EventHubs_GetAuthorizationRule(#[from] event_hubs::get_authorization_rule::Error), #[error(transparent)] EventHubs_PostAuthorizationRule(#[from] event_hubs::post_authorization_rule::Error), #[error(transparent)] EventHubs_CreateOrUpdateAuthorizationRule(#[from] event_hubs::create_or_update_authorization_rule::Error), #[error(transparent)] EventHubs_DeleteAuthorizationRule(#[from] event_hubs::delete_authorization_rule::Error), #[error(transparent)] ConsumerGroups_Get(#[from] consumer_groups::get::Error), #[error(transparent)] ConsumerGroups_CreateOrUpdate(#[from] consumer_groups::create_or_update::Error), #[error(transparent)] ConsumerGroups_Delete(#[from] consumer_groups::delete::Error), #[error(transparent)] ConsumerGroups_ListAll(#[from] consumer_groups::list_all::Error), } pub mod operations { use super::{models, API_VERSION}; pub struct Client(pub(crate) super::Client); impl Client { pub fn list(&self) -> list::Builder { list::Builder { client: self.0.clone() } } } pub mod list { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::OperationListResult, Error>> { Box::pin(async move { let url_str = &format!("{}/providers/Microsoft.EventHub/operations", self.client.endpoint(),); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::OperationListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } } pub mod namespaces { use super::{models, API_VERSION}; pub struct Client(pub(crate) super::Client); impl Client { pub fn check_name_availability( &self, subscription_id: impl Into<String>, parameters: impl Into<models::CheckNameAvailabilityParameter>, ) -> check_name_availability::Builder { check_name_availability::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), parameters: parameters.into(), } } pub fn check_name_space_availability( &self, subscription_id: impl Into<String>, parameters: impl Into<models::CheckNameAvailabilityParameter>, ) -> check_name_space_availability::Builder { check_name_space_availability::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), parameters: parameters.into(), } } pub fn list_by_subscription(&self, subscription_id: impl Into<String>) -> list_by_subscription::Builder { list_by_subscription::Builder { client: self.0.clone(), subscription_id: subscription_id.into(), } } pub fn list_by_resource_group( &self, resource_group_name: impl Into<String>, subscription_id: impl Into<String>, ) -> list_by_resource_group::Builder { list_by_resource_group::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), subscription_id: subscription_id.into(), } } pub fn get( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, subscription_id: impl Into<String>, ) -> get::Builder { get::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), subscription_id: subscription_id.into(), } } pub fn create_or_update( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, parameters: impl Into<models::NamespaceCreateOrUpdateParameters>, subscription_id: impl Into<String>, ) -> create_or_update::Builder { create_or_update::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), parameters: parameters.into(), subscription_id: subscription_id.into(), } } pub fn update( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, parameters: impl Into<models::NamespaceUpdateParameter>, subscription_id: impl Into<String>, ) -> update::Builder { update::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), parameters: parameters.into(), subscription_id: subscription_id.into(), } } pub fn delete( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, subscription_id: impl Into<String>, ) -> delete::Builder { delete::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), subscription_id: subscription_id.into(), } } pub fn list_authorization_rules( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, subscription_id: impl Into<String>, ) -> list_authorization_rules::Builder { list_authorization_rules::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), subscription_id: subscription_id.into(), } } pub fn list_post_authorization_rules( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, subscription_id: impl Into<String>, ) -> list_post_authorization_rules::Builder { list_post_authorization_rules::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), subscription_id: subscription_id.into(), } } pub fn get_authorization_rule( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, authorization_rule_name: impl Into<String>, subscription_id: impl Into<String>, ) -> get_authorization_rule::Builder { get_authorization_rule::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), authorization_rule_name: authorization_rule_name.into(), subscription_id: subscription_id.into(), } } pub fn post_authorization_rule( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, authorization_rule_name: impl Into<String>, subscription_id: impl Into<String>, ) -> post_authorization_rule::Builder { post_authorization_rule::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), authorization_rule_name: authorization_rule_name.into(), subscription_id: subscription_id.into(), } } pub fn create_or_update_authorization_rule( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, authorization_rule_name: impl Into<String>, parameters: impl Into<models::SharedAccessAuthorizationRuleCreateOrUpdateParameters>, subscription_id: impl Into<String>, ) -> create_or_update_authorization_rule::Builder { create_or_update_authorization_rule::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), authorization_rule_name: authorization_rule_name.into(), parameters: parameters.into(), subscription_id: subscription_id.into(), } } pub fn delete_authorization_rule( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, authorization_rule_name: impl Into<String>, subscription_id: impl Into<String>, ) -> delete_authorization_rule::Builder { delete_authorization_rule::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), authorization_rule_name: authorization_rule_name.into(), subscription_id: subscription_id.into(), } } } pub mod check_name_availability { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) parameters: models::CheckNameAvailabilityParameter, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::CheckNameAvailabilityResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.EventHub/CheckNameAvailability", self.client.endpoint(), &self.subscription_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::CheckNameAvailabilityResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod check_name_space_availability { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, pub(crate) parameters: models::CheckNameAvailabilityParameter, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::CheckNameAvailabilityResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.EventHub/CheckNamespaceAvailability", self.client.endpoint(), &self.subscription_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::CheckNameAvailabilityResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod list_by_subscription { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::NamespaceListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.EventHub/namespaces", self.client.endpoint(), &self.subscription_id ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::NamespaceListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod list_by_resource_group { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::NamespaceListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces", self.client.endpoint(), &self.subscription_id, &self.resource_group_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::NamespaceListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Ok200(models::NamespaceResource), Created201(models::NamespaceResource), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.namespace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::NamespaceResource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::NamespaceResource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Created201(rsp_value)) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Created201(models::NamespaceResource), Ok200(models::NamespaceResource), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) parameters: models::NamespaceCreateOrUpdateParameters, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.namespace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::CREATED => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::NamespaceResource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Created201(rsp_value)) } http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::NamespaceResource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Ok200(rsp_value)) } http::StatusCode::ACCEPTED => Ok(Response::Accepted202), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod update { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { Created201(models::NamespaceResource), Ok200(models::NamespaceResource), Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) parameters: models::NamespaceUpdateParameter, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.namespace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::CREATED => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::NamespaceResource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Created201(rsp_value)) } http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::NamespaceResource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(Response::Ok200(rsp_value)) } http::StatusCode::ACCEPTED => Ok(Response::Accepted202), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { NoContent204, Ok200, Accepted202, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.namespace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::NO_CONTENT => Ok(Response::NoContent204), http::StatusCode::OK => Ok(Response::Ok200), http::StatusCode::ACCEPTED => Ok(Response::Accepted202), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod list_authorization_rules { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::SharedAccessAuthorizationRuleListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}/AuthorizationRules", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.namespace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::SharedAccessAuthorizationRuleListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod list_post_authorization_rules { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::SharedAccessAuthorizationRuleListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}/AuthorizationRules", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.namespace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::SharedAccessAuthorizationRuleListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod get_authorization_rule { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) authorization_rule_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::SharedAccessAuthorizationRuleResource, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}/AuthorizationRules/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.namespace_name, &self.authorization_rule_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::SharedAccessAuthorizationRuleResource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod post_authorization_rule { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) authorization_rule_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::SharedAccessAuthorizationRulePostResource, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}/AuthorizationRules/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.namespace_name, &self.authorization_rule_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::SharedAccessAuthorizationRulePostResource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod create_or_update_authorization_rule { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) authorization_rule_name: String, pub(crate) parameters: models::SharedAccessAuthorizationRuleCreateOrUpdateParameters, pub(crate) subscription_id: String, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::SharedAccessAuthorizationRuleResource, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}/AuthorizationRules/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.namespace_name, &self.authorization_rule_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::SharedAccessAuthorizationRuleResource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod delete_authorization_rule { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { NoContent204, Ok200, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) authorization_rule_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}/AuthorizationRules/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.namespace_name, &self.authorization_rule_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::NO_CONTENT => Ok(Response::NoContent204), http::StatusCode::OK => Ok(Response::Ok200), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } } pub mod event_hubs { use super::{models, API_VERSION}; pub struct Client(pub(crate) super::Client); impl Client { pub fn list_all( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, subscription_id: impl Into<String>, ) -> list_all::Builder { list_all::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), subscription_id: subscription_id.into(), } } pub fn get( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, event_hub_name: impl Into<String>, subscription_id: impl Into<String>, ) -> get::Builder { get::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), event_hub_name: event_hub_name.into(), subscription_id: subscription_id.into(), } } pub fn create_or_update( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, event_hub_name: impl Into<String>, parameters: impl Into<models::EventHubCreateOrUpdateParameters>, subscription_id: impl Into<String>, ) -> create_or_update::Builder { create_or_update::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), event_hub_name: event_hub_name.into(), parameters: parameters.into(), subscription_id: subscription_id.into(), } } pub fn delete( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, event_hub_name: impl Into<String>, subscription_id: impl Into<String>, ) -> delete::Builder { delete::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), event_hub_name: event_hub_name.into(), subscription_id: subscription_id.into(), } } pub fn list_authorization_rules( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, event_hub_name: impl Into<String>, subscription_id: impl Into<String>, ) -> list_authorization_rules::Builder { list_authorization_rules::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), event_hub_name: event_hub_name.into(), subscription_id: subscription_id.into(), } } pub fn get_authorization_rule( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, event_hub_name: impl Into<String>, authorization_rule_name: impl Into<String>, subscription_id: impl Into<String>, ) -> get_authorization_rule::Builder { get_authorization_rule::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), event_hub_name: event_hub_name.into(), authorization_rule_name: authorization_rule_name.into(), subscription_id: subscription_id.into(), } } pub fn post_authorization_rule( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, event_hub_name: impl Into<String>, authorization_rule_name: impl Into<String>, subscription_id: impl Into<String>, ) -> post_authorization_rule::Builder { post_authorization_rule::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), event_hub_name: event_hub_name.into(), authorization_rule_name: authorization_rule_name.into(), subscription_id: subscription_id.into(), } } pub fn create_or_update_authorization_rule( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, event_hub_name: impl Into<String>, authorization_rule_name: impl Into<String>, parameters: impl Into<models::SharedAccessAuthorizationRuleCreateOrUpdateParameters>, subscription_id: impl Into<String>, ) -> create_or_update_authorization_rule::Builder { create_or_update_authorization_rule::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), event_hub_name: event_hub_name.into(), authorization_rule_name: authorization_rule_name.into(), parameters: parameters.into(), subscription_id: subscription_id.into(), } } pub fn delete_authorization_rule( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, event_hub_name: impl Into<String>, authorization_rule_name: impl Into<String>, subscription_id: impl Into<String>, ) -> delete_authorization_rule::Builder { delete_authorization_rule::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), event_hub_name: event_hub_name.into(), authorization_rule_name: authorization_rule_name.into(), subscription_id: subscription_id.into(), } } } pub mod list_all { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::EventHubListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}/eventhubs", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.namespace_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::EventHubListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) event_hub_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::EventHubResource, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}/eventhubs/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.namespace_name, &self.event_hub_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::EventHubResource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) event_hub_name: String, pub(crate) parameters: models::EventHubCreateOrUpdateParameters, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::EventHubResource, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}/eventhubs/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.namespace_name, &self.event_hub_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::EventHubResource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { NoContent204, Ok200, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) event_hub_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}/eventhubs/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.namespace_name, &self.event_hub_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::NO_CONTENT => Ok(Response::NoContent204), http::StatusCode::OK => Ok(Response::Ok200), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod list_authorization_rules { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) event_hub_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::SharedAccessAuthorizationRuleListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}/eventhubs/{}/authorizationRules", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.namespace_name, &self.event_hub_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::SharedAccessAuthorizationRuleListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod get_authorization_rule { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) event_hub_name: String, pub(crate) authorization_rule_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::SharedAccessAuthorizationRuleResource, Error>> { Box::pin(async move { let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}/eventhubs/{}/authorizationRules/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . namespace_name , & self . event_hub_name , & self . authorization_rule_name) ; let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::SharedAccessAuthorizationRuleResource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod post_authorization_rule { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) event_hub_name: String, pub(crate) authorization_rule_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::SharedAccessAuthorizationRulePostResource, Error>> { Box::pin(async move { let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}/eventhubs/{}/authorizationRules/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . namespace_name , & self . event_hub_name , & self . authorization_rule_name) ; let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::SharedAccessAuthorizationRulePostResource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod create_or_update_authorization_rule { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) event_hub_name: String, pub(crate) authorization_rule_name: String, pub(crate) parameters: models::SharedAccessAuthorizationRuleCreateOrUpdateParameters, pub(crate) subscription_id: String, } impl Builder { pub fn into_future( self, ) -> futures::future::BoxFuture<'static, std::result::Result<models::SharedAccessAuthorizationRuleResource, Error>> { Box::pin(async move { let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}/eventhubs/{}/authorizationRules/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . namespace_name , & self . event_hub_name , & self . authorization_rule_name) ; let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::SharedAccessAuthorizationRuleResource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod delete_authorization_rule { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { NoContent204, Ok200, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) event_hub_name: String, pub(crate) authorization_rule_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}/eventhubs/{}/authorizationRules/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . namespace_name , & self . event_hub_name , & self . authorization_rule_name) ; let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::NO_CONTENT => Ok(Response::NoContent204), http::StatusCode::OK => Ok(Response::Ok200), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } } pub mod consumer_groups { use super::{models, API_VERSION}; pub struct Client(pub(crate) super::Client); impl Client { pub fn get( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, event_hub_name: impl Into<String>, consumer_group_name: impl Into<String>, subscription_id: impl Into<String>, ) -> get::Builder { get::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), event_hub_name: event_hub_name.into(), consumer_group_name: consumer_group_name.into(), subscription_id: subscription_id.into(), } } pub fn create_or_update( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, event_hub_name: impl Into<String>, consumer_group_name: impl Into<String>, parameters: impl Into<models::ConsumerGroupCreateOrUpdateParameters>, subscription_id: impl Into<String>, ) -> create_or_update::Builder { create_or_update::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), event_hub_name: event_hub_name.into(), consumer_group_name: consumer_group_name.into(), parameters: parameters.into(), subscription_id: subscription_id.into(), } } pub fn delete( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, event_hub_name: impl Into<String>, consumer_group_name: impl Into<String>, subscription_id: impl Into<String>, ) -> delete::Builder { delete::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), event_hub_name: event_hub_name.into(), consumer_group_name: consumer_group_name.into(), subscription_id: subscription_id.into(), } } pub fn list_all( &self, resource_group_name: impl Into<String>, namespace_name: impl Into<String>, event_hub_name: impl Into<String>, subscription_id: impl Into<String>, ) -> list_all::Builder { list_all::Builder { client: self.0.clone(), resource_group_name: resource_group_name.into(), namespace_name: namespace_name.into(), event_hub_name: event_hub_name.into(), subscription_id: subscription_id.into(), } } } pub mod get { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) event_hub_name: String, pub(crate) consumer_group_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ConsumerGroupResource, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}/eventhubs/{}/consumergroups/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.namespace_name, &self.event_hub_name, &self.consumer_group_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ConsumerGroupResource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod create_or_update { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) event_hub_name: String, pub(crate) consumer_group_name: String, pub(crate) parameters: models::ConsumerGroupCreateOrUpdateParameters, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ConsumerGroupResource, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}/eventhubs/{}/consumergroups/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.namespace_name, &self.event_hub_name, &self.consumer_group_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ConsumerGroupResource = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod delete { use super::{models, API_VERSION}; #[derive(Debug)] pub enum Response { NoContent204, Ok200, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) event_hub_name: String, pub(crate) consumer_group_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}/eventhubs/{}/consumergroups/{}", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.namespace_name, &self.event_hub_name, &self.consumer_group_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::NO_CONTENT => Ok(Response::NoContent204), http::StatusCode::OK => Ok(Response::Ok200), status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } pub mod list_all { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrl(url::ParseError), #[error("Failed to build request: {0}")] BuildRequest(http::Error), #[error("Failed to serialize request body: {0}")] Serialize(serde_json::Error), #[error("Failed to get access token: {0}")] GetToken(azure_core::Error), #[error("Failed to execute request: {0}")] SendRequest(azure_core::Error), #[error("Failed to get response bytes: {0}")] ResponseBytes(azure_core::StreamError), #[error("Failed to deserialize response: {0}, body: {1:?}")] Deserialize(serde_json::Error, bytes::Bytes), } #[derive(Clone)] pub struct Builder { pub(crate) client: super::super::Client, pub(crate) resource_group_name: String, pub(crate) namespace_name: String, pub(crate) event_hub_name: String, pub(crate) subscription_id: String, } impl Builder { pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ConsumerGroupListResult, Error>> { Box::pin(async move { let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventHub/namespaces/{}/eventhubs/{}/consumergroups", self.client.endpoint(), &self.subscription_id, &self.resource_group_name, &self.namespace_name, &self.event_hub_name ); let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); let credential = self.client.token_credential(); let token_response = credential .get_token(&self.client.scopes().join(" ")) .await .map_err(Error::GetToken)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(Error::BuildRequest)?; let rsp = self.client.send(req).await.map_err(Error::SendRequest)?; let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct(); match rsp_status { http::StatusCode::OK => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; let rsp_value: models::ConsumerGroupListResult = serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?; Err(Error::UnexpectedResponse { status_code, body: rsp_body, }) } } }) } } } }
51.995133
341
0.530253
9bc8886e9660a50ee5fbfb32b963a9b6c7a7e810
51
net.sf.jasperreports.web.servlets.ReportPageStatus
25.5
50
0.882353
d917c94cdcab12c96619e3363f184d57d531ca65
4,593
//! API types. pub use allowed_update::*; pub use animation::*; pub use audio::*; pub use callback_game::*; pub use callback_query::*; pub use chat::*; pub use chat_action::*; pub use chat_id::*; pub use chat_member::*; pub use chat_or_inline_message::*; pub use chat_permissions::*; pub use chat_photo::*; pub use chosen_inline_result::*; pub use contact::*; pub use document::*; pub use encrypted_credentials::*; pub use encrypted_passport_element::*; pub use file::*; pub use force_reply::*; pub use game::*; pub use game_high_score::*; pub use inline_keyboard_button::*; pub use inline_keyboard_markup::*; pub use inline_query::*; pub use inline_query_result::*; pub use inline_query_result_article::*; pub use inline_query_result_audio::*; pub use inline_query_result_cached_audio::*; pub use inline_query_result_cached_document::*; pub use inline_query_result_cached_gif::*; pub use inline_query_result_cached_mpeg4_gif::*; pub use inline_query_result_cached_photo::*; pub use inline_query_result_cached_sticker::*; pub use inline_query_result_cached_video::*; pub use inline_query_result_cached_voice::*; pub use inline_query_result_contact::*; pub use inline_query_result_document::*; pub use inline_query_result_game::*; pub use inline_query_result_gif::*; pub use inline_query_result_location::*; pub use inline_query_result_mpeg4_gif::*; pub use inline_query_result_photo::*; pub use inline_query_result_venue::*; pub use inline_query_result_video::*; pub use inline_query_result_voice::*; pub use input_file::*; pub use input_media::*; pub use input_message_content::*; pub use invoice::*; pub use keyboard_button::*; pub use keyboard_button_poll_type::*; pub use label_price::*; pub use location::*; pub use login_url::*; pub use mask_position::*; pub use message::*; pub use message_entity::*; pub use order_info::*; pub use parse_mode::*; pub use passport_data::*; pub use passport_element_error::*; pub use passport_file::*; pub use photo_size::*; pub use poll::*; pub use poll_answer::*; pub use poll_type::*; pub use pre_checkout_query::*; pub use reply_keyboard_markup::*; pub use reply_keyboard_remove::*; pub use reply_markup::*; pub use response_parameters::*; pub use send_invoice::*; pub use shipping_address::*; pub use shipping_option::*; pub use shipping_query::*; pub use sticker::*; pub use sticker_set::*; pub use successful_payment::*; pub use unit_false::*; pub use unit_true::*; pub use update::*; pub use user::*; pub use user_profile_photos::*; pub use venue::*; pub use video::*; pub use video_note::*; pub use voice::*; pub use webhook_info::*; mod allowed_update; mod animation; mod audio; mod callback_game; mod callback_query; mod chat; mod chat_action; mod chat_id; mod chat_member; mod chat_or_inline_message; mod chat_permissions; mod chat_photo; mod chosen_inline_result; mod contact; mod document; mod file; mod force_reply; mod game; mod game_high_score; mod inline_keyboard_button; mod inline_keyboard_markup; mod input_file; mod input_media; mod input_message_content; mod invoice; mod keyboard_button; mod keyboard_button_poll_type; mod label_price; mod location; mod login_url; mod mask_position; mod message; mod message_entity; mod order_info; mod parse_mode; mod photo_size; mod poll; mod poll_answer; mod poll_type; mod pre_checkout_query; mod reply_keyboard_markup; mod reply_keyboard_remove; mod reply_markup; mod response_parameters; mod send_invoice; mod shipping_address; mod shipping_option; mod shipping_query; mod sticker; mod sticker_set; mod successful_payment; mod unit_false; mod unit_true; mod update; mod user; mod user_profile_photos; mod venue; mod video; mod video_note; mod voice; mod webhook_info; mod inline_query; mod inline_query_result; mod inline_query_result_article; mod inline_query_result_audio; mod inline_query_result_cached_audio; mod inline_query_result_cached_document; mod inline_query_result_cached_gif; mod inline_query_result_cached_mpeg4_gif; mod inline_query_result_cached_photo; mod inline_query_result_cached_sticker; mod inline_query_result_cached_video; mod inline_query_result_cached_voice; mod inline_query_result_contact; mod inline_query_result_document; mod inline_query_result_game; mod inline_query_result_gif; mod inline_query_result_location; mod inline_query_result_mpeg4_gif; mod inline_query_result_photo; mod inline_query_result_venue; mod inline_query_result_video; mod inline_query_result_voice; mod encrypted_credentials; mod encrypted_passport_element; mod passport_data; mod passport_element_error; mod passport_file; pub use non_telegram_types::*; mod non_telegram_types;
24.827027
48
0.803614
67d1714ccf407746bbe62b663a3926d168bc8b1b
758
#[allow(dead_code)] mod portfolio; mod backend; mod state; mod paths; mod error; pub type Result<T> = std::result::Result<T, error::Error>; fn main() { use backend::{ world_trading_data::WorldTradingData, aggregator::Aggregator, cache::Cache, }; let state = state::State::get().unwrap(); println!("state: {:#?}", state); state.save().unwrap(); let backend = WorldTradingData::new("demo".to_owned()); let cached_backend = Cache::new(backend, std::time::Duration::from_secs(1)); let mut aggregator = Aggregator::new(vec![cached_backend]); println!("{:#?}", aggregator.request(&state)); println!("{:#?}", aggregator.request(&state)); std::thread::sleep(std::time::Duration::from_secs(2)); println!("{:#?}", aggregator.request(&state)); }
25.266667
77
0.671504
28ccb0e5336d5818af73dbfe96da25a9a6db98d6
799
use near_sdk::collections::UnorderedSet; use near_sdk::borsh::{self, BorshDeserialize, BorshSerialize}; #[derive(BorshDeserialize, BorshSerialize)] pub struct Person{ name: String, classes: UnorderedSet<u128> } impl Person{ /* Getters */ pub fn name(&self) -> &String { &self.name } pub fn new(name: &String, storage_id: u128, id: u32) -> Self{ let mut key = b"pcls".to_vec(); key.extend_from_slice(&storage_id.to_le_bytes()); key.extend_from_slice(&id.to_le_bytes()); Self{ name: name.clone(), classes: UnorderedSet::new(key) } } pub fn add_class(&mut self, class_id: &u128){ self.classes.insert(class_id); } pub fn get_classes(&self) -> Vec<u128>{ self.classes.to_vec() } }
26.633333
65
0.614518
08c791fa0cace6d0c7205d890c69625589d4f240
47,056
//! AVM2 objects. use crate::avm2::activation::Activation; use crate::avm2::array::ArrayStorage; use crate::avm2::bytearray::ByteArrayStorage; use crate::avm2::class::Class; use crate::avm2::domain::Domain; use crate::avm2::events::{DispatchList, Event}; use crate::avm2::function::Executable; use crate::avm2::names::{Multiname, Namespace, QName}; use crate::avm2::regexp::RegExp; use crate::avm2::scope::ScopeChain; use crate::avm2::traits::{Trait, TraitKind}; use crate::avm2::value::{Hint, Value}; use crate::avm2::vector::VectorStorage; use crate::avm2::Error; use crate::backend::audio::{SoundHandle, SoundInstanceHandle}; use crate::bitmap::bitmap_data::BitmapData; use crate::display_object::DisplayObject; use crate::string::AvmString; use gc_arena::{Collect, GcCell, MutationContext}; use ruffle_macros::enum_trait_object; use std::cell::{Ref, RefMut}; use std::fmt::Debug; use std::hash::{Hash, Hasher}; mod array_object; mod bitmapdata_object; mod bytearray_object; mod class_object; mod date_object; mod dictionary_object; mod dispatch_object; mod domain_object; mod event_object; mod function_object; mod loaderinfo_object; mod namespace_object; mod primitive_object; mod qname_object; mod regexp_object; mod script_object; mod sound_object; mod soundchannel_object; mod stage_object; mod vector_object; mod xml_object; pub use crate::avm2::object::array_object::{array_allocator, ArrayObject}; pub use crate::avm2::object::bitmapdata_object::{bitmapdata_allocator, BitmapDataObject}; pub use crate::avm2::object::bytearray_object::{bytearray_allocator, ByteArrayObject}; pub use crate::avm2::object::class_object::ClassObject; pub use crate::avm2::object::date_object::{date_allocator, DateObject}; pub use crate::avm2::object::dictionary_object::{dictionary_allocator, DictionaryObject}; pub use crate::avm2::object::dispatch_object::DispatchObject; pub use crate::avm2::object::domain_object::{appdomain_allocator, DomainObject}; pub use crate::avm2::object::event_object::{event_allocator, EventObject}; pub use crate::avm2::object::function_object::FunctionObject; pub use crate::avm2::object::loaderinfo_object::{ loaderinfo_allocator, LoaderInfoObject, LoaderStream, }; pub use crate::avm2::object::namespace_object::{namespace_allocator, NamespaceObject}; pub use crate::avm2::object::primitive_object::{primitive_allocator, PrimitiveObject}; pub use crate::avm2::object::qname_object::{qname_allocator, QNameObject}; pub use crate::avm2::object::regexp_object::{regexp_allocator, RegExpObject}; pub use crate::avm2::object::script_object::{ScriptObject, ScriptObjectData}; pub use crate::avm2::object::sound_object::{sound_allocator, SoundObject}; pub use crate::avm2::object::soundchannel_object::{soundchannel_allocator, SoundChannelObject}; pub use crate::avm2::object::stage_object::{stage_allocator, StageObject}; pub use crate::avm2::object::vector_object::{vector_allocator, VectorObject}; pub use crate::avm2::object::xml_object::{xml_allocator, XmlObject}; /// Represents an object that can be directly interacted with by the AVM2 /// runtime. #[enum_trait_object( #[allow(clippy::enum_variant_names)] #[derive(Clone, Collect, Debug, Copy)] #[collect(no_drop)] pub enum Object<'gc> { ScriptObject(ScriptObject<'gc>), FunctionObject(FunctionObject<'gc>), PrimitiveObject(PrimitiveObject<'gc>), NamespaceObject(NamespaceObject<'gc>), ArrayObject(ArrayObject<'gc>), StageObject(StageObject<'gc>), DomainObject(DomainObject<'gc>), EventObject(EventObject<'gc>), DispatchObject(DispatchObject<'gc>), XmlObject(XmlObject<'gc>), RegExpObject(RegExpObject<'gc>), ByteArrayObject(ByteArrayObject<'gc>), LoaderInfoObject(LoaderInfoObject<'gc>), ClassObject(ClassObject<'gc>), VectorObject(VectorObject<'gc>), SoundObject(SoundObject<'gc>), SoundChannelObject(SoundChannelObject<'gc>), BitmapDataObject(BitmapDataObject<'gc>), DateObject(DateObject<'gc>), DictionaryObject(DictionaryObject<'gc>), QNameObject(QNameObject<'gc>), } )] pub trait TObject<'gc>: 'gc + Collect + Debug + Into<Object<'gc>> + Clone + Copy { /// Get the base of this object. /// Any trait method implementations that were not overrided will forward the call to this instead. fn base(&self) -> Ref<ScriptObjectData<'gc>>; fn base_mut(&self, mc: MutationContext<'gc, '_>) -> RefMut<ScriptObjectData<'gc>>; /// Retrieve a property by QName, after multiname resolution, prototype /// lookups, and all other considerations have been taken. /// /// This required method is only intended to be called by other TObject /// methods. fn get_property_local( self, receiver: Object<'gc>, name: &QName<'gc>, activation: &mut Activation<'_, 'gc, '_>, ) -> Result<Value<'gc>, Error> { let base = self.base(); let rv = base.get_property_local(receiver, name, activation)?; drop(base); rv.resolve(activation) } /// Retrieve a property by Multiname lookup. /// /// This corresponds directly to the AVM2 operation `getproperty`, with the /// exception that it does not special-case object lookups on dictionary /// structured objects. #[allow(unused_mut)] //Not unused. fn get_property( mut self, receiver: Object<'gc>, multiname: &Multiname<'gc>, activation: &mut Activation<'_, 'gc, '_>, ) -> Result<Value<'gc>, Error> { let name = self.resolve_multiname(multiname)?; // Special case: Unresolvable properties on dynamic classes are treated // as dynamic properties that have not yet been set, and yield // `undefined` if name.is_none() { if !self .instance_of_class_definition() .map(|c| c.read().is_sealed()) .unwrap_or(false) { return Ok(Value::Undefined); } return Err( format!("Cannot get undefined property {:?}", multiname.local_name()).into(), ); } // At this point, the name must be a valid QName. let name = name.unwrap(); if !self.base().has_own_instantiated_property(&name) { // Initialize lazy-bound methods at this point in time. if let Some(class) = self.instance_of() { if let Some((bound_method, disp_id)) = class.bound_instance_method(activation, receiver, &name)? { self.install_method( activation.context.gc_context, name.clone(), disp_id, bound_method, ); return Ok(bound_method.into()); } } // Class methods are also lazy-bound. if let Some(class) = self.as_class_object() { if let Some((bound_method, disp_id)) = class.bound_class_method(activation, &name)? { self.install_method( activation.context.gc_context, name.clone(), disp_id, bound_method, ); return Ok(bound_method.into()); } } } let has_no_getter = self.has_own_virtual_setter(&name) && !self.has_own_virtual_getter(&name); if self.has_own_property(&name)? && !has_no_getter { return self.get_property_local(receiver, &name, activation); } if let Some(proto) = self.proto() { return proto.get_property(receiver, multiname, activation); } Ok(Value::Undefined) } /// Set a property by QName, after multiname resolution and all other /// considerations have been taken. /// /// This required method is only intended to be called by other TObject /// methods. fn set_property_local( self, receiver: Object<'gc>, name: &QName<'gc>, value: Value<'gc>, activation: &mut Activation<'_, 'gc, '_>, ) -> Result<(), Error> { let mut base = self.base_mut(activation.context.gc_context); let rv = base.set_property_local(receiver, name, value, activation)?; drop(base); rv.resolve(activation)?; Ok(()) } /// Set a property by Multiname lookup. /// /// This corresponds directly with the AVM2 operation `setproperty`, with /// the exception that it does not special-case object lookups on /// dictionary structured objects. fn set_property( &mut self, receiver: Object<'gc>, multiname: &Multiname<'gc>, value: Value<'gc>, activation: &mut Activation<'_, 'gc, '_>, ) -> Result<(), Error> { let mut name = self.resolve_multiname(multiname)?; // Special case: Unresolvable properties on dynamic classes are treated // as initializing a new dynamic property on namespace Public(""). if name.is_none() { if !self .instance_of_class_definition() .map(|c| c.read().is_sealed()) .unwrap_or(false) { let local_name: Result<AvmString<'gc>, Error> = multiname .local_name() .ok_or_else(|| "Cannot set undefined property using any name".into()); name = Some(QName::dynamic_name(local_name?)); } else { return Err( format!("Cannot set undefined property {:?}", multiname.local_name()).into(), ); } } // At this point, name resolution should have completed. let name = name.unwrap(); // Reject attempts to overwrite lazy-bound methods before they have // been bound. if let Some(class) = self.instance_of() { if class.instance_method(&name)?.is_some() { return Err(format!( "Cannot overwrite read-only property {:?}", name.local_name() ) .into()); } } if let Some(class) = self.as_class_object() { if class.class_method(&name)?.is_some() { return Err(format!( "Cannot overwrite read-only property {:?}", name.local_name() ) .into()); } } self.set_property_local(receiver, &name, value, activation) } /// Initialize a property by QName, after multiname resolution and all /// other considerations have been taken. /// /// This required method is only intended to be called by other TObject /// methods. fn init_property_local( self, receiver: Object<'gc>, name: &QName<'gc>, value: Value<'gc>, activation: &mut Activation<'_, 'gc, '_>, ) -> Result<(), Error> { let mut base = self.base_mut(activation.context.gc_context); let rv = base.init_property_local(receiver, name, value, activation)?; drop(base); rv.resolve(activation)?; Ok(()) } /// Initialize a property by Multiname lookup. /// /// This corresponds directly with the AVM2 operation `initproperty`. fn init_property( &mut self, receiver: Object<'gc>, multiname: &Multiname<'gc>, value: Value<'gc>, activation: &mut Activation<'_, 'gc, '_>, ) -> Result<(), Error> { let mut name = self.resolve_multiname(multiname)?; // Special case: Unresolvable properties on dynamic classes are treated // as initializing a new dynamic property on namespace Public(""). if name.is_none() { if !self .instance_of_class_definition() .map(|c| c.read().is_sealed()) .unwrap_or(false) { let local_name: Result<AvmString<'gc>, Error> = multiname .local_name() .ok_or_else(|| "Cannot init undefined property using any name".into()); name = Some(QName::dynamic_name(local_name?)); } else { return Err(format!( "Cannot init undefined property {:?}", multiname.local_name() ) .into()); } } // At this point, name resolution should have completed. let name = name.unwrap(); // Reject attempts to overwrite lazy-bound methods before they have // been bound. if let Some(class) = self.instance_of() { if class.instance_method(&name)?.is_some() { return Err(format!( "Cannot overwrite read-only property {:?}", name.local_name() ) .into()); } } if let Some(class) = self.as_class_object() { if class.class_method(&name)?.is_some() { return Err(format!( "Cannot overwrite read-only property {:?}", name.local_name() ) .into()); } } self.init_property_local(receiver, &name, value, activation) } /// Call a named property on the object. /// /// This corresponds directly to the `callproperty` operation in AVM2. fn call_property( self, multiname: &Multiname<'gc>, arguments: &[Value<'gc>], activation: &mut Activation<'_, 'gc, '_>, ) -> Result<Value<'gc>, Error> { let name = self.resolve_multiname(multiname)?; if name.is_none() { return Err(format!( "Attempted to call undefined property {:?}", multiname.local_name() ) .into()); } let name = name.unwrap(); if let Some(class) = self.instance_of() { if let Some((superclass, method_trait)) = class.instance_method(&name)? { let method = method_trait.as_method().unwrap(); if !method.needs_arguments_object() { let scope = class.instance_scope(); return Executable::from_method(method, scope, None, Some(superclass)).exec( Some(self.into()), arguments, activation, superclass.into(), //Deliberately invalid. ); } } } if let Some(class) = self.as_class_object() { if let Some(method_trait) = class.class_method(&name)? { let method = method_trait.as_method().unwrap(); if !method.needs_arguments_object() { let scope = class.class_scope(); return Executable::from_method(method, scope, None, Some(class)).exec( Some(self.into()), arguments, activation, class.into(), //Deliberately invalid. ); } } } let function = self .get_property(self.into(), multiname, activation)? .coerce_to_object(activation); if function.is_err() { return Err(format!( "Attempted to call undefined property {:?}", multiname.local_name() ) .into()); } function .unwrap() .call(Some(self.into()), arguments, activation) } /// Retrieve a slot by its index. fn get_slot(self, id: u32) -> Result<Value<'gc>, Error> { let base = self.base(); base.get_slot(id) } /// Set a slot by its index. fn set_slot( self, id: u32, value: Value<'gc>, mc: MutationContext<'gc, '_>, ) -> Result<(), Error> { let mut base = self.base_mut(mc); base.set_slot(id, value, mc) } /// Initialize a slot by its index. fn init_slot( self, id: u32, value: Value<'gc>, mc: MutationContext<'gc, '_>, ) -> Result<(), Error> { let mut base = self.base_mut(mc); base.init_slot(id, value, mc) } /// Call a method by its index. /// /// This directly corresponds with the AVM2 operation `callmethod`. #[allow(unused_mut)] //Not unused. fn call_method( mut self, id: u32, arguments: &[Value<'gc>], activation: &mut Activation<'_, 'gc, '_>, ) -> Result<Value<'gc>, Error> { if self.base().get_method(id).is_none() { if let Some(class) = self.instance_of() { if let Some((bound_method, name)) = class.bound_instance_method_by_id(activation, self.into(), id)? { self.install_method( activation.context.gc_context, name.clone(), id, bound_method, ); } } if let Some(class) = self.as_class_object() { if let Some((bound_method, name)) = class.bound_class_method_by_id(activation, id)? { self.install_method( activation.context.gc_context, name.clone(), id, bound_method, ); } } } if let Some(method_object) = self.base().get_method(id) { return method_object.call(Some(self.into()), arguments, activation); } Err(format!("Cannot call unknown method id {}", id).into()) } /// Resolve a multiname into a single QName, if any of the namespaces /// match. fn resolve_multiname(self, multiname: &Multiname<'gc>) -> Result<Option<QName<'gc>>, Error> { for ns in multiname.namespace_set() { if ns.is_any() { if let Some(name) = multiname.local_name() { let ns = self.resolve_any(name)?; return Ok(ns.map(|ns| QName::new(ns, name))); } else { return Ok(None); } } else if let Some(name) = multiname.local_name() { let qname = QName::new(ns.clone(), name); if self.has_property(&qname)? { return Ok(Some(qname)); } } else { return Ok(None); } } if let Some(proto) = self.proto() { return proto.resolve_multiname(multiname); } Ok(None) } /// Given a local name, find the namespace it resides in, if any. /// /// The `Namespace` must not be `Namespace::Any`, as this function exists /// specifically resolve names in that namespace. /// /// Trait names will be resolve on class objects and object instances, but /// not prototypes. If you want to search a prototype's provided traits you /// must walk the prototype chain using `resolve_any_trait`. fn resolve_any(self, local_name: AvmString<'gc>) -> Result<Option<Namespace<'gc>>, Error> { let base = self.base(); base.resolve_any(local_name) } /// Given a local name of a trait, find the namespace it resides in, if any. /// /// This function only works for names which are trait properties, not /// dynamic or prototype properties. Furthermore, instance prototypes *will* /// resolve trait names here, contrary to their behavior in `resolve_any.` fn resolve_any_trait( self, local_name: AvmString<'gc>, ) -> Result<Option<Namespace<'gc>>, Error> { let base = self.base(); base.resolve_any_trait(local_name) } /// Indicates whether or not a property exists on an object. fn has_property(self, name: &QName<'gc>) -> Result<bool, Error> { if self.has_own_property(name)? { Ok(true) } else if let Some(proto) = self.proto() { Ok(proto.has_own_property(name)?) } else { Ok(false) } } /// Indicates whether or not a property or trait exists on an object and is /// not part of the prototype chain. fn has_own_property(self, name: &QName<'gc>) -> Result<bool, Error> { let base = self.base(); base.has_own_property(name) } /// Returns true if an object has one or more traits of a given name. fn has_trait(self, name: &QName<'gc>) -> Result<bool, Error> { let base = self.base(); base.has_trait(name) } /// Check if a particular object contains a virtual getter by the given /// name. fn has_own_virtual_getter(self, name: &QName<'gc>) -> bool { let base = self.base(); base.has_own_virtual_getter(name) } /// Check if a particular object contains a virtual setter by the given /// name. fn has_own_virtual_setter(self, name: &QName<'gc>) -> bool { let base = self.base(); base.has_own_virtual_setter(name) } /// Indicates whether or not a property is overwritable. fn is_property_overwritable( self, _gc_context: MutationContext<'gc, '_>, name: &QName<'gc>, ) -> bool { let base = self.base(); base.is_property_overwritable(name) } /// Delete a named property from the object. /// /// Returns false if the property cannot be deleted. fn delete_property(&self, gc_context: MutationContext<'gc, '_>, name: &QName<'gc>) -> bool { let mut base = self.base_mut(gc_context); // Reject attempts to delete lazy-bound methods before they have // been bound. if !base.has_own_instantiated_property(name) { if let Some(class) = self.instance_of() { if class .instance_method(name) .map(|t| t.is_some()) .unwrap_or(false) { return false; } } if let Some(class) = self.as_class_object() { if class .class_method(name) .map(|t| t.is_some()) .unwrap_or(false) { return false; } } } base.delete_property(name) } /// Retrieve the `__proto__` of a given object. /// /// The proto is another object used to resolve methods across a class of /// multiple objects. It should also be accessible as `__proto__` from /// `get`. fn proto(&self) -> Option<Object<'gc>> { let base = self.base(); base.proto() } /// Change the `__proto__` on this object. /// /// This method primarily exists so that the global scope that player /// globals loads into can be created before its superclasses are. It /// should be used sparingly, if at all. fn set_proto(self, mc: MutationContext<'gc, '_>, proto: Object<'gc>) { let mut base = self.base_mut(mc); base.set_proto(proto) } /// Retrieve a given enumerable name by index. /// /// Enumerants are listed by index, starting from zero. A value of `None` /// indicates that no enumerant with that index, or any greater index, /// exists. (In other words, it means stop.) /// /// Objects are responsible for maintaining a consistently ordered and /// indexed list of enumerable names which can be queried by this /// mechanism. fn get_enumerant_name(&self, index: u32) -> Option<Value<'gc>> { let base = self.base(); base.get_enumerant_name(index) } /// Determine if a property is currently enumerable. /// /// Properties that do not exist are also not enumerable. fn property_is_enumerable(&self, name: &QName<'gc>) -> bool { let base = self.base(); base.property_is_enumerable(name) } /// Mark a dynamic property on this object as enumerable. fn set_local_property_is_enumerable( &self, mc: MutationContext<'gc, '_>, name: &QName<'gc>, is_enumerable: bool, ) -> Result<(), Error> { let mut base = self.base_mut(mc); base.set_local_property_is_enumerable(name, is_enumerable) } /// Install a method (or any other non-slot value) on an object. fn install_method( &mut self, mc: MutationContext<'gc, '_>, name: QName<'gc>, disp_id: u32, function: Object<'gc>, ) { let mut base = self.base_mut(mc); base.install_method(name, disp_id, function) } /// Install a getter method on an object property. fn install_getter( &mut self, mc: MutationContext<'gc, '_>, name: QName<'gc>, disp_id: u32, function: Object<'gc>, ) -> Result<(), Error> { let mut base = self.base_mut(mc); base.install_getter(name, disp_id, function) } /// Install a setter method on an object property. fn install_setter( &mut self, mc: MutationContext<'gc, '_>, name: QName<'gc>, disp_id: u32, function: Object<'gc>, ) -> Result<(), Error> { let mut base = self.base_mut(mc); base.install_setter(name, disp_id, function) } /// Install a dynamic or built-in value property on an object. fn install_dynamic_property( &mut self, mc: MutationContext<'gc, '_>, name: QName<'gc>, value: Value<'gc>, ) -> Result<(), Error> { let mut base = self.base_mut(mc); base.install_dynamic_property(name, value) } /// Install a slot on an object property. fn install_slot( &mut self, mc: MutationContext<'gc, '_>, name: QName<'gc>, id: u32, value: Value<'gc>, ) { let mut base = self.base_mut(mc); base.install_slot(name, id, value) } /// Install a const on an object property. fn install_const( &mut self, mc: MutationContext<'gc, '_>, name: QName<'gc>, id: u32, value: Value<'gc>, ) { let mut base = self.base_mut(mc); base.install_const(name, id, value) } /// Install all instance traits provided by a class. /// /// This method will also install superclass instance traits first. By /// calling this method with the lowest class in the chain, you will ensure /// all instance traits are installed. /// /// Read the documentation for `install_trait` to learn more about exactly /// how traits are instantiated. fn install_instance_traits( &mut self, activation: &mut Activation<'_, 'gc, '_>, from_class_object: ClassObject<'gc>, ) -> Result<(), Error> { if let Some(superclass_object) = from_class_object.superclass_object() { self.install_instance_traits(activation, superclass_object)?; } let class = from_class_object.inner_class_definition(); self.install_traits( activation, class.read().instance_traits(), from_class_object.instance_scope(), Some(from_class_object), )?; Ok(()) } /// Install a list of traits into this object. /// /// This function should be called immediately after object allocation and /// before any constructors have a chance to run. /// /// Read the documentation for `install_trait` to learn more about exactly /// how traits are instantiated. fn install_traits( &mut self, activation: &mut Activation<'_, 'gc, '_>, traits: &[Trait<'gc>], scope: ScopeChain<'gc>, defining_class: Option<ClassObject<'gc>>, ) -> Result<(), Error> { for trait_entry in traits { self.install_trait(activation, trait_entry, scope, defining_class)?; } Ok(()) } /// Install a single trait into this object. /// /// This function should be called immediately after object allocation and /// before any constructors have a chance to run. It should also only be /// called once per name and/or slot ID, as reinstalling a trait may unset /// already set properties. /// /// Class, function, and method traits are *not* instantiated at /// installation time. Instead, installing such traits merely installs a /// placeholder (such as an `undefined` const slot) until the trait is /// properly initialized. /// /// All traits that are instantiated at install time will be instantiated /// with this object's current scope stack and this object as a bound /// receiver. /// /// The value of the trait at the time of installation will be returned /// here, or `undefined` for classes, functions, and methods. /// /// No verification happens in `install_trait`. Instead, you must make sure /// to only install traits from validated classes. fn install_trait( &mut self, activation: &mut Activation<'_, 'gc, '_>, trait_entry: &Trait<'gc>, scope: ScopeChain<'gc>, defining_class: Option<ClassObject<'gc>>, ) -> Result<Value<'gc>, Error> { let receiver = (*self).into(); let trait_name = trait_entry.name().clone(); avm_debug!( activation.avm2(), "Installing trait {:?} of kind {:?}", trait_name, trait_entry.kind() ); match trait_entry.kind() { TraitKind::Slot { slot_id, default_value, .. } => { self.install_slot( activation.context.gc_context, trait_name, *slot_id, default_value.clone(), ); Ok(default_value.clone()) } TraitKind::Method { .. } => Ok(Value::Undefined), TraitKind::Getter { disp_id, method, .. } => { let function = FunctionObject::from_method( activation, method.clone(), scope, Some(receiver), defining_class, ); self.install_getter( activation.context.gc_context, trait_name, *disp_id, function, )?; Ok(function.into()) } TraitKind::Setter { disp_id, method, .. } => { let function = FunctionObject::from_method( activation, method.clone(), scope, Some(receiver), defining_class, ); self.install_setter( activation.context.gc_context, trait_name, *disp_id, function, )?; Ok(function.into()) } TraitKind::Class { slot_id, .. } => { self.install_const( activation.context.gc_context, trait_name, *slot_id, Value::Undefined, ); Ok(Value::Undefined) } TraitKind::Function { slot_id, .. } => { self.install_const( activation.context.gc_context, trait_name, *slot_id, Value::Undefined, ); Ok(Value::Undefined) } TraitKind::Const { slot_id, default_value, .. } => { self.install_const( activation.context.gc_context, trait_name, *slot_id, default_value.clone(), ); Ok(default_value.clone()) } } } /// Call the object. fn call( self, _reciever: Option<Object<'gc>>, _arguments: &[Value<'gc>], _activation: &mut Activation<'_, 'gc, '_>, ) -> Result<Value<'gc>, Error> { Err("Object is not callable".into()) } /// Construct a Class or Function and return an instance of it. /// /// As the first step in object construction, the `construct` method is /// called on the class object to produce an instance of that class. The /// constructor is then expected to perform the following steps, in order: /// /// 1. Allocate the instance object. For ES4 classes, the class's instance /// allocator is used to allocate the object. ES3-style classes use the /// prototype to derive instances. /// 2. Associate the instance object with the class's explicit `prototype`. /// 3. If the class has instance traits, install them at this time. /// 4. Call the constructor method with the newly-allocated object as /// reciever. For ES3 classes, this is just the function's associated /// method. /// 5. Yield the allocated object. (The return values of constructors are /// ignored.) fn construct( self, _activation: &mut Activation<'_, 'gc, '_>, _args: &[Value<'gc>], ) -> Result<Object<'gc>, Error> { Err("Object is not constructable".into()) } /// Construct a property of this object by Multiname lookup. /// /// This corresponds directly to the AVM2 operation `constructprop`. fn construct_prop( self, multiname: &Multiname<'gc>, args: &[Value<'gc>], activation: &mut Activation<'_, 'gc, '_>, ) -> Result<Object<'gc>, Error> { let ctor = self .get_property(self.into(), multiname, activation)? .coerce_to_object(activation)?; ctor.construct(activation, args) } /// Construct a host object prototype of some kind and return it. /// /// This is called specifically to allocate old-style ES3 instances. The /// returned object should have no properties upon it. fn derive(&self, activation: &mut Activation<'_, 'gc, '_>) -> Result<Object<'gc>, Error>; /// Construct a parameterization of this particular type and return it. /// /// This is called specifically to parameterize generic types, of which /// only one exists: `Vector`. When `Vector` is applied with a given /// parameter, a new type is returned which can be used to construct /// `Vector`s of that type. /// /// If the object is not a parameterized type, this yields an error. In /// practice, this means only `Vector` can use this method. Parameters must /// be class objects or `null`, which indicates any type. /// /// When a given type is parameterized with the same parameters multiple /// times, each application must return the same object. This is because /// each application has a separate prototype that accepts dynamic /// parameters. fn apply( &self, _activation: &mut Activation<'_, 'gc, '_>, _params: &[Value<'gc>], ) -> Result<ClassObject<'gc>, Error> { Err("Not a parameterized type".into()) } /// Determine the type of primitive coercion this object would prefer, in /// the case that there is no obvious reason to prefer one type over the /// other. /// /// All native ECMAScript objects prefer numerical coercions, except `Date`, /// which wants string coercions. fn default_hint(&self) -> Hint { Hint::Number } /// Implement the result of calling `Object.prototype.toString` on this /// object class. /// /// `toString` is a method used to request an object be coerced to a string /// value. The default implementation is stored here. User-specified string /// coercions happen by defining `toString` in a downstream class or /// prototype; this is then picked up by the VM runtime when doing /// coercions. fn to_string(&self, mc: MutationContext<'gc, '_>) -> Result<Value<'gc>, Error> { let class_name = self .instance_of_class_definition() .map(|c| c.read().name().local_name()) .unwrap_or_else(|| "Object".into()); Ok(AvmString::new(mc, format!("[object {}]", class_name)).into()) } /// Implement the result of calling `Object.prototype.toLocaleString` on this /// object class. /// /// `toLocaleString` is a method used to request an object be coerced to a /// locale-dependent string value. The default implementation appears to /// generate a debug-style string based on the name of the class this /// object is, in the format of `[object Class]` (where `Class` is the name /// of the class that created this object). fn to_locale_string(&self, mc: MutationContext<'gc, '_>) -> Result<Value<'gc>, Error> { let class_name = self .instance_of_class_definition() .map(|c| c.read().name().local_name()) .unwrap_or_else(|| "Object".into()); Ok(AvmString::new(mc, format!("[object {}]", class_name)).into()) } /// Implement the result of calling `Object.prototype.valueOf` on this /// object class. /// /// `valueOf` is a method used to request an object be coerced to a /// primitive value. Typically, this would be a number of some kind. fn value_of(&self, mc: MutationContext<'gc, '_>) -> Result<Value<'gc>, Error>; /// Determine if this object is an instance of a given type. /// /// This uses the ES3 definition of instance, which walks the prototype /// chain. For the ES4 definition of instance, use `is_of_type`, which uses /// the class object chain and accounts for interfaces. /// /// The given object should be the class object for the given type we are /// checking against this object. Its prototype will be extracted and /// searched in the prototype chain of this object. fn is_instance_of( &self, activation: &mut Activation<'_, 'gc, '_>, class: Object<'gc>, ) -> Result<bool, Error> { let type_proto = class .get_property(class, &QName::dynamic_name("prototype").into(), activation)? .coerce_to_object(activation)?; self.has_prototype_in_chain(type_proto) } /// Determine if this object has a given prototype in its prototype chain. /// /// The given object `type_proto` should be the prototype we are checking /// against this object. fn has_prototype_in_chain(&self, type_proto: Object<'gc>) -> Result<bool, Error> { let mut my_proto = self.proto(); //TODO: Is it a verification error to do `obj instanceof bare_object`? while let Some(proto) = my_proto { if Object::ptr_eq(proto, type_proto) { return Ok(true); } my_proto = proto.proto() } Ok(false) } /// Determine if this object is an instance of a given type. /// /// This uses the ES4 definition of instance, which walks the class object /// chain and accounts for interfaces. For the ES3 definition of instance, /// use `is_instance_of`, which uses the prototype chain. /// /// The given object should be the class object for the given type we are /// checking against this object. fn is_of_type( &self, test_class: ClassObject<'gc>, activation: &mut Activation<'_, 'gc, '_>, ) -> Result<bool, Error> { let my_class = self.instance_of(); // ES3 objects are not class instances but are still treated as // instances of Object, which is an ES4 class. if my_class.is_none() && Object::ptr_eq(test_class, activation.avm2().classes().object) { Ok(true) } else if let Some(my_class) = my_class { my_class.has_class_in_chain(test_class, activation) } else { Ok(false) } } /// Get a raw pointer value for this object. fn as_ptr(&self) -> *const ObjectPtr; /// Get this object's class, if it has one. fn instance_of(&self) -> Option<ClassObject<'gc>> { let base = self.base(); base.instance_of() } /// Get this object's class's `Class`, if it has one. fn instance_of_class_definition(&self) -> Option<GcCell<'gc, Class<'gc>>> { self.instance_of().map(|cls| cls.inner_class_definition()) } fn set_instance_of(&self, mc: MutationContext<'gc, '_>, instance_of: ClassObject<'gc>) { let mut base = self.base_mut(mc); base.set_instance_of(instance_of); } /// Try to corece this object into a `ClassObject`. fn as_class_object(&self) -> Option<ClassObject<'gc>> { None } /// Get this object's `Executable`, if it has one. fn as_executable(&self) -> Option<Ref<Executable<'gc>>> { None } /// Unwrap this object's `Namespace`, if the object is a boxed namespace. fn as_namespace(&self) -> Option<Ref<Namespace<'gc>>> { None } /// Unwrap this object as a `QNameObject` fn as_qname_object(self) -> Option<QNameObject<'gc>> { None } /// Unwrap this object as array storage. fn as_array_storage(&self) -> Option<Ref<ArrayStorage<'gc>>> { None } /// Unwrap this object as bytearray. fn as_bytearray(&self) -> Option<Ref<ByteArrayStorage>> { None } fn as_bytearray_mut(&self, _mc: MutationContext<'gc, '_>) -> Option<RefMut<ByteArrayStorage>> { None } fn as_bytearray_object(&self) -> Option<ByteArrayObject<'gc>> { None } /// Unwrap this object as mutable array storage. fn as_array_storage_mut( &self, _mc: MutationContext<'gc, '_>, ) -> Option<RefMut<ArrayStorage<'gc>>> { None } /// Unwrap this object as vector storage. fn as_vector_storage(&self) -> Option<Ref<VectorStorage<'gc>>> { None } /// Unwrap this object as mutable vector storage. fn as_vector_storage_mut( &self, _mc: MutationContext<'gc, '_>, ) -> Option<RefMut<VectorStorage<'gc>>> { None } /// Get this object's `DisplayObject`, if it has one. fn as_display_object(&self) -> Option<DisplayObject<'gc>> { None } /// Associate this object with a display object, if it can support such an /// association. /// /// If not, then this function does nothing. fn init_display_object(&self, _mc: MutationContext<'gc, '_>, _obj: DisplayObject<'gc>) {} /// Unwrap this object as an ApplicationDomain. fn as_application_domain(&self) -> Option<Domain<'gc>> { None } /// Unwrap this object as an event. fn as_event(&self) -> Option<Ref<Event<'gc>>> { None } /// Unwrap this object as a mutable event. fn as_event_mut(&self, _mc: MutationContext<'gc, '_>) -> Option<RefMut<Event<'gc>>> { None } /// Unwrap this object as a list of event handlers. fn as_dispatch(&self) -> Option<Ref<DispatchList<'gc>>> { None } /// Unwrap this object as a mutable list of event handlers. fn as_dispatch_mut(&self, _mc: MutationContext<'gc, '_>) -> Option<RefMut<DispatchList<'gc>>> { None } /// Unwrap this object as an immutable primitive value. /// /// This function should not be called in cases where a normal `Value` /// coercion would do. It *only* accounts for boxed primitives, and not /// `valueOf`. fn as_primitive(&self) -> Option<Ref<Value<'gc>>> { None } /// Unwrap this object as a mutable primitive value. fn as_primitive_mut(&self, _mc: MutationContext<'gc, '_>) -> Option<RefMut<Value<'gc>>> { None } /// Unwrap this object as a regexp. fn as_regexp(&self) -> Option<Ref<RegExp<'gc>>> { None } /// Unwrap this object as a mutable regexp. fn as_regexp_mut(&self, _mc: MutationContext<'gc, '_>) -> Option<RefMut<RegExp<'gc>>> { None } /// Unwrap this object's loader stream fn as_loader_stream(&self) -> Option<Ref<LoaderStream<'gc>>> { None } /// Unwrap this object's sound handle. fn as_sound(self) -> Option<SoundHandle> { None } /// Associate the object with a particular sound handle. /// /// This does nothing if the object is not a sound. fn set_sound(self, _mc: MutationContext<'gc, '_>, _sound: SoundHandle) {} /// Unwrap this object's sound instance handle. fn as_sound_channel(self) -> Option<SoundChannelObject<'gc>> { None } /// Associate the object with a particular sound instance handle. /// /// This does nothing if the object is not a sound channel. fn set_sound_instance(self, _mc: MutationContext<'gc, '_>, _sound: SoundInstanceHandle) {} /// Unwrap this object's bitmap data fn as_bitmap_data(&self) -> Option<GcCell<'gc, BitmapData<'gc>>> { None } /// Initialize the bitmap data in this object, if it's capable of /// supporting said data. /// /// This should only be called to initialize the association between an AVM /// object and it's associated bitmap data. This association should not be /// reinitialized later. fn init_bitmap_data( &self, _mc: MutationContext<'gc, '_>, _new_bitmap: GcCell<'gc, BitmapData<'gc>>, ) { } /// Get this objects `DateObject`, if it has one. fn as_date_object(&self) -> Option<DateObject<'gc>> { None } /// Get this object as a `DictionaryObject`, if it is one. fn as_dictionary_object(self) -> Option<DictionaryObject<'gc>> { None } } pub enum ObjectPtr {} impl<'gc> Object<'gc> { pub fn ptr_eq<T: TObject<'gc>>(a: T, b: T) -> bool { a.as_ptr() == b.as_ptr() } } impl<'gc> PartialEq for Object<'gc> { fn eq(&self, other: &Self) -> bool { Object::ptr_eq(*self, *other) } } impl<'gc> Eq for Object<'gc> {} impl<'gc> Hash for Object<'gc> { fn hash<H: Hasher>(&self, state: &mut H) { self.as_ptr().hash(state); } }
33.950938
103
0.572701
756e6200aeaaeb3dc2b3fc895aded3360ae347ea
10,108
// // // //! ATA IO code, handling device multiplexing and IO operations use ::prelude::*; //use ::memory::helpers::{DMABuffer}; //use ::async; use ::metadevs::storage; //use ::device_manager::IOBinding; use x86_64::instructions::port; pub const SECTOR_SIZE: usize = 128; //const MAX_DMA_SECTORS: usize = 0x2_0000 / SECTOR_SIZE; // Limited by sector count (and PRDT entries) const MAX_DMA_SECTORS: usize = 0x1F_F000 / SECTOR_SIZE; // Limited by sector count (and PRDT entries) // 512 PDRT entries, assume maximum fragmentation = 512 * 4K max = 2^21 = 2MB per transfer //const HDD_PIO_W28: u8 = 0x30, //const HDD_PIO_R28: u8 = 0x20; //const HDD_PIO_W48: u8 = 0x34; //const HDD_PIO_R48: u8 = 0x24; const HDD_IDENTIFY: u8 = 0xEC; const HDD_DMA_R28: u8 = 0xC8; const HDD_DMA_W28: u8 = 0xCA; const HDD_DMA_R48: u8 = 0x25; const HDD_DMA_W48: u8 = 0x35; const ISA_DATA : u16 = 0x00; const ISA_ERROR : u16 = 0x01; const ISA_PRECOMP : u16 = 0x01; const ISA_CTRL : u16 = 0x02; const ISA_SECCNT : u16 = 0x02; const ISA_SECTOR : u16 = 0x03; const ISA_CYL_LO : u16 = 0x04; const ISA_CYL_HI : u16 = 0x05; const ISA_SDH : u16 = 0x06; const ISA_COMMAND : u16 = 0x07; const ISA_STATUS : u16 = 0x07; const IDE_BSY : u8 = 0x80; const IDE_DRDY : u8 = 0x40; const IDE_DF : u8 = 0x20; const IDE_DRQ : u8 = 0x08; const IDE_ERR : u8 = 0x01; const IDE_CMD_READ : u8 = 0x20; const IDE_CMD_WRITE : u8 = 0x30; const IDE_CMD_IDENTIFY : u8 = 0xEC; const IDE_IDENT_SECTORS : usize = 20; const IDE_IDENT_MODEL : usize = 54; const IDE_IDENT_CAPABILITIES : usize = 98; const IDE_IDENT_CMDSETS : usize = 164; const IDE_IDENT_MAX_LBA : usize = 120; const IDE_IDENT_MAX_LBA_EXT : usize = 200; const IO_BASE0 : u16 = 0x1F0; const IO_BASE1 : u16 = 0x170; const IO_CTRL0 : u16 = 0x3F4; const IO_CTRL1 : u16 = 0x374; const MAX_IDE : usize = 4; const MAX_NSECS : usize = 128; //const MAX_DISK_NSECS 0x10000000U; //const VALID_IDE(ideno) (((ideno) >= 0) && ((ideno) < MAX_IDE) && (ide_devices[ideno].valid)) struct Channels{ base : u16, // I/O Base ctrl : u16, // Control Base } const channels:[(u16,u16);2] = [(IO_BASE0, IO_CTRL0),(IO_BASE1, IO_CTRL1)]; //const IO_BASE(ideno) (channels[(ideno) >> 1].base) //const IO_CTRL(ideno) (channels[(ideno) >> 1].ctrl) #[derive(Default)] pub struct DmaController { pub name: String, //pub ata_controllers: [AtaController; 2], //pub dma_base: IOBinding, } impl DmaController { /// Read ATA DMA pub fn do_dma_rd<'a>(&'a self, blockidx: u64, count: usize, dst: &'a mut [u32], disk: u8) -> Result<usize,storage::IoError> { assert_eq!(dst.len(), count * SECTOR_SIZE); let dst = if count > MAX_DMA_SECTORS { &mut dst[.. MAX_DMA_SECTORS * SECTOR_SIZE] } else { dst }; //self.do_dma(blockidx, DMABuffer::new_mut(dst, 32), disk, false); self.ide_read_secs(disk,blockidx,dst,count as u8) //Ok(233) } /// Write ATA DMA pub fn do_dma_wr<'a>(&'a self, blockidx: u64, count: usize, dst: &'a [u32], disk: u8) -> Result<usize,storage::IoError> { assert_eq!(dst.len(), count * SECTOR_SIZE); let dst = if count > MAX_DMA_SECTORS { &dst[.. MAX_DMA_SECTORS * SECTOR_SIZE] } else { dst }; //println!("ide_write_secs: disk={},blockidx={},count={}",disk,blockidx,count); self.ide_write_secs(disk,blockidx,dst,count as u8) //Ok(233) } fn ide_wait_ready(&self, iobase:u16, check_error: usize)->usize { unsafe{ let mut r= port::inb(iobase + ISA_STATUS); //println!("iobase:{} ready:{}",iobase,r); while (r & IDE_BSY) > 0{ r= port::inb(iobase + ISA_STATUS); //println!("busy"); } /* nothing */ if check_error ==1 && (r & (IDE_DF | IDE_ERR)) != 0 { return 1; } } return 0; } pub fn ide_init(&self) { //static_assert((SECTSIZE % 4) == 0); for ideno in 0 .. MAX_IDE { //println!("ideno:{}",ideno); /* assume that no device here */ //ide_devices[ideno].valid = 0; //let iobase = IO_BASE(ideno); let iobase = channels[if ideno >2 {1} else {0}].0; /* wait device ready */ self.ide_wait_ready(iobase, 0); //println!("ide_wait_ready"); unsafe{ /* step1: select drive */ //println!("outb"); port::outb(iobase + ISA_SDH, (0xE0 | ((ideno & 1) << 4)) as u8); self.ide_wait_ready(iobase, 0); /* step2: send ATA identify command */ //println!("outb"); port::outb(iobase + ISA_COMMAND, IDE_CMD_IDENTIFY); self.ide_wait_ready(iobase, 0); /* step3: polling */ //println!("inb"); if port::inb(iobase + ISA_STATUS) == 0 || self.ide_wait_ready(iobase, 1) != 0 { continue ; } //println!("insl"); let mut buffer:[u32;128]=[0;128]; for i in 0..buffer.len(){ buffer[i]=i as u32; if i==1 { //println!("{:#x}",&buffer[i] as *const u32 as usize - ::consts::KERNEL_OFFSET) } } //println!("insl {:#x}",&buffer as *const u32 as usize - ::consts::KERNEL_OFFSET); //println!("insl {:#x}",buffer.as_ptr() as usize - ::consts::KERNEL_OFFSET); //port::insl(iobase + ISA_DATA, &mut buffer); let port=iobase + ISA_DATA; //let buf=&mut buffer; for i in 0..buffer.len(){ asm!("insl %dx, (%edi)" :: "{dx}"(port), "{edi}"(&buffer[i]) : "edi" : "volatile"); } //println!("insl"); for i in 0..4{ println!("init:{}",buffer[i]); } } /* device is ok */ //ide_devices[ideno].valid = 1; /* read identification space of the device */ /*let buffer[128]; insl(iobase + ISA_DATA, buffer, sizeof(buffer) / sizeof(unsigned int)); unsigned char *ident = (unsigned char *)buffer; unsigned int sectors; unsigned int cmdsets = *(unsigned int *)(ident + IDE_IDENT_CMDSETS); /* device use 48-bits or 28-bits addressing */ if (cmdsets & (1 << 26)) { sectors = *(unsigned int *)(ident + IDE_IDENT_MAX_LBA_EXT); } else { sectors = *(unsigned int *)(ident + IDE_IDENT_MAX_LBA); } ide_devices[ideno].sets = cmdsets; ide_devices[ideno].size = sectors; /* check if supports LBA */ assert((*(unsigned short *)(ident + IDE_IDENT_CAPABILITIES) & 0x200) != 0); unsigned char *model = ide_devices[ideno].model, *data = ident + IDE_IDENT_MODEL; unsigned int i, length = 40; for (i = 0; i < length; i += 2) { model[i] = data[i + 1], model[i + 1] = data[i]; } do { model[i] = '\0'; } while (i -- > 0 && model[i] == ' '); cprintf("ide %d: %10u(sectors), '%s'.\n", ideno, ide_devices[ideno].size, ide_devices[ideno].model);*/ } // enable ide interrupt //pic_enable(IRQ_IDE1); //pic_enable(IRQ_IDE2); } fn ide_read_secs<'a>(&'a self, ideno: u8, secno:u64, dst: &'a mut [u32], nsecs:u8) -> Result<usize,storage::IoError> { //assert(nsecs <= MAX_NSECS && VALID_IDE(ideno)); //assert(secno < MAX_DISK_NSECS && secno + nsecs <= MAX_DISK_NSECS); let iobase = channels[if ideno >2 {1} else {0}].0; let ioctrl = channels[if ideno >2 {1} else {0}].1; //ide_wait_ready(iobase, 0); self.ide_wait_ready(iobase,0); let ret = 0; // generate interrupt unsafe{ port::outb(ioctrl + ISA_CTRL, 0); port::outb(iobase + ISA_SECCNT, nsecs); port::outb(iobase + ISA_SECTOR, (secno & 0xFF)as u8); port::outb(iobase + ISA_CYL_LO, ((secno >> 8) & 0xFF)as u8); port::outb(iobase + ISA_CYL_HI, ((secno >> 16) & 0xFF)as u8); port::outb(iobase + ISA_SDH, 0xE0 | ((ideno & 1) << 4) | (((secno >> 24) & 0xF)as u8)); //port::outb(iobase + ISA_SDH, (0xE0 | ((ideno & 1) << 4)) as u8); //self.ide_wait_ready(iobase, 0); port::outb(iobase + ISA_COMMAND, IDE_CMD_READ); //self.ide_wait_ready(iobase, 0); // if port::inb(iobase + ISA_STATUS) == 0 || self.ide_wait_ready(iobase, 1) != 0 { // println!("error?"); // } for i in 0 .. nsecs { //dst = dst + SECTSIZE; let tmp = &mut dst[(i as usize)*SECTOR_SIZE .. ((i+1) as usize)*SECTOR_SIZE]; if self.ide_wait_ready(iobase, 1) != 0 { println!("wait ready error"); } //self.ide_wait_ready(iobase, 1); //port::insl(iobase, tmp); let port=iobase; //let buf=&mut buffer; for i in 0..tmp.len(){ asm!("insl %dx, (%edi)" :: "{dx}"(port), "{edi}"(&tmp[i]) : "edi" : "volatile"); } //println!("read :{}",i); } } Ok(ret) } fn ide_write_secs<'a>(&'a self, ideno: u8, secno:u64, src: &'a [u32], nsecs:u8) -> Result<usize,storage::IoError> { //assert(nsecs <= MAX_NSECS && VALID_IDE(ideno)); //assert(secno < MAX_DISK_NSECS && secno + nsecs <= MAX_DISK_NSECS); let iobase = channels[if ideno >2 {1} else {0}].0; let ioctrl = channels[if ideno >2 {1} else {0}].1; //ide_wait_ready(iobase, 0); self.ide_wait_ready(iobase,0); let ret = 0; // generate interrupt unsafe{ port::outb(ioctrl + ISA_CTRL, 0); port::outb(iobase + ISA_SECCNT, nsecs); port::outb(iobase + ISA_SECTOR, (secno & 0xFF)as u8); port::outb(iobase + ISA_CYL_LO, ((secno >> 8) & 0xFF)as u8); port::outb(iobase + ISA_CYL_HI, ((secno >> 16) & 0xFF)as u8); port::outb(iobase + ISA_SDH, 0xE0 | ((ideno & 1) << 4) | (((secno >> 24) & 0xF)as u8)); port::outb(iobase + ISA_COMMAND, IDE_CMD_WRITE); //println!("{}",nsecs); for i in 0 .. nsecs { //dst = dst + SECTSIZE; // if ((ret = ide_wait_ready(iobase, 1)) != 0) { // goto out; // } //port::insb(iobase, dst); //println!("i={}",i); let tmp = &src[(i as usize)*SECTOR_SIZE .. ((i+1) as usize)*SECTOR_SIZE]; if self.ide_wait_ready(iobase, 1) != 0 { println!("wait ready error"); } //println!("write {}:{}",i,src[i as usize]); //println!("outsl"); //port::outsl(iobase, tmp); let port=iobase; //let buf=&mut buffer; for i in 0..tmp.len(){ asm!("outsl (%esi), %dx" :: "{dx}"(port), "{esi}"(&tmp[i]) : "edi"); } //println!("write :{}",i); // for i in 0..4 { // println!("{}",src[i as usize]); // } //port::outb(iobase, src[i as usize]); } } Ok(ret) } }
32.397436
126
0.595568
67eaa716fe10ac205342ec897c297c79f1955ca2
553
#![feature(test)] extern crate test; use digest::bench_update; use ripemd::{Ripemd160, Ripemd256, Ripemd320}; use test::Bencher; bench_update!( Ripemd160::default(); ripemd160_10 10; ripemd160_100 100; ripemd160_1000 1000; ripemd160_10000 10000; ); bench_update!( Ripemd256::default(); ripemd256_10 10; ripemd256_100 100; ripemd256_1000 1000; ripemd256_10000 10000; ); bench_update!( Ripemd320::default(); ripemd320_10 10; ripemd320_100 100; ripemd320_1000 1000; ripemd320_10000 10000; );
17.83871
46
0.696203
29edf8a9e4f5c9123ea90fd8572a4b19e0d52c75
15,958
// tools for automatically formatting spwn files use crate::ast::*; pub trait SpwnFmt { fn fmt(&self, ind: Indent) -> String; } type Indent = u16; fn tabs(mut num: Indent) -> String { let mut out = String::new(); while num > 4 { out += "\t"; num -= 4; } for _ in 0..num { out += " "; } out } pub fn _format(input: Vec<Statement>) -> String { let mut out = String::new(); for s in input { out += &s.fmt(0) } out } pub fn _format2(input: &ValueBody) -> String { input.fmt(0) } fn element_list(elements: &[impl SpwnFmt], open: char, closing: char, ind: Indent) -> String { if elements.is_empty() { return format!("{}{}", open, closing); } let mut elem_text = Vec::<String>::new(); let mut sum = 0; let last = elements.len() - 1; for (_i, el) in elements.iter().enumerate() { let text = el.fmt(0); sum += text.lines().next().unwrap().len(); elem_text.push(text) } let vertical = if elements.len() == 1 { sum > 150 } else { elem_text.iter().enumerate().any(|(i, x)| { if i != last { x.len() > 50 || x.contains('\n') } else { sum > 100 } }) }; if vertical { let mut out = format!("{}\n", open); for el in &elem_text { for line in el.lines() { out += &format!("{}{}\n", tabs(ind + 4), line); } out.pop(); out += ",\n"; } /*if elements.len() == 1 { out.pop(); out.pop(); out += "\n"; }*/ out + &format!("{}{}", tabs(ind), closing) } else { let mut out = format!("{}", open); let last_elem = elem_text.pop().unwrap(); let iter = elem_text.iter(); for el in iter { out += &format!("{}, ", el); } let mut last_elem_lines = last_elem.lines(); out += last_elem_lines.next().unwrap(); for line in last_elem_lines { out += &format!("\n{}{}", tabs(ind), line); } out.push(closing); out } } impl SpwnFmt for DictDef { fn fmt(&self, ind: Indent) -> String { match self { DictDef::Def((name, expr)) => format!("{}{}: {}", tabs(ind), name, expr.fmt(ind)), DictDef::Extract(expr) => format!("{}..{}", tabs(ind), expr.fmt(ind)), } } } // fn trim_start_tabs(string: &str) -> (&str, Indent) { // //https://doc.rust-lang.org/src/core/str/mod.rs.html#4082-4090 // let mut ind = 0; // for (i, c) in string.chars().enumerate() { // match c { // '\t' => ind += 4, // ' ' => ind += 1, // _ => return (unsafe { string.get_unchecked(i..string.len()) }, ind), // } // } // ("", ind) // } // fn indent_comment(comment: &str, ind: Indent) -> String { // let mut in_comment = false; // let mut current_off = 0; // let mut out = String::new(); // for line in comment.lines() { // let (trimmed, ind_offset) = trim_start_tabs(line); // if !in_comment { // if trimmed.starts_with("//") { // out += &format!("{}{}\r\n", tabs(ind), trimmed); // } else if trimmed.starts_with("/*") { // in_comment = true; // current_off = ind_offset; // out += &format!("{}{}\r\n", tabs(ind), trimmed); // } // } else { // out += &format!("{}{}\r\n", tabs(ind_offset - current_off), trimmed); // } // if line.trim_end().ends_with("*/") { // in_comment = false // } // } // out // } /*#[cfg(test)] mod tests { use super::*; #[test] fn test() { println!( "{}", indent_comment( &String::from( " //hello /* a = { b = 2 c = 3 a = { b = 2 c = 3 a = { b = 2 c = 3 } } } */ //bruh //bruh //bruh " ), 0 ) ) } }*/ impl SpwnFmt for Statement { fn fmt(&self, ind: Indent) -> String { let mut out = String::new(); // if let Some(comment) = &self.comment.0 { // //out += "[stmt pre]"; // out += &indent_comment(comment, ind); // // // if !comment.ends_with('\n') { // out += "\n"; // } // out += &tabs(ind); // } out += &if self.arrow { format!("-> {}\n", self.body.fmt(ind)) } else { format!("{}\n", self.body.fmt(ind)) }; // if let Some(comment) = &self.comment.1 { // if !comment.starts_with('\n') { // out += "\n"; // } // //out += &tabs(ind); // //out += "[stmt post]"; // out += &indent_comment(comment, ind); // } out } } impl SpwnFmt for StatementBody { fn fmt(&self, ind: Indent) -> String { match self { //StatementBody::Definition(def) => format!("{}", def.fmt(ind)), StatementBody::Call(call) => call.fmt(ind), StatementBody::Expr(x) => x.fmt(ind), StatementBody::TypeDef(x) => format!("type {}", x), StatementBody::Return(x) => match x { Some(expr) => format!("return {}", expr.fmt(ind)), None => "return".to_string(), }, StatementBody::Definition(x) => x.fmt(ind), StatementBody::Impl(x) => x.fmt(ind), StatementBody::If(x) => x.fmt(ind), StatementBody::For(x) => x.fmt(ind), StatementBody::While(_) => "While loop lol".to_string(), StatementBody::Error(x) => x.fmt(ind), StatementBody::Extract(x) => format!("extract {}", x.fmt(ind)), StatementBody::Break => String::from("break"), StatementBody::Continue => String::from("continue"), } } } //for object def impl SpwnFmt for (Expression, Expression) { fn fmt(&self, ind: Indent) -> String { format!("{}: {}", self.0.fmt(ind), self.1.fmt(ind)) } } impl SpwnFmt for ValueBody { fn fmt(&self, ind: Indent) -> String { use ValueBody::*; match self { Id(x) => x.fmt(ind), Number(x) => format!("{}", x), CmpStmt(x) => format!("!{{\n{}\n{}}}", x.fmt(ind + 4), tabs(ind)), Dictionary(x) => element_list(x, '{', '}', ind), Array(x) => element_list(x, '[', ']', ind), Symbol(x) => x.to_string(), Bool(x) => format!("{}", x), Expression(x) => format!("({})", x.fmt(ind)), Str(x) => format!("\"{}\"", x.inner), Import(x, f) => format!("import{} {:?}", if *f { "!" } else { "" }, x), Obj(x) => { (match x.mode { ObjectMode::Object => "obj".to_string(), ObjectMode::Trigger => "trigger".to_string(), }) + &element_list(&x.props, '{', '}', ind) } Macro(x) => x.fmt(ind), Resolved(_) => "<val>".to_string(), TypeIndicator(x) => format!("@{}", x), Null => "null".to_string(), SelfVal => "self".to_string(), Ternary(t) => format!( "{} if {} else {}", t.if_expr.fmt(ind), t.condition.fmt(ind), t.else_expr.fmt(ind) ), ListComp(c) => format!( "{} for {} in {}", c.body.fmt(ind), c.symbol, c.iterator.fmt(ind) ), Switch(_, _) => "switch".to_string(), } } } impl SpwnFmt for ValueLiteral { fn fmt(&self, ind: Indent) -> String { self.body.fmt(ind) } } impl SpwnFmt for IdClass { fn fmt(&self, _ind: Indent) -> String { match self { IdClass::Group => "g", IdClass::Color => "c", IdClass::Item => "i", IdClass::Block => "b", } .to_string() } } impl SpwnFmt for Path { fn fmt(&self, ind: Indent) -> String { match self { Path::Member(def) => format!(".{}", def), Path::Associated(def) => format!("::{}", def), Path::NSlice(_def) => "[its a slice ok]".to_string(), Path::Constructor(dict) => format!("::{}", element_list(dict, '{', '}', ind)), Path::Index(call) => format!("[{}]", call.fmt(ind)), Path::Call(x) => element_list(x, '(', ')', ind), Path::Increment => "++".to_string(), Path::Decrement => "--".to_string(), } } } impl SpwnFmt for Argument { fn fmt(&self, ind: Indent) -> String { if let Some(symbol) = &self.symbol { format!("{} = {}", symbol, self.value.fmt(ind)) } else { self.value.fmt(ind) } } } impl SpwnFmt for Call { fn fmt(&self, ind: Indent) -> String { format!("{}!", self.function.fmt(ind)) } } impl SpwnFmt for For { fn fmt(&self, ind: Indent) -> String { format!( "for {} in {} {{\n{}\n{}}}", self.symbol, self.array.fmt(ind), CompoundStatement { statements: self.body.clone() } .fmt(ind + 4), tabs(ind) ) } } impl SpwnFmt for Variable { fn fmt(&self, ind: Indent) -> String { let mut out = String::new(); // if let Some(comment) = &self.comment.0 { // //out += "[var pre]"; // out += &indent_comment(comment, ind); // if comment.ends_with('\n') { // out += &tabs(ind); // } // } if let Some(op) = &self.operator { out += &op.fmt(ind); } out += &self.value.fmt(ind); for p in &self.path { out += &p.fmt(ind).to_string(); } // if let Some(comment) = &self.comment.1 { // //out += "[var post]"; // out += &indent_comment(comment, ind); // /*if comment.ends_with("\n") { // out += &tabs(ind); // }*/ // } out } } impl SpwnFmt for Expression { fn fmt(&self, ind: Indent) -> String { let mut out = String::new(); for (i, op) in self.operators.iter().enumerate() { if let Operator::Range = op { out += &format!("{}{}", self.values[i].fmt(ind), (*op).fmt(ind)); } else { out += &format!("{} {} ", self.values[i].fmt(ind), (*op).fmt(ind)); } } out += &self.values.last().unwrap().fmt(ind); out } } impl SpwnFmt for Id { fn fmt(&self, ind: Indent) -> String { if self.unspecified { format!("?{}", self.class_name.fmt(ind)) } else { format!("{}{}", self.number, self.class_name.fmt(ind)) } } } impl SpwnFmt for Operator { fn fmt(&self, _ind: Indent) -> String { match self { Operator::Or => "||", Operator::And => "&&", Operator::Equal => "==", Operator::NotEqual => "!=", Operator::Range => "..", Operator::MoreOrEqual => ">=", Operator::LessOrEqual => "<=", Operator::More => ">", Operator::Less => "<", Operator::Slash => "/", Operator::IntDividedBy => "/%", Operator::Star => "*", Operator::Power => "^", Operator::Plus => "+", Operator::Minus => "-", Operator::Modulo => "%", Operator::Assign => "=", Operator::Add => "+=", Operator::Subtract => "-=", Operator::Multiply => "*=", Operator::Divide => "/=", Operator::IntDivide => "/%=", Operator::As => "as", Operator::Has => "has", Operator::Either => "|", Operator::Exponate => "^=", Operator::Modulate => "%=", Operator::Swap => "<=>", } .to_string() } } impl SpwnFmt for UnaryOperator { fn fmt(&self, _ind: Indent) -> String { match self { UnaryOperator::Not => "!", UnaryOperator::Minus => "-", UnaryOperator::Range => "..", UnaryOperator::Decrement => "--", UnaryOperator::Increment => "++", } .to_string() } } impl SpwnFmt for Definition { fn fmt(&self, ind: Indent) -> String { format!( "{}{}{}", if self.mutable { "let " } else { "" }, self.symbol.fmt(ind), if let Some(value) = &self.value { format!(" = {}", value.fmt(ind)) } else { String::new() } ) } } impl SpwnFmt for Error { fn fmt(&self, ind: Indent) -> String { format!("error {}", self.message.fmt(ind)) } } fn trim_newline(s: &mut String) { if s.ends_with('\n') { s.pop(); if s.ends_with('\r') { s.pop(); } } } impl SpwnFmt for CompoundStatement { fn fmt(&self, ind: Indent) -> String { let mut out = String::new(); for s in &self.statements { out += &format!("{}{}", tabs(ind), s.fmt(ind)); } trim_newline(&mut out); out } } impl SpwnFmt for Implementation { fn fmt(&self, ind: Indent) -> String { format!("impl {} ", self.symbol.fmt(ind)) + &element_list(&self.members, '{', '}', ind) } } impl SpwnFmt for If { fn fmt(&self, ind: Indent) -> String { let mut out = format!( "if {} {{\n{}\n{}}}", self.condition.fmt(ind), CompoundStatement { statements: self.if_body.clone() } .fmt(ind + 4), tabs(ind) ); if let Some(body) = &self.else_body { out += &format!( " else {{\n{}\n{}}}", CompoundStatement { statements: body.clone() } .fmt(ind + 4), tabs(ind) ); } out } } impl SpwnFmt for ArgDef { fn fmt(&self, ind: Indent) -> String { let (name, value, tag, typ, _, _) = self; let mut out = tag.fmt(ind); out += name; if let Some(expr) = typ { out += &format!(": {}", expr.fmt(ind)); } if let Some(expr) = value { out += &format!(" = {}", expr.fmt(ind)); } out } } impl SpwnFmt for Macro { fn fmt(&self, ind: Indent) -> String { let mut out = String::new(); out += &self.properties.fmt(ind); out += &element_list(&self.args, '(', ')', ind); out += &format!(" {{\n{}\n{}}}", &self.body.fmt(ind + 4), tabs(ind)); out } } impl SpwnFmt for (String, Vec<Argument>) { fn fmt(&self, ind: Indent) -> String { self.0.clone() + &element_list(&self.1, '(', ')', ind) } } impl SpwnFmt for Attribute { fn fmt(&self, ind: Indent) -> String { if self.tags.is_empty() { return String::new(); } let text = String::from("#") + &element_list(&self.tags, '[', ']', ind); if text.len() > 60 { text + "\n" + &tabs(ind) } else { text + " " } } }
26.775168
95
0.423236
26f045882b53754ad0ab8861664970db12683466
5,627
// Copyright 2016 PingCAP, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. // Copyright 2016 The etcd Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::VecDeque; use eraftpb::Message; use fxhash::{FxHashMap, FxHashSet}; /// Determines the relative safety of and consistency of read only requests. #[derive(Debug, PartialEq, Clone, Copy)] pub enum ReadOnlyOption { /// Safe guarantees the linearizability of the read only request by /// communicating with the quorum. It is the default and suggested option. Safe, /// LeaseBased ensures linearizability of the read only request by /// relying on the leader lease. It can be affected by clock drift. /// If the clock drift is unbounded, leader might keep the lease longer than it /// should (clock can move backward/pause without any bound). ReadIndex is not safe /// in that case. LeaseBased, } impl Default for ReadOnlyOption { fn default() -> ReadOnlyOption { ReadOnlyOption::Safe } } /// ReadState provides state for read only query. /// It's caller's responsibility to send MsgReadIndex first before getting /// this state from ready. It's also caller's duty to differentiate if this /// state is what it requests through request_ctx, e.g. given a unique id as /// request_ctx. #[derive(Default, Debug, PartialEq, Clone)] pub struct ReadState { /// The index of the read state. pub index: u64, /// A datagram consisting of context about the request. pub request_ctx: Vec<u8>, } #[derive(Default, Debug, Clone)] pub struct ReadIndexStatus { pub req: Message, pub index: u64, pub acks: FxHashSet<u64>, } #[derive(Default, Debug, Clone)] pub struct ReadOnly { pub option: ReadOnlyOption, pub pending_read_index: FxHashMap<Vec<u8>, ReadIndexStatus>, pub read_index_queue: VecDeque<Vec<u8>>, } impl ReadOnly { pub fn new(option: ReadOnlyOption) -> ReadOnly { ReadOnly { option, pending_read_index: FxHashMap::default(), read_index_queue: VecDeque::new(), } } /// Adds a read only request into readonly struct. /// /// `index` is the commit index of the raft state machine when it received /// the read only request. /// /// `m` is the original read only request message from the local or remote node. pub fn add_request(&mut self, index: u64, m: Message) { let ctx = { let key = m.get_entries()[0].get_data(); if self.pending_read_index.contains_key(key) { return; } key.to_vec() }; let status = ReadIndexStatus { req: m, index, acks: FxHashSet::default(), }; self.pending_read_index.insert(ctx.clone(), status); self.read_index_queue.push_back(ctx); } /// Notifies the ReadOnly struct that the raft state machine received /// an acknowledgment of the heartbeat that attached with the read only request /// context. pub fn recv_ack(&mut self, m: &Message) -> FxHashSet<u64> { match self.pending_read_index.get_mut(m.get_context()) { None => Default::default(), Some(rs) => { rs.acks.insert(m.get_from()); // add one to include an ack from local node let mut set_with_self = FxHashSet::default(); set_with_self.insert(m.get_to()); rs.acks.union(&set_with_self).cloned().collect() } } } /// Advances the read only request queue kept by the ReadOnly struct. /// It dequeues the requests until it finds the read only request that has /// the same context as the given `m`. pub fn advance(&mut self, m: &Message) -> Vec<ReadIndexStatus> { let mut rss = vec![]; if let Some(i) = self.read_index_queue.iter().position(|x| { if !self.pending_read_index.contains_key(x) { panic!("cannot find correspond read state from pending map"); } *x == m.get_context() }) { for _ in 0..=i { let rs = self.read_index_queue.pop_front().unwrap(); let status = self.pending_read_index.remove(&rs).unwrap(); rss.push(status); } } rss } /// Returns the context of the last pending read only request in ReadOnly struct. pub fn last_pending_request_ctx(&self) -> Option<Vec<u8>> { self.read_index_queue.back().cloned() } #[inline] pub fn pending_read_count(&self) -> usize { self.read_index_queue.len() } }
35.389937
87
0.643149
61200d8f2e10d731fc0b94a4eb391e866d83233b
6,000
use super::chan::{channel, Sender, Receiver}; use futures::executor::LocalPool; use futures::task::LocalSpawnExt; enum StepResult { Halt, StepIp(usize), } pub struct IntCode { pub memory: Vec<i64>, ip: usize, relative_base: i64, inputs: Receiver<i64>, outputs: Sender<i64>, } impl IntCode { pub fn new(input: &str, inputs: Receiver<i64>, outputs: Sender<i64>) -> Self { let memory = input .split("\n") .filter(|a| !a.is_empty()) .nth(0).unwrap() .split(",") .map(|a| a.parse::<i64>().unwrap()) .collect(); IntCode { memory, ip: 0, relative_base: 0, inputs, outputs, } } async fn step(&mut self) -> StepResult { let opcode = self.memory[self.ip] % 100; match opcode { 1 => { // Add let a = self.read(1); let b = self.read(2); self.write(3, a + b); return StepResult::StepIp(4); }, 2 => { // Multiply let a = self.read(1); let b = self.read(2); self.write(3, a * b); return StepResult::StepIp(4); }, 3 => { // Input let val = self.inputs.clone().await; self.write(1, val); return StepResult::StepIp(2); }, 4 => { // Output let val = self.read(1); self.outputs.send(val); return StepResult::StepIp(2); }, 5 => { // Jump If True let test = self.read(1); let dest = self.read(2); if test != 0 { self.ip = dest as usize; return StepResult::StepIp(0); } else { return StepResult::StepIp(3); } }, 6 => { // Jump If False let test = self.read(1); let dest = self.read(2); if test == 0 { self.ip = dest as usize; return StepResult::StepIp(0); } else { return StepResult::StepIp(3); } }, 7 => { // Less Than let a = self.read(1); let b = self.read(2); self.write(3, if a < b { 1 } else { 0 }); return StepResult::StepIp(4); }, 8 => { // Equal let a = self.read(1); let b = self.read(2); self.write(3, if a == b { 1 } else { 0 }); return StepResult::StepIp(4); }, 9 => { // Adjust Relative Base let a = self.read(1); self.relative_base += a; return StepResult::StepIp(2); }, 99 => { return StepResult::Halt; }, _ => { assert!(false); unreachable!(); } } } pub async fn run(&mut self) { loop { match self.step().await { StepResult::Halt => { return; }, StepResult::StepIp(dist) => { self.ip += dist; }, } } } fn read(&mut self, offset: usize) ->i64 { let mut factor = 10; for _ in 0..offset { factor *= 10; } let opcode = self.memory[self.ip]; let mode = (opcode / factor) % 10; let contents = self.memory[self.ip + offset]; match mode { 0 => self.read_index(contents as usize), 1 => contents, 2 => self.read_index((self.relative_base + contents) as usize), _ => { assert!(false); unreachable!(); }, } } fn read_index(&mut self, index: usize) -> i64 { if index > self.memory.len() { 0 } else { self.memory[index] } } fn write(&mut self, offset: usize, value: i64) { let mut factor = 10; for _ in 0..offset { factor *= 10; } let opcode = self.memory[self.ip]; let mode = (opcode / factor) % 10; let contents = self.memory[self.ip + offset]; match mode { 0 => self.write_index(contents as usize, value), 1 => { assert!(false); unreachable!(); }, 2 => self.write_index((self.relative_base + contents) as usize, value), _ => { assert!(false); unreachable!(); }, } } fn write_index(&mut self, index: usize, value: i64) { if index >= self.memory.len() { self.memory.resize(index + 1, 0); } self.memory[index] = value; } } pub async fn async_int_code(prog: &'static str, inputs: Receiver<i64>, outputs: Sender<i64>) { let mut comp = IntCode::new(prog, inputs, outputs.clone()); comp.run().await; } pub fn run_int_code(prog: &'static str, inputs: Vec<i64>) -> Vec<i64> { let mut pool = LocalPool::new(); let spawner = pool.spawner(); let (itx, irx) = channel("inputs".to_owned(), false); let (otx, orx) = channel("outputs".to_owned(), false); for input in inputs { itx.send(input); } spawner.spawn_local(async_int_code(prog, irx, otx)).unwrap(); pool.run(); orx.remainder() }
23.904382
92
0.403667
18f938dc1940731dbf4758dd7ee25623e1817469
16,051
use wagyu_model::no_std::*; use wagyu_model::{Amount, AmountError}; use core::fmt; use serde::Serialize; // Number of satoshis (base unit) per BTC const COIN: i64 = 1_0000_0000; // Maximum number of satoshis const MAX_COINS: i64 = 21_000_000 * COIN; /// Represents the amount of Bitcoin in satoshis #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)] pub struct BitcoinAmount(pub i64); pub enum Denomination { // sat Satoshi, // uBTC (bit) MicroBit, // mBTC MilliBit, // cBTC CentiBit, // dBTC DeciBit, // BTC Bitcoin, } impl Denomination { /// The number of decimal places more than a satoshi. fn precision(self) -> u32 { match self { Denomination::Satoshi => 0, Denomination::MicroBit => 2, Denomination::MilliBit => 5, Denomination::CentiBit => 6, Denomination::DeciBit => 7, Denomination::Bitcoin => 8, } } } impl fmt::Display for Denomination { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{}", match self { Denomination::Satoshi => "satoshi", Denomination::MicroBit => "uBTC", Denomination::MilliBit => "mBTC", Denomination::CentiBit => "cBTC", Denomination::DeciBit => "dBTC", Denomination::Bitcoin => "BTC", } ) } } impl Amount for BitcoinAmount {} impl BitcoinAmount { /// The zero amount. pub const ZERO: BitcoinAmount = BitcoinAmount(0); /// Exactly one satoshi. pub const ONE_SAT: BitcoinAmount = BitcoinAmount(1); /// Exactly one bitcoin. pub const ONE_BTC: BitcoinAmount = BitcoinAmount(COIN); pub fn from_satoshi(satoshis: i64) -> Result<Self, AmountError> { if -MAX_COINS <= satoshis && satoshis <= MAX_COINS { Ok(Self(satoshis)) } else { return Err(AmountError::AmountOutOfBounds( satoshis.to_string(), MAX_COINS.to_string(), )); } } pub fn from_ubtc(ubtc_value: i64) -> Result<Self, AmountError> { let satoshis = ubtc_value * 10_i64.pow(Denomination::MicroBit.precision()); Self::from_satoshi(satoshis) } pub fn from_mbtc(mbtc_value: i64) -> Result<Self, AmountError> { let satoshis = mbtc_value * 10_i64.pow(Denomination::MilliBit.precision()); Self::from_satoshi(satoshis) } pub fn from_cbtc(cbtc_value: i64) -> Result<Self, AmountError> { let satoshis = cbtc_value * 10_i64.pow(Denomination::CentiBit.precision()); Self::from_satoshi(satoshis) } pub fn from_dbtc(dbtc_value: i64) -> Result<Self, AmountError> { let satoshis = dbtc_value * 10_i64.pow(Denomination::DeciBit.precision()); Self::from_satoshi(satoshis) } pub fn from_btc(btc_value: i64) -> Result<Self, AmountError> { let satoshis = btc_value * 10_i64.pow(Denomination::Bitcoin.precision()); Self::from_satoshi(satoshis) } pub fn add(self, b: Self) -> Result<Self, AmountError> { Self::from_satoshi(self.0 + b.0) } pub fn sub(self, b: BitcoinAmount) -> Result<Self, AmountError> { Self::from_satoshi(self.0 - b.0) } } impl fmt::Display for BitcoinAmount { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.0.to_string()) } } #[cfg(test)] mod tests { use super::*; fn test_from_satoshi(sat_value: i64, expected_amount: BitcoinAmount) { let amount = BitcoinAmount::from_satoshi(sat_value).unwrap(); assert_eq!(expected_amount, amount) } fn test_from_ubtc(ubtc_value: i64, expected_amount: BitcoinAmount) { let amount = BitcoinAmount::from_ubtc(ubtc_value).unwrap(); assert_eq!(expected_amount, amount) } fn test_from_mbtc(mbtc_value: i64, expected_amount: BitcoinAmount) { let amount = BitcoinAmount::from_mbtc(mbtc_value).unwrap(); assert_eq!(expected_amount, amount) } fn test_from_cbtc(cbtc_value: i64, expected_amount: BitcoinAmount) { let amount = BitcoinAmount::from_cbtc(cbtc_value).unwrap(); assert_eq!(expected_amount, amount) } fn test_from_dbtc(dbtc_value: i64, expected_amount: BitcoinAmount) { let amount = BitcoinAmount::from_dbtc(dbtc_value).unwrap(); assert_eq!(expected_amount, amount) } fn test_from_btc(btc_value: i64, expected_amount: BitcoinAmount) { let amount = BitcoinAmount::from_btc(btc_value).unwrap(); assert_eq!(expected_amount, amount) } fn test_addition(a: &i64, b: &i64, result: &i64) { let a = BitcoinAmount::from_satoshi(*a).unwrap(); let b = BitcoinAmount::from_satoshi(*b).unwrap(); let result = BitcoinAmount::from_satoshi(*result).unwrap(); assert_eq!(result, a.add(b).unwrap()); } fn test_subtraction(a: &i64, b: &i64, result: &i64) { let a = BitcoinAmount::from_satoshi(*a).unwrap(); let b = BitcoinAmount::from_satoshi(*b).unwrap(); let result = BitcoinAmount::from_satoshi(*result).unwrap(); assert_eq!(result, a.sub(b).unwrap()); } pub struct AmountDenominationTestCase { satoshi: i64, micro_bit: i64, milli_bit: i64, centi_bit: i64, deci_bit: i64, bitcoin: i64, } mod valid_conversions { use super::*; const TEST_AMOUNTS: [AmountDenominationTestCase; 5] = [ AmountDenominationTestCase { satoshi: 0, micro_bit: 0, milli_bit: 0, centi_bit: 0, deci_bit: 0, bitcoin: 0, }, AmountDenominationTestCase { satoshi: 100000000, micro_bit: 1000000, milli_bit: 1000, centi_bit: 100, deci_bit: 10, bitcoin: 1, }, AmountDenominationTestCase { satoshi: 100000000000, micro_bit: 1000000000, milli_bit: 1000000, centi_bit: 100000, deci_bit: 10000, bitcoin: 1000, }, AmountDenominationTestCase { satoshi: 123456700000000, micro_bit: 1234567000000, milli_bit: 1234567000, centi_bit: 123456700, deci_bit: 12345670, bitcoin: 1234567, }, AmountDenominationTestCase { satoshi: 2100000000000000, micro_bit: 21000000000000, milli_bit: 21000000000, centi_bit: 2100000000, deci_bit: 210000000, bitcoin: 21000000, }, ]; #[test] fn test_satoshi_conversion() { TEST_AMOUNTS .iter() .for_each(|amounts| test_from_satoshi(amounts.satoshi, BitcoinAmount(amounts.satoshi))); } #[test] fn test_ubtc_conversion() { TEST_AMOUNTS .iter() .for_each(|amounts| test_from_ubtc(amounts.micro_bit, BitcoinAmount(amounts.satoshi))); } #[test] fn test_mbtc_conversion() { TEST_AMOUNTS .iter() .for_each(|amounts| test_from_mbtc(amounts.milli_bit, BitcoinAmount(amounts.satoshi))); } #[test] fn test_cbtc_conversion() { TEST_AMOUNTS .iter() .for_each(|amounts| test_from_cbtc(amounts.centi_bit, BitcoinAmount(amounts.satoshi))); } #[test] fn test_dbtc_conversion() { TEST_AMOUNTS .iter() .for_each(|amounts| test_from_dbtc(amounts.deci_bit, BitcoinAmount(amounts.satoshi))); } #[test] fn test_btc_conversion() { TEST_AMOUNTS .iter() .for_each(|amounts| test_from_btc(amounts.bitcoin, BitcoinAmount(amounts.satoshi))); } } mod valid_arithmetic { use super::*; const TEST_VALUES: [(i64, i64, i64); 7] = [ (0, 0, 0), (1, 2, 3), (100000, 0, 100000), (123456789, 987654321, 1111111110), (100000000000000, 2000000000000000, 2100000000000000), (-100000000000000, -2000000000000000, -2100000000000000), (1000000, -1000000, 0), ]; #[test] fn test_valid_addition() { TEST_VALUES.iter().for_each(|(a, b, c)| test_addition(a, b, c)); } #[test] fn test_valid_subtraction() { TEST_VALUES.iter().for_each(|(a, b, c)| test_subtraction(c, b, a)); } } mod test_invalid { use super::*; mod test_out_of_bounds { use super::*; const INVALID_TEST_AMOUNTS: [AmountDenominationTestCase; 4] = [ AmountDenominationTestCase { satoshi: 2100000100000000, micro_bit: 21000001000000, milli_bit: 21000001000, centi_bit: 2100000100, deci_bit: 210000010, bitcoin: 21000001, }, AmountDenominationTestCase { satoshi: -2100000100000000, micro_bit: -21000001000000, milli_bit: -21000001000, centi_bit: -2100000100, deci_bit: -210000010, bitcoin: -21000001, }, AmountDenominationTestCase { satoshi: 1000000000000000000, micro_bit: 10000000000000000, milli_bit: 10000000000000, centi_bit: 1000000000000, deci_bit: 100000000000, bitcoin: 10000000000, }, AmountDenominationTestCase { satoshi: -1000000000000000000, micro_bit: -10000000000000000, milli_bit: -10000000000000, centi_bit: -1000000000000, deci_bit: -100000000000, bitcoin: -10000000000, }, ]; #[should_panic(expected = "AmountOutOfBounds")] #[test] fn test_invalid_satoshi_conversion() { INVALID_TEST_AMOUNTS .iter() .for_each(|amounts| test_from_satoshi(amounts.satoshi, BitcoinAmount(amounts.satoshi))); } #[should_panic(expected = "AmountOutOfBounds")] #[test] fn test_invalid_ubtc_conversion() { INVALID_TEST_AMOUNTS .iter() .for_each(|amounts| test_from_ubtc(amounts.micro_bit, BitcoinAmount(amounts.satoshi))); } #[should_panic(expected = "AmountOutOfBounds")] #[test] fn test_invalid_mbtc_conversion() { INVALID_TEST_AMOUNTS .iter() .for_each(|amounts| test_from_mbtc(amounts.milli_bit, BitcoinAmount(amounts.satoshi))); } #[should_panic(expected = "AmountOutOfBounds")] #[test] fn test_invalid_cbtc_conversion() { INVALID_TEST_AMOUNTS .iter() .for_each(|amounts| test_from_cbtc(amounts.centi_bit, BitcoinAmount(amounts.satoshi))); } #[should_panic(expected = "AmountOutOfBounds")] #[test] fn test_invalid_dbtc_conversion() { INVALID_TEST_AMOUNTS .iter() .for_each(|amounts| test_from_dbtc(amounts.deci_bit, BitcoinAmount(amounts.satoshi))); } #[should_panic(expected = "AmountOutOfBounds")] #[test] fn test_invalid_btc_conversion() { INVALID_TEST_AMOUNTS .iter() .for_each(|amounts| test_from_btc(amounts.bitcoin, BitcoinAmount(amounts.satoshi))); } } mod test_invalid_conversion { use super::*; const INVALID_TEST_AMOUNTS: [AmountDenominationTestCase; 4] = [ AmountDenominationTestCase { satoshi: 1, micro_bit: 1, milli_bit: 1, centi_bit: 1, deci_bit: 1, bitcoin: 1, }, AmountDenominationTestCase { satoshi: 1, micro_bit: 10, milli_bit: 100, centi_bit: 1000, deci_bit: 1000000, bitcoin: 100000000, }, AmountDenominationTestCase { satoshi: 123456789, micro_bit: 1234567, milli_bit: 1234, centi_bit: 123, deci_bit: 12, bitcoin: 1, }, AmountDenominationTestCase { satoshi: 2100000000000000, micro_bit: 21000000000000, milli_bit: 21000000000, centi_bit: 2100000000, deci_bit: 210000000, bitcoin: 20999999, }, ]; #[should_panic] #[test] fn test_invalid_ubtc_conversion() { INVALID_TEST_AMOUNTS .iter() .for_each(|amounts| test_from_ubtc(amounts.micro_bit, BitcoinAmount(amounts.satoshi))); } #[should_panic] #[test] fn test_invalid_mbtc_conversion() { INVALID_TEST_AMOUNTS .iter() .for_each(|amounts| test_from_mbtc(amounts.milli_bit, BitcoinAmount(amounts.satoshi))); } #[should_panic] #[test] fn test_invalid_cbtc_conversion() { INVALID_TEST_AMOUNTS .iter() .for_each(|amounts| test_from_cbtc(amounts.centi_bit, BitcoinAmount(amounts.satoshi))); } #[should_panic] #[test] fn test_invalid_dbtc_conversion() { INVALID_TEST_AMOUNTS .iter() .for_each(|amounts| test_from_dbtc(amounts.deci_bit, BitcoinAmount(amounts.satoshi))); } #[should_panic] #[test] fn test_invalid_btc_conversion() { INVALID_TEST_AMOUNTS .iter() .for_each(|amounts| test_from_btc(amounts.bitcoin, BitcoinAmount(amounts.satoshi))); } } mod invalid_arithmetic { use super::*; const TEST_VALUES: [(i64, i64, i64); 8] = [ (0, 0, 1), (1, 2, 5), (100000, 1, 100000), (123456789, 123456789, 123456789), (-1000, -1000, 2000), (2100000000000000, 1, 2100000000000001), (2100000000000000, 2100000000000000, 4200000000000000), (-2100000000000000, -2100000000000000, -4200000000000000), ]; #[should_panic] #[test] fn test_invalid_addition() { TEST_VALUES.iter().for_each(|(a, b, c)| test_addition(a, b, c)); } #[should_panic] #[test] fn test_invalid_subtraction() { TEST_VALUES.iter().for_each(|(a, b, c)| test_subtraction(a, b, c)); } } } }
32.230924
108
0.522647
e22b5558ebe99157ed58210e7591ef0b3a406b9f
83,512
// Take a look at the license at the top of the repository in the LICENSE file. //! Translation between GLib/GLib-based FFI types and their Rust counterparts. //! //! This module allows library bindings authors to decouple type translation //! logic and use unified idioms at FFI boundaries. It also implements //! translation of GLib core data types. //! //! `FromGlib`, `from_glib` and `IntoGlib` translate simple types like `bool`. //! //! ```ignore //! pub fn set_accept_focus(&self, accept_focus: bool) { //! unsafe { gdk::ffi::gdk_window_set_accept_focus(self.pointer, accept_focus.into_glib()) } //! } //! //! pub fn get_accept_focus(&self) -> bool { //! unsafe { from_glib(gdk::ffi::gdk_window_get_accept_focus(self.pointer)) } //! } //! ``` //! //! Implementing [`OptionIntoGlib`] on a Rust type `T` allows specifying a sentinel to indicate //! a `None` value and auto-implementing [`FromGlib`] for `Option<T>`, which would not be //! possible in dependent crates due to the [orphan rule](https://doc.rust-lang.org/book/ch10-02-traits.html#implementing-a-trait-on-a-type). //! In the example below, [`IntoGlib`] is auto-implemented for `Option<SpecialU32>`. //! //! ``` //! # use glib::translate::*; //! struct SpecialU32(u32); //! impl IntoGlib for SpecialU32 { //! type GlibType = libc::c_uint; //! fn into_glib(self) -> libc::c_uint { //! self.0 as libc::c_uint //! } //! } //! impl OptionIntoGlib for SpecialU32 { //! const GLIB_NONE: Self::GlibType = 0xFFFFFF; //! } //! ``` //! //! In order to auto-implement [`FromGlib`] for `Option<SpecialU32>`, proceed as follows: //! //! ``` //! # use glib::translate::*; //! # struct SpecialU32(u32); //! # impl IntoGlib for SpecialU32 { //! # type GlibType = libc::c_uint; //! # fn into_glib(self) -> libc::c_uint { //! # self.0 as libc::c_uint //! # } //! # } //! # impl OptionIntoGlib for SpecialU32 { //! # const GLIB_NONE: Self::GlibType = 0xFFFFFF; //! # } //! impl TryFromGlib<libc::c_uint> for SpecialU32 { //! type Error = GlibNoneError; //! unsafe fn try_from_glib(val: libc::c_uint) -> Result<Self, GlibNoneError> { //! if val == SpecialU32::GLIB_NONE { //! return Err(GlibNoneError); //! } //! Ok(SpecialU32(val as u32)) //! } //! } //! ``` //! //! The [`TryFromGlib`] trait can also be implemented when the Glib type range is larger than the //! target Rust type's range. In the example below, the Rust type `U32` can be built from a signed //! [`libc::c_long`], which means that the negative range is not valid. //! //! ``` //! # use std::convert::TryFrom; //! # use std::num::TryFromIntError; //! # use glib::translate::*; //! struct U32(u32); //! impl TryFromGlib<libc::c_long> for U32 { //! type Error = TryFromIntError; //! unsafe fn try_from_glib(val: libc::c_long) -> Result<Self, TryFromIntError> { //! Ok(U32(u32::try_from(val)?)) //! } //! } //! ``` //! //! Finally, you can define [`TryFromGlib`] with both `None` and `Invalid` alternatives by setting //! the associated `type Error = GlibNoneOrInvalidError<I>` (where `I` is the `Error` type //! when the value is invalid), which results in auto-implementing [`FromGlib`] for //! `Result<Option<T>, I>`. //! //! `ToGlibPtr`, `FromGlibPtrNone`, `FromGlibPtrFull` and `FromGlibPtrBorrow` work on `gpointer`s //! and ensure correct ownership of values //! according to [Glib ownership transfer rules](https://gi.readthedocs.io/en/latest/annotations/giannotations.html). //! //! `FromGlibPtrNone` and `FromGlibPtrFull` //! must be called on values obtained from C, //! according to their `transfer` annotations. //! They acquire non-gobject types, //! as well as turning floating references to strong ones, //! which are the only ones properly handled by the Rust bindings. //! //! For more information about floating references, please refer to the "Floating references" section //! of [the gobject reference](https://developer.gnome.org/gobject/stable/gobject-The-Base-Object-Type.html). //! //! ```ignore //! fn get_title(&self) -> Option<String> { //! unsafe { //! let title = gtk::ffi::gtk_window_get_title(self.pointer); //! from_glib_none(title) //! } //! } //! fn create_bool(value: gboolean) -> Variant { //! unsafe { //! let variant = ffi::g_variant_new_boolean(value); //! // g_variant_new_boolean has `transfer none` //! from_glib_none(variant) //! } //! } //! ``` //! //! Letting the foreign library borrow pointers from the Rust side often //! requires having a temporary variable of an intermediate type (e.g. `CString`). //! A `Stash` contains the temporary storage and a pointer into it that //! is valid for the lifetime of the `Stash`. As the lifetime of the `Stash` returned //! from `to_glib_none` is at least the enclosing statement, you can avoid explicitly //! binding the stash in most cases and just take the pointer out of it: //! //! ```ignore //! pub fn set_icon_name(&self, name: &str) { //! unsafe { //! gdk::ffi::gdk_window_set_icon_name(self.pointer, name.to_glib_none().0) //! } //! } //! ``` use libc::{c_char, size_t}; use std::char; use std::cmp::{Eq, Ordering, PartialEq}; use std::collections::HashMap; use std::error::Error; use std::ffi::{CStr, CString}; use std::ffi::{OsStr, OsString}; use std::fmt; use std::mem; #[cfg(not(windows))] use std::os::unix::prelude::*; use std::path::{Path, PathBuf}; use std::ptr; /// A pointer pub trait Ptr: Copy + 'static { fn is_null(&self) -> bool; fn from<X>(ptr: *mut X) -> Self; fn to<X>(self) -> *mut X; } impl<T: 'static> Ptr for *const T { #[inline] fn is_null(&self) -> bool { (*self).is_null() } #[inline] fn from<X>(ptr: *mut X) -> *const T { ptr as *const T } #[inline] fn to<X>(self) -> *mut X { self as *mut X } } impl<T: 'static> Ptr for *mut T { #[inline] fn is_null(&self) -> bool { (*self).is_null() } #[inline] fn from<X>(ptr: *mut X) -> *mut T { ptr as *mut T } #[inline] fn to<X>(self) -> *mut X { self as *mut X } } /// Overrides pointer mutability. /// /// Use when the C API should be specifying a const pointer but doesn't. pub fn mut_override<T>(ptr: *const T) -> *mut T { ptr as *mut T } /// Overrides pointer constness. /// /// Use when the C API need const pointer, but function with `IsA<T>` constraint, /// that usaly don't have const pointer conversion. pub fn const_override<T>(ptr: *mut T) -> *const T { ptr as *const T } /// A trait for creating an uninitialized value. Handy for receiving outparams. pub trait Uninitialized { /// Returns an uninitialized value. unsafe fn uninitialized() -> Self; } /// Returns an uninitialized value. #[inline] pub unsafe fn uninitialized<T: Uninitialized>() -> T { T::uninitialized() } /// Helper type that stores temporary values used for translation. /// /// `P` is the foreign type pointer and the first element of the tuple. /// /// `T` is the Rust type that is translated. /// /// The second element of the tuple is the temporary storage defined /// by the implementation of `ToGlibPtr<P> for T` /// /// Say you want to pass a `*mut GdkWindowAttr` to a foreign function. The `Stash` /// will own a `GdkWindowAttr` and a `CString` that `GdkWindowAttr::title` points into. /// /// ```ignore /// impl <'a> ToGlibPtr<'a, *mut ffi::GdkWindowAttr> for WindowAttr { /// type Storage = (Box<ffi::GdkWindowAttr>, Stash<'a, *const c_char, Option<String>>); /// /// fn to_glib_none(&'a self) -> Stash<*mut ffi::GdkWindowAttr, WindowAttr> { /// let title = self.title.to_glib_none(); /// /// let mut attrs = Box::new(ffi::GdkWindowAttr { /// title: title.0, /// // .... /// }); /// /// Stash(&mut *attrs, (attrs, title)) /// } /// } /// ``` pub struct Stash<'a, P: Copy, T: ?Sized + ToGlibPtr<'a, P>>( pub P, pub <T as ToGlibPtr<'a, P>>::Storage, ); pub struct StashMut<'a, P: Copy, T: ?Sized>(pub P, pub <T as ToGlibPtrMut<'a, P>>::Storage) where T: ToGlibPtrMut<'a, P>; /// Wrapper around values representing borrowed C memory. /// /// This is returned by `from_glib_borrow()` and ensures that the wrapped value /// is never dropped when going out of scope. /// /// Borrowed values must never be passed by value or mutable reference to safe Rust code and must /// not leave the C scope in which they are valid. #[derive(Debug)] pub struct Borrowed<T>(mem::ManuallyDrop<T>); impl<T> Borrowed<T> { /// Creates a new borrowed value. pub fn new(val: T) -> Self { Self(mem::ManuallyDrop::new(val)) } /// Extracts the contained value. /// /// # Safety /// /// The returned value must never be dropped and instead has to be passed to `mem::forget()` or /// be directly wrapped in `mem::ManuallyDrop` or another `Borrowed` wrapper. pub unsafe fn into_inner(self) -> T { mem::ManuallyDrop::into_inner(self.0) } } impl<T> AsRef<T> for Borrowed<T> { fn as_ref(&self) -> &T { &*self.0 } } impl<T> std::ops::Deref for Borrowed<T> { type Target = T; fn deref(&self) -> &T { &*self.0 } } /// Translate a simple type. pub trait IntoGlib { type GlibType: Copy; fn into_glib(self) -> Self::GlibType; } impl IntoGlib for bool { type GlibType = ffi::gboolean; #[inline] fn into_glib(self) -> ffi::gboolean { if self { ffi::GTRUE } else { ffi::GFALSE } } } impl IntoGlib for char { type GlibType = u32; #[inline] fn into_glib(self) -> u32 { self as u32 } } impl IntoGlib for Option<char> { type GlibType = u32; #[inline] fn into_glib(self) -> u32 { self.as_ref().map(|&c| c as u32).unwrap_or(0) } } impl IntoGlib for Ordering { type GlibType = i32; #[inline] fn into_glib(self) -> i32 { match self { Ordering::Less => -1, Ordering::Equal => 0, Ordering::Greater => 1, } } } impl<O, E, G> IntoGlib for Result<O, E> where G: Copy, O: IntoGlib<GlibType = G> + TryFromGlib<G, Error = E>, E: IntoGlib<GlibType = G>, { type GlibType = G; #[inline] fn into_glib(self) -> Self::GlibType { match self { Ok(ok) => ok.into_glib(), Err(err) => err.into_glib(), } } } /// A Rust type `T` for which `Option<T>` translates to the same glib type as T. pub trait OptionIntoGlib: IntoGlib { const GLIB_NONE: Self::GlibType; } impl<T: OptionIntoGlib> IntoGlib for Option<T> { type GlibType = T::GlibType; #[inline] fn into_glib(self) -> Self::GlibType { match self { Some(t) => t.into_glib(), None => T::GLIB_NONE, } } } /// Provides the default pointer type to be used in some container conversions. /// /// It's `*mut c_char` for `String`, `*mut GtkButton` for `gtk::Button`, etc. pub trait GlibPtrDefault { type GlibType: Ptr; } impl<'a, T: ?Sized + GlibPtrDefault> GlibPtrDefault for &'a T { type GlibType = <T as GlibPtrDefault>::GlibType; } /// Translate to a pointer. pub trait ToGlibPtr<'a, P: Copy> { type Storage; /// Transfer: none. /// /// The pointer in the `Stash` is only valid for the lifetime of the `Stash`. fn to_glib_none(&'a self) -> Stash<'a, P, Self>; /// Transfer: container. /// /// We transfer the container ownership to the foreign library retaining /// the elements ownership. fn to_glib_container(&'a self) -> Stash<'a, P, Self> { unimplemented!(); } /// Transfer: full. /// /// We transfer the ownership to the foreign library. fn to_glib_full(&self) -> P { unimplemented!(); } } /// /// Translate to a pointer with a mutable borrow. pub trait ToGlibPtrMut<'a, P: Copy> { type Storage; /// Transfer: none. /// /// The pointer in the `Stash` is only valid for the lifetime of the `Stash`. #[allow(clippy::wrong_self_convention)] fn to_glib_none_mut(&'a mut self) -> StashMut<P, Self>; } impl<'a, P: Ptr, T: ToGlibPtr<'a, P>> ToGlibPtr<'a, P> for Option<T> { type Storage = Option<<T as ToGlibPtr<'a, P>>::Storage>; #[inline] fn to_glib_none(&'a self) -> Stash<'a, P, Option<T>> { self.as_ref() .map_or(Stash(Ptr::from::<()>(ptr::null_mut()), None), |s| { let s = s.to_glib_none(); Stash(s.0, Some(s.1)) }) } #[inline] fn to_glib_full(&self) -> P { self.as_ref() .map_or(Ptr::from::<()>(ptr::null_mut()), ToGlibPtr::to_glib_full) } } impl<'a, 'opt: 'a, P: Ptr, T: ToGlibPtrMut<'a, P>> ToGlibPtrMut<'a, P> for Option<&'opt mut T> { type Storage = Option<<T as ToGlibPtrMut<'a, P>>::Storage>; #[inline] fn to_glib_none_mut(&'a mut self) -> StashMut<'a, P, Option<&'opt mut T>> { self.as_mut() .map_or(StashMut(Ptr::from::<()>(ptr::null_mut()), None), |s| { let s = s.to_glib_none_mut(); StashMut(s.0, Some(s.1)) }) } } impl<'a, P: Ptr, T: ?Sized + ToGlibPtr<'a, P>> ToGlibPtr<'a, P> for &'a T { type Storage = <T as ToGlibPtr<'a, P>>::Storage; #[inline] fn to_glib_none(&'a self) -> Stash<'a, P, Self> { let s = (*self).to_glib_none(); Stash(s.0, s.1) } #[inline] fn to_glib_full(&self) -> P { (*self).to_glib_full() } } impl<'a> ToGlibPtr<'a, *const c_char> for str { type Storage = CString; #[inline] fn to_glib_none(&'a self) -> Stash<'a, *const c_char, Self> { let tmp = CString::new(self).expect("str::ToGlibPtr<*const c_char>: unexpected '\0' character"); Stash(tmp.as_ptr(), tmp) } #[inline] fn to_glib_full(&self) -> *const c_char { unsafe { ffi::g_strndup(self.as_ptr() as *const c_char, self.len() as size_t) as *const c_char } } } impl<'a> ToGlibPtr<'a, *mut c_char> for str { type Storage = CString; #[inline] fn to_glib_none(&'a self) -> Stash<'a, *mut c_char, Self> { let tmp = CString::new(self).expect("str::ToGlibPtr<*mut c_char>: unexpected '\0' character"); Stash(tmp.as_ptr() as *mut c_char, tmp) } #[inline] fn to_glib_full(&self) -> *mut c_char { unsafe { ffi::g_strndup(self.as_ptr() as *mut c_char, self.len() as size_t) } } } impl<'a> ToGlibPtr<'a, *const c_char> for String { type Storage = CString; #[inline] fn to_glib_none(&self) -> Stash<'a, *const c_char, String> { let tmp = CString::new(&self[..]) .expect("String::ToGlibPtr<*const c_char>: unexpected '\0' character"); Stash(tmp.as_ptr(), tmp) } #[inline] fn to_glib_full(&self) -> *const c_char { unsafe { ffi::g_strndup(self.as_ptr() as *const c_char, self.len() as size_t) as *const c_char } } } impl<'a> ToGlibPtr<'a, *mut c_char> for String { type Storage = CString; #[inline] fn to_glib_none(&self) -> Stash<'a, *mut c_char, String> { let tmp = CString::new(&self[..]) .expect("String::ToGlibPtr<*mut c_char>: unexpected '\0' character"); Stash(tmp.as_ptr() as *mut c_char, tmp) } #[inline] fn to_glib_full(&self) -> *mut c_char { unsafe { ffi::g_strndup(self.as_ptr() as *const c_char, self.len() as size_t) as *mut c_char } } } impl GlibPtrDefault for str { type GlibType = *mut c_char; } impl GlibPtrDefault for String { type GlibType = *mut c_char; } #[cfg(not(windows))] fn path_to_c(path: &Path) -> CString { // GLib paths on UNIX are always in the local encoding, just like in Rust // // Paths on UNIX must not contain NUL bytes, in which case the conversion // to a CString would fail. The only thing we can do then is to panic, as passing // NULL or the empty string to GLib would cause undefined behaviour. CString::new(path.as_os_str().as_bytes()).expect("Invalid path with NUL bytes") } #[cfg(windows)] fn path_to_c(path: &Path) -> CString { // GLib paths are always UTF-8 strings on Windows, while in Rust they are // WTF-8. As such, we need to convert to a UTF-8 string. This conversion can // fail, see https://simonsapin.github.io/wtf-8/#converting-wtf-8-utf-8 // // It's not clear what we're supposed to do if it fails: the path is not // representable in UTF-8 and thus can't possibly be passed to GLib. // Passing NULL or the empty string to GLib can lead to undefined behaviour, so // the only safe option seems to be to simply panic here. let path_str = path .to_str() .expect("Path can't be represented as UTF-8") .to_owned(); // On Windows, paths can have \\?\ prepended for long-path support. See // MSDN documentation about CreateFile // // We have to get rid of this and let GLib take care of all these // weirdnesses later if path_str.starts_with("\\\\?\\") { CString::new(path_str[4..].as_bytes()) } else { CString::new(path_str.as_bytes()) } .expect("Invalid path with NUL bytes") } #[cfg(not(windows))] fn os_str_to_c(s: &OsStr) -> CString { // GLib OS string (environment strings) on UNIX are always in the local encoding, // just like in Rust // // OS string on UNIX must not contain NUL bytes, in which case the conversion // to a CString would fail. The only thing we can do then is to panic, as passing // NULL or the empty string to GLib would cause undefined behaviour. CString::new(s.as_bytes()).expect("Invalid OS String with NUL bytes") } #[cfg(windows)] fn os_str_to_c(s: &OsStr) -> CString { // GLib OS string (environment strings) are always UTF-8 strings on Windows, // while in Rust they are WTF-8. As such, we need to convert to a UTF-8 string. // This conversion can fail, see https://simonsapin.github.io/wtf-8/#converting-wtf-8-utf-8 // // It's not clear what we're supposed to do if it fails: the OS string is not // representable in UTF-8 and thus can't possibly be passed to GLib. // Passing NULL or the empty string to GLib can lead to undefined behaviour, so // the only safe option seems to be to simply panic here. let os_str = s .to_str() .expect("OS String can't be represented as UTF-8") .to_owned(); CString::new(os_str.as_bytes()).expect("Invalid OS string with NUL bytes") } impl<'a> ToGlibPtr<'a, *const c_char> for Path { type Storage = CString; #[inline] fn to_glib_none(&'a self) -> Stash<'a, *const c_char, Self> { let tmp = path_to_c(self); Stash(tmp.as_ptr(), tmp) } } impl<'a> ToGlibPtr<'a, *mut c_char> for Path { type Storage = CString; #[inline] fn to_glib_none(&'a self) -> Stash<'a, *mut c_char, Self> { let tmp = path_to_c(self); Stash(tmp.as_ptr() as *mut c_char, tmp) } } impl<'a> ToGlibPtr<'a, *const c_char> for PathBuf { type Storage = CString; #[inline] fn to_glib_none(&'a self) -> Stash<'a, *const c_char, Self> { let tmp = path_to_c(self); Stash(tmp.as_ptr(), tmp) } } impl<'a> ToGlibPtr<'a, *mut c_char> for PathBuf { type Storage = CString; #[inline] fn to_glib_none(&'a self) -> Stash<'a, *mut c_char, Self> { let tmp = path_to_c(self); Stash(tmp.as_ptr() as *mut c_char, tmp) } } impl GlibPtrDefault for Path { type GlibType = *mut c_char; } impl GlibPtrDefault for PathBuf { type GlibType = *mut c_char; } impl<'a> ToGlibPtr<'a, *const c_char> for OsStr { type Storage = CString; #[inline] fn to_glib_none(&'a self) -> Stash<'a, *const c_char, Self> { let tmp = os_str_to_c(self); Stash(tmp.as_ptr(), tmp) } } impl<'a> ToGlibPtr<'a, *mut c_char> for OsStr { type Storage = CString; #[inline] fn to_glib_none(&'a self) -> Stash<'a, *mut c_char, Self> { let tmp = os_str_to_c(self); Stash(tmp.as_ptr() as *mut c_char, tmp) } } impl<'a> ToGlibPtr<'a, *const c_char> for OsString { type Storage = CString; #[inline] fn to_glib_none(&'a self) -> Stash<'a, *const c_char, Self> { let tmp = os_str_to_c(self); Stash(tmp.as_ptr(), tmp) } } impl<'a> ToGlibPtr<'a, *mut c_char> for OsString { type Storage = CString; #[inline] fn to_glib_none(&'a self) -> Stash<'a, *mut c_char, Self> { let tmp = os_str_to_c(self); Stash(tmp.as_ptr() as *mut c_char, tmp) } } impl GlibPtrDefault for OsStr { type GlibType = *mut c_char; } impl GlibPtrDefault for OsString { type GlibType = *mut c_char; } pub trait ToGlibContainerFromSlice<'a, P> where Self: Sized, { type Storage; #[allow(clippy::wrong_self_convention)] fn to_glib_none_from_slice(t: &'a [Self]) -> (P, Self::Storage); #[allow(clippy::wrong_self_convention)] fn to_glib_container_from_slice(t: &'a [Self]) -> (P, Self::Storage); #[allow(clippy::wrong_self_convention)] fn to_glib_full_from_slice(t: &[Self]) -> P; } macro_rules! impl_to_glib_container_from_slice_fundamental { ($name:ty) => { impl<'a> ToGlibContainerFromSlice<'a, *mut $name> for $name { type Storage = &'a [$name]; fn to_glib_none_from_slice(t: &'a [$name]) -> (*mut $name, &'a [$name]) { (t.as_ptr() as *mut $name, t) } fn to_glib_container_from_slice(t: &'a [$name]) -> (*mut $name, &'a [$name]) { (ToGlibContainerFromSlice::to_glib_full_from_slice(t), t) } fn to_glib_full_from_slice(t: &[$name]) -> *mut $name { if t.len() == 0 { return ptr::null_mut(); } unsafe { let res = ffi::g_malloc(mem::size_of::<$name>() * t.len()) as *mut $name; ptr::copy_nonoverlapping(t.as_ptr(), res, t.len()); res } } } }; } impl_to_glib_container_from_slice_fundamental!(u8); impl_to_glib_container_from_slice_fundamental!(i8); impl_to_glib_container_from_slice_fundamental!(u16); impl_to_glib_container_from_slice_fundamental!(i16); impl_to_glib_container_from_slice_fundamental!(u32); impl_to_glib_container_from_slice_fundamental!(i32); impl_to_glib_container_from_slice_fundamental!(u64); impl_to_glib_container_from_slice_fundamental!(i64); impl_to_glib_container_from_slice_fundamental!(f32); impl_to_glib_container_from_slice_fundamental!(f64); macro_rules! impl_to_glib_container_from_slice_string { ($name:ty, $ffi_name:ty) => { impl<'a> ToGlibContainerFromSlice<'a, *mut $ffi_name> for $name { type Storage = (Vec<Stash<'a, $ffi_name, $name>>, Option<Vec<$ffi_name>>); fn to_glib_none_from_slice(t: &'a [$name]) -> (*mut $ffi_name, Self::Storage) { let v: Vec<_> = t.iter().map(ToGlibPtr::to_glib_none).collect(); let mut v_ptr: Vec<_> = v.iter().map(|s| s.0).collect(); v_ptr.push(ptr::null_mut() as $ffi_name); (v_ptr.as_ptr() as *mut $ffi_name, (v, Some(v_ptr))) } fn to_glib_container_from_slice(t: &'a [$name]) -> (*mut $ffi_name, Self::Storage) { let v: Vec<_> = t.iter().map(ToGlibPtr::to_glib_none).collect(); let v_ptr = unsafe { let v_ptr = ffi::g_malloc0(mem::size_of::<$ffi_name>() * (t.len() + 1)) as *mut $ffi_name; for (i, s) in v.iter().enumerate() { ptr::write(v_ptr.add(i), s.0); } v_ptr }; (v_ptr, (v, None)) } fn to_glib_full_from_slice(t: &[$name]) -> *mut $ffi_name { unsafe { let v_ptr = ffi::g_malloc0(mem::size_of::<$ffi_name>() * (t.len() + 1)) as *mut $ffi_name; for (i, s) in t.iter().enumerate() { ptr::write(v_ptr.add(i), s.to_glib_full()); } v_ptr } } } impl<'a> ToGlibContainerFromSlice<'a, *const $ffi_name> for $name { type Storage = (Vec<Stash<'a, $ffi_name, $name>>, Option<Vec<$ffi_name>>); fn to_glib_none_from_slice(t: &'a [$name]) -> (*const $ffi_name, Self::Storage) { let v: Vec<_> = t.iter().map(ToGlibPtr::to_glib_none).collect(); let mut v_ptr: Vec<_> = v.iter().map(|s| s.0).collect(); v_ptr.push(ptr::null_mut() as $ffi_name); (v_ptr.as_ptr() as *const $ffi_name, (v, Some(v_ptr))) } fn to_glib_container_from_slice(t: &'a [$name]) -> (*const $ffi_name, Self::Storage) { let v: Vec<_> = t.iter().map(ToGlibPtr::to_glib_none).collect(); let v_ptr = unsafe { let v_ptr = ffi::g_malloc0(mem::size_of::<$ffi_name>() * (t.len() + 1)) as *mut $ffi_name; for (i, s) in v.iter().enumerate() { ptr::write(v_ptr.add(i), s.0); } v_ptr as *const $ffi_name }; (v_ptr, (v, None)) } fn to_glib_full_from_slice(t: &[$name]) -> *const $ffi_name { unsafe { let v_ptr = ffi::g_malloc0(mem::size_of::<$ffi_name>() * (t.len() + 1)) as *mut $ffi_name; for (i, s) in t.iter().enumerate() { ptr::write(v_ptr.add(i), s.to_glib_full()); } v_ptr as *const $ffi_name } } } }; } impl_to_glib_container_from_slice_string!(&'a str, *mut c_char); impl_to_glib_container_from_slice_string!(&'a str, *const c_char); impl_to_glib_container_from_slice_string!(String, *mut c_char); impl_to_glib_container_from_slice_string!(String, *const c_char); impl_to_glib_container_from_slice_string!(&'a Path, *mut c_char); impl_to_glib_container_from_slice_string!(&'a Path, *const c_char); impl_to_glib_container_from_slice_string!(PathBuf, *mut c_char); impl_to_glib_container_from_slice_string!(PathBuf, *const c_char); impl_to_glib_container_from_slice_string!(&'a OsStr, *mut c_char); impl_to_glib_container_from_slice_string!(&'a OsStr, *const c_char); impl_to_glib_container_from_slice_string!(OsString, *mut c_char); impl_to_glib_container_from_slice_string!(OsString, *const c_char); impl_to_glib_container_from_slice_string!(crate::GString, *mut c_char); impl_to_glib_container_from_slice_string!(crate::GString, *const c_char); impl<'a, T> ToGlibContainerFromSlice<'a, *mut ffi::GList> for T where T: GlibPtrDefault + ToGlibPtr<'a, <T as GlibPtrDefault>::GlibType>, { type Storage = ( Option<List>, Vec<Stash<'a, <T as GlibPtrDefault>::GlibType, T>>, ); #[inline] fn to_glib_none_from_slice(t: &'a [T]) -> (*mut ffi::GList, Self::Storage) { let stash_vec: Vec<_> = t.iter().rev().map(ToGlibPtr::to_glib_none).collect(); let mut list: *mut ffi::GList = ptr::null_mut(); unsafe { for stash in &stash_vec { list = ffi::g_list_prepend(list, Ptr::to(stash.0)); } } (list, (Some(List(list)), stash_vec)) } #[inline] fn to_glib_container_from_slice(t: &'a [T]) -> (*mut ffi::GList, Self::Storage) { let stash_vec: Vec<_> = t.iter().rev().map(ToGlibPtr::to_glib_none).collect(); let mut list: *mut ffi::GList = ptr::null_mut(); unsafe { for stash in &stash_vec { list = ffi::g_list_prepend(list, Ptr::to(stash.0)); } } (list, (None, stash_vec)) } #[inline] fn to_glib_full_from_slice(t: &[T]) -> *mut ffi::GList { let mut list: *mut ffi::GList = ptr::null_mut(); unsafe { for ptr in t.iter().rev().map(ToGlibPtr::to_glib_full) { list = ffi::g_list_prepend(list, Ptr::to(ptr)); } } list } } impl<'a, T> ToGlibContainerFromSlice<'a, *const ffi::GList> for T where T: GlibPtrDefault + ToGlibPtr<'a, <T as GlibPtrDefault>::GlibType>, { type Storage = ( Option<List>, Vec<Stash<'a, <T as GlibPtrDefault>::GlibType, T>>, ); #[inline] fn to_glib_none_from_slice(t: &'a [T]) -> (*const ffi::GList, Self::Storage) { let (list, stash) = ToGlibContainerFromSlice::<*mut ffi::GList>::to_glib_none_from_slice(t); (list as *const ffi::GList, stash) } #[inline] fn to_glib_container_from_slice(_t: &'a [T]) -> (*const ffi::GList, Self::Storage) { unimplemented!() } #[inline] fn to_glib_full_from_slice(_t: &[T]) -> *const ffi::GList { unimplemented!() } } pub struct List(*mut ffi::GList); impl Drop for List { fn drop(&mut self) { unsafe { ffi::g_list_free(self.0) } } } impl<'a, T> ToGlibContainerFromSlice<'a, *mut ffi::GSList> for &'a T where T: GlibPtrDefault + ToGlibPtr<'a, <T as GlibPtrDefault>::GlibType>, { type Storage = ( Option<SList>, Vec<Stash<'a, <T as GlibPtrDefault>::GlibType, &'a T>>, ); #[inline] fn to_glib_none_from_slice(t: &'a [&'a T]) -> (*mut ffi::GSList, Self::Storage) { let stash_vec: Vec<_> = t.iter().rev().map(ToGlibPtr::to_glib_none).collect(); let mut list: *mut ffi::GSList = ptr::null_mut(); unsafe { for stash in &stash_vec { list = ffi::g_slist_prepend(list, Ptr::to(stash.0)); } } (list, (Some(SList(list)), stash_vec)) } #[inline] fn to_glib_container_from_slice(t: &'a [&'a T]) -> (*mut ffi::GSList, Self::Storage) { let stash_vec: Vec<_> = t.iter().rev().map(ToGlibPtr::to_glib_none).collect(); let mut list: *mut ffi::GSList = ptr::null_mut(); unsafe { for stash in &stash_vec { list = ffi::g_slist_prepend(list, Ptr::to(stash.0)); } } (list, (None, stash_vec)) } #[inline] fn to_glib_full_from_slice(t: &[&'a T]) -> *mut ffi::GSList { let mut list: *mut ffi::GSList = ptr::null_mut(); unsafe { for ptr in t.iter().rev().map(ToGlibPtr::to_glib_full) { list = ffi::g_slist_prepend(list, Ptr::to(ptr)); } } list } } impl<'a, T> ToGlibContainerFromSlice<'a, *const ffi::GSList> for &'a T where T: GlibPtrDefault + ToGlibPtr<'a, <T as GlibPtrDefault>::GlibType>, { type Storage = ( Option<SList>, Vec<Stash<'a, <T as GlibPtrDefault>::GlibType, &'a T>>, ); #[inline] fn to_glib_none_from_slice(t: &'a [&'a T]) -> (*const ffi::GSList, Self::Storage) { let (list, stash) = ToGlibContainerFromSlice::<*mut ffi::GSList>::to_glib_none_from_slice(t); (list as *const ffi::GSList, stash) } #[inline] fn to_glib_container_from_slice(_t: &'a [&'a T]) -> (*const ffi::GSList, Self::Storage) { unimplemented!() } #[inline] fn to_glib_full_from_slice(_t: &[&'a T]) -> *const ffi::GSList { unimplemented!() } } pub struct SList(*mut ffi::GSList); impl Drop for SList { fn drop(&mut self) { unsafe { ffi::g_slist_free(self.0) } } } impl<'a, P: Ptr, T: ToGlibContainerFromSlice<'a, P>> ToGlibPtr<'a, P> for [T] { type Storage = T::Storage; #[inline] fn to_glib_none(&'a self) -> Stash<'a, P, Self> { let result = ToGlibContainerFromSlice::to_glib_none_from_slice(self); Stash(result.0, result.1) } #[inline] fn to_glib_container(&'a self) -> Stash<'a, P, Self> { let result = ToGlibContainerFromSlice::to_glib_container_from_slice(self); Stash(result.0, result.1) } #[inline] fn to_glib_full(&self) -> P { ToGlibContainerFromSlice::to_glib_full_from_slice(self) } } #[allow(clippy::implicit_hasher)] impl<'a> ToGlibPtr<'a, *mut ffi::GHashTable> for HashMap<String, String> { type Storage = HashTable; #[inline] fn to_glib_none(&self) -> Stash<'a, *mut ffi::GHashTable, Self> { let ptr = self.to_glib_full(); Stash(ptr, HashTable(ptr)) } #[inline] fn to_glib_full(&self) -> *mut ffi::GHashTable { unsafe { let ptr = ffi::g_hash_table_new_full( Some(ffi::g_str_hash), Some(ffi::g_str_equal), Some(ffi::g_free), Some(ffi::g_free), ); for (k, v) in self { let k: *mut c_char = k.to_glib_full(); let v: *mut c_char = v.to_glib_full(); ffi::g_hash_table_insert(ptr, k as *mut _, v as *mut _); } ptr } } } pub struct HashTable(*mut ffi::GHashTable); impl Drop for HashTable { fn drop(&mut self) { unsafe { ffi::g_hash_table_unref(self.0) } } } pub struct PtrArray(*mut ffi::GPtrArray); impl Drop for PtrArray { fn drop(&mut self) { unsafe { ffi::g_ptr_array_unref(self.0); } } } impl<'a, T> ToGlibContainerFromSlice<'a, *mut ffi::GPtrArray> for T where T: GlibPtrDefault + ToGlibPtr<'a, <T as GlibPtrDefault>::GlibType>, { type Storage = ( Option<PtrArray>, Vec<Stash<'a, <T as GlibPtrDefault>::GlibType, T>>, ); #[inline] fn to_glib_none_from_slice(t: &'a [T]) -> (*mut ffi::GPtrArray, Self::Storage) { let stash_vec: Vec<_> = t.iter().map(ToGlibPtr::to_glib_none).collect(); let arr = unsafe { ffi::g_ptr_array_sized_new(t.len() as _) }; unsafe { for stash in &stash_vec { ffi::g_ptr_array_add(arr, Ptr::to(stash.0)); } } (arr, (Some(PtrArray(arr)), stash_vec)) } #[inline] fn to_glib_container_from_slice(t: &'a [T]) -> (*mut ffi::GPtrArray, Self::Storage) { let stash_vec: Vec<_> = t.iter().map(ToGlibPtr::to_glib_none).collect(); let arr = unsafe { ffi::g_ptr_array_sized_new(t.len() as _) }; unsafe { for stash in &stash_vec { ffi::g_ptr_array_add(arr, Ptr::to(stash.0)); } } (arr, (None, stash_vec)) } #[inline] fn to_glib_full_from_slice(t: &[T]) -> *mut ffi::GPtrArray { let arr = unsafe { ffi::g_ptr_array_sized_new(t.len() as _) }; unsafe { for ptr in t.iter().map(ToGlibPtr::to_glib_full) { ffi::g_ptr_array_add(arr, Ptr::to(ptr)); } } arr } } impl<'a, T> ToGlibContainerFromSlice<'a, *const ffi::GPtrArray> for T where T: GlibPtrDefault + ToGlibPtr<'a, <T as GlibPtrDefault>::GlibType>, { type Storage = ( Option<PtrArray>, Vec<Stash<'a, <T as GlibPtrDefault>::GlibType, T>>, ); #[inline] fn to_glib_none_from_slice(t: &'a [T]) -> (*const ffi::GPtrArray, Self::Storage) { let (arr, stash) = ToGlibContainerFromSlice::<*mut ffi::GPtrArray>::to_glib_none_from_slice(t); (arr as *const ffi::GPtrArray, stash) } #[inline] fn to_glib_container_from_slice(_t: &'a [T]) -> (*const ffi::GPtrArray, Self::Storage) { unimplemented!() } #[inline] fn to_glib_full_from_slice(_t: &[T]) -> *const ffi::GPtrArray { unimplemented!() } } /// Translate a simple type. pub trait FromGlib<G: Copy>: Sized { unsafe fn from_glib(val: G) -> Self; } /// Translate a simple type. #[inline] pub unsafe fn from_glib<G: Copy, T: FromGlib<G>>(val: G) -> T { FromGlib::from_glib(val) } impl FromGlib<ffi::gboolean> for bool { #[inline] unsafe fn from_glib(val: ffi::gboolean) -> Self { val != ffi::GFALSE } } impl FromGlib<i32> for Ordering { #[inline] unsafe fn from_glib(val: i32) -> Self { val.cmp(&0) } } /// Translate from a Glib type which can result in an undefined and/or invalid value. pub trait TryFromGlib<G: Copy>: Sized { type Error; unsafe fn try_from_glib(val: G) -> Result<Self, Self::Error>; } /// Translate from a Glib type which can result in an undefined and/or invalid value. #[inline] pub unsafe fn try_from_glib<G: Copy, T: TryFromGlib<G>>( val: G, ) -> Result<T, <T as TryFromGlib<G>>::Error> { TryFromGlib::try_from_glib(val) } /// Error type for [`TryFromGlib`] when the Glib value is None. #[derive(Debug, PartialEq, Eq)] pub struct GlibNoneError; impl fmt::Display for GlibNoneError { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { write!(fmt, "glib value is None") } } impl std::error::Error for GlibNoneError {} impl<G: Copy, T: TryFromGlib<G, Error = GlibNoneError>> FromGlib<G> for Option<T> { #[inline] unsafe fn from_glib(val: G) -> Self { T::try_from_glib(val).ok() } } /// Error type for [`TryFromGlib`] when the Glib value can be None or invalid. #[derive(Debug, Eq, PartialEq)] pub enum GlibNoneOrInvalidError<I: Error> { Invalid(I), None, } impl<I: Error> GlibNoneOrInvalidError<I> { /// Builds the `None` variant. pub fn none() -> Self { Self::None } /// Returns `true` if `self` is the `None` variant. pub fn is_none(&self) -> bool { matches!(self, Self::None) } /// Returns `true` if `self` is the `Invalid` variant. pub fn is_invalid(&self) -> bool { matches!(self, Self::Invalid(_)) } } impl<I: Error> From<I> for GlibNoneOrInvalidError<I> { fn from(invalid: I) -> Self { Self::Invalid(invalid) } } impl<I: Error> fmt::Display for GlibNoneOrInvalidError<I> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Invalid(err) => { write!(fmt, "glib value is invalid: ")?; fmt::Display::fmt(err, fmt) } Self::None => write!(fmt, "glib value is None"), } } } impl<I: Error> Error for GlibNoneOrInvalidError<I> {} impl<G: Copy, I: Error, T: TryFromGlib<G, Error = GlibNoneOrInvalidError<I>>> FromGlib<G> for Result<Option<T>, I> { #[inline] unsafe fn from_glib(val: G) -> Self { match T::try_from_glib(val) { Ok(value) => Ok(Some(value)), Err(GlibNoneOrInvalidError::None) => Ok(None), Err(GlibNoneOrInvalidError::Invalid(err)) => Err(err), } } } /// Translate from a pointer type which is annotated with `transfer none`. /// The resulting value is referenced at least once, by the bindings. /// /// This is suitable for floating references, which become strong references. /// It is also suitable for acquiring non-gobject values, like `gchar*`. /// /// <a name="safety_points"></a> /// # Safety /// /// The implementation of this trait should acquire a reference to the value /// in a way appropriate to the type, /// e.g. by increasing the reference count or copying. /// Values obtained using this trait must be properly released on `drop()` /// by the implementing type. /// /// For more information, refer to module level documentation. pub trait FromGlibPtrNone<P: Ptr>: Sized { /// # Safety /// /// See trait level [notes on safety](#safety_points) unsafe fn from_glib_none(ptr: P) -> Self; } /// Translate from a pointer type which is annotated with `transfer full`. /// This transfers the ownership of the value to the Rust side. /// /// Because ownership can only be transferred if something is already referenced, /// this is unsuitable for floating references. /// /// <a name="safety_points"></a> /// # Safety /// /// The implementation of this trait should not alter the reference count /// or make copies of the underlying value. /// Values obtained using this trait must be properly released on `drop()` /// by the implementing type. /// /// For more information, refer to module level documentation. pub trait FromGlibPtrFull<P: Ptr>: Sized { /// # Safety /// /// See trait level [notes on safety](#safety_points) unsafe fn from_glib_full(ptr: P) -> Self; } /// Translate from a pointer type by borrowing, without affecting the refcount. /// /// The purpose of this trait is to access values inside callbacks /// without changing their reference status. /// The obtained borrow must not be accessed outside of the scope of the callback, /// and called procedures must not store any references to the underlying data. /// Safe Rust code must never obtain a mutable Rust reference. /// /// <a name="safety_points"></a> /// # Safety /// /// The implementation of this trait as well as the returned type /// must satisfy the same constraints together. /// They must not take ownership of the underlying value, copy it, /// and should not change its rerefence count. /// If it does, it must properly release obtained references. /// /// The returned value, when dropped, /// must leave the underlying value in the same state /// as before from_glib_borrow was called: /// - it must not be dropped, /// - it must be the same type of reference, e.g. still floating. /// /// For more information, refer to module level documentation. pub trait FromGlibPtrBorrow<P: Ptr>: Sized { /// # Safety /// /// See trait level [notes on safety](#safety_points) unsafe fn from_glib_borrow(_ptr: P) -> Borrowed<Self> { unimplemented!(); } } /// Translate from a pointer type, transfer: none. /// /// See [`FromGlibPtrNone`](trait.FromGlibPtrNone.html). #[inline] pub unsafe fn from_glib_none<P: Ptr, T: FromGlibPtrNone<P>>(ptr: P) -> T { FromGlibPtrNone::from_glib_none(ptr) } /// Translate from a pointer type, transfer: full (assume ownership). /// /// See [`FromGlibPtrFull`](trait.FromGlibPtrFull.html). #[inline] pub unsafe fn from_glib_full<P: Ptr, T: FromGlibPtrFull<P>>(ptr: P) -> T { FromGlibPtrFull::from_glib_full(ptr) } /// Translate from a pointer type, borrowing the pointer. /// /// See [`FromGlibPtrBorrow`](trait.FromGlibPtrBorrow.html). #[inline] pub unsafe fn from_glib_borrow<P: Ptr, T: FromGlibPtrBorrow<P>>(ptr: P) -> Borrowed<T> { FromGlibPtrBorrow::from_glib_borrow(ptr) } impl<P: Ptr, T: FromGlibPtrNone<P>> FromGlibPtrNone<P> for Option<T> { #[inline] unsafe fn from_glib_none(ptr: P) -> Option<T> { if ptr.is_null() { None } else { Some(from_glib_none(ptr)) } } } impl<P: Ptr, T: FromGlibPtrBorrow<P>> FromGlibPtrBorrow<P> for Option<T> { #[inline] unsafe fn from_glib_borrow(ptr: P) -> Borrowed<Option<T>> { if ptr.is_null() { Borrowed::new(None) } else { let val = T::from_glib_borrow(ptr); Borrowed::new(Some(val.into_inner())) } } } impl<P: Ptr, T: FromGlibPtrFull<P>> FromGlibPtrFull<P> for Option<T> { #[inline] unsafe fn from_glib_full(ptr: P) -> Option<T> { if ptr.is_null() { None } else { Some(from_glib_full(ptr)) } } } impl FromGlibPtrNone<*const c_char> for String { #[inline] unsafe fn from_glib_none(ptr: *const c_char) -> Self { assert!(!ptr.is_null()); Self::from_utf8_lossy(CStr::from_ptr(ptr).to_bytes()).into_owned() } } // TODO: Deprecate this impl FromGlibPtrFull<*const c_char> for String { #[inline] unsafe fn from_glib_full(ptr: *const c_char) -> Self { let res = from_glib_none(ptr); ffi::g_free(ptr as *mut _); res } } // TODO: Deprecate this impl FromGlibPtrNone<*mut c_char> for String { #[inline] unsafe fn from_glib_none(ptr: *mut c_char) -> Self { assert!(!ptr.is_null()); Self::from_utf8_lossy(CStr::from_ptr(ptr).to_bytes()).into_owned() } } // TODO: Deprecate this impl FromGlibPtrFull<*mut c_char> for String { #[inline] unsafe fn from_glib_full(ptr: *mut c_char) -> Self { let res = from_glib_none(ptr); ffi::g_free(ptr as *mut _); res } } #[cfg(not(windows))] unsafe fn c_to_path_buf(ptr: *const c_char) -> PathBuf { assert!(!ptr.is_null()); // GLib paths on UNIX are always in the local encoding, which can be // UTF-8 or anything else really, but is always a NUL-terminated string // and must not contain any other NUL bytes OsString::from_vec(CStr::from_ptr(ptr).to_bytes().to_vec()).into() } #[cfg(windows)] unsafe fn c_to_path_buf(ptr: *const c_char) -> PathBuf { assert!(!ptr.is_null()); // GLib paths on Windows are always UTF-8, as such we can convert to a String // first and then go to a PathBuf from there. Unless there is a bug // in the C library, the conversion from UTF-8 can never fail so we can // safely panic here if that ever happens String::from_utf8(CStr::from_ptr(ptr).to_bytes().into()) .expect("Invalid, non-UTF8 path") .into() } #[cfg(not(windows))] unsafe fn c_to_os_string(ptr: *const c_char) -> OsString { assert!(!ptr.is_null()); // GLib OS string (environment strings) on UNIX are always in the local encoding, // which can be UTF-8 or anything else really, but is always a NUL-terminated string // and must not contain any other NUL bytes OsString::from_vec(CStr::from_ptr(ptr).to_bytes().to_vec()) } #[cfg(windows)] unsafe fn c_to_os_string(ptr: *const c_char) -> OsString { assert!(!ptr.is_null()); // GLib OS string (environment strings) on Windows are always UTF-8, // as such we can convert to a String // first and then go to a OsString from there. Unless there is a bug // in the C library, the conversion from UTF-8 can never fail so we can // safely panic here if that ever happens String::from_utf8(CStr::from_ptr(ptr).to_bytes().into()) .expect("Invalid, non-UTF8 path") .into() } impl FromGlibPtrNone<*const c_char> for PathBuf { #[inline] unsafe fn from_glib_none(ptr: *const c_char) -> Self { assert!(!ptr.is_null()); c_to_path_buf(ptr) } } impl FromGlibPtrFull<*const c_char> for PathBuf { #[inline] unsafe fn from_glib_full(ptr: *const c_char) -> Self { let res = from_glib_none(ptr); ffi::g_free(ptr as *mut _); res } } impl FromGlibPtrNone<*mut c_char> for PathBuf { #[inline] unsafe fn from_glib_none(ptr: *mut c_char) -> Self { assert!(!ptr.is_null()); c_to_path_buf(ptr) } } impl FromGlibPtrFull<*mut c_char> for PathBuf { #[inline] unsafe fn from_glib_full(ptr: *mut c_char) -> Self { let res = from_glib_none(ptr); ffi::g_free(ptr as *mut _); res } } impl FromGlibPtrNone<*const c_char> for OsString { #[inline] unsafe fn from_glib_none(ptr: *const c_char) -> Self { assert!(!ptr.is_null()); c_to_os_string(ptr) } } impl FromGlibPtrFull<*const c_char> for OsString { #[inline] unsafe fn from_glib_full(ptr: *const c_char) -> Self { let res = from_glib_none(ptr); ffi::g_free(ptr as *mut _); res } } impl FromGlibPtrNone<*mut c_char> for OsString { #[inline] unsafe fn from_glib_none(ptr: *mut c_char) -> Self { assert!(!ptr.is_null()); c_to_os_string(ptr) } } impl FromGlibPtrFull<*mut c_char> for OsString { #[inline] unsafe fn from_glib_full(ptr: *mut c_char) -> Self { let res = from_glib_none(ptr); ffi::g_free(ptr as *mut _); res } } /// Translate from a container. pub trait FromGlibContainer<T, P: Ptr>: Sized { /// Transfer: none. /// /// `num` is the advised number of elements. unsafe fn from_glib_none_num(ptr: P, num: usize) -> Self; /// Transfer: container. /// /// `num` is the advised number of elements. unsafe fn from_glib_container_num(ptr: P, num: usize) -> Self; /// Transfer: full. /// /// `num` is the advised number of elements. unsafe fn from_glib_full_num(ptr: P, num: usize) -> Self; } /// Translate from a container of pointers. pub trait FromGlibPtrContainer<P: Ptr, PP: Ptr>: FromGlibContainer<P, PP> + Sized { /// Transfer: none. unsafe fn from_glib_none(ptr: PP) -> Self; /// Transfer: container. unsafe fn from_glib_container(ptr: PP) -> Self; /// Transfer: full. unsafe fn from_glib_full(ptr: PP) -> Self; } pub unsafe fn c_ptr_array_len<P: Ptr>(mut ptr: *const P) -> usize { let mut len = 0; if !ptr.is_null() { while !(*ptr).is_null() { len += 1; ptr = ptr.offset(1); } } len } pub trait FromGlibContainerAsVec<T, P: Ptr> where Self: Sized, { unsafe fn from_glib_none_num_as_vec(ptr: P, num: usize) -> Vec<Self>; unsafe fn from_glib_container_num_as_vec(ptr: P, num: usize) -> Vec<Self>; unsafe fn from_glib_full_num_as_vec(ptr: P, num: usize) -> Vec<Self>; } pub trait FromGlibPtrArrayContainerAsVec<P: Ptr, PP: Ptr>: FromGlibContainerAsVec<P, PP> where Self: Sized, { unsafe fn from_glib_none_as_vec(ptr: PP) -> Vec<Self>; unsafe fn from_glib_container_as_vec(ptr: PP) -> Vec<Self>; unsafe fn from_glib_full_as_vec(ptr: PP) -> Vec<Self>; } impl FromGlibContainerAsVec<bool, *const ffi::gboolean> for bool { unsafe fn from_glib_none_num_as_vec(ptr: *const ffi::gboolean, num: usize) -> Vec<Self> { if num == 0 || ptr.is_null() { return Vec::new(); } let mut res = Vec::with_capacity(num); for i in 0..num { res.push(from_glib(ptr::read(ptr.add(i)))); } res } unsafe fn from_glib_container_num_as_vec(_: *const ffi::gboolean, _: usize) -> Vec<Self> { // Can't really free a *const unimplemented!(); } unsafe fn from_glib_full_num_as_vec(_: *const ffi::gboolean, _: usize) -> Vec<Self> { // Can't really free a *const unimplemented!(); } } impl FromGlibContainerAsVec<bool, *mut ffi::gboolean> for bool { unsafe fn from_glib_none_num_as_vec(ptr: *mut ffi::gboolean, num: usize) -> Vec<Self> { FromGlibContainerAsVec::from_glib_none_num_as_vec(ptr as *const _, num) } unsafe fn from_glib_container_num_as_vec(ptr: *mut ffi::gboolean, num: usize) -> Vec<Self> { let res = FromGlibContainerAsVec::from_glib_none_num_as_vec(ptr, num); ffi::g_free(ptr as *mut _); res } unsafe fn from_glib_full_num_as_vec(ptr: *mut ffi::gboolean, num: usize) -> Vec<Self> { FromGlibContainerAsVec::from_glib_container_num_as_vec(ptr, num) } } macro_rules! impl_from_glib_container_as_vec_fundamental { ($name:ty) => { impl FromGlibContainerAsVec<$name, *const $name> for $name { unsafe fn from_glib_none_num_as_vec(ptr: *const $name, num: usize) -> Vec<Self> { if num == 0 || ptr.is_null() { return Vec::new(); } let mut res = Vec::with_capacity(num); for i in 0..num { res.push(ptr::read(ptr.add(i))); } res } unsafe fn from_glib_container_num_as_vec(_: *const $name, _: usize) -> Vec<Self> { // Can't really free a *const unimplemented!(); } unsafe fn from_glib_full_num_as_vec(_: *const $name, _: usize) -> Vec<Self> { // Can't really free a *const unimplemented!(); } } impl FromGlibContainerAsVec<$name, *mut $name> for $name { unsafe fn from_glib_none_num_as_vec(ptr: *mut $name, num: usize) -> Vec<Self> { FromGlibContainerAsVec::from_glib_none_num_as_vec(ptr as *const _, num) } unsafe fn from_glib_container_num_as_vec(ptr: *mut $name, num: usize) -> Vec<Self> { let res = FromGlibContainerAsVec::from_glib_none_num_as_vec(ptr, num); ffi::g_free(ptr as *mut _); res } unsafe fn from_glib_full_num_as_vec(ptr: *mut $name, num: usize) -> Vec<Self> { FromGlibContainerAsVec::from_glib_container_num_as_vec(ptr, num) } } }; } impl_from_glib_container_as_vec_fundamental!(u8); impl_from_glib_container_as_vec_fundamental!(i8); impl_from_glib_container_as_vec_fundamental!(u16); impl_from_glib_container_as_vec_fundamental!(i16); impl_from_glib_container_as_vec_fundamental!(u32); impl_from_glib_container_as_vec_fundamental!(i32); impl_from_glib_container_as_vec_fundamental!(u64); impl_from_glib_container_as_vec_fundamental!(i64); impl_from_glib_container_as_vec_fundamental!(f32); impl_from_glib_container_as_vec_fundamental!(f64); macro_rules! impl_from_glib_container_as_vec_string { ($name:ty, $ffi_name:ty) => { impl FromGlibContainerAsVec<$ffi_name, *const $ffi_name> for $name { unsafe fn from_glib_none_num_as_vec(ptr: *const $ffi_name, num: usize) -> Vec<Self> { if num == 0 || ptr.is_null() { return Vec::new(); } let mut res = Vec::with_capacity(num); for i in 0..num { res.push(from_glib_none(ptr::read(ptr.add(i)) as $ffi_name)); } res } unsafe fn from_glib_container_num_as_vec(_: *const $ffi_name, _: usize) -> Vec<Self> { // Can't really free a *const unimplemented!(); } unsafe fn from_glib_full_num_as_vec(_: *const $ffi_name, _: usize) -> Vec<Self> { // Can't really free a *const unimplemented!(); } } impl FromGlibContainerAsVec<$ffi_name, *mut $ffi_name> for $name { unsafe fn from_glib_none_num_as_vec(ptr: *mut $ffi_name, num: usize) -> Vec<Self> { FromGlibContainerAsVec::from_glib_none_num_as_vec(ptr as *const _, num) } unsafe fn from_glib_container_num_as_vec(ptr: *mut $ffi_name, num: usize) -> Vec<Self> { let res = FromGlibContainerAsVec::from_glib_none_num_as_vec(ptr, num); ffi::g_free(ptr as *mut _); res } unsafe fn from_glib_full_num_as_vec(ptr: *mut $ffi_name, num: usize) -> Vec<Self> { if num == 0 || ptr.is_null() { return Vec::new(); } let mut res = Vec::with_capacity(num); for i in 0..num { res.push(from_glib_full(ptr::read(ptr.add(i)))); } ffi::g_free(ptr as *mut _); res } } impl FromGlibPtrArrayContainerAsVec<$ffi_name, *mut $ffi_name> for $name { unsafe fn from_glib_none_as_vec(ptr: *mut $ffi_name) -> Vec<Self> { FromGlibContainerAsVec::from_glib_none_num_as_vec(ptr, c_ptr_array_len(ptr)) } unsafe fn from_glib_container_as_vec(ptr: *mut $ffi_name) -> Vec<Self> { FromGlibContainerAsVec::from_glib_container_num_as_vec(ptr, c_ptr_array_len(ptr)) } unsafe fn from_glib_full_as_vec(ptr: *mut $ffi_name) -> Vec<Self> { FromGlibContainerAsVec::from_glib_full_num_as_vec(ptr, c_ptr_array_len(ptr)) } } impl FromGlibPtrArrayContainerAsVec<$ffi_name, *const $ffi_name> for $name { unsafe fn from_glib_none_as_vec(ptr: *const $ffi_name) -> Vec<Self> { FromGlibContainerAsVec::from_glib_none_num_as_vec(ptr, c_ptr_array_len(ptr)) } unsafe fn from_glib_container_as_vec(ptr: *const $ffi_name) -> Vec<Self> { FromGlibContainerAsVec::from_glib_container_num_as_vec(ptr, c_ptr_array_len(ptr)) } unsafe fn from_glib_full_as_vec(ptr: *const $ffi_name) -> Vec<Self> { FromGlibContainerAsVec::from_glib_full_num_as_vec(ptr, c_ptr_array_len(ptr)) } } }; } // TODO: Deprecate this impl_from_glib_container_as_vec_string!(String, *const c_char); impl_from_glib_container_as_vec_string!(String, *mut c_char); impl_from_glib_container_as_vec_string!(PathBuf, *const c_char); impl_from_glib_container_as_vec_string!(PathBuf, *mut c_char); impl_from_glib_container_as_vec_string!(OsString, *const c_char); impl_from_glib_container_as_vec_string!(OsString, *mut c_char); impl<P, PP: Ptr, T: FromGlibContainerAsVec<P, PP>> FromGlibContainer<P, PP> for Vec<T> { unsafe fn from_glib_none_num(ptr: PP, num: usize) -> Vec<T> { FromGlibContainerAsVec::from_glib_none_num_as_vec(ptr, num) } unsafe fn from_glib_container_num(ptr: PP, num: usize) -> Vec<T> { FromGlibContainerAsVec::from_glib_container_num_as_vec(ptr, num) } unsafe fn from_glib_full_num(ptr: PP, num: usize) -> Vec<T> { FromGlibContainerAsVec::from_glib_full_num_as_vec(ptr, num) } } impl<P: Ptr, PP: Ptr, T: FromGlibPtrArrayContainerAsVec<P, PP>> FromGlibPtrContainer<P, PP> for Vec<T> { unsafe fn from_glib_none(ptr: PP) -> Vec<T> { FromGlibPtrArrayContainerAsVec::from_glib_none_as_vec(ptr) } unsafe fn from_glib_container(ptr: PP) -> Vec<T> { FromGlibPtrArrayContainerAsVec::from_glib_container_as_vec(ptr) } unsafe fn from_glib_full(ptr: PP) -> Vec<T> { FromGlibPtrArrayContainerAsVec::from_glib_full_as_vec(ptr) } } impl<T> FromGlibContainerAsVec<<T as GlibPtrDefault>::GlibType, *mut ffi::GSList> for T where T: GlibPtrDefault + FromGlibPtrNone<<T as GlibPtrDefault>::GlibType> + FromGlibPtrFull<<T as GlibPtrDefault>::GlibType>, { unsafe fn from_glib_none_num_as_vec(mut ptr: *mut ffi::GSList, num: usize) -> Vec<T> { if num == 0 || ptr.is_null() { return Vec::new(); } let mut res = Vec::with_capacity(num); for _ in 0..num { if ptr.is_null() { break; } let item_ptr: <T as GlibPtrDefault>::GlibType = Ptr::from((*ptr).data); if !item_ptr.is_null() { res.push(from_glib_none(item_ptr)); } ptr = (*ptr).next; } res } unsafe fn from_glib_container_num_as_vec(ptr: *mut ffi::GSList, num: usize) -> Vec<T> { let res = FromGlibContainerAsVec::from_glib_none_num_as_vec(ptr, num); ffi::g_slist_free(ptr); res } unsafe fn from_glib_full_num_as_vec(mut ptr: *mut ffi::GSList, num: usize) -> Vec<T> { if num == 0 || ptr.is_null() { return Vec::new(); } let orig_ptr = ptr; let mut res = Vec::with_capacity(num); for _ in 0..num { if ptr.is_null() { break; } let item_ptr: <T as GlibPtrDefault>::GlibType = Ptr::from((*ptr).data); if !item_ptr.is_null() { res.push(from_glib_full(item_ptr)); } ptr = (*ptr).next; } ffi::g_slist_free(orig_ptr); res } } impl<T> FromGlibPtrArrayContainerAsVec<<T as GlibPtrDefault>::GlibType, *mut ffi::GSList> for T where T: GlibPtrDefault + FromGlibPtrNone<<T as GlibPtrDefault>::GlibType> + FromGlibPtrFull<<T as GlibPtrDefault>::GlibType>, { unsafe fn from_glib_none_as_vec(mut ptr: *mut ffi::GSList) -> Vec<T> { let mut res = Vec::new(); while !ptr.is_null() { let item_ptr: <T as GlibPtrDefault>::GlibType = Ptr::from((*ptr).data); if !item_ptr.is_null() { res.push(from_glib_none(item_ptr)); } ptr = (*ptr).next; } res } unsafe fn from_glib_container_as_vec(ptr: *mut ffi::GSList) -> Vec<T> { let res = FromGlibPtrArrayContainerAsVec::from_glib_none_as_vec(ptr); ffi::g_slist_free(ptr); res } unsafe fn from_glib_full_as_vec(mut ptr: *mut ffi::GSList) -> Vec<T> { let orig_ptr = ptr; let mut res = Vec::new(); while !ptr.is_null() { let item_ptr: <T as GlibPtrDefault>::GlibType = Ptr::from((*ptr).data); if !item_ptr.is_null() { res.push(from_glib_full(item_ptr)); } ptr = (*ptr).next; } ffi::g_slist_free(orig_ptr); res } } impl<T> FromGlibContainerAsVec<<T as GlibPtrDefault>::GlibType, *mut ffi::GList> for T where T: GlibPtrDefault + FromGlibPtrNone<<T as GlibPtrDefault>::GlibType> + FromGlibPtrFull<<T as GlibPtrDefault>::GlibType>, { unsafe fn from_glib_none_num_as_vec(mut ptr: *mut ffi::GList, num: usize) -> Vec<T> { if num == 0 || ptr.is_null() { return Vec::new(); } let mut res = Vec::with_capacity(num); for _ in 0..num { if ptr.is_null() { break; } let item_ptr: <T as GlibPtrDefault>::GlibType = Ptr::from((*ptr).data); if !item_ptr.is_null() { res.push(from_glib_none(item_ptr)); } ptr = (*ptr).next; } res } unsafe fn from_glib_container_num_as_vec(ptr: *mut ffi::GList, num: usize) -> Vec<T> { let res = FromGlibContainerAsVec::from_glib_none_num_as_vec(ptr, num); ffi::g_list_free(ptr); res } unsafe fn from_glib_full_num_as_vec(mut ptr: *mut ffi::GList, num: usize) -> Vec<T> { if num == 0 || ptr.is_null() { return Vec::new(); } let orig_ptr = ptr; let mut res = Vec::with_capacity(num); for _ in 0..num { if ptr.is_null() { break; } let item_ptr: <T as GlibPtrDefault>::GlibType = Ptr::from((*ptr).data); if !item_ptr.is_null() { res.push(from_glib_full(item_ptr)); } ptr = (*ptr).next; } ffi::g_list_free(orig_ptr); res } } impl<T> FromGlibPtrArrayContainerAsVec<<T as GlibPtrDefault>::GlibType, *mut ffi::GList> for T where T: GlibPtrDefault + FromGlibPtrNone<<T as GlibPtrDefault>::GlibType> + FromGlibPtrFull<<T as GlibPtrDefault>::GlibType>, { unsafe fn from_glib_none_as_vec(mut ptr: *mut ffi::GList) -> Vec<T> { let mut res = Vec::new(); while !ptr.is_null() { let item_ptr: <T as GlibPtrDefault>::GlibType = Ptr::from((*ptr).data); if !item_ptr.is_null() { res.push(from_glib_none(item_ptr)); } ptr = (*ptr).next; } res } unsafe fn from_glib_container_as_vec(ptr: *mut ffi::GList) -> Vec<T> { let res = FromGlibPtrArrayContainerAsVec::from_glib_none_as_vec(ptr); ffi::g_list_free(ptr); res } unsafe fn from_glib_full_as_vec(mut ptr: *mut ffi::GList) -> Vec<T> { let orig_ptr = ptr; let mut res = Vec::new(); while !ptr.is_null() { let item_ptr: <T as GlibPtrDefault>::GlibType = Ptr::from((*ptr).data); if !item_ptr.is_null() { res.push(from_glib_full(item_ptr)); } ptr = (*ptr).next; } ffi::g_list_free(orig_ptr); res } } impl<T> FromGlibContainerAsVec<<T as GlibPtrDefault>::GlibType, *const ffi::GList> for T where T: GlibPtrDefault + FromGlibPtrNone<<T as GlibPtrDefault>::GlibType> + FromGlibPtrFull<<T as GlibPtrDefault>::GlibType>, { unsafe fn from_glib_none_num_as_vec(ptr: *const ffi::GList, num: usize) -> Vec<T> { FromGlibContainerAsVec::from_glib_none_num_as_vec(mut_override(ptr), num) } unsafe fn from_glib_container_num_as_vec(_: *const ffi::GList, _: usize) -> Vec<T> { // Can't really free a *const unimplemented!() } unsafe fn from_glib_full_num_as_vec(_: *const ffi::GList, _: usize) -> Vec<T> { // Can't really free a *const unimplemented!() } } impl<T> FromGlibPtrArrayContainerAsVec<<T as GlibPtrDefault>::GlibType, *const ffi::GList> for T where T: GlibPtrDefault + FromGlibPtrNone<<T as GlibPtrDefault>::GlibType> + FromGlibPtrFull<<T as GlibPtrDefault>::GlibType>, { unsafe fn from_glib_none_as_vec(ptr: *const ffi::GList) -> Vec<T> { FromGlibPtrArrayContainerAsVec::from_glib_none_as_vec(mut_override(ptr)) } unsafe fn from_glib_container_as_vec(_: *const ffi::GList) -> Vec<T> { // Can't really free a *const unimplemented!() } unsafe fn from_glib_full_as_vec(_: *const ffi::GList) -> Vec<T> { // Can't really free a *const unimplemented!() } } impl<T> FromGlibContainerAsVec<<T as GlibPtrDefault>::GlibType, *const ffi::GSList> for T where T: GlibPtrDefault + FromGlibPtrNone<<T as GlibPtrDefault>::GlibType> + FromGlibPtrFull<<T as GlibPtrDefault>::GlibType>, { unsafe fn from_glib_none_num_as_vec(ptr: *const ffi::GSList, num: usize) -> Vec<T> { FromGlibContainerAsVec::from_glib_none_num_as_vec(mut_override(ptr), num) } unsafe fn from_glib_container_num_as_vec(_: *const ffi::GSList, _: usize) -> Vec<T> { // Can't really free a *const unimplemented!() } unsafe fn from_glib_full_num_as_vec(_: *const ffi::GSList, _: usize) -> Vec<T> { // Can't really free a *const unimplemented!() } } impl<T> FromGlibPtrArrayContainerAsVec<<T as GlibPtrDefault>::GlibType, *const ffi::GSList> for T where T: GlibPtrDefault + FromGlibPtrNone<<T as GlibPtrDefault>::GlibType> + FromGlibPtrFull<<T as GlibPtrDefault>::GlibType>, { unsafe fn from_glib_none_as_vec(ptr: *const ffi::GSList) -> Vec<T> { FromGlibPtrArrayContainerAsVec::from_glib_none_as_vec(mut_override(ptr)) } unsafe fn from_glib_container_as_vec(_: *const ffi::GSList) -> Vec<T> { // Can't really free a *const unimplemented!() } unsafe fn from_glib_full_as_vec(_: *const ffi::GSList) -> Vec<T> { // Can't really free a *const unimplemented!() } } #[allow(clippy::implicit_hasher)] impl FromGlibContainer<*const c_char, *mut ffi::GHashTable> for HashMap<String, String> { unsafe fn from_glib_none_num(ptr: *mut ffi::GHashTable, _: usize) -> Self { FromGlibPtrContainer::from_glib_none(ptr) } unsafe fn from_glib_container_num(ptr: *mut ffi::GHashTable, _: usize) -> Self { FromGlibPtrContainer::from_glib_full(ptr) } unsafe fn from_glib_full_num(ptr: *mut ffi::GHashTable, _: usize) -> Self { FromGlibPtrContainer::from_glib_full(ptr) } } #[allow(clippy::implicit_hasher)] impl FromGlibPtrContainer<*const c_char, *mut ffi::GHashTable> for HashMap<String, String> { unsafe fn from_glib_none(ptr: *mut ffi::GHashTable) -> Self { unsafe extern "C" fn read_string_hash_table( key: ffi::gpointer, value: ffi::gpointer, hash_map: ffi::gpointer, ) { let key: String = from_glib_none(key as *const c_char); let value: String = from_glib_none(value as *const c_char); let hash_map: &mut HashMap<String, String> = &mut *(hash_map as *mut HashMap<String, String>); hash_map.insert(key, value); } let mut map = HashMap::new(); ffi::g_hash_table_foreach( ptr, Some(read_string_hash_table), &mut map as *mut HashMap<String, String> as *mut _, ); map } unsafe fn from_glib_container(ptr: *mut ffi::GHashTable) -> Self { FromGlibPtrContainer::from_glib_full(ptr) } unsafe fn from_glib_full(ptr: *mut ffi::GHashTable) -> Self { let map = FromGlibPtrContainer::from_glib_none(ptr); ffi::g_hash_table_unref(ptr); map } } impl<T> FromGlibContainerAsVec<<T as GlibPtrDefault>::GlibType, *mut ffi::GPtrArray> for T where T: GlibPtrDefault + FromGlibPtrNone<<T as GlibPtrDefault>::GlibType> + FromGlibPtrFull<<T as GlibPtrDefault>::GlibType>, { unsafe fn from_glib_none_num_as_vec(ptr: *mut ffi::GPtrArray, num: usize) -> Vec<T> { if num == 0 || ptr.is_null() { return Vec::new(); } let pdata = (*ptr).pdata; assert!((*ptr).len as usize >= num); let mut res = Vec::with_capacity(num); for i in 0..num { let item_ptr: <T as GlibPtrDefault>::GlibType = Ptr::from(ptr::read(pdata.add(i))); if !item_ptr.is_null() { res.push(from_glib_none(item_ptr)); } } res } unsafe fn from_glib_container_num_as_vec(ptr: *mut ffi::GPtrArray, num: usize) -> Vec<T> { let res = FromGlibContainer::from_glib_none_num(ptr, num); if !ptr.is_null() { ffi::g_ptr_array_unref(ptr); } res } unsafe fn from_glib_full_num_as_vec(ptr: *mut ffi::GPtrArray, num: usize) -> Vec<T> { if num == 0 || ptr.is_null() { return Vec::new(); } let pdata = (*ptr).pdata; assert!((*ptr).len as usize >= num); let mut res = Vec::with_capacity(num); for i in 0..num { let item_ptr: <T as GlibPtrDefault>::GlibType = Ptr::from(ptr::read(pdata.add(i))); if !item_ptr.is_null() { res.push(from_glib_none(item_ptr)); } } ffi::g_ptr_array_unref(ptr); res } } impl<T> FromGlibPtrArrayContainerAsVec<<T as GlibPtrDefault>::GlibType, *mut ffi::GPtrArray> for T where T: GlibPtrDefault + FromGlibPtrNone<<T as GlibPtrDefault>::GlibType> + FromGlibPtrFull<<T as GlibPtrDefault>::GlibType>, { unsafe fn from_glib_none_as_vec(ptr: *mut ffi::GPtrArray) -> Vec<T> { let num = (*ptr).len as usize; FromGlibContainer::from_glib_none_num(ptr, num) } unsafe fn from_glib_container_as_vec(ptr: *mut ffi::GPtrArray) -> Vec<T> { let num = (*ptr).len as usize; FromGlibContainer::from_glib_container_num(ptr, num) } unsafe fn from_glib_full_as_vec(ptr: *mut ffi::GPtrArray) -> Vec<T> { let num = (*ptr).len as usize; FromGlibContainer::from_glib_full_num(ptr, num) } } impl<T> FromGlibContainerAsVec<<T as GlibPtrDefault>::GlibType, *const ffi::GPtrArray> for T where T: GlibPtrDefault + FromGlibPtrNone<<T as GlibPtrDefault>::GlibType> + FromGlibPtrFull<<T as GlibPtrDefault>::GlibType>, { unsafe fn from_glib_none_num_as_vec(ptr: *const ffi::GPtrArray, num: usize) -> Vec<T> { FromGlibContainerAsVec::from_glib_none_num_as_vec(mut_override(ptr), num) } unsafe fn from_glib_container_num_as_vec(_: *const ffi::GPtrArray, _: usize) -> Vec<T> { // Can't really free a *const unimplemented!() } unsafe fn from_glib_full_num_as_vec(_: *const ffi::GPtrArray, _: usize) -> Vec<T> { // Can't really free a *const unimplemented!() } } impl<T> FromGlibPtrArrayContainerAsVec<<T as GlibPtrDefault>::GlibType, *const ffi::GPtrArray> for T where T: GlibPtrDefault + FromGlibPtrNone<<T as GlibPtrDefault>::GlibType> + FromGlibPtrFull<<T as GlibPtrDefault>::GlibType>, { unsafe fn from_glib_none_as_vec(ptr: *const ffi::GPtrArray) -> Vec<T> { FromGlibPtrArrayContainerAsVec::from_glib_none_as_vec(mut_override(ptr)) } unsafe fn from_glib_container_as_vec(_: *const ffi::GPtrArray) -> Vec<T> { // Can't really free a *const unimplemented!() } unsafe fn from_glib_full_as_vec(_: *const ffi::GPtrArray) -> Vec<T> { // Can't really free a *const unimplemented!() } } #[cfg(test)] mod tests { use std::fs; use tempfile::tempdir; use super::*; use crate::GString; use std::collections::HashMap; #[test] fn boolean() { assert_eq!(true.into_glib(), ffi::GTRUE); assert_eq!(false.into_glib(), ffi::GFALSE); assert_eq!(true, unsafe { bool::from_glib(ffi::GTRUE) }); assert_eq!(false, unsafe { bool::from_glib(ffi::GFALSE) }); assert_eq!(true, unsafe { bool::from_glib(42) }); } #[test] fn ordering() { assert_eq!(Ordering::Less.into_glib(), -1); assert_eq!(Ordering::Equal.into_glib(), 0); assert_eq!(Ordering::Greater.into_glib(), 1); assert_eq!(Ordering::Less, unsafe { Ordering::from_glib(-42) }); assert_eq!(Ordering::Less, unsafe { Ordering::from_glib(-1) }); assert_eq!(Ordering::Equal, unsafe { Ordering::from_glib(0) }); assert_eq!(Ordering::Greater, unsafe { Ordering::from_glib(1) }); assert_eq!(Ordering::Greater, unsafe { Ordering::from_glib(42) }); } #[test] fn string() { let s = "ABC"; let owned = "ABC".to_string(); let cstring = CString::new("ABC").unwrap(); let stash = s.to_glib_none(); assert_eq!(unsafe { CStr::from_ptr(stash.0) }, cstring.as_c_str()); let stash = owned.to_glib_none(); assert_eq!(unsafe { CStr::from_ptr(stash.0) }, cstring.as_c_str()); let ptr: *mut c_char = s.to_glib_full(); assert_eq!(unsafe { CStr::from_ptr(ptr) }, cstring.as_c_str()); unsafe { ffi::g_free(ptr as *mut _); } let ptr: *mut c_char = owned.to_glib_full(); assert_eq!(unsafe { CStr::from_ptr(ptr) }, cstring.as_c_str()); assert_eq!(s, unsafe { String::from_glib_none(ptr) }); assert_eq!(owned, unsafe { String::from_glib_full(ptr) }); } #[test] fn string_hash_map() { let mut map = HashMap::new(); map.insert("A".into(), "1".into()); map.insert("B".into(), "2".into()); map.insert("C".into(), "3".into()); let ptr: *mut ffi::GHashTable = map.to_glib_full(); let map = unsafe { HashMap::from_glib_full(ptr) }; assert_eq!(map.get("A"), Some(&"1".into())); assert_eq!(map.get("B"), Some(&"2".into())); assert_eq!(map.get("C"), Some(&"3".into())); } #[test] fn string_array() { let v = vec!["A".to_string(), "B".to_string(), "C".to_string()]; let stash = v.to_glib_none(); let ptr: *mut *mut c_char = stash.0; let ptr_copy = unsafe { ffi::g_strdupv(ptr) }; let actual: Vec<String> = unsafe { FromGlibPtrContainer::from_glib_full(ptr_copy) }; assert_eq!(v, actual); } #[test] fn gstring_array() { let v = vec!["A".to_string(), "B".to_string(), "C".to_string()]; let stash = v.to_glib_none(); let ptr: *mut *mut c_char = stash.0; let ptr_copy = unsafe { ffi::g_strdupv(ptr) }; let actual: Vec<GString> = unsafe { FromGlibPtrContainer::from_glib_full(ptr_copy) }; assert_eq!(v, actual); } #[test] fn ptr_array() { let strings = &["A", "B", "C"]; let (ptr, _stash) = ToGlibContainerFromSlice::<*mut ffi::GPtrArray>::to_glib_none_from_slice(strings); let v: Vec<GString> = unsafe { FromGlibPtrArrayContainerAsVec::from_glib_none_as_vec(ptr) }; assert_eq!(&v, strings); } #[test] #[cfg(not(target_os = "macos"))] fn test_paths() { let tmp_dir = tempdir().unwrap(); // Test if passing paths to GLib and getting them back // gives us useful results let dir_1 = tmp_dir.path().join("abcd"); fs::create_dir(&dir_1).unwrap(); assert_eq!(crate::path_get_basename(&dir_1), Path::new("abcd")); assert_eq!( crate::path_get_basename(dir_1.canonicalize().unwrap()), Path::new("abcd") ); // This currently fails on Windows because C:\\Users\\runneradmin // gets shortened to C:\\Users\\RUNNER~1 #[cfg(not(windows))] assert_eq!( crate::path_get_dirname(dir_1.canonicalize().unwrap()), tmp_dir.path() ); assert!(crate::file_test( &dir_1, crate::FileTest::EXISTS | crate::FileTest::IS_DIR )); assert!(crate::file_test( &dir_1.canonicalize().unwrap(), crate::FileTest::EXISTS | crate::FileTest::IS_DIR )); // And test with some non-ASCII characters let dir_2 = tmp_dir.as_ref().join("øäöü"); fs::create_dir(&dir_2).unwrap(); assert_eq!(crate::path_get_basename(&dir_2), Path::new("øäöü")); assert_eq!( crate::path_get_basename(dir_2.canonicalize().unwrap()), Path::new("øäöü") ); // This currently fails on Windows because C:\\Users\\runneradmin // gets shortened to C:\\Users\\RUNNER~1 #[cfg(not(windows))] assert_eq!( crate::path_get_dirname(dir_2.canonicalize().unwrap()), tmp_dir.path() ); assert!(crate::file_test( &dir_2, crate::FileTest::EXISTS | crate::FileTest::IS_DIR )); assert!(crate::file_test( &dir_2.canonicalize().unwrap(), crate::FileTest::EXISTS | crate::FileTest::IS_DIR )); } #[test] #[cfg(target_os = "macos")] fn test_paths() { let t_dir = tempdir().unwrap(); let tmp_dir = t_dir.path().canonicalize().unwrap(); // Test if passing paths to GLib and getting them back // gives us useful results let dir_1 = tmp_dir.join("abcd"); fs::create_dir(&dir_1).unwrap(); assert_eq!(crate::path_get_basename(&dir_1), Path::from("abcd")); assert_eq!( crate::path_get_basename(dir_1.canonicalize().unwrap()), Path::from("abcd") ); assert_eq!( crate::path_get_dirname(dir_1.canonicalize().unwrap()), tmp_dir ); assert!(crate::file_test( &dir_1, crate::FileTest::EXISTS | crate::FileTest::IS_DIR )); assert!(crate::file_test( &dir_1.canonicalize().unwrap(), crate::FileTest::EXISTS | crate::FileTest::IS_DIR )); } #[test] fn none_value() { const CLONG_NONE: libc::c_long = -1; #[derive(Debug, PartialEq, Eq)] struct SpecialU32(u32); impl IntoGlib for SpecialU32 { type GlibType = libc::c_uint; fn into_glib(self) -> libc::c_uint { self.0 as libc::c_uint } } impl OptionIntoGlib for SpecialU32 { const GLIB_NONE: Self::GlibType = CLONG_NONE as libc::c_uint; } assert_eq!(SpecialU32(0).into_glib(), 0); assert_eq!(SpecialU32(42).into_glib(), 42); assert_eq!(Some(SpecialU32(0)).into_glib(), 0); assert_eq!(Some(SpecialU32(42)).into_glib(), 42); assert_eq!( Option::None::<SpecialU32>.into_glib(), SpecialU32::GLIB_NONE ); impl TryFromGlib<libc::c_uint> for SpecialU32 { type Error = GlibNoneError; unsafe fn try_from_glib(val: libc::c_uint) -> Result<Self, GlibNoneError> { if val == SpecialU32::GLIB_NONE { return Err(GlibNoneError); } Ok(SpecialU32(val as u32)) } } assert_eq!(unsafe { SpecialU32::try_from_glib(0) }, Ok(SpecialU32(0))); assert_eq!(unsafe { SpecialU32::try_from_glib(42) }, Ok(SpecialU32(42))); assert_eq!( unsafe { SpecialU32::try_from_glib(SpecialU32::GLIB_NONE) }, Err(GlibNoneError) ); assert_eq!( unsafe { Option::<SpecialU32>::from_glib(0) }, Some(SpecialU32(0)) ); assert_eq!( unsafe { Option::<SpecialU32>::from_glib(42) }, Some(SpecialU32(42)) ); assert!(unsafe { Option::<SpecialU32>::from_glib(SpecialU32::GLIB_NONE) }.is_none()); } #[test] fn invalid_value() { use std::convert::TryFrom; use std::num::TryFromIntError; #[derive(Debug, PartialEq, Eq)] struct U32(u32); impl TryFromGlib<libc::c_long> for U32 { type Error = TryFromIntError; unsafe fn try_from_glib(val: libc::c_long) -> Result<Self, TryFromIntError> { Ok(U32(u32::try_from(val)?)) } } assert_eq!(unsafe { U32::try_from_glib(0) }, Ok(U32(0))); assert_eq!(unsafe { U32::try_from_glib(42) }, Ok(U32(42))); assert!(unsafe { U32::try_from_glib(-1) }.is_err()); assert!(unsafe { U32::try_from_glib(-42) }.is_err()); } #[test] fn none_or_invalid_value() { use std::convert::TryFrom; use std::num::TryFromIntError; #[derive(Debug, PartialEq, Eq)] struct SpecialU32(u32); impl IntoGlib for SpecialU32 { type GlibType = libc::c_long; fn into_glib(self) -> libc::c_long { self.0 as libc::c_long } } impl OptionIntoGlib for SpecialU32 { const GLIB_NONE: Self::GlibType = -1; } assert_eq!(SpecialU32(0).into_glib(), 0); assert_eq!(SpecialU32(42).into_glib(), 42); assert_eq!(Some(SpecialU32(42)).into_glib(), 42); assert_eq!( Option::None::<SpecialU32>.into_glib(), SpecialU32::GLIB_NONE ); impl TryFromGlib<libc::c_long> for SpecialU32 { type Error = GlibNoneOrInvalidError<TryFromIntError>; unsafe fn try_from_glib( val: libc::c_long, ) -> Result<Self, GlibNoneOrInvalidError<TryFromIntError>> { if val == SpecialU32::GLIB_NONE { return Err(GlibNoneOrInvalidError::None); } Ok(SpecialU32(u32::try_from(val)?)) } } assert_eq!(unsafe { SpecialU32::try_from_glib(0) }, Ok(SpecialU32(0))); assert_eq!(unsafe { SpecialU32::try_from_glib(42) }, Ok(SpecialU32(42))); assert!(unsafe { SpecialU32::try_from_glib(SpecialU32::GLIB_NONE) } .unwrap_err() .is_none()); assert!(unsafe { SpecialU32::try_from_glib(-42) } .unwrap_err() .is_invalid()); assert_eq!( unsafe { Result::<Option<SpecialU32>, _>::from_glib(0) }, Ok(Some(SpecialU32(0))) ); assert_eq!( unsafe { Result::<Option<SpecialU32>, _>::from_glib(42) }, Ok(Some(SpecialU32(42))) ); assert_eq!( unsafe { Result::<Option<SpecialU32>, _>::from_glib(SpecialU32::GLIB_NONE) }, Ok(None) ); assert!(unsafe { Result::<Option<SpecialU32>, _>::from_glib(-42) }.is_err()); } }
32.194295
141
0.593651
2f3514d20945ab84f764956f33a0f5162901d214
6,417
//! Koleksi macro library internal macro_rules! implement_crypto_wrapper { ( $(#[$attr:meta])* struct $name:ident, $size:expr) => { implement_crypto_wrapper!( $(#[$attr])* struct $name, $crate::crypto::ds::$name, $name, $size ); }; ( $(#[$attr:meta])* struct $name:ident, $source:path, $source_name:ident, $size:expr) => { /// Crypto object wrapper #[derive(Clone)] $(#[$attr])* pub struct $name([u8; $size]); impl $name { #[doc(hidden)] pub fn new(bytes_array: [u8; $size]) -> Self { let a = { use $source; $source_name::from_bytes(&bytes_array).expect("from bytes") }; $name(a.to_bytes()) } /// Creates new instance from bytes slice. #[inline] pub fn from_slice(bytes: &[u8]) -> Option<Self> { // kode ini kelihatan aneh, tapi hanya dengan cara inilah // kode bagian ini bisa dicompile di Rust stable. // kemungkinan kalau nanti Rust stable sudah bisa menghandle // macro type path agar bisa langsung digunakan untuk memanggil // fungsi statis-nya kode ini akan dirubah. let a = { use $source; $source_name::from_bytes(bytes) }; a.map(|a| $name(a.to_bytes())).ok() } /// Convert to hex string #[inline] pub fn to_hex(&self) -> String { hex::encode(&self.0[..]) } } impl ::std::fmt::Debug for $name { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "{}({}..)", stringify!($name), &self.to_hex()[..8]) } } impl ::std::fmt::Display for $name { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "{}({}..)", stringify!($name), &self.to_hex()[..8]) } } impl ::hex::FromHex for $name { type Error = ::hex::FromHexError; fn from_hex<T: AsRef<[u8]>>(v: T) -> Result<Self, Self::Error> { let bytes = Vec::<u8>::from_hex(v)?; if let Some(self_value) = Self::from_slice(bytes.as_ref()) { Ok(self_value) } else { Err(::hex::FromHexError::InvalidStringLength) } } } impl ::std::str::FromStr for $name { type Err = ::hex::FromHexError; fn from_str(s: &str) -> Result<Self, Self::Err> { use hex::FromHex; $name::from_hex(s) } } }; } macro_rules! impl_event_listener { ($name:ident) => { impl $name { pub fn new() -> Arc<Self> { Arc::new(Self { db: db::clone() }) } fn db(&self) -> db::DbConn { self.db.get().expect(concat!( "Cannot get db connection from poll in ", stringify!($name) )) } } }; } macro_rules! impl_dao { ( $(#[$meta:meta])* $name:ident) => { $(#[$meta])* #[derive(Dao)] pub struct $name<'a> { db: &'a PgConnection, } }; ( $(#[$meta:meta])* $name:ident, $id_type:literal) => { $(#[$meta])* #[derive(Dao)] #[id_type = $id_type] pub struct $name<'a> { db: &'a PgConnection, } }; } /// Macro to generate Dao implementation. /// /// Example use of this macro: /// /// ``` /// impl_daos!( /// /// DAO for Comment /// CommentDao /// ); /// ``` /// /// For custom ID type /// /// ``` /// impl_daos!( /// /// DAO for Role /// (RoleDao, "i32"), /// ); /// ``` macro_rules! impl_daos { ( $( $(#[$meta:meta])* $name:ident, )* ) => { $( impl_dao!( $(#[$meta])* $name ); )* }; ( $( $(#[$meta:meta])* $name:ident ),* ) => { impl_daos!( $( $(#[$meta])* $name, )* ); }; ( $( $(#[$meta:meta])* ( $name:ident, $id_type:literal) ,)* ) => { $( impl_dao!( $(#[$meta])* $name, $id_type ); )* }; ( $( $(#[$meta:meta])* ( $name:ident, $id_type:literal)),* ) => { impl_daos!( $( $(#[$meta])* ( $name, $id_type), )* ); }; } macro_rules! meta_value_i32 { ($s:ident, $key:literal) => { $s.meta .iter() .find(|a| a.starts_with(concat!($key, ":"))) .and_then(|a| a.splitn(2, ':').last()) .and_then(|a| a.parse::<i32>().ok()) .unwrap_or(0) }; } macro_rules! meta_value_i64 { ($s:ident, $key:literal) => { $s.meta .iter() .find(|a| a.starts_with(concat!($key, "="))) .and_then(|a| a.splitn(2, '=').last()) .and_then(|a| a.parse::<i64>().ok()) }; } macro_rules! meta_value_str { ($s:ident, $key:literal) => { // $s.meta // .iter() // .find(|a| a.starts_with(concat!($key, ":"))) // .and_then(|a| a.splitn(2, ':').last()) // .unwrap_or("") meta_value_str!($s, $key, ":") }; ($s:ident, $key:literal, $ass:literal) => { $s.meta .iter() .find(|a| a.starts_with(concat!($key, $ass))) .and_then(|a| a.splitn(2, $ass).last()) .unwrap_or("") }; } macro_rules! value_str_opt { ($s:ident, $key:literal) => { $s.iter() .find(|a| a.starts_with(concat!($key, ":"))) .and_then(|a| a.splitn(2, ":").last()) }; } macro_rules! list_has_flag { ($s:expr, $key:literal) => { $s.iter() .find(|a| a.as_str() == concat!(":", $key, ":")) .is_some() }; } macro_rules! param_fail { ($msg:expr) => { return Err(crate::error::Error::InvalidParameter(format!("{}", $msg))); }; } macro_rules! fail { ($msg:expr) => { return Err(crate::error::Error::InternalError(format_err!("{}", $msg))); }; } macro_rules! param_err { ($msg:expr) => { crate::error::Error::InvalidParameter(format!("{}", $msg)) }; }
27.306383
104
0.433224
481963006241c94c914d4ee114b781dc06d17863
3,041
#![cfg(feature = "macros")] //! Test slf: PyRef/PyMutRef<Self>(especially, slf.into::<Py>) works use pyo3::prelude::*; use pyo3::types::{PyBytes, PyString}; use pyo3::PyCell; use std::collections::HashMap; mod common; /// Assumes it's a file reader or so. /// Inspired by https://github.com/jothan/cordoba, thanks. #[pyclass] #[derive(Clone, Debug)] struct Reader { inner: HashMap<u8, String>, } #[pymethods] impl Reader { fn clone_ref(slf: &PyCell<Self>) -> &PyCell<Self> { slf } fn clone_ref_with_py<'py>(slf: &'py PyCell<Self>, _py: Python<'py>) -> &'py PyCell<Self> { slf } fn get_iter(slf: &PyCell<Self>, keys: Py<PyBytes>) -> Iter { Iter { reader: slf.into(), keys, idx: 0, } } fn get_iter_and_reset( mut slf: PyRefMut<Self>, keys: Py<PyBytes>, py: Python, ) -> PyResult<Iter> { let reader = Py::new(py, slf.clone())?; slf.inner.clear(); Ok(Iter { reader, keys, idx: 0, }) } } #[pyclass] #[derive(Debug)] struct Iter { reader: Py<Reader>, keys: Py<PyBytes>, idx: usize, } #[pymethods] impl Iter { #[allow(clippy::self_named_constructors)] fn __iter__(slf: PyRef<Self>) -> PyRef<Self> { slf } fn __next__(mut slf: PyRefMut<Self>) -> PyResult<Option<PyObject>> { let bytes = slf.keys.as_ref(slf.py()).as_bytes(); match bytes.get(slf.idx) { Some(&b) => { slf.idx += 1; let py = slf.py(); let reader = slf.reader.as_ref(py); let reader_ref = reader.try_borrow()?; let res = reader_ref .inner .get(&b) .map(|s| PyString::new(py, s).into()); Ok(res) } None => Ok(None), } } } fn reader() -> Reader { let reader = [(1, "a"), (2, "b"), (3, "c"), (4, "d"), (5, "e")]; Reader { inner: reader.iter().map(|(k, v)| (*k, (*v).to_string())).collect(), } } #[test] fn test_nested_iter() { let gil = Python::acquire_gil(); let py = gil.python(); let reader: PyObject = reader().into_py(py); py_assert!( py, reader, "list(reader.get_iter(bytes([3, 5, 2]))) == ['c', 'e', 'b']" ); } #[test] fn test_clone_ref() { let gil = Python::acquire_gil(); let py = gil.python(); let reader: PyObject = reader().into_py(py); py_assert!(py, reader, "reader == reader.clone_ref()"); py_assert!(py, reader, "reader == reader.clone_ref_with_py()"); } #[test] fn test_nested_iter_reset() { let gil = Python::acquire_gil(); let py = gil.python(); let reader = PyCell::new(py, reader()).unwrap(); py_assert!( py, reader, "list(reader.get_iter_and_reset(bytes([3, 5, 2]))) == ['c', 'e', 'b']" ); let reader_ref = reader.borrow(); assert!(reader_ref.inner.is_empty()); }
24.524194
94
0.515949
71e4db64385d1014e61abecf841be6b081794769
3,097
use anyhow::{anyhow, Result}; use code_writer::{write_bootstrap, write_code}; use parser::parse; use std::io::{BufWriter, Write}; use std::{env, ffi::OsStr}; use std::{fs::File, path::Path}; mod arithmetic_command; mod code_writer; mod command; mod parser; mod segment; fn main() -> Result<()> { let args: Vec<String> = env::args().collect(); if args.len() < 2 { return Err(anyhow!("please enter file name")); } let path_dir = Path::new(&args[1]); if path_dir.is_dir() { let file_name = path_dir.file_stem().unwrap().to_str().unwrap(); let new_file_path = path_dir.join(Path::new(file_name).with_extension("asm")); let new_file = File::create(new_file_path)?; let mut writer = BufWriter::new(new_file); let file_names = path_dir .read_dir()? .filter_map(|entry| { entry.ok().and_then(|e| { e.path() .file_name() .and_then(|n| n.to_str().map(|s| s.to_string())) }) }) .collect::<Vec<String>>(); if file_names.contains(&"Sys.vm".to_string()) { writer.write(write_bootstrap().as_bytes())?; writer.write(b"\n\n")?; } let dirs = path_dir.read_dir()?; for dir in dirs { let dir = dir?; if let Some(extension) = dir.path().extension() { if extension == OsStr::new("vm") { let commands = parse(&dir.path())?; let mut id: i32 = 0; for command in commands { writer.write( write_code( dir.path().file_stem().unwrap().to_str().unwrap(), &command, &id, ) .as_bytes(), )?; writer.write(b"\n\n")?; id += 1; } } } } } else { if let Some(extension) = path_dir.extension() { if extension == OsStr::new("vm") { let commands = parse(&path_dir.to_path_buf())?; let new_file_path = Path::new(path_dir.parent().unwrap()) .join(Path::new(path_dir.file_stem().unwrap()).with_extension("asm")); let new_file = File::create(new_file_path)?; let mut writer = BufWriter::new(new_file); let mut id: i32 = 0; for command in commands { writer.write( write_code( &path_dir.file_stem().unwrap().to_str().unwrap(), &command, &id, ) .as_bytes(), )?; writer.write(b"\n\n")?; id += 1; } } } } Ok(()) }
32.946809
90
0.426219
69c63c7ee28f13bf5387e3840c7ca7203b866940
25,700
use std::{collections::HashMap, hash::Hash, marker::PhantomData, time::Duration}; use amethyst_assets::{AssetStorage, Handle}; use amethyst_core::ecs::{ CommandBuffer, Entity, EntityStore, IntoQuery, ParallelRunnable, SubWorld, System, SystemBuilder, TryRead, Write, }; use derivative::Derivative; use fnv::FnvHashMap; use log::{debug, error}; use minterpolate::InterpolationPrimitive; #[cfg(feature = "profiler")] use thread_profiler::profile_scope; use crate::resources::{ Animation, AnimationCommand, AnimationControl, AnimationControlSet, AnimationHierarchy, AnimationSampling, ControlState, DeferStartRelation, RestState, Sampler, SamplerControl, SamplerControlSet, StepDirection, }; /// System for setting up animations, should run before `SamplerInterpolationSystem`. /// /// Will process all active `AnimationControl` + `AnimationHierarchy`, and do processing of the /// animations they describe. If an animation only targets a single node/entity, there is no need /// for `AnimationHierarchy`. /// /// ### Type parameters: /// /// - `I`: identifier type for running animations, only one animation can be run at the same time /// with the same id /// - `T`: the component type that the animation should be applied to #[derive(Derivative)] #[derivative(Default)] pub(crate) struct AnimationControlSystem< I: PartialEq + Eq + Hash + Copy + Send + Sync + 'static, T: AnimationSampling + Clone, > { _marker: PhantomData<T>, _marker2: PhantomData<I>, next_id: u64, } impl<I, T> System for AnimationControlSystem<I, T> where I: std::fmt::Debug + PartialEq + Eq + Hash + Copy + Send + Sync + 'static, T: AnimationSampling + Clone + std::fmt::Debug, { fn build(mut self) -> Box<dyn ParallelRunnable> { self.next_id = 1; let mut remove_sets = Vec::default(); let mut remove_ids = Vec::default(); let mut state_set = FnvHashMap::default(); let mut deferred_start = Vec::default(); Box::new( SystemBuilder::new("AnimationControlSystem") .read_resource::<AssetStorage<Animation<T>>>() .read_resource::<AssetStorage<Sampler<T::Primitive>>>() .read_component::<T>() .write_component::<SamplerControlSet<T>>() .write_component::<RestState<T>>() .with_query(<(Entity, Write<AnimationControlSet<I, T>>, TryRead<AnimationHierarchy<T>>)>::query()) .build(move |mut buffer, world, (animation_storage, sampler_storage), query| { #[cfg(feature = "profiler")] profile_scope!("animation_control_system"); remove_sets.clear(); let (mut query_world, mut world) = world.split_for_query(query); for (entity, control_set, hierarchy) in query.iter_mut(&mut query_world) { remove_ids.clear(); state_set.clear(); // process each animation in control set for (ref id, ref mut control) in &mut control_set.animations { let mut remove = false; if let Some(state) = animation_storage .get(&control.animation) .and_then(|animation| { process_animation_control( *entity, &mut world, animation, control, hierarchy, &*sampler_storage, buffer, &mut remove, &mut self.next_id, ) }) { control.state = state; } // update command for next iteration if let AnimationCommand::Step(_) = control.command { control.command = AnimationCommand::Start; } if let AnimationCommand::SetInputValue(_) = control.command { control.command = AnimationCommand::Start; } // remove completed animations if remove { remove_ids.push(*id); } else { // record current position of running animations to know when to trigger deferred state_set.insert( *id, match control.state { ControlState::Running(_) => { let val = *hierarchy .and_then(|h| h.nodes.values().next()) .unwrap_or(entity); find_max_duration( control.id, world .entry_ref( val, ) .expect("Retrieve hierarchy node entry ref") .get_component::<SamplerControlSet<T>>() .ok()) } _ => -1.0, }, ); } } // record deferred animation as not started for deferred_animation in &control_set.deferred_animations { state_set.insert(deferred_animation.animation_id, -1.0); } deferred_start.clear(); for deferred_animation in &control_set.deferred_animations { let (start, start_dur) = if let Some(dur) = state_set.get(&deferred_animation.relation.0) { if *dur < 0. { (false, 0.) } else if let DeferStartRelation::Start(start_dur) = deferred_animation.relation.1 { let remain_dur = dur - start_dur; (remain_dur >= 0., remain_dur) } else { (false, 0.) } } else { (true, 0.) }; if start { deferred_start .push((deferred_animation.animation_id, start_dur)); state_set .insert(deferred_animation.animation_id, start_dur); } } let mut next_id = self.next_id; for &(id, start_dur) in &deferred_start { debug!("Processing Deferred Animation {:?}", id); let index = control_set .deferred_animations .iter() .position(|a| a.animation_id == id) .expect("Unreachable: Id of current `deferred_start` was taken from previous loop over `deferred_animations`"); let mut def = control_set.deferred_animations.remove(index); def.control.state = ControlState::Deferred(Duration::from_secs_f32(start_dur)); def.control.command = AnimationCommand::Start; let mut remove = false; if let Some(state) = animation_storage .get(&def.control.animation) .and_then(|animation| { process_animation_control( *entity, &mut world, animation, &mut def.control, hierarchy, &*sampler_storage, &mut buffer, &mut remove, &mut next_id, ) }) { def.control.state = state; } control_set.insert(id, def.control); } self.next_id = next_id; for id in &remove_ids { debug!("Removing AnimationControlSet {:?}", id); control_set.remove(&*id); if control_set.is_empty() { remove_sets.push(*entity); } } } for entity in &remove_sets { buffer.remove_component::<AnimationControlSet<I, T>>(*entity); } } )) } } fn find_max_duration<T>(control_id: u64, samplers: Option<&SamplerControlSet<T>>) -> f32 where T: AnimationSampling, { samplers .and_then(|set| set.get_running_duration(control_id)) .unwrap_or(0.) } /// Check if the given animation list is for a single node. If so, we don't need an /// `AnimationHierarchy`. fn only_one_index<C, P>(nodes: &[(usize, C, Handle<Sampler<P>>)]) -> bool where P: InterpolationPrimitive, { if nodes.is_empty() { true } else { let first = nodes[0].0; nodes.iter().all(|&(ref i, _, _)| *i == first) } } /// Process a single animation control object. /// /// ## Parameters: /// /// - `entity`: the entity the control object is active for /// - `animation`: the animation the control is for /// - `control`: animation control object /// - `hierarchy`: the animation node hierarchy for the entity hierarchy the animation instance is /// active for, if this is None the animation must be for a single node, which is the /// local entity. If the animation contains more than a single node index, the /// animation will be silently dropped. /// - `sampler_storage`: `AssetStorage` for all `Sampler`s /// - `samplers`: the active sampler sets /// - `targets`: Target components, used to retrieve the rest pose before animation starts. /// - `remove`: all entities pushed here will have the control object removed at the end of the system execution /// - `next_id`: next id to use for the animation control id /// /// ## /// /// Optionally returns a new `ControlState` for the animation. This will be the new state of the /// control object. fn process_animation_control<T>( entity: Entity, world: &mut SubWorld<'_>, animation: &Animation<T>, control: &mut AnimationControl<T>, hierarchy: Option<&AnimationHierarchy<T>>, sampler_storage: &AssetStorage<Sampler<T::Primitive>>, buffer: &mut CommandBuffer, remove: &mut bool, next_id: &mut u64, ) -> Option<ControlState> where T: AnimationSampling + Clone, { // Checking hierarchy let h_fallback = AnimationHierarchy::new_single(animation.nodes[0].0, entity); let hierarchy = match hierarchy { Some(h) => h, None => { if only_one_index(&animation.nodes) { debug!("Creating fallback hierarchy for Entity"); &h_fallback } else { error!( "Animation control which target multiple nodes without a hierarchy detected, dropping" ); *remove = true; return None; } } }; match (&control.state, &control.command) { // Check for aborted or done animation (_, &AnimationCommand::Abort) | (&(ControlState::Abort | ControlState::Done), _) => { debug!("Aborting Animation"); // signal samplers to abort, and remove control object if all samplers are done and removed if check_and_terminate_animation(control.id, hierarchy, world, buffer) { *remove = true; } Some(ControlState::Abort) } // Animation was just requested, start it // We ignore the command here because we need the animation to be // started before we can pause it, and to avoid a lot of checks for // abort. The command will be processed next frame. (&(ControlState::Requested | ControlState::Deferred(..)), &AnimationCommand::Start) => { debug!("Starting animation!"); control.id = *next_id; *next_id += 1; if start_animation( animation, sampler_storage, control, world, buffer, hierarchy, ) { Some(ControlState::Running(Duration::from_secs(0))) } else { None // Try again next frame, might just be that samplers haven't finished loading } } // If pause was requested on a running animation, pause it (&ControlState::Running(..), &AnimationCommand::Pause) => { pause_animation(control.id, hierarchy, world); Some(ControlState::Paused(Duration::from_secs(0))) } // If start was requested on a paused animation, unpause it (&ControlState::Paused(_), &AnimationCommand::Start) => { unpause_animation(control.id, hierarchy, world); Some(ControlState::Running(Duration::from_secs(0))) } (&ControlState::Running(..), &AnimationCommand::Step(ref dir)) => { step_animation(control.id, hierarchy, world, sampler_storage, dir); None } (&ControlState::Running(..), &AnimationCommand::SetInputValue(value)) => { set_animation_input(control.id, hierarchy, world, value); None } (&ControlState::Running(..), &AnimationCommand::SetBlendWeights(ref weights)) => { set_blend_weights(control.id, hierarchy, world, weights); None } // check for finished/aborted animations, wait for samplers to signal done, // then remove control objects (&ControlState::Running(..), _) => { if check_termination(control.id, hierarchy, world) { debug!("Animation finished, terminating."); // Do termination for node_entity in hierarchy.nodes.values() { let empty = world .entry_mut(entity) .expect("Failed to get hierarchy entity entry") .get_component_mut::<SamplerControlSet<T>>() .map(|sampler| { sampler.clear(control.id); sampler.is_empty() }) .unwrap_or(false); if empty { buffer.remove_component::<SamplerControlSet<T>>(*node_entity); } } *remove = true; } else { debug!("Animation Playing: {:?}", control.id); update_animation_rate(control.id, hierarchy, world, control.rate_multiplier); } None } _ => None, } } /// Process animation creation request. /// Will build `SamplerControlSet`s for the `AnimationHierarchy` given, based on the `Sampler`s in /// the given `Animation`. /// /// ## Parameters /// /// - `animation`: the animation to start /// - `sampler_storage`: all samplers /// - `control`: the control object for the animation instance /// - `hierarchy`: the animation node hierarchy for the entity hierarchy the animation instance is active for /// - `samplers`: the active sampler sets /// - `targets`: Target components, used to retrieve the rest pose before animation starts. /// /// ## Returns /// /// True if the animation was started, false if it wasn't. fn start_animation<T>( animation: &Animation<T>, sampler_storage: &AssetStorage<Sampler<T::Primitive>>, control: &AnimationControl<T>, world: &mut SubWorld<'_>, buffer: &mut CommandBuffer, hierarchy: &AnimationHierarchy<T>, ) -> bool where T: AnimationSampling + Clone, { // check that hierarchy is valid, and all samplers exist let valid = animation .nodes .iter() .all(|&(ref node_index, _, ref sampler_handle)| { hierarchy.nodes.contains_key(node_index) && sampler_storage.get(sampler_handle).is_some() }); if !valid { error!("Animation hierarchy is not valid for sampler!"); return false; } debug!("Creating rest state for this AnimationHierarchy"); hierarchy.rest_state(world, buffer); let start_state = if let ControlState::Deferred(dur) = control.state { ControlState::Deferred(dur) } else { ControlState::Requested }; let mut dirty_sampler_control_set_cache = HashMap::new(); // setup sampler tree for &(ref node_index, ref channel, ref sampler_handle) in &animation.nodes { let node_entity = hierarchy.nodes.get(node_index).expect( "Unreachable: Existence of all nodes are checked in validation of hierarchy above", ); debug!(" index: {:?} entity {:?}", node_index, node_entity); if let Ok(mut entry) = world.entry_mut(*node_entity) { if let Ok(component) = entry .get_component::<RestState<T>>() .map(RestState::state) .or_else(|_| entry.get_component::<T>()) { debug!("Creating SamplerControl for {:?}", entry.archetype()); let sampler_control = SamplerControl::<T> { control_id: control.id, channel: channel.clone(), state: start_state.clone(), sampler: sampler_handle.clone(), end: control.end.clone(), after: component.current_sample(channel), rate_multiplier: control.rate_multiplier, blend_weight: 1.0, }; if let Ok(set) = entry.get_component_mut::<SamplerControlSet<T>>() { debug!("Adding SamplerControl to existing SamplerControlSet"); set.add_control(sampler_control); } else { let mut set = SamplerControlSet::default(); // try to retrieve the component from the dirty cache if dirty_sampler_control_set_cache.contains_key(node_entity) { set = dirty_sampler_control_set_cache .remove(node_entity) .expect("Unreachable, we just checked"); } debug!("Adding SamplerControl to new SamplerControlSet"); set.add_control(sampler_control); dirty_sampler_control_set_cache.insert(*node_entity, set.clone()); buffer.add_component(*node_entity, set); } } else { error!( "Failed to acquire animated component. Is the component you are trying to animate present on the target entity: {:?}", node_entity ); return false; } } } true } fn pause_animation<T>(control_id: u64, hierarchy: &AnimationHierarchy<T>, world: &mut SubWorld<'_>) where T: AnimationSampling, { for node_entity in hierarchy.nodes.values() { if let Ok(mut entry) = world.entry_mut(*node_entity) { if let Ok(ref mut s) = entry.get_component_mut::<SamplerControlSet<T>>() { s.pause(control_id); } } } } fn unpause_animation<T>( control_id: u64, hierarchy: &AnimationHierarchy<T>, world: &mut SubWorld<'_>, ) where T: AnimationSampling, { for node_entity in hierarchy.nodes.values() { if let Ok(mut entry) = world.entry_mut(*node_entity) { if let Ok(ref mut s) = entry.get_component_mut::<SamplerControlSet<T>>() { s.unpause(control_id); } } } } fn step_animation<T>( control_id: u64, hierarchy: &AnimationHierarchy<T>, world: &mut SubWorld<'_>, sampler_storage: &AssetStorage<Sampler<T::Primitive>>, direction: &StepDirection, ) where T: AnimationSampling, { for node_entity in hierarchy.nodes.values() { if let Ok(mut entry) = world.entry_mut(*node_entity) { if let Ok(ref mut s) = entry.get_component_mut::<SamplerControlSet<T>>() { s.step(control_id, sampler_storage, direction); } } } } fn set_animation_input<T>( control_id: u64, hierarchy: &AnimationHierarchy<T>, world: &mut SubWorld<'_>, input: f32, ) where T: AnimationSampling, { for node_entity in hierarchy.nodes.values() { if let Ok(mut entry) = world.entry_mut(*node_entity) { if let Ok(ref mut s) = entry.get_component_mut::<SamplerControlSet<T>>() { s.set_input(control_id, input); } } } } fn set_blend_weights<T>( control_id: u64, hierarchy: &AnimationHierarchy<T>, world: &mut SubWorld<'_>, weights: &[(usize, T::Channel, f32)], ) where T: AnimationSampling, { for &(node_index, ref channel, weight) in weights { if let Some(node_entity) = hierarchy.nodes.get(&node_index) { if let Ok(mut entry) = world.entry_mut(*node_entity) { if let Ok(ref mut s) = entry.get_component_mut::<SamplerControlSet<T>>() { s.set_blend_weight(control_id, channel, weight); } } } } } fn update_animation_rate<T>( control_id: u64, hierarchy: &AnimationHierarchy<T>, world: &mut SubWorld<'_>, rate_multiplier: f32, ) where T: AnimationSampling, { for node_entity in hierarchy.nodes.values() { if let Ok(mut entry) = world.entry_mut(*node_entity) { if let Ok(ref mut s) = entry.get_component_mut::<SamplerControlSet<T>>() { s.set_rate_multiplier(control_id, rate_multiplier); } } } } /// Check if all nodes in an `AnimationHierarchy` are ready for termination, if so remove all /// `SamplerControlSet`s for the hierarchy, if not request termination on all sampler controls fn check_and_terminate_animation<T>( control_id: u64, hierarchy: &AnimationHierarchy<T>, world: &mut SubWorld<'_>, buffer: &mut CommandBuffer, ) -> bool where T: AnimationSampling, { // Check for termination if check_termination(control_id, hierarchy, world) { // Do termination for node_entity in hierarchy.nodes.values() { let empty = world .entry_mut(*node_entity) .expect("get hierarchy node entry_mut") .get_component_mut::<SamplerControlSet<T>>() .map(|sampler| { sampler.clear(control_id); sampler.is_empty() }) .unwrap_or(false); if empty { buffer.remove_component::<SamplerControlSet<T>>(*node_entity); } } true } else { // Request termination of samplers for node_entity in hierarchy.nodes.values() { if let Ok(mut entry) = world.entry_mut(*node_entity) { if let Ok(ref mut s) = entry.get_component_mut::<SamplerControlSet<T>>() { s.abort(control_id); } } } false } } /// Check if all nodes in an `AnimationHierarchy` are ready for termination. fn check_termination<T>( control_id: u64, hierarchy: &AnimationHierarchy<T>, world: &mut SubWorld<'_>, ) -> bool where T: AnimationSampling, { hierarchy.nodes.iter().all(|(_, node_entity)| { world .entry_ref(*node_entity) .expect("node entry ref") .get_component::<SamplerControlSet<T>>() .map_or(true, |acs| acs.check_termination(control_id)) }) }
39.296636
143
0.507315
f5f5d4ed3804fe8c008f4d21e95d1baf09858813
582
// aux-build:overlapping_pub_trait_source.rs /* * This crate declares two public paths, `m::Tr` and `prelude::_`. Make sure we prefer the former. */ extern crate overlapping_pub_trait_source; fn main() { //~^ HELP the following trait is implemented but not in scope; perhaps add a `use` for it: //~| SUGGESTION overlapping_pub_trait_source::m::Tr use overlapping_pub_trait_source::S; S.method(); //~^ ERROR no method named `method` found for struct `S` in the current scope [E0599] //~| HELP items from traits can only be used if the trait is in scope }
36.375
98
0.706186
d79e35bf33ec91387ab6b79e3af123927028b181
147
use tower_lsp::lsp_types::*; pub fn symb(name: &str) -> CompletionItem { CompletionItem { label: name.to_string(), ..Default::default() } }
16.333333
43
0.659864
875d76a2d80d9e314682410236a2e68a3f8876d0
2,434
// Copyright (c) 2020 DarkWeb Design // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. /// Apply a user supplied function to every member of a vector /// /// # Description /// /// Applies the user-defined callback function to each element of the vector. /// /// **callback** /// /// Callback takes on two parameters. The vector parameter's value being the first, and the index /// second. /// /// Only the values of the vector may potentially be changed, i.e., the programmer cannot add, unset /// or reorder elements. /// /// # Examples /// /// Example #1 array_walk() example /// /// ``` /// use phpify::array::array_walk; /// /// let mut fruits = vec![ /// "lemon".to_string(), /// "orange".to_string(), /// "banana".to_string(), /// "apple".to_string(), /// ]; /// /// fn test_alter(item: &mut String, index: usize) { /// *item = format!("fruit: {}", *item); /// } /// /// array_walk(&mut fruits, test_alter); /// /// assert_eq!(fruits[0], "fruit: lemon"); /// assert_eq!(fruits[1], "fruit: orange"); /// assert_eq!(fruits[2], "fruit: banana"); /// assert_eq!(fruits[3], "fruit: apple"); /// ``` pub fn array_walk<T>(array: &mut Vec<T>, callback: impl Fn(&mut T, usize) + 'static) { for (index, value) in array.iter_mut().enumerate() { callback(value, index); } } #[cfg(test)] mod tests { use crate::array::array_walk; #[test] fn test() { let mut vec = vec![1, 2, 3]; array_walk(&mut vec, |value, index| *value = *value * index); assert_eq!(vec, [0, 2, 6]); } }
32.891892
100
0.657354
1acc8fbaecf92c56f7e468a9333e8f9107000f8d
3,338
use std::path::PathBuf; use crate::prelude::*; use nu_engine::filesystem::path::canonicalize; use nu_engine::WholeStreamCommand; use nu_errors::ShellError; use nu_protocol::{CommandAction, ReturnSuccess, Signature, SyntaxShape, UntaggedValue}; use nu_source::Tagged; #[cfg(unix)] use std::os::unix::fs::PermissionsExt; pub struct SubCommand; #[derive(Deserialize)] pub struct Arguments { #[serde(rename = "load")] pub load_path: Option<Tagged<PathBuf>>, } impl WholeStreamCommand for SubCommand { fn name(&self) -> &str { "nu plugin" } fn signature(&self) -> Signature { Signature::build("nu plugin").named( "load", SyntaxShape::FilePath, "a path to load the plugins from", Some('l'), ) } fn usage(&self) -> &str { "Nu Plugin" } fn examples(&self) -> Vec<Example> { vec![Example { description: "Load all plugins in the current directory", example: "nu plugin --load .", result: None, }] } fn run_with_actions(&self, args: CommandArgs) -> Result<ActionStream, ShellError> { let scope = args.scope().clone(); let shell_manager = args.shell_manager(); let (Arguments { load_path }, _) = args.process()?; if let Some(Tagged { item: load_path, tag, }) = load_path { let path = canonicalize(shell_manager.path(), load_path).map_err(|_| { ShellError::labeled_error( "Cannot load plugins from directory", "directory not found", &tag, ) })?; if !path.is_dir() { return Err(ShellError::labeled_error( "Cannot load plugins from directory", "is not a directory", &tag, )); } #[cfg(unix)] { let has_exec = path .metadata() .map(|m| umask::Mode::from(m.permissions().mode()).has(umask::USER_READ)) .map_err(|e| { ShellError::labeled_error( "Cannot load plugins from directory", format!("cannot stat ({})", e), &tag, ) })?; if !has_exec { return Err(ShellError::labeled_error( "Cannot load plugins from directory", "permission denied", &tag, )); } } return Ok(vec![ReturnSuccess::action(CommandAction::AddPlugins( path.to_string_lossy().to_string(), ))] .into()); } Ok(ActionStream::one(ReturnSuccess::value( UntaggedValue::string(get_full_help(&SubCommand, &scope)).into_value(Tag::unknown()), ))) } } #[cfg(test)] mod tests { use super::ShellError; use super::SubCommand; #[test] fn examples_work_as_expected() -> Result<(), ShellError> { use crate::examples::test as test_examples; test_examples(SubCommand {}) } }
27.816667
97
0.49281
bbb8603b3ed1ebe863cf8749d3db0b2002a50795
811
extern crate httpmock; use isahc::get; use httpmock::Method::GET; use httpmock::MockServer; #[test] fn explicit_delete_mock_test() { // Arrange let server = MockServer::start(); let mut m = server.mock(|when, then| { when.method(GET).path("/health"); then.status(205); }); // Act: Send the HTTP request let response = get(&format!( "http://{}:{}/health", server.host(), server.port() )) .unwrap(); // Assert m.assert(); assert_eq!(response.status(), 205); // Delete the mock and send the request again m.delete(); let response = get(&format!("http://{}/health", server.address())).unwrap(); // Assert that the request failed, because the mock has been deleted assert_eq!(response.status(), 404); }
21.342105
80
0.593095
f48c058b196b4a0d40c93178d8fd664b82b26b95
108
pub mod audio; pub mod locale; pub mod log; pub mod navigator; pub mod render; pub mod storage; pub mod ui;
13.5
18
0.740741
294b89acc2cb5f94e77b3b8cf5e9398f22f76204
1,073
extern crate clap; extern crate fibers; extern crate fibers_inotify; extern crate futures; #[macro_use] extern crate trackable; use clap::{App, Arg}; use fibers::{Executor, InPlaceExecutor, Spawn}; use fibers_inotify::{Error, InotifyService, WatchMask}; use futures::{Future, Stream}; fn main() { let matches = App::new("watch") .arg(Arg::with_name("PATH").index(1).required(true)) .get_matches(); let path = matches.value_of("PATH").unwrap(); let mask = WatchMask::CREATE | WatchMask::MODIFY | WatchMask::DELETE_SELF | WatchMask::DELETE | WatchMask::MOVE | WatchMask::MOVE_SELF; let inotify_service = InotifyService::new(); let inotify_handle = inotify_service.handle(); let mut executor = InPlaceExecutor::new().unwrap(); executor.spawn(inotify_service.map_err(|e| panic!("{}", e))); let fiber = executor.spawn_monitor(inotify_handle.watch(path, mask).for_each(|event| { println!("{:?}", event); Ok(()) })); track_try_unwrap!(executor.run_fiber(fiber).unwrap().map_err(Error::from)); }
32.515152
97
0.67754
dd69ee36ea93f05d70d3c8414f181ef1ecccf711
1,352
use std::fs::File; use std::io::Read; use clap::Parser; #[derive(Parser)] pub struct Options { /// Path to a wgsl shader program (use '-' for stdin). #[clap(default_value = "-")] pub input: String, /// Path at which to write output (use '-' for stdout). #[clap(short, long, default_value = "-")] pub output: String, } pub fn run(options: Options) -> eyre::Result<()> { let source = read_shader_from_path(&options.input)?; let ast = parser::parse(&source); struct Output(Box<dyn std::io::Write>); impl std::fmt::Write for Output { fn write_str(&mut self, s: &str) -> std::fmt::Result { use std::io::Write; self.0.write_all(s.as_bytes()).unwrap(); Ok(()) } } let output: Box<dyn std::io::Write> = match options.output.as_str() { "-" => Box::new(std::io::stdout()), path => Box::new(File::create(path)?), }; ast::writer::Writer::default() .write_module(&mut Output(output), &ast) .unwrap(); Ok(()) } fn read_shader_from_path(path: &str) -> eyre::Result<String> { let mut input: Box<dyn Read> = match path { "-" => Box::new(std::io::stdin()), path => Box::new(File::open(path)?), }; let mut shader = String::new(); input.read_to_string(&mut shader)?; Ok(shader) }
25.037037
73
0.559172
f948c9eb2f68db6cc6e148b69a93a91d570ad4bf
48,229
// Copyright 2018 PingCAP, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. use std::time::Duration; use futures::sync::mpsc; use futures::{future, stream, Future, Stream}; use protobuf::{CodedInputStream, Message}; use kvproto::{coprocessor as coppb, errorpb, kvrpcpb}; use tipb::analyze::{AnalyzeReq, AnalyzeType}; use tipb::checksum::{ChecksumRequest, ChecksumScanOn}; use tipb::executor::ExecType; use tipb::select::DAGRequest; use crate::server::readpool::{self, ReadPool}; use crate::server::Config; use crate::storage::{self, Engine, SnapshotStore}; use crate::util::Either; use crate::coprocessor::dag::executor::ExecutorMetrics; use crate::coprocessor::metrics::*; use crate::coprocessor::tracker::Tracker; use crate::coprocessor::*; const OUTDATED_ERROR_MSG: &str = "request outdated."; const BUSY_ERROR_MSG: &str = "server is busy (coprocessor full)."; /// A pool to build and run Coprocessor request handlers. pub struct Endpoint<E: Engine> { /// The storage engine to build Coprocessor request handlers. engine: E, /// The thread pool to run Coprocessor requests. read_pool: ReadPool<ReadPoolContext>, /// The recursion limit when parsing Coprocessor Protobuf requests. recursion_limit: u32, batch_row_limit: usize, stream_batch_row_limit: usize, stream_channel_size: usize, /// The soft time limit of handling Coprocessor requests. max_handle_duration: Duration, } impl<E: Engine> Clone for Endpoint<E> { fn clone(&self) -> Self { Self { engine: self.engine.clone(), read_pool: self.read_pool.clone(), ..*self } } } impl<E: Engine> crate::util::AssertSend for Endpoint<E> {} impl<E: Engine> Endpoint<E> { pub fn new(cfg: &Config, engine: E, read_pool: ReadPool<ReadPoolContext>) -> Self { Self { engine, read_pool, recursion_limit: cfg.end_point_recursion_limit, batch_row_limit: cfg.end_point_batch_row_limit, stream_batch_row_limit: cfg.end_point_stream_batch_row_limit, stream_channel_size: cfg.end_point_stream_channel_size, max_handle_duration: cfg.end_point_request_max_handle_duration.0, } } /// Parse the raw `Request` to create `RequestHandlerBuilder` and `ReqContext`. /// Returns `Err` if fails. fn parse_request( &self, mut req: coppb::Request, peer: Option<String>, is_streaming: bool, ) -> Result<(RequestHandlerBuilder<E::Snap>, ReqContext)> { fail_point!("coprocessor_parse_request", |_| Err(box_err!( "unsupported tp (failpoint)" ))); let (context, data, ranges) = ( req.take_context(), req.take_data(), req.take_ranges().to_vec(), ); let mut is = CodedInputStream::from_bytes(&data); is.set_recursion_limit(self.recursion_limit); let req_ctx: ReqContext; let builder: RequestHandlerBuilder<E::Snap>; match req.get_tp() { REQ_TYPE_DAG => { let mut dag = DAGRequest::new(); box_try!(dag.merge_from(&mut is)); let mut table_scan = false; let mut is_desc_scan = false; if let Some(scan) = dag.get_executors().iter().next() { table_scan = scan.get_tp() == ExecType::TypeTableScan; if table_scan { is_desc_scan = scan.get_tbl_scan().get_desc(); } else { is_desc_scan = scan.get_idx_scan().get_desc(); } } req_ctx = ReqContext::new( make_tag(table_scan), context, ranges.as_slice(), self.max_handle_duration, peer, Some(is_desc_scan), Some(dag.get_start_ts()), ); let batch_row_limit = self.get_batch_row_limit(is_streaming); builder = Box::new(move |snap, req_ctx: &ReqContext| { // TODO: Remove explicit type once rust-lang#41078 is resolved let store = SnapshotStore::new( snap, dag.get_start_ts(), req_ctx.context.get_isolation_level(), !req_ctx.context.get_not_fill_cache(), ); dag::DAGRequestHandler::build( dag, ranges, store, req_ctx.deadline, batch_row_limit, is_streaming, true, ) }); } REQ_TYPE_ANALYZE => { let mut analyze = AnalyzeReq::new(); box_try!(analyze.merge_from(&mut is)); let table_scan = analyze.get_tp() == AnalyzeType::TypeColumn; req_ctx = ReqContext::new( make_tag(table_scan), context, ranges.as_slice(), self.max_handle_duration, peer, None, Some(analyze.get_start_ts()), ); builder = Box::new(move |snap, req_ctx: &_| { // TODO: Remove explicit type once rust-lang#41078 is resolved statistics::analyze::AnalyzeContext::new(analyze, ranges, snap, req_ctx) .map(|h| h.into_boxed()) }); } REQ_TYPE_CHECKSUM => { let mut checksum = ChecksumRequest::new(); box_try!(checksum.merge_from(&mut is)); let table_scan = checksum.get_scan_on() == ChecksumScanOn::Table; req_ctx = ReqContext::new( make_tag(table_scan), context, ranges.as_slice(), self.max_handle_duration, peer, None, Some(checksum.get_start_ts()), ); builder = Box::new(move |snap, req_ctx: &_| { // TODO: Remove explicit type once rust-lang#41078 is resolved checksum::ChecksumContext::new(checksum, ranges, snap, req_ctx) .map(|h| h.into_boxed()) }); } tp => return Err(box_err!("unsupported tp {}", tp)), }; Ok((builder, req_ctx)) } /// Get the batch row limit configuration. #[inline] fn get_batch_row_limit(&self, is_streaming: bool) -> usize { if is_streaming { self.stream_batch_row_limit } else { self.batch_row_limit } } #[inline] fn async_snapshot( engine: E, ctx: &kvrpcpb::Context, ) -> impl Future<Item = E::Snap, Error = Error> { let (callback, future) = crate::util::future::paired_future_callback(); let val = engine.async_snapshot(ctx, callback); future::result(val) .and_then(|_| future.map_err(|cancel| storage::engine::Error::Other(box_err!(cancel)))) .and_then(|(_ctx, result)| result) // map engine::Error -> coprocessor::Error .map_err(Error::from) } /// The real implementation of handling a unary request. /// /// It first retrieves a snapshot, then builds the `RequestHandler` over the snapshot and /// the given `handler_builder`. Finally, it calls the unary request interface of the /// `RequestHandler` to process the request and produce a result. // TODO: Convert to use async / await. fn handle_unary_request_impl( engine: E, tracker: Box<Tracker>, handler_builder: RequestHandlerBuilder<E::Snap>, ) -> impl Future<Item = coppb::Response, Error = Error> { // When this function is being executed, it may be queued for a long time, so that // deadline may exceed. future::result(tracker.req_ctx.deadline.check_if_exceeded()) .and_then(move |_| { Self::async_snapshot(engine, &tracker.req_ctx.context) .map(|snapshot| (tracker, snapshot)) }) .and_then(move |(tracker, snapshot)| { // When snapshot is retrieved, deadline may exceed. future::result(tracker.req_ctx.deadline.check_if_exceeded()) .map(|_| (tracker, snapshot)) }) .and_then(move |(tracker, snapshot)| { future::result(handler_builder.call_box((snapshot, &tracker.req_ctx))) .map(|handler| (tracker, handler)) }) .and_then(|(mut tracker, mut handler)| { tracker.on_begin_all_items(); tracker.on_begin_item(); // There might be errors when handling requests. In this case, we still need its // execution metrics. let result = handler.handle_request(); let exec_metrics = { let mut metrics = ExecutorMetrics::default(); handler.collect_metrics_into(&mut metrics); metrics }; tracker.on_finish_item(Some(exec_metrics)); let exec_details = tracker.get_item_exec_details(); tracker.on_finish_all_items(); future::result(result) .or_else(|e| Ok::<_, Error>(make_error_response(e))) .map(|mut resp| { resp.set_exec_details(exec_details); resp }) }) } /// Handle a unary request and run on the read pool. /// /// Returns `Err(err)` if the read pool is full. Returns `Ok(future)` in other cases. /// The future inside may be an error however. fn handle_unary_request( &self, req_ctx: ReqContext, handler_builder: RequestHandlerBuilder<E::Snap>, ) -> Result<impl Future<Item = coppb::Response, Error = Error>> { let engine = self.engine.clone(); let priority = readpool::Priority::from(req_ctx.context.get_priority()); // box the tracker so that moving it is cheap. let mut tracker = Box::new(Tracker::new(req_ctx)); self.read_pool .future_execute(priority, move |ctxd| { tracker.attach_ctxd(ctxd); Self::handle_unary_request_impl(engine, tracker, handler_builder) }) .map_err(|_| Error::Full) } /// Parses and handles a unary request. Returns a future that will never fail. If there are /// errors during parsing or handling, they will be converted into a `Response` as the success /// result of the future. #[inline] pub fn parse_and_handle_unary_request( &self, req: coppb::Request, peer: Option<String>, ) -> impl Future<Item = coppb::Response, Error = ()> { let result_of_future = self.parse_request(req, peer, false) .and_then(|(handler_builder, req_ctx)| { self.handle_unary_request(req_ctx, handler_builder) }); future::result(result_of_future) .flatten() .or_else(|e| Ok(make_error_response(e))) } /// The real implementation of handling a stream request. /// /// It first retrieves a snapshot, then builds the `RequestHandler` over the snapshot and /// the given `handler_builder`. Finally, it calls the stream request interface of the /// `RequestHandler` multiple times to process the request and produce multiple results. // TODO: Convert to use async / await. fn handle_stream_request_impl( engine: E, tracker: Box<Tracker>, handler_builder: RequestHandlerBuilder<E::Snap>, ) -> impl Stream<Item = coppb::Response, Error = Error> { // When this function is being executed, it may be queued for a long time, so that // deadline may exceed. let tracker_and_handler_future = future::result(tracker.req_ctx.deadline.check_if_exceeded()) .and_then(move |_| { Self::async_snapshot(engine, &tracker.req_ctx.context) .map(|snapshot| (tracker, snapshot)) }) .and_then(move |(tracker, snapshot)| { // When snapshot is retrieved, deadline may exceed. future::result(tracker.req_ctx.deadline.check_if_exceeded()) .map(|_| (tracker, snapshot)) }) .and_then(move |(tracker, snapshot)| { future::result(handler_builder.call_box((snapshot, &tracker.req_ctx))) .map(|handler| (tracker, handler)) }); tracker_and_handler_future .map(|(mut tracker, handler)| { tracker.on_begin_all_items(); // The state is `Option<(tracker, handler, finished)>`, `None` indicates finished. // For every stream item except the last one, the type is `Either::Left(Response)`. // For last stream item, the type is `Either::Right(Tracker)` so that we can do // more things for tracker later. let initial_state = Some((tracker, handler, false)); stream::unfold(initial_state, |state| { match state { Some((mut tracker, mut handler, finished)) => { if finished { // Emit tracker as the last item. let yielded = Either::Right(tracker); let next_state = None; return Some(Ok((yielded, next_state))); } // There are future items tracker.on_begin_item(); let result = handler.handle_streaming_request(); let exec_metrics = { let mut metrics = ExecutorMetrics::default(); handler.collect_metrics_into(&mut metrics); metrics }; tracker.on_finish_item(Some(exec_metrics)); let exec_details = tracker.get_item_exec_details(); let (mut resp, finished) = match result { Err(e) => (make_error_response(e), true), Ok((None, _)) => { let yielded = Either::Right(tracker); let next_state = None; return Some(Ok((yielded, next_state))); } Ok((Some(resp), finished)) => (resp, finished), }; resp.set_exec_details(exec_details); let yielded = Either::Left(resp); let next_state = Some((tracker, handler, finished)); Some(Ok((yielded, next_state))) } None => { // Finished None } } }) .filter_map(|resp_or_tracker| match resp_or_tracker { Either::Left(resp) => Some(resp), Either::Right(mut tracker) => { tracker.on_finish_all_items(); None } }) }) .flatten_stream() } /// Handle a stream request and run on the read pool. /// /// Returns `Err(err)` if the read pool is full. Returns `Ok(stream)` in other cases. /// The stream inside may produce errors however. fn handle_stream_request( &self, req_ctx: ReqContext, handler_builder: RequestHandlerBuilder<E::Snap>, ) -> Result<impl Stream<Item = coppb::Response, Error = Error>> { let (tx, rx) = mpsc::channel::<Result<coppb::Response>>(self.stream_channel_size); let engine = self.engine.clone(); let priority = readpool::Priority::from(req_ctx.context.get_priority()); // Must be created befure `future_execute`, otherwise wait time is not tracked. let mut tracker = Box::new(Tracker::new(req_ctx)); self.read_pool .future_execute(priority, move |ctxd| { tracker.attach_ctxd(ctxd); Self::handle_stream_request_impl(engine, tracker, handler_builder) // Stream<Resp, Error> .then(Ok::<_, mpsc::SendError<_>>) // Stream<Result<Resp, Error>, MpscError> .forward(tx) }) .map_err(|_| Error::Full) .and_then(move |cpu_future| { // Keep running stream producer cpu_future.forget(); // Returns the stream instead of a future Ok(rx.then(|r| r.unwrap())) }) } /// Parses and handles a stream request. Returns a stream that produce each result in a /// `Response` and will never fail. If there are errors during parsing or handling, they will /// be converted into a `Response` as the only stream item. #[inline] pub fn parse_and_handle_stream_request( &self, req: coppb::Request, peer: Option<String>, ) -> impl Stream<Item = coppb::Response, Error = ()> { let result_of_stream = self.parse_request(req, peer, true) .and_then(|(handler_builder, req_ctx)| { self.handle_stream_request(req_ctx, handler_builder) }); // Result<Stream<Resp, Error>, Error> stream::once(result_of_stream) // Stream<Stream<Resp, Error>, Error> .flatten() // Stream<Resp, Error> .or_else(|e| Ok(make_error_response(e))) // Stream<Resp, ()> } } fn make_tag(is_table_scan: bool) -> &'static str { if is_table_scan { "select" } else { "index" } } fn make_error_response(e: Error) -> coppb::Response { error!( "error-response"; "err" => %e ); let mut resp = coppb::Response::new(); let tag; match e { Error::Region(e) => { tag = storage::get_tag_from_header(&e); resp.set_region_error(e); } Error::Locked(info) => { tag = "lock"; resp.set_locked(info); } Error::Outdated(elapsed, scan_tag) => { tag = "outdated"; OUTDATED_REQ_WAIT_TIME .with_label_values(&[scan_tag]) .observe(elapsed.as_secs() as f64); resp.set_other_error(OUTDATED_ERROR_MSG.to_owned()); } Error::Full => { tag = "full"; let mut errorpb = errorpb::Error::new(); errorpb.set_message("Coprocessor end-point is full".to_owned()); let mut server_is_busy_err = errorpb::ServerIsBusy::new(); server_is_busy_err.set_reason(BUSY_ERROR_MSG.to_owned()); errorpb.set_server_is_busy(server_is_busy_err); resp.set_region_error(errorpb); } Error::Other(_) | Error::Eval(_) => { tag = "other"; resp.set_other_error(format!("{}", e)); } }; COPR_REQ_ERROR.with_label_values(&[tag]).inc(); resp } #[cfg(test)] mod tests { use super::*; use std::sync::{atomic, mpsc, Arc}; use std::thread; use std::vec; use tipb::executor::Executor; use tipb::expression::Expr; use crate::storage::TestEngineBuilder; use crate::util::worker::FutureWorker; /// A unary `RequestHandler` that always produces a fixture. struct UnaryFixture { handle_duration_millis: u64, result: Option<Result<coppb::Response>>, } impl UnaryFixture { pub fn new(result: Result<coppb::Response>) -> UnaryFixture { UnaryFixture { handle_duration_millis: 0, result: Some(result), } } pub fn new_with_duration( result: Result<coppb::Response>, handle_duration_millis: u64, ) -> UnaryFixture { UnaryFixture { handle_duration_millis, result: Some(result), } } } impl RequestHandler for UnaryFixture { fn handle_request(&mut self) -> Result<coppb::Response> { thread::sleep(Duration::from_millis(self.handle_duration_millis)); self.result.take().unwrap() } } /// A streaming `RequestHandler` that always produces a fixture. struct StreamFixture { result_len: usize, result_iter: vec::IntoIter<Result<coppb::Response>>, handle_durations_millis: vec::IntoIter<u64>, nth: usize, } impl StreamFixture { pub fn new(result: Vec<Result<coppb::Response>>) -> StreamFixture { let len = result.len(); StreamFixture { result_len: len, result_iter: result.into_iter(), handle_durations_millis: vec![0; len].into_iter(), nth: 0, } } pub fn new_with_duration( result: Vec<Result<coppb::Response>>, handle_durations_millis: Vec<u64>, ) -> StreamFixture { assert_eq!(result.len(), handle_durations_millis.len()); StreamFixture { result_len: result.len(), result_iter: result.into_iter(), handle_durations_millis: handle_durations_millis.into_iter(), nth: 0, } } } impl RequestHandler for StreamFixture { fn handle_streaming_request(&mut self) -> Result<(Option<coppb::Response>, bool)> { let is_finished = if self.result_len == 0 { true } else { self.nth >= (self.result_len - 1) }; let ret = match self.result_iter.next() { None => { assert!(is_finished); Ok((None, is_finished)) } Some(val) => { let handle_duration_ms = self.handle_durations_millis.next().unwrap(); thread::sleep(Duration::from_millis(handle_duration_ms)); match val { Ok(resp) => Ok((Some(resp), is_finished)), Err(e) => Err(e), } } }; self.nth += 1; ret } } /// A streaming `RequestHandler` that produces values according a closure. struct StreamFromClosure { result_generator: Box<dyn Fn(usize) -> HandlerStreamStepResult + Send>, nth: usize, } impl StreamFromClosure { pub fn new<F>(result_generator: F) -> StreamFromClosure where F: Fn(usize) -> HandlerStreamStepResult + Send + 'static, { StreamFromClosure { result_generator: Box::new(result_generator), nth: 0, } } } impl RequestHandler for StreamFromClosure { fn handle_streaming_request(&mut self) -> Result<(Option<coppb::Response>, bool)> { let result = (self.result_generator)(self.nth); self.nth += 1; result } } #[test] fn test_outdated_request() { let pd_worker = FutureWorker::new("test-pd-worker"); let engine = TestEngineBuilder::new().build().unwrap(); let read_pool = ReadPool::new("readpool", &readpool::Config::default_for_test(), || { ReadPoolContext::new(pd_worker.scheduler()) }); let cop = Endpoint::new(&Config::default(), engine, read_pool); // a normal request let handler_builder = Box::new(|_, _: &_| Ok(UnaryFixture::new(Ok(coppb::Response::new())).into_boxed())); let resp = cop .handle_unary_request(ReqContext::default_for_test(), handler_builder) .unwrap() .wait() .unwrap(); assert!(resp.get_other_error().is_empty()); // an outdated request let handler_builder = Box::new(|_, _: &_| Ok(UnaryFixture::new(Ok(coppb::Response::new())).into_boxed())); let outdated_req_ctx = ReqContext::new( "test", kvrpcpb::Context::new(), &[], Duration::from_secs(0), None, None, None, ); assert!(cop .handle_unary_request(outdated_req_ctx, handler_builder) .unwrap() .wait() .is_err()); } #[test] fn test_stack_guard() { let pd_worker = FutureWorker::new("test-pd-worker"); let engine = TestEngineBuilder::new().build().unwrap(); let read_pool = ReadPool::new("readpool", &readpool::Config::default_for_test(), || { ReadPoolContext::new(pd_worker.scheduler()) }); let cop = Endpoint::new( &Config { end_point_recursion_limit: 5, ..Config::default() }, engine, read_pool, ); let req = { let mut expr = Expr::new(); for _ in 0..10 { let mut e = Expr::new(); e.mut_children().push(expr); expr = e; } let mut e = Executor::new(); e.mut_selection().mut_conditions().push(expr); let mut dag = DAGRequest::new(); dag.mut_executors().push(e); let mut req = coppb::Request::new(); req.set_tp(REQ_TYPE_DAG); req.set_data(dag.write_to_bytes().unwrap()); req }; let resp: coppb::Response = cop .parse_and_handle_unary_request(req, None) .wait() .unwrap(); assert!(!resp.get_other_error().is_empty()); } #[test] fn test_invalid_req_type() { let pd_worker = FutureWorker::new("test-pd-worker"); let engine = TestEngineBuilder::new().build().unwrap(); let read_pool = ReadPool::new("readpool", &readpool::Config::default_for_test(), || { ReadPoolContext::new(pd_worker.scheduler()) }); let cop = Endpoint::new(&Config::default(), engine, read_pool); let mut req = coppb::Request::new(); req.set_tp(9999); let resp: coppb::Response = cop .parse_and_handle_unary_request(req, None) .wait() .unwrap(); assert!(!resp.get_other_error().is_empty()); } #[test] fn test_invalid_req_body() { let pd_worker = FutureWorker::new("test-pd-worker"); let engine = TestEngineBuilder::new().build().unwrap(); let read_pool = ReadPool::new("readpool", &readpool::Config::default_for_test(), || { ReadPoolContext::new(pd_worker.scheduler()) }); let cop = Endpoint::new(&Config::default(), engine, read_pool); let mut req = coppb::Request::new(); req.set_tp(REQ_TYPE_DAG); req.set_data(vec![1, 2, 3]); let resp = cop .parse_and_handle_unary_request(req, None) .wait() .unwrap(); assert!(!resp.get_other_error().is_empty()); } #[test] fn test_full() { let pd_worker = FutureWorker::new("test-pd-worker"); let engine = TestEngineBuilder::new().build().unwrap(); let read_pool = ReadPool::new( "readpool", &readpool::Config { normal_concurrency: 1, max_tasks_per_worker_normal: 2, ..readpool::Config::default_for_test() }, || ReadPoolContext::new(pd_worker.scheduler()), ); let cop = Endpoint::new(&Config::default(), engine, read_pool); let (tx, rx) = mpsc::channel(); // first 2 requests are processed as normal and laters are returned as errors for i in 0..5 { let mut response = coppb::Response::new(); response.set_data(vec![1, 2, i]); let mut context = kvrpcpb::Context::new(); context.set_priority(kvrpcpb::CommandPri::Normal); let handler_builder = Box::new(|_, _: &_| { Ok(UnaryFixture::new_with_duration(Ok(response), 1000).into_boxed()) }); let result_of_future = cop.handle_unary_request(ReqContext::default_for_test(), handler_builder); match result_of_future { Err(full_error) => { tx.send(Err(full_error)).unwrap(); } Ok(future) => { let tx = tx.clone(); thread::spawn(move || { tx.send(future.wait()).unwrap(); }); } } thread::sleep(Duration::from_millis(100)); } // verify for _ in 2..5 { assert!(rx.recv().unwrap().is_err()); } for i in 0..2 { let resp = rx.recv().unwrap().unwrap(); assert_eq!(resp.get_data(), [1, 2, i]); assert!(!resp.has_region_error()); } } #[test] fn test_error_unary_response() { let pd_worker = FutureWorker::new("test-pd-worker"); let engine = TestEngineBuilder::new().build().unwrap(); let read_pool = ReadPool::new("readpool", &readpool::Config::default_for_test(), || { ReadPoolContext::new(pd_worker.scheduler()) }); let cop = Endpoint::new(&Config::default(), engine, read_pool); let handler_builder = Box::new(|_, _: &_| { Ok(UnaryFixture::new(Err(Error::Other(box_err!("foo")))).into_boxed()) }); let resp = cop .handle_unary_request(ReqContext::default_for_test(), handler_builder) .unwrap() .wait() .unwrap(); assert_eq!(resp.get_data().len(), 0); assert!(!resp.get_other_error().is_empty()); } #[test] fn test_error_streaming_response() { let pd_worker = FutureWorker::new("test-pd-worker"); let engine = TestEngineBuilder::new().build().unwrap(); let read_pool = ReadPool::new("readpool", &readpool::Config::default_for_test(), || { ReadPoolContext::new(pd_worker.scheduler()) }); let cop = Endpoint::new(&Config::default(), engine, read_pool); // Fail immediately let handler_builder = Box::new(|_, _: &_| { Ok(StreamFixture::new(vec![Err(Error::Other(box_err!("foo")))]).into_boxed()) }); let resp_vec = cop .handle_stream_request(ReqContext::default_for_test(), handler_builder) .unwrap() .collect() .wait() .unwrap(); assert_eq!(resp_vec.len(), 1); assert_eq!(resp_vec[0].get_data().len(), 0); assert!(!resp_vec[0].get_other_error().is_empty()); // Fail after some success responses let mut responses = Vec::new(); for i in 0..5 { let mut resp = coppb::Response::new(); resp.set_data(vec![1, 2, i]); responses.push(Ok(resp)); } responses.push(Err(Error::Other(box_err!("foo")))); let handler_builder = Box::new(|_, _: &_| Ok(StreamFixture::new(responses).into_boxed())); let resp_vec = cop .handle_stream_request(ReqContext::default_for_test(), handler_builder) .unwrap() .collect() .wait() .unwrap(); assert_eq!(resp_vec.len(), 6); for i in 0..5 { assert_eq!(resp_vec[i].get_data(), [1, 2, i as u8]); } assert_eq!(resp_vec[5].get_data().len(), 0); assert!(!resp_vec[5].get_other_error().is_empty()); } #[test] fn test_empty_streaming_response() { let pd_worker = FutureWorker::new("test-pd-worker"); let engine = TestEngineBuilder::new().build().unwrap(); let read_pool = ReadPool::new("readpool", &readpool::Config::default_for_test(), || { ReadPoolContext::new(pd_worker.scheduler()) }); let cop = Endpoint::new(&Config::default(), engine, read_pool); let handler_builder = Box::new(|_, _: &_| Ok(StreamFixture::new(vec![]).into_boxed())); let resp_vec = cop .handle_stream_request(ReqContext::default_for_test(), handler_builder) .unwrap() .collect() .wait() .unwrap(); assert_eq!(resp_vec.len(), 0); } // TODO: Test panic? #[test] fn test_special_streaming_handlers() { let pd_worker = FutureWorker::new("test-pd-worker"); let engine = TestEngineBuilder::new().build().unwrap(); let read_pool = ReadPool::new("readpool", &readpool::Config::default_for_test(), || { ReadPoolContext::new(pd_worker.scheduler()) }); let cop = Endpoint::new(&Config::default(), engine, read_pool); // handler returns `finished == true` should not be called again. let counter = Arc::new(atomic::AtomicIsize::new(0)); let counter_clone = Arc::clone(&counter); let handler = StreamFromClosure::new(move |nth| match nth { 0 => { let mut resp = coppb::Response::new(); resp.set_data(vec![1, 2, 7]); Ok((Some(resp), true)) } _ => { // we cannot use `unreachable!()` here because CpuPool catches panic. counter_clone.store(1, atomic::Ordering::SeqCst); Err(box_err!("unreachable")) } }); let handler_builder = Box::new(move |_, _: &_| Ok(handler.into_boxed())); let resp_vec = cop .handle_stream_request(ReqContext::default_for_test(), handler_builder) .unwrap() .collect() .wait() .unwrap(); assert_eq!(resp_vec.len(), 1); assert_eq!(resp_vec[0].get_data(), [1, 2, 7]); assert_eq!(counter.load(atomic::Ordering::SeqCst), 0); // handler returns `None` but `finished == false` should not be called again. let counter = Arc::new(atomic::AtomicIsize::new(0)); let counter_clone = Arc::clone(&counter); let handler = StreamFromClosure::new(move |nth| match nth { 0 => { let mut resp = coppb::Response::new(); resp.set_data(vec![1, 2, 13]); Ok((Some(resp), false)) } 1 => Ok((None, false)), _ => { counter_clone.store(1, atomic::Ordering::SeqCst); Err(box_err!("unreachable")) } }); let handler_builder = Box::new(move |_, _: &_| Ok(handler.into_boxed())); let resp_vec = cop .handle_stream_request(ReqContext::default_for_test(), handler_builder) .unwrap() .collect() .wait() .unwrap(); assert_eq!(resp_vec.len(), 1); assert_eq!(resp_vec[0].get_data(), [1, 2, 13]); assert_eq!(counter.load(atomic::Ordering::SeqCst), 0); // handler returns `Err(..)` should not be called again. let counter = Arc::new(atomic::AtomicIsize::new(0)); let counter_clone = Arc::clone(&counter); let handler = StreamFromClosure::new(move |nth| match nth { 0 => { let mut resp = coppb::Response::new(); resp.set_data(vec![1, 2, 23]); Ok((Some(resp), false)) } 1 => Err(box_err!("foo")), _ => { counter_clone.store(1, atomic::Ordering::SeqCst); Err(box_err!("unreachable")) } }); let handler_builder = Box::new(move |_, _: &_| Ok(handler.into_boxed())); let resp_vec = cop .handle_stream_request(ReqContext::default_for_test(), handler_builder) .unwrap() .collect() .wait() .unwrap(); assert_eq!(resp_vec.len(), 2); assert_eq!(resp_vec[0].get_data(), [1, 2, 23]); assert!(!resp_vec[1].get_other_error().is_empty()); assert_eq!(counter.load(atomic::Ordering::SeqCst), 0); } #[test] fn test_channel_size() { let pd_worker = FutureWorker::new("test-pd-worker"); let engine = TestEngineBuilder::new().build().unwrap(); let read_pool = ReadPool::new("readpool", &readpool::Config::default_for_test(), || { ReadPoolContext::new(pd_worker.scheduler()) }); let cop = Endpoint::new( &Config { end_point_stream_channel_size: 3, ..Config::default() }, engine, read_pool, ); let counter = Arc::new(atomic::AtomicIsize::new(0)); let counter_clone = Arc::clone(&counter); let handler = StreamFromClosure::new(move |nth| { // produce an infinite stream let mut resp = coppb::Response::new(); resp.set_data(vec![1, 2, nth as u8]); counter_clone.fetch_add(1, atomic::Ordering::SeqCst); Ok((Some(resp), false)) }); let handler_builder = Box::new(move |_, _: &_| Ok(handler.into_boxed())); let resp_vec = cop .handle_stream_request(ReqContext::default_for_test(), handler_builder) .unwrap() .take(7) .collect() .wait() .unwrap(); assert_eq!(resp_vec.len(), 7); assert!(counter.load(atomic::Ordering::SeqCst) < 14); } #[test] fn test_handle_time() { use crate::util::config::ReadableDuration; /// Asserted that the snapshot can be retrieved in 500ms. const SNAPSHOT_DURATION_MS: i64 = 500; /// Asserted that the delay caused by OS scheduling other tasks is smaller than 200ms. /// This is mostly for CI. const HANDLE_ERROR_MS: i64 = 200; /// The acceptable error range for a coarse timer. Note that we use CLOCK_MONOTONIC_COARSE /// which can be slewed by time adjustment code (e.g., NTP, PTP). const COARSE_ERROR_MS: i64 = 50; /// The duration that payload executes. const PAYLOAD_SMALL: i64 = 3000; const PAYLOAD_LARGE: i64 = 6000; let pd_worker = FutureWorker::new("test-pd-worker"); let engine = TestEngineBuilder::new().build().unwrap(); let read_pool = ReadPool::new( "readpool", &readpool::Config::default_with_concurrency(1), || ReadPoolContext::new(pd_worker.scheduler()), ); let mut config = Config::default(); config.end_point_request_max_handle_duration = ReadableDuration::millis((PAYLOAD_SMALL + PAYLOAD_LARGE) as u64 * 2); let cop = Endpoint::new(&config, engine, read_pool); let (tx, rx) = std::sync::mpsc::channel(); // A request that requests execution details. let mut req_with_exec_detail = ReqContext::default_for_test(); req_with_exec_detail.context.set_handle_time(true); { let mut wait_time: i64 = 0; // Request 1: Unary, success response. let handler_builder = Box::new(|_, _: &_| { Ok(UnaryFixture::new_with_duration( Ok(coppb::Response::new()), PAYLOAD_SMALL as u64, ) .into_boxed()) }); let resp_future_1 = cop .handle_unary_request(req_with_exec_detail.clone(), handler_builder) .unwrap(); let sender = tx.clone(); thread::spawn(move || sender.send(vec![resp_future_1.wait().unwrap()]).unwrap()); // Sleep a while to make sure that thread is spawn and snapshot is taken. thread::sleep(Duration::from_millis(SNAPSHOT_DURATION_MS as u64)); // Request 2: Unary, error response. let handler_builder = Box::new(|_, _: &_| { Ok( UnaryFixture::new_with_duration(Err(box_err!("foo")), PAYLOAD_LARGE as u64) .into_boxed(), ) }); let resp_future_2 = cop .handle_unary_request(req_with_exec_detail.clone(), handler_builder) .unwrap(); let sender = tx.clone(); thread::spawn(move || sender.send(vec![resp_future_2.wait().unwrap()]).unwrap()); thread::sleep(Duration::from_millis(SNAPSHOT_DURATION_MS as u64)); // Response 1 let resp = &rx.recv().unwrap()[0]; assert!(resp.get_other_error().is_empty()); assert_ge!( resp.get_exec_details().get_handle_time().get_process_ms(), PAYLOAD_SMALL - COARSE_ERROR_MS ); assert_lt!( resp.get_exec_details().get_handle_time().get_process_ms(), PAYLOAD_SMALL + HANDLE_ERROR_MS + COARSE_ERROR_MS ); assert_ge!( resp.get_exec_details().get_handle_time().get_wait_ms(), wait_time - HANDLE_ERROR_MS - COARSE_ERROR_MS ); assert_lt!( resp.get_exec_details().get_handle_time().get_wait_ms(), wait_time + HANDLE_ERROR_MS + COARSE_ERROR_MS ); wait_time += PAYLOAD_SMALL - SNAPSHOT_DURATION_MS; // Response 2 let resp = &rx.recv().unwrap()[0]; assert!(!resp.get_other_error().is_empty()); assert_ge!( resp.get_exec_details().get_handle_time().get_process_ms(), PAYLOAD_LARGE - COARSE_ERROR_MS ); assert_lt!( resp.get_exec_details().get_handle_time().get_process_ms(), PAYLOAD_LARGE + HANDLE_ERROR_MS + COARSE_ERROR_MS ); assert_ge!( resp.get_exec_details().get_handle_time().get_wait_ms(), wait_time - HANDLE_ERROR_MS - COARSE_ERROR_MS ); assert_lt!( resp.get_exec_details().get_handle_time().get_wait_ms(), wait_time + HANDLE_ERROR_MS + COARSE_ERROR_MS ); } { let mut wait_time: i64 = 0; // Request 1: Unary, success response. let handler_builder = Box::new(|_, _: &_| { Ok(UnaryFixture::new_with_duration( Ok(coppb::Response::new()), PAYLOAD_LARGE as u64, ) .into_boxed()) }); let resp_future_1 = cop .handle_unary_request(req_with_exec_detail.clone(), handler_builder) .unwrap(); let sender = tx.clone(); thread::spawn(move || sender.send(vec![resp_future_1.wait().unwrap()]).unwrap()); // Sleep a while to make sure that thread is spawn and snapshot is taken. thread::sleep(Duration::from_millis(SNAPSHOT_DURATION_MS as u64)); // Request 2: Stream. let handler_builder = Box::new(|_, _: &_| { Ok(StreamFixture::new_with_duration( vec![ Ok(coppb::Response::new()), Err(box_err!("foo")), Ok(coppb::Response::new()), ], vec![ PAYLOAD_SMALL as u64, PAYLOAD_LARGE as u64, PAYLOAD_SMALL as u64, ], ) .into_boxed()) }); let resp_future_3 = cop .handle_stream_request(req_with_exec_detail.clone(), handler_builder) .unwrap(); let sender = tx.clone(); thread::spawn(move || { sender .send(resp_future_3.collect().wait().unwrap()) .unwrap() }); // Response 1 let resp = &rx.recv().unwrap()[0]; assert!(resp.get_other_error().is_empty()); assert_ge!( resp.get_exec_details().get_handle_time().get_process_ms(), PAYLOAD_LARGE - COARSE_ERROR_MS ); assert_lt!( resp.get_exec_details().get_handle_time().get_process_ms(), PAYLOAD_LARGE + HANDLE_ERROR_MS + COARSE_ERROR_MS ); assert_ge!( resp.get_exec_details().get_handle_time().get_wait_ms(), wait_time - HANDLE_ERROR_MS - COARSE_ERROR_MS ); assert_lt!( resp.get_exec_details().get_handle_time().get_wait_ms(), wait_time + HANDLE_ERROR_MS + COARSE_ERROR_MS ); wait_time += PAYLOAD_LARGE - SNAPSHOT_DURATION_MS; // Response 2 let resp = &rx.recv().unwrap(); assert_eq!(resp.len(), 2); assert!(resp[0].get_other_error().is_empty()); assert_ge!( resp[0] .get_exec_details() .get_handle_time() .get_process_ms(), PAYLOAD_SMALL - COARSE_ERROR_MS ); assert_lt!( resp[0] .get_exec_details() .get_handle_time() .get_process_ms(), PAYLOAD_SMALL + HANDLE_ERROR_MS + COARSE_ERROR_MS ); assert_ge!( resp[0].get_exec_details().get_handle_time().get_wait_ms(), wait_time - HANDLE_ERROR_MS - COARSE_ERROR_MS ); assert_lt!( resp[0].get_exec_details().get_handle_time().get_wait_ms(), wait_time + HANDLE_ERROR_MS + COARSE_ERROR_MS ); assert!(!resp[1].get_other_error().is_empty()); assert_ge!( resp[1] .get_exec_details() .get_handle_time() .get_process_ms(), PAYLOAD_LARGE - COARSE_ERROR_MS ); assert_lt!( resp[1] .get_exec_details() .get_handle_time() .get_process_ms(), PAYLOAD_LARGE + HANDLE_ERROR_MS + COARSE_ERROR_MS ); assert_ge!( resp[1].get_exec_details().get_handle_time().get_wait_ms(), wait_time - HANDLE_ERROR_MS - COARSE_ERROR_MS ); assert_lt!( resp[1].get_exec_details().get_handle_time().get_wait_ms(), wait_time + HANDLE_ERROR_MS + COARSE_ERROR_MS ); } } }
38.24663
105
0.528375
759f2cc86572bb113be37cf5d4f875a3fd045adf
41,166
//! Transforms `ast::Expr` into an equivalent `hir_def::expr::Expr` //! representation. use std::{mem, sync::Arc}; use either::Either; use hir_expand::{ ast_id_map::{AstIdMap, FileAstId}, hygiene::Hygiene, name::{name, AsName, Name}, ExpandError, HirFileId, InFile, }; use la_arena::Arena; use profile::Count; use syntax::{ ast::{ self, ArgListOwner, ArrayExprKind, AstChildren, LiteralKind, LoopBodyOwner, NameOwner, SlicePatComponents, }, AstNode, AstPtr, SyntaxNodePtr, }; use crate::{ adt::StructKind, body::{Body, BodySourceMap, Expander, LabelSource, PatPtr, SyntheticSyntax}, body::{BodyDiagnostic, ExprSource, PatSource}, builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint}, db::DefDatabase, expr::{ dummy_expr_id, Array, BindingAnnotation, Expr, ExprId, Label, LabelId, Literal, MatchArm, MatchGuard, Pat, PatId, RecordFieldPat, RecordLitField, Statement, }, intern::Interned, item_scope::BuiltinShadowMode, path::{GenericArgs, Path}, type_ref::{Mutability, Rawness, TypeRef}, AdtId, BlockLoc, ModuleDefId, UnresolvedMacro, }; pub struct LowerCtx<'a> { pub db: &'a dyn DefDatabase, hygiene: Hygiene, file_id: Option<HirFileId>, source_ast_id_map: Option<Arc<AstIdMap>>, } impl<'a> LowerCtx<'a> { pub fn new(db: &'a dyn DefDatabase, file_id: HirFileId) -> Self { LowerCtx { db, hygiene: Hygiene::new(db.upcast(), file_id), file_id: Some(file_id), source_ast_id_map: Some(db.ast_id_map(file_id)), } } pub fn with_hygiene(db: &'a dyn DefDatabase, hygiene: &Hygiene) -> Self { LowerCtx { db, hygiene: hygiene.clone(), file_id: None, source_ast_id_map: None } } pub(crate) fn hygiene(&self) -> &Hygiene { &self.hygiene } pub(crate) fn file_id(&self) -> HirFileId { self.file_id.unwrap() } pub(crate) fn lower_path(&self, ast: ast::Path) -> Option<Path> { Path::from_src(ast, self) } pub(crate) fn ast_id<N: AstNode>(&self, item: &N) -> Option<FileAstId<N>> { self.source_ast_id_map.as_ref().map(|ast_id_map| ast_id_map.ast_id(item)) } } pub(super) fn lower( db: &dyn DefDatabase, expander: Expander, params: Option<ast::ParamList>, body: Option<ast::Expr>, ) -> (Body, BodySourceMap) { ExprCollector { db, source_map: BodySourceMap::default(), body: Body { exprs: Arena::default(), pats: Arena::default(), labels: Arena::default(), params: Vec::new(), body_expr: dummy_expr_id(), block_scopes: Vec::new(), _c: Count::new(), }, expander, statements_in_scope: Vec::new(), } .collect(params, body) } struct ExprCollector<'a> { db: &'a dyn DefDatabase, expander: Expander, body: Body, source_map: BodySourceMap, statements_in_scope: Vec<Statement>, } impl ExprCollector<'_> { fn collect( mut self, param_list: Option<ast::ParamList>, body: Option<ast::Expr>, ) -> (Body, BodySourceMap) { if let Some(param_list) = param_list { if let Some(self_param) = param_list.self_param() { let ptr = AstPtr::new(&self_param); let param_pat = self.alloc_pat( Pat::Bind { name: name![self], mode: BindingAnnotation::new( self_param.mut_token().is_some() && self_param.amp_token().is_none(), false, ), subpat: None, }, Either::Right(ptr), ); self.body.params.push(param_pat); } for param in param_list.params() { let pat = match param.pat() { None => continue, Some(pat) => pat, }; let param_pat = self.collect_pat(pat); self.body.params.push(param_pat); } }; self.body.body_expr = self.collect_expr_opt(body); (self.body, self.source_map) } fn ctx(&self) -> LowerCtx<'_> { LowerCtx::new(self.db, self.expander.current_file_id) } fn alloc_expr(&mut self, expr: Expr, ptr: AstPtr<ast::Expr>) -> ExprId { let src = self.expander.to_source(ptr); let id = self.make_expr(expr, Ok(src.clone())); self.source_map.expr_map.insert(src, id); id } // desugared exprs don't have ptr, that's wrong and should be fixed // somehow. fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId { self.make_expr(expr, Err(SyntheticSyntax)) } fn unit(&mut self) -> ExprId { self.alloc_expr_desugared(Expr::Tuple { exprs: Vec::new() }) } fn missing_expr(&mut self) -> ExprId { self.alloc_expr_desugared(Expr::Missing) } fn make_expr(&mut self, expr: Expr, src: Result<ExprSource, SyntheticSyntax>) -> ExprId { let id = self.body.exprs.alloc(expr); self.source_map.expr_map_back.insert(id, src); id } fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId { let src = self.expander.to_source(ptr); let id = self.make_pat(pat, Ok(src.clone())); self.source_map.pat_map.insert(src, id); id } fn missing_pat(&mut self) -> PatId { self.make_pat(Pat::Missing, Err(SyntheticSyntax)) } fn make_pat(&mut self, pat: Pat, src: Result<PatSource, SyntheticSyntax>) -> PatId { let id = self.body.pats.alloc(pat); self.source_map.pat_map_back.insert(id, src); id } fn alloc_label(&mut self, label: Label, ptr: AstPtr<ast::Label>) -> LabelId { let src = self.expander.to_source(ptr); let id = self.make_label(label, src.clone()); self.source_map.label_map.insert(src, id); id } fn make_label(&mut self, label: Label, src: LabelSource) -> LabelId { let id = self.body.labels.alloc(label); self.source_map.label_map_back.insert(id, src); id } fn collect_expr(&mut self, expr: ast::Expr) -> ExprId { self.maybe_collect_expr(expr).unwrap_or_else(|| self.missing_expr()) } /// Returns `None` if and only if the expression is `#[cfg]`d out. fn maybe_collect_expr(&mut self, expr: ast::Expr) -> Option<ExprId> { let syntax_ptr = AstPtr::new(&expr); self.check_cfg(&expr)?; Some(match expr { ast::Expr::IfExpr(e) => { let then_branch = self.collect_block_opt(e.then_branch()); let else_branch = e.else_branch().map(|b| match b { ast::ElseBranch::Block(it) => self.collect_block(it), ast::ElseBranch::IfExpr(elif) => { let expr: ast::Expr = ast::Expr::cast(elif.syntax().clone()).unwrap(); self.collect_expr(expr) } }); let condition = match e.condition() { None => self.missing_expr(), Some(condition) => match condition.pat() { None => self.collect_expr_opt(condition.expr()), // if let -- desugar to match Some(pat) => { let pat = self.collect_pat(pat); let match_expr = self.collect_expr_opt(condition.expr()); let placeholder_pat = self.missing_pat(); let arms = vec![ MatchArm { pat, expr: then_branch, guard: None }, MatchArm { pat: placeholder_pat, expr: else_branch.unwrap_or_else(|| self.unit()), guard: None, }, ]; return Some( self.alloc_expr(Expr::Match { expr: match_expr, arms }, syntax_ptr), ); } }, }; self.alloc_expr(Expr::If { condition, then_branch, else_branch }, syntax_ptr) } ast::Expr::EffectExpr(e) => match e.effect() { ast::Effect::Try(_) => { let body = self.collect_block_opt(e.block_expr()); self.alloc_expr(Expr::TryBlock { body }, syntax_ptr) } ast::Effect::Unsafe(_) => { let body = self.collect_block_opt(e.block_expr()); self.alloc_expr(Expr::Unsafe { body }, syntax_ptr) } // FIXME: we need to record these effects somewhere... ast::Effect::Label(label) => { let label = self.collect_label(label); match e.block_expr() { Some(block) => { let res = self.collect_block(block); match &mut self.body.exprs[res] { Expr::Block { label: block_label, .. } => { *block_label = Some(label); } _ => unreachable!(), } res } None => self.missing_expr(), } } // FIXME: we need to record these effects somewhere... ast::Effect::Async(_) => { let body = self.collect_block_opt(e.block_expr()); self.alloc_expr(Expr::Async { body }, syntax_ptr) } ast::Effect::Const(_) => { let body = self.collect_block_opt(e.block_expr()); self.alloc_expr(Expr::Const { body }, syntax_ptr) } }, ast::Expr::BlockExpr(e) => self.collect_block(e), ast::Expr::LoopExpr(e) => { let label = e.label().map(|label| self.collect_label(label)); let body = self.collect_block_opt(e.loop_body()); self.alloc_expr(Expr::Loop { body, label }, syntax_ptr) } ast::Expr::WhileExpr(e) => { let label = e.label().map(|label| self.collect_label(label)); let body = self.collect_block_opt(e.loop_body()); let condition = match e.condition() { None => self.missing_expr(), Some(condition) => match condition.pat() { None => self.collect_expr_opt(condition.expr()), // if let -- desugar to match Some(pat) => { cov_mark::hit!(infer_resolve_while_let); let pat = self.collect_pat(pat); let match_expr = self.collect_expr_opt(condition.expr()); let placeholder_pat = self.missing_pat(); let break_ = self.alloc_expr_desugared(Expr::Break { expr: None, label: None }); let arms = vec![ MatchArm { pat, expr: body, guard: None }, MatchArm { pat: placeholder_pat, expr: break_, guard: None }, ]; let match_expr = self.alloc_expr_desugared(Expr::Match { expr: match_expr, arms }); return Some( self.alloc_expr(Expr::Loop { body: match_expr, label }, syntax_ptr), ); } }, }; self.alloc_expr(Expr::While { condition, body, label }, syntax_ptr) } ast::Expr::ForExpr(e) => { let label = e.label().map(|label| self.collect_label(label)); let iterable = self.collect_expr_opt(e.iterable()); let pat = self.collect_pat_opt(e.pat()); let body = self.collect_block_opt(e.loop_body()); self.alloc_expr(Expr::For { iterable, pat, body, label }, syntax_ptr) } ast::Expr::CallExpr(e) => { let callee = self.collect_expr_opt(e.expr()); let args = if let Some(arg_list) = e.arg_list() { arg_list.args().filter_map(|e| self.maybe_collect_expr(e)).collect() } else { Vec::new() }; self.alloc_expr(Expr::Call { callee, args }, syntax_ptr) } ast::Expr::MethodCallExpr(e) => { let receiver = self.collect_expr_opt(e.receiver()); let args = if let Some(arg_list) = e.arg_list() { arg_list.args().filter_map(|e| self.maybe_collect_expr(e)).collect() } else { Vec::new() }; let method_name = e.name_ref().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); let generic_args = e .generic_arg_list() .and_then(|it| GenericArgs::from_ast(&self.ctx(), it)) .map(Box::new); self.alloc_expr( Expr::MethodCall { receiver, method_name, args, generic_args }, syntax_ptr, ) } ast::Expr::MatchExpr(e) => { let expr = self.collect_expr_opt(e.expr()); let arms = if let Some(match_arm_list) = e.match_arm_list() { match_arm_list .arms() .filter_map(|arm| { self.check_cfg(&arm).map(|()| MatchArm { pat: self.collect_pat_opt(arm.pat()), expr: self.collect_expr_opt(arm.expr()), guard: arm.guard().map(|guard| match guard.pat() { Some(pat) => MatchGuard::IfLet { pat: self.collect_pat(pat), expr: self.collect_expr_opt(guard.expr()), }, None => { MatchGuard::If { expr: self.collect_expr_opt(guard.expr()) } } }), }) }) .collect() } else { Vec::new() }; self.alloc_expr(Expr::Match { expr, arms }, syntax_ptr) } ast::Expr::PathExpr(e) => { let path = e .path() .and_then(|path| self.expander.parse_path(self.db, path)) .map(Expr::Path) .unwrap_or(Expr::Missing); self.alloc_expr(path, syntax_ptr) } ast::Expr::ContinueExpr(e) => self.alloc_expr( Expr::Continue { label: e.lifetime().map(|l| Name::new_lifetime(&l)) }, syntax_ptr, ), ast::Expr::BreakExpr(e) => { let expr = e.expr().map(|e| self.collect_expr(e)); self.alloc_expr( Expr::Break { expr, label: e.lifetime().map(|l| Name::new_lifetime(&l)) }, syntax_ptr, ) } ast::Expr::ParenExpr(e) => { let inner = self.collect_expr_opt(e.expr()); // make the paren expr point to the inner expression as well let src = self.expander.to_source(syntax_ptr); self.source_map.expr_map.insert(src, inner); inner } ast::Expr::ReturnExpr(e) => { let expr = e.expr().map(|e| self.collect_expr(e)); self.alloc_expr(Expr::Return { expr }, syntax_ptr) } ast::Expr::YieldExpr(e) => { let expr = e.expr().map(|e| self.collect_expr(e)); self.alloc_expr(Expr::Yield { expr }, syntax_ptr) } ast::Expr::RecordExpr(e) => { let path = e.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new); let record_lit = if let Some(nfl) = e.record_expr_field_list() { let fields = nfl .fields() .filter_map(|field| { self.check_cfg(&field)?; let name = field.field_name()?.as_name(); let expr = match field.expr() { Some(e) => self.collect_expr(e), None => self.missing_expr(), }; let src = self.expander.to_source(AstPtr::new(&field)); self.source_map.field_map.insert(src.clone(), expr); self.source_map.field_map_back.insert(expr, src); Some(RecordLitField { name, expr }) }) .collect(); let spread = nfl.spread().map(|s| self.collect_expr(s)); Expr::RecordLit { path, fields, spread } } else { Expr::RecordLit { path, fields: Vec::new(), spread: None } }; self.alloc_expr(record_lit, syntax_ptr) } ast::Expr::FieldExpr(e) => { let expr = self.collect_expr_opt(e.expr()); let name = match e.field_access() { Some(kind) => kind.as_name(), _ => Name::missing(), }; self.alloc_expr(Expr::Field { expr, name }, syntax_ptr) } ast::Expr::AwaitExpr(e) => { let expr = self.collect_expr_opt(e.expr()); self.alloc_expr(Expr::Await { expr }, syntax_ptr) } ast::Expr::TryExpr(e) => { let expr = self.collect_expr_opt(e.expr()); self.alloc_expr(Expr::Try { expr }, syntax_ptr) } ast::Expr::CastExpr(e) => { let expr = self.collect_expr_opt(e.expr()); let type_ref = Interned::new(TypeRef::from_ast_opt(&self.ctx(), e.ty())); self.alloc_expr(Expr::Cast { expr, type_ref }, syntax_ptr) } ast::Expr::RefExpr(e) => { let expr = self.collect_expr_opt(e.expr()); let raw_tok = e.raw_token().is_some(); let mutability = if raw_tok { if e.mut_token().is_some() { Mutability::Mut } else if e.const_token().is_some() { Mutability::Shared } else { unreachable!("parser only remaps to raw_token() if matching mutability token follows") } } else { Mutability::from_mutable(e.mut_token().is_some()) }; let rawness = Rawness::from_raw(raw_tok); self.alloc_expr(Expr::Ref { expr, rawness, mutability }, syntax_ptr) } ast::Expr::PrefixExpr(e) => { let expr = self.collect_expr_opt(e.expr()); if let Some(op) = e.op_kind() { self.alloc_expr(Expr::UnaryOp { expr, op }, syntax_ptr) } else { self.alloc_expr(Expr::Missing, syntax_ptr) } } ast::Expr::ClosureExpr(e) => { let mut args = Vec::new(); let mut arg_types = Vec::new(); if let Some(pl) = e.param_list() { for param in pl.params() { let pat = self.collect_pat_opt(param.pat()); let type_ref = param.ty().map(|it| Interned::new(TypeRef::from_ast(&self.ctx(), it))); args.push(pat); arg_types.push(type_ref); } } let ret_type = e .ret_type() .and_then(|r| r.ty()) .map(|it| Interned::new(TypeRef::from_ast(&self.ctx(), it))); let body = self.collect_expr_opt(e.body()); self.alloc_expr(Expr::Lambda { args, arg_types, ret_type, body }, syntax_ptr) } ast::Expr::BinExpr(e) => { let lhs = self.collect_expr_opt(e.lhs()); let rhs = self.collect_expr_opt(e.rhs()); let op = e.op_kind(); self.alloc_expr(Expr::BinaryOp { lhs, rhs, op }, syntax_ptr) } ast::Expr::TupleExpr(e) => { let exprs = e.fields().map(|expr| self.collect_expr(expr)).collect(); self.alloc_expr(Expr::Tuple { exprs }, syntax_ptr) } ast::Expr::BoxExpr(e) => { let expr = self.collect_expr_opt(e.expr()); self.alloc_expr(Expr::Box { expr }, syntax_ptr) } ast::Expr::ArrayExpr(e) => { let kind = e.kind(); match kind { ArrayExprKind::ElementList(e) => { let exprs = e.map(|expr| self.collect_expr(expr)).collect(); self.alloc_expr(Expr::Array(Array::ElementList(exprs)), syntax_ptr) } ArrayExprKind::Repeat { initializer, repeat } => { let initializer = self.collect_expr_opt(initializer); let repeat = self.collect_expr_opt(repeat); self.alloc_expr( Expr::Array(Array::Repeat { initializer, repeat }), syntax_ptr, ) } } } ast::Expr::Literal(e) => self.alloc_expr(Expr::Literal(e.kind().into()), syntax_ptr), ast::Expr::IndexExpr(e) => { let base = self.collect_expr_opt(e.base()); let index = self.collect_expr_opt(e.index()); self.alloc_expr(Expr::Index { base, index }, syntax_ptr) } ast::Expr::RangeExpr(e) => { let lhs = e.start().map(|lhs| self.collect_expr(lhs)); let rhs = e.end().map(|rhs| self.collect_expr(rhs)); match e.op_kind() { Some(range_type) => { self.alloc_expr(Expr::Range { lhs, rhs, range_type }, syntax_ptr) } None => self.alloc_expr(Expr::Missing, syntax_ptr), } } ast::Expr::MacroCall(e) => { let macro_ptr = AstPtr::new(&e); let mut ids = vec![]; self.collect_macro_call(e, macro_ptr, |this, expansion| { ids.push(match expansion { Some(it) => this.collect_expr(it), None => this.alloc_expr(Expr::Missing, syntax_ptr.clone()), }) }); ids[0] } ast::Expr::MacroStmts(e) => { e.statements().for_each(|s| self.collect_stmt(s)); let tail = e .expr() .map(|e| self.collect_expr(e)) .unwrap_or_else(|| self.alloc_expr(Expr::Missing, syntax_ptr.clone())); self.alloc_expr(Expr::MacroStmts { tail }, syntax_ptr) } }) } fn collect_macro_call<F: FnMut(&mut Self, Option<T>), T: ast::AstNode>( &mut self, e: ast::MacroCall, syntax_ptr: AstPtr<ast::MacroCall>, mut collector: F, ) { // File containing the macro call. Expansion errors will be attached here. let outer_file = self.expander.current_file_id; let macro_call = self.expander.to_source(AstPtr::new(&e)); let res = self.expander.enter_expand(self.db, e); let res = match res { Ok(res) => res, Err(UnresolvedMacro { path }) => { self.source_map.diagnostics.push(BodyDiagnostic::UnresolvedMacroCall { node: InFile::new(outer_file, syntax_ptr), path, }); collector(self, None); return; } }; match &res.err { Some(ExpandError::UnresolvedProcMacro) => { self.source_map.diagnostics.push(BodyDiagnostic::UnresolvedProcMacro { node: InFile::new(outer_file, syntax_ptr), }); } Some(err) => { self.source_map.diagnostics.push(BodyDiagnostic::MacroError { node: InFile::new(outer_file, syntax_ptr), message: err.to_string(), }); } None => {} } match res.value { Some((mark, expansion)) => { self.source_map.expansions.insert(macro_call, self.expander.current_file_id); let id = collector(self, Some(expansion)); self.expander.exit(self.db, mark); id } None => collector(self, None), } } fn collect_expr_opt(&mut self, expr: Option<ast::Expr>) -> ExprId { if let Some(expr) = expr { self.collect_expr(expr) } else { self.missing_expr() } } fn collect_stmt(&mut self, s: ast::Stmt) { match s { ast::Stmt::LetStmt(stmt) => { if self.check_cfg(&stmt).is_none() { return; } let pat = self.collect_pat_opt(stmt.pat()); let type_ref = stmt.ty().map(|it| Interned::new(TypeRef::from_ast(&self.ctx(), it))); let initializer = stmt.initializer().map(|e| self.collect_expr(e)); self.statements_in_scope.push(Statement::Let { pat, type_ref, initializer }); } ast::Stmt::ExprStmt(stmt) => { if self.check_cfg(&stmt).is_none() { return; } let has_semi = stmt.semicolon_token().is_some(); // Note that macro could be expended to multiple statements if let Some(ast::Expr::MacroCall(m)) = stmt.expr() { let macro_ptr = AstPtr::new(&m); let syntax_ptr = AstPtr::new(&stmt.expr().unwrap()); self.collect_macro_call(m, macro_ptr, |this, expansion| match expansion { Some(expansion) => { let statements: ast::MacroStmts = expansion; statements.statements().for_each(|stmt| this.collect_stmt(stmt)); if let Some(expr) = statements.expr() { let expr = this.collect_expr(expr); this.statements_in_scope.push(Statement::Expr { expr, has_semi }); } } None => { let expr = this.alloc_expr(Expr::Missing, syntax_ptr.clone()); this.statements_in_scope.push(Statement::Expr { expr, has_semi }); } }); } else { let expr = self.collect_expr_opt(stmt.expr()); self.statements_in_scope.push(Statement::Expr { expr, has_semi }); } } ast::Stmt::Item(item) => { self.check_cfg(&item); } } } fn collect_block(&mut self, block: ast::BlockExpr) -> ExprId { let ast_id = self.expander.ast_id(&block); let block_loc = BlockLoc { ast_id, module: self.expander.def_map.module_id(self.expander.module) }; let block_id = self.db.intern_block(block_loc); let (module, def_map) = match self.db.block_def_map(block_id) { Some(def_map) => { self.body.block_scopes.push(block_id); (def_map.root(), def_map) } None => (self.expander.module, self.expander.def_map.clone()), }; let prev_def_map = mem::replace(&mut self.expander.def_map, def_map); let prev_local_module = mem::replace(&mut self.expander.module, module); let prev_statements = std::mem::take(&mut self.statements_in_scope); block.statements().for_each(|s| self.collect_stmt(s)); block.tail_expr().and_then(|e| { let expr = self.maybe_collect_expr(e)?; self.statements_in_scope.push(Statement::Expr { expr, has_semi: false }); Some(()) }); let mut tail = None; if let Some(Statement::Expr { expr, has_semi: false }) = self.statements_in_scope.last() { tail = Some(*expr); self.statements_in_scope.pop(); } let tail = tail; let statements = std::mem::replace(&mut self.statements_in_scope, prev_statements); let syntax_node_ptr = AstPtr::new(&block.into()); let expr_id = self.alloc_expr( Expr::Block { id: block_id, statements, tail, label: None }, syntax_node_ptr, ); self.expander.def_map = prev_def_map; self.expander.module = prev_local_module; expr_id } fn collect_block_opt(&mut self, expr: Option<ast::BlockExpr>) -> ExprId { if let Some(block) = expr { self.collect_block(block) } else { self.missing_expr() } } fn collect_label(&mut self, ast_label: ast::Label) -> LabelId { let label = Label { name: ast_label.lifetime().as_ref().map_or_else(Name::missing, Name::new_lifetime), }; self.alloc_label(label, AstPtr::new(&ast_label)) } fn collect_pat(&mut self, pat: ast::Pat) -> PatId { let pattern = match &pat { ast::Pat::IdentPat(bp) => { let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); let annotation = BindingAnnotation::new(bp.mut_token().is_some(), bp.ref_token().is_some()); let subpat = bp.pat().map(|subpat| self.collect_pat(subpat)); if annotation == BindingAnnotation::Unannotated && subpat.is_none() { // This could also be a single-segment path pattern. To // decide that, we need to try resolving the name. let (resolved, _) = self.expander.def_map.resolve_path( self.db, self.expander.module, &name.clone().into(), BuiltinShadowMode::Other, ); match resolved.take_values() { Some(ModuleDefId::ConstId(_)) => Pat::Path(name.into()), Some(ModuleDefId::EnumVariantId(_)) => { // this is only really valid for unit variants, but // shadowing other enum variants with a pattern is // an error anyway Pat::Path(name.into()) } Some(ModuleDefId::AdtId(AdtId::StructId(s))) if self.db.struct_data(s).variant_data.kind() != StructKind::Record => { // Funnily enough, record structs *can* be shadowed // by pattern bindings (but unit or tuple structs // can't). Pat::Path(name.into()) } // shadowing statics is an error as well, so we just ignore that case here _ => Pat::Bind { name, mode: annotation, subpat }, } } else { Pat::Bind { name, mode: annotation, subpat } } } ast::Pat::TupleStructPat(p) => { let path = p.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new); let (args, ellipsis) = self.collect_tuple_pat(p.fields()); Pat::TupleStruct { path, args, ellipsis } } ast::Pat::RefPat(p) => { let pat = self.collect_pat_opt(p.pat()); let mutability = Mutability::from_mutable(p.mut_token().is_some()); Pat::Ref { pat, mutability } } ast::Pat::PathPat(p) => { let path = p.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new); path.map(Pat::Path).unwrap_or(Pat::Missing) } ast::Pat::OrPat(p) => { let pats = p.pats().map(|p| self.collect_pat(p)).collect(); Pat::Or(pats) } ast::Pat::ParenPat(p) => return self.collect_pat_opt(p.pat()), ast::Pat::TuplePat(p) => { let (args, ellipsis) = self.collect_tuple_pat(p.fields()); Pat::Tuple { args, ellipsis } } ast::Pat::WildcardPat(_) => Pat::Wild, ast::Pat::RecordPat(p) => { let path = p.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new); let args: Vec<_> = p .record_pat_field_list() .expect("every struct should have a field list") .fields() .filter_map(|f| { let ast_pat = f.pat()?; let pat = self.collect_pat(ast_pat); let name = f.field_name()?.as_name(); Some(RecordFieldPat { name, pat }) }) .collect(); let ellipsis = p .record_pat_field_list() .expect("every struct should have a field list") .dotdot_token() .is_some(); Pat::Record { path, args, ellipsis } } ast::Pat::SlicePat(p) => { let SlicePatComponents { prefix, slice, suffix } = p.components(); // FIXME properly handle `RestPat` Pat::Slice { prefix: prefix.into_iter().map(|p| self.collect_pat(p)).collect(), slice: slice.map(|p| self.collect_pat(p)), suffix: suffix.into_iter().map(|p| self.collect_pat(p)).collect(), } } ast::Pat::LiteralPat(lit) => { if let Some(ast_lit) = lit.literal() { let expr = Expr::Literal(ast_lit.kind().into()); let expr_ptr = AstPtr::new(&ast::Expr::Literal(ast_lit)); let expr_id = self.alloc_expr(expr, expr_ptr); Pat::Lit(expr_id) } else { Pat::Missing } } ast::Pat::RestPat(_) => { // `RestPat` requires special handling and should not be mapped // to a Pat. Here we are using `Pat::Missing` as a fallback for // when `RestPat` is mapped to `Pat`, which can easily happen // when the source code being analyzed has a malformed pattern // which includes `..` in a place where it isn't valid. Pat::Missing } ast::Pat::BoxPat(boxpat) => { let inner = self.collect_pat_opt(boxpat.pat()); Pat::Box { inner } } ast::Pat::ConstBlockPat(const_block_pat) => { if let Some(expr) = const_block_pat.block_expr() { let expr_id = self.collect_block(expr); Pat::ConstBlock(expr_id) } else { Pat::Missing } } ast::Pat::MacroPat(mac) => match mac.macro_call() { Some(call) => { let macro_ptr = AstPtr::new(&call); let mut pat = None; self.collect_macro_call(call, macro_ptr, |this, expanded_pat| { pat = Some(this.collect_pat_opt(expanded_pat)); }); match pat { Some(pat) => return pat, None => Pat::Missing, } } None => Pat::Missing, }, // FIXME: implement ast::Pat::RangePat(_) => Pat::Missing, }; let ptr = AstPtr::new(&pat); self.alloc_pat(pattern, Either::Left(ptr)) } fn collect_pat_opt(&mut self, pat: Option<ast::Pat>) -> PatId { if let Some(pat) = pat { self.collect_pat(pat) } else { self.missing_pat() } } fn collect_tuple_pat(&mut self, args: AstChildren<ast::Pat>) -> (Vec<PatId>, Option<usize>) { // Find the location of the `..`, if there is one. Note that we do not // consider the possibility of there being multiple `..` here. let ellipsis = args.clone().position(|p| matches!(p, ast::Pat::RestPat(_))); // We want to skip the `..` pattern here, since we account for it above. let args = args .filter(|p| !matches!(p, ast::Pat::RestPat(_))) .map(|p| self.collect_pat(p)) .collect(); (args, ellipsis) } /// Returns `None` (and emits diagnostics) when `owner` if `#[cfg]`d out, and `Some(())` when /// not. fn check_cfg(&mut self, owner: &dyn ast::AttrsOwner) -> Option<()> { match self.expander.parse_attrs(self.db, owner).cfg() { Some(cfg) => { if self.expander.cfg_options().check(&cfg) != Some(false) { return Some(()); } self.source_map.diagnostics.push(BodyDiagnostic::InactiveCode { node: InFile::new( self.expander.current_file_id, SyntaxNodePtr::new(owner.syntax()), ), cfg, opts: self.expander.cfg_options().clone(), }); None } None => Some(()), } } } impl From<ast::LiteralKind> for Literal { fn from(ast_lit_kind: ast::LiteralKind) -> Self { match ast_lit_kind { // FIXME: these should have actual values filled in, but unsure on perf impact LiteralKind::IntNumber(lit) => { if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) { Literal::Float(Default::default(), builtin) } else if let builtin @ Some(_) = lit.suffix().and_then(|it| BuiltinInt::from_suffix(it)) { Literal::Int(lit.value().unwrap_or(0) as i128, builtin) } else { let builtin = lit.suffix().and_then(|it| BuiltinUint::from_suffix(it)); Literal::Uint(lit.value().unwrap_or(0), builtin) } } LiteralKind::FloatNumber(lit) => { let ty = lit.suffix().and_then(|it| BuiltinFloat::from_suffix(it)); Literal::Float(Default::default(), ty) } LiteralKind::ByteString(bs) => { let text = bs.value().map(Vec::from).unwrap_or_else(Default::default); Literal::ByteString(text) } LiteralKind::String(_) => Literal::String(Default::default()), LiteralKind::Byte => Literal::Uint(Default::default(), Some(BuiltinUint::U8)), LiteralKind::Bool(val) => Literal::Bool(val), LiteralKind::Char => Literal::Char(Default::default()), } } }
42.308325
110
0.472356
e8f794038e41f049078d4ddc5777a627af5443ba
12,044
use std::fmt::Debug; use serde::{de::DeserializeOwned, Serialize}; use transmog::{Format, OwnedDeserializer}; use transmog_pot::Pot; use crate::{ document::{BorrowedDocument, CollectionDocument}, key::Key, schema::{ view::map::{Mappings, ViewMappedValue}, Collection, CollectionName, Name, SerializedCollection, ViewName, }, AnyError, }; /// Types for defining a `Map` within a `View`. pub mod map; /// Errors that arise when interacting with views. #[derive(thiserror::Error, Debug)] // TODO add which view name and collection pub enum Error { /// An error occurred while serializing or deserializing keys emitted in a view. #[error("error serializing view keys {0}")] KeySerialization(Box<dyn AnyError>), /// An error unrelated to views. #[error("core error: {0}")] Core(#[from] crate::Error), } impl Error { /// Returns a [`Self::KeySerialization`] instance after boxing the error. pub fn key_serialization<E: AnyError>(error: E) -> Self { Self::KeySerialization(Box::new(error)) } } impl From<pot::Error> for Error { fn from(err: pot::Error) -> Self { Self::Core(crate::Error::from(err)) } } /// A type alias for the result of `ViewSchema::map()`. #[allow(type_alias_bounds)] // False positive, required for associated types pub type ViewMapResult<V: View> = Result<Mappings<V::Key, V::Value>, crate::Error>; /// A type alias for the result of `ViewSchema::reduce()`. #[allow(type_alias_bounds)] // False positive, required for associated types pub type ReduceResult<V: View> = Result<V::Value, crate::Error>; /// A mechanism for accessing mapped and/or reduced data from a [`Collection`]. #[doc = "\n"] #[doc = include_str!("./view-overview.md")] pub trait View: Send + Sync + Debug + 'static { /// The collection this view belongs to type Collection: Collection; /// The key for this view. type Key: for<'a> Key<'a> + 'static; /// An associated type that can be stored with each entry in the view. type Value: Send + Sync; /// The name of the view. Must be unique per collection. fn name(&self) -> Name; /// The namespaced name of the view. fn view_name(&self) -> ViewName { ViewName { collection: Self::Collection::collection_name(), name: self.name(), } } } /// The implementation of Map/Reduce for a [`View`]. #[doc = "\n"] #[doc = include_str!("./view-overview.md")] pub trait ViewSchema: Send + Sync + Debug + 'static { /// The view this schema is defined for. type View: SerializedView; /// If true, no two documents may emit the same key. Unique views are /// updated when the document is saved, allowing for this check to be done /// atomically. When a document is updated, all unique views will be /// updated, and if any of them fail, the document will not be allowed to /// update and an /// [`Error::UniqueKeyViolation`](crate::Error::UniqueKeyViolation) will be /// returned. fn unique(&self) -> bool { false } /// The version of the view. Changing this value will cause indexes to be rebuilt. fn version(&self) -> u64 { 0 } /// The map function for this view. This function is responsible for /// emitting entries for any documents that should be contained in this /// View. If None is returned, the View will not include the document. See [the user guide's chapter on /// views for more information on how map /// works](https://dev.bonsaidb.io/main/guide/about/concepts/view.html#map). fn map(&self, document: &BorrowedDocument<'_>) -> ViewMapResult<Self::View>; /// Returns a value that is produced by reducing a list of `mappings` into a /// single value. If `rereduce` is true, the values contained in the /// mappings have already been reduced at least one time. If an error of /// [`ReduceUnimplemented`](crate::Error::ReduceUnimplemented) is returned, /// queries that ask for a reduce operation will return an error. See [the /// user guide's chapter on views for more information on how reduce /// works](https://dev.bonsaidb.io/main/guide/about/concepts/view.html#reduce). #[allow(unused_variables)] fn reduce( &self, mappings: &[ViewMappedValue<Self::View>], rereduce: bool, ) -> Result<<Self::View as View>::Value, crate::Error> { Err(crate::Error::ReduceUnimplemented) } } /// A [`View`] with additional tyes and logic to handle serializing view values. pub trait SerializedView: View { /// The serialization format for this view. type Format: OwnedDeserializer<Self::Value>; /// Returns the configured instance of [`Self::Format`]. // TODO allow configuration to be passed here, such as max allocation bytes. fn format() -> Self::Format; /// Deserialize `data` as `Self::Value` using this views's format. fn deserialize(data: &[u8]) -> Result<Self::Value, crate::Error> { Self::format() .deserialize_owned(data) .map_err(|err| crate::Error::Serialization(err.to_string())) } /// Serialize `item` using this views's format. fn serialize(item: &Self::Value) -> Result<Vec<u8>, crate::Error> { Self::format() .serialize(item) .map_err(|err| crate::Error::Serialization(err.to_string())) } } /// A default serialization strategy for views. Uses equivalent settings as /// [`DefaultSerialization`](crate::schema::DefaultSerialization). pub trait DefaultViewSerialization: View {} impl<T> SerializedView for T where T: DefaultViewSerialization, T::Value: Serialize + DeserializeOwned, { type Format = Pot; fn format() -> Self::Format { Pot::default() } } /// A [`View`] for a [`Collection`] that stores Serde-compatible documents. The /// only difference between implmementing this and [`View`] is that the `map` /// function receives a [`CollectionDocument`] instead of a [`BorrowedDocument`]. pub trait CollectionViewSchema: Send + Sync + Debug + 'static where <Self::View as View>::Collection: SerializedCollection, { /// The view this schema is an implementation of. type View: SerializedView; /// If true, no two documents may emit the same key. Unique views are /// updated when the document is saved, allowing for this check to be done /// atomically. When a document is updated, all unique views will be /// updated, and if any of them fail, the document will not be allowed to /// update and an /// [`Error::UniqueKeyViolation`](crate::Error::UniqueKeyViolation) will be /// returned. fn unique(&self) -> bool { false } /// The version of the view. Changing this value will cause indexes to be rebuilt. fn version(&self) -> u64 { 0 } /// The map function for this view. This function is responsible for /// emitting entries for any documents that should be contained in this /// View. If None is returned, the View will not include the document. fn map( &self, document: CollectionDocument<<Self::View as View>::Collection>, ) -> ViewMapResult<Self::View>; /// The reduce function for this view. If `Err(Error::ReduceUnimplemented)` /// is returned, queries that ask for a reduce operation will return an /// error. See [`CouchDB`'s Reduce/Rereduce /// documentation](https://docs.couchdb.org/en/stable/ddocs/views/intro.html#reduce-rereduce) /// for the design this implementation will be inspired by #[allow(unused_variables)] fn reduce( &self, mappings: &[ViewMappedValue<Self::View>], rereduce: bool, ) -> ReduceResult<Self::View> { Err(crate::Error::ReduceUnimplemented) } } impl<T> ViewSchema for T where T: CollectionViewSchema, T::View: SerializedView, <T::View as View>::Collection: SerializedCollection, { type View = T::View; fn version(&self) -> u64 { T::version(self) } fn map(&self, document: &BorrowedDocument<'_>) -> ViewMapResult<Self::View> { T::map(self, CollectionDocument::try_from(document)?) } fn reduce( &self, mappings: &[ViewMappedValue<Self::View>], rereduce: bool, ) -> Result<<Self::View as View>::Value, crate::Error> { T::reduce(self, mappings, rereduce) } fn unique(&self) -> bool { T::unique(self) } } /// Wraps a [`View`] with serialization to erase the associated types pub trait Serialized: Send + Sync + Debug { /// Wraps returing [`<View::Collection as Collection>::collection_name()`](crate::schema::Collection::collection_name) fn collection(&self) -> CollectionName; /// Wraps [`ViewSchema::unique`] fn unique(&self) -> bool; /// Wraps [`ViewSchema::version`] fn version(&self) -> u64; /// Wraps [`View::view_name`] fn view_name(&self) -> ViewName; /// Wraps [`ViewSchema::map`] fn map(&self, document: &BorrowedDocument<'_>) -> Result<Vec<map::Serialized>, Error>; /// Wraps [`ViewSchema::reduce`] fn reduce(&self, mappings: &[(&[u8], &[u8])], rereduce: bool) -> Result<Vec<u8>, Error>; } /// Defines an unique view named `$view_name` for `$collection` with the /// mapping provided. #[macro_export(local_inner_macros)] macro_rules! define_basic_unique_mapped_view { ($view_name:ident, $collection:ty, $version:literal, $name:literal, $key:ty, $mapping:expr $(,)?) => { define_mapped_view!( $view_name, $collection, $version, $name, $key, (), true, $mapping ); }; ($view_name:ident, $collection:ty, $version:literal, $name:literal, $key:ty, $value:ty, $mapping:expr $(,)?) => { define_mapped_view!( $view_name, $collection, $version, $name, $key, $value, true, $mapping ); }; } /// Defines a non-unique view named `$view_name` for `$collection` with the /// mapping provided. #[macro_export(local_inner_macros)] macro_rules! define_basic_mapped_view { ($view_name:ident, $collection:ty, $version:literal, $name:literal, $key:ty, $mapping:expr $(,)?) => { define_mapped_view!( $view_name, $collection, $version, $name, $key, (), false, $mapping ); }; ($view_name:ident, $collection:ty, $version:literal, $name:literal, $key:ty, $value:ty, $mapping:expr $(,)?) => { define_mapped_view!( $view_name, $collection, $version, $name, $key, $value, false, $mapping ); }; } /// Defines a view using the mapping provided. #[macro_export] macro_rules! define_mapped_view { ($view_name:ident, $collection:ty, $version:literal, $name:literal, $key:ty, $value:ty, $unique:literal, $mapping:expr) => { #[derive(Debug, Clone)] pub struct $view_name; impl $crate::schema::View for $view_name { type Collection = $collection; type Key = $key; type Value = $value; fn name(&self) -> $crate::schema::Name { $crate::schema::Name::new($name) } } impl $crate::schema::CollectionViewSchema for $view_name { type View = Self; fn unique(&self) -> bool { $unique } fn version(&self) -> u64 { $version } fn map( &self, document: $crate::document::CollectionDocument<$collection>, ) -> $crate::schema::ViewMapResult<Self::View> { $mapping(document) } } impl $crate::schema::view::DefaultViewSerialization for $view_name {} }; }
33.736695
128
0.616323
e2f0484b2dccd2e50c7e81c4319595ee9a91ab51
5,552
mod postgres; use nettu_scheduler_domain::{SyncedCalendarEvent, ID}; pub use postgres::PostgresEventSyncedRepo; #[async_trait::async_trait] pub trait IEventSyncedRepo: Send + Sync { async fn insert(&self, e: &SyncedCalendarEvent) -> anyhow::Result<()>; async fn find_by_event(&self, event_id: &ID) -> anyhow::Result<Vec<SyncedCalendarEvent>>; } #[cfg(test)] mod tests { use crate::setup_context; use nettu_scheduler_domain::{ Account, AccountIntegration, Calendar, CalendarEvent, IntegrationProvider, SyncedCalendar, SyncedCalendarEvent, User, UserIntegration, }; #[tokio::test] async fn test_event_synced_repo() { let ctx = setup_context().await; let account = Account::new(); ctx.repos .accounts .insert(&account) .await .expect("To insert account"); let user = User::new(account.id.clone()); ctx.repos.users.insert(&user).await.expect("To insert user"); for provider in [IntegrationProvider::Google, IntegrationProvider::Outlook] { let acc_integration = AccountIntegration { account_id: account.id.clone(), client_id: "".into(), client_secret: "".into(), redirect_uri: "".into(), provider: provider.clone(), }; ctx.repos .account_integrations .insert(&acc_integration) .await .expect("To insert account integration"); let user_integration = UserIntegration { access_token: "".into(), access_token_expires_ts: 0, refresh_token: "".into(), account_id: account.id.clone(), user_id: user.id.clone(), provider, }; ctx.repos .user_integrations .insert(&user_integration) .await .expect("To insert user integration"); } let calendar = Calendar::new(&user.id, &account.id); ctx.repos .calendars .insert(&calendar) .await .expect("To insert calendar"); for provider in [IntegrationProvider::Google, IntegrationProvider::Outlook] { let sync_calendar = SyncedCalendar { calendar_id: calendar.id.clone(), ext_calendar_id: "".into(), provider, user_id: user.id.clone(), }; assert!(ctx .repos .calendar_synced .insert(&sync_calendar) .await .is_ok()); } let e = CalendarEvent { account_id: account.id.clone(), calendar_id: calendar.id.clone(), user_id: user.id.clone(), ..Default::default() }; assert!(ctx.repos.events.insert(&e).await.is_ok()); for provider in [IntegrationProvider::Google, IntegrationProvider::Outlook] { let sync_event = SyncedCalendarEvent { calendar_id: calendar.id.clone(), event_id: e.id.clone(), ext_calendar_id: "".into(), ext_event_id: "".into(), provider, user_id: user.id.clone(), }; assert!(ctx.repos.event_synced.insert(&sync_event).await.is_ok()); } let synced_events = ctx .repos .event_synced .find_by_event(&e.id) .await .expect("To find synced calendar event"); assert_eq!(synced_events.len(), 2); assert_eq!(synced_events[0].event_id, e.id); assert_eq!(synced_events[1].event_id, e.id); assert!(synced_events .iter() .find(|c| c.provider == IntegrationProvider::Google) .is_some()); assert!(synced_events .iter() .find(|c| c.provider == IntegrationProvider::Outlook) .is_some()); // Deleting the sync calendar also deletes all the corresponding sync events for that calendar let sync_calendar = SyncedCalendar { calendar_id: calendar.id.clone(), ext_calendar_id: "".into(), provider: IntegrationProvider::Google, user_id: user.id.clone(), }; assert!(ctx .repos .calendar_synced .delete(&sync_calendar) .await .is_ok()); let synced_events = ctx .repos .event_synced .find_by_event(&e.id) .await .expect("To find synced calendar event"); assert_eq!(synced_events.len(), 1); assert_eq!(synced_events[0].provider, IntegrationProvider::Outlook); // And now delete outlook calendar sync let sync_calendar = SyncedCalendar { calendar_id: calendar.id.clone(), ext_calendar_id: "".into(), provider: IntegrationProvider::Outlook, user_id: user.id.clone(), }; assert!(ctx .repos .calendar_synced .delete(&sync_calendar) .await .is_ok()); let synced_events = ctx .repos .event_synced .find_by_event(&e.id) .await .expect("To find synced calendar event"); assert!(synced_events.is_empty()); } }
33.047619
102
0.530079
e463677f799e763d5f2ce51335d3d7227da7517e
2,472
use super::vec_ext::*; /// A Range type (i.e. something which has a minimum and a maximum). /// This is commonly used as `Range<TV>` and `Range<IV>` to represent a box, though can be used with /// other numeric types as well. /// /// Note that, unlike the rest of the library, we do not use the 2d and 3d feature flags to allow /// `Range<T>` to be dimension independent. This is because `Range` can be generic over different /// types (not just f32 and f64), and potentially even works for types that aren't vectors. /// /// Finally, note that we also do not use the standard library's [`std::ops::Range`] because it /// would provide little benefit other than the `..` syntax, since none of the methods would carry /// over, while creating additional hassle with the orphan rules (requiring us to create extension /// traits). #[derive(Debug, Default, Copy, Clone, serde::Serialize, serde::Deserialize, PartialEq, Eq)] pub struct Range<T> { pub min: T, pub max: T, } impl<T> Range<T> { /// Creates a new `Range` given a `min` and `max` value. pub fn new(min: T, max: T) -> Self { Self { min, max } } } impl<T: na::Scalar + na::ClosedSub, const D: usize> Range<na::SVector<T, D>> { /// Calculates the size of a `Range` as a vector. pub fn size(&self) -> na::SVector<T, D> { &self.max - &self.min } } impl<T: na::Scalar + na::ClosedAdd + na::ClosedSub + Copy, const D: usize> Range<na::SVector<T, D>> { pub fn thickened(&self, thickness_each_side: T) -> Self { let min = self.min - na::SVector::from_element(thickness_each_side); let max = self.max + na::SVector::from_element(thickness_each_side); Range::new(min, max) } } impl<T: na::Scalar + PartialOrd, const D: usize> Range<na::SVector<T, D>> { /// Whether `a` is within the `Range` (returns `true` at the endpoints). pub fn contains(&self, a: na::SVector<T, D>) -> bool { self.min.all_le(&a) && self.max.all_ge(&a) } /// Whether `a` is within the `Range`, but treating it as a half-open interval /// (i.e. returns true if the components are equal to the `min` but not the `max`). pub fn contains_half_open(&self, a: na::SVector<T, D>) -> bool { self.min.all_le(&a) && self.max.all_gt(&a) } } impl<T: std::fmt::Display> std::fmt::Display for Range<T> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}..{}", self.min, self.max) } }
39.238095
100
0.633495
c1cfd436708de8058dfb9ab8262ae2c79bb996bf
2,446
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. use deno_core::error::AnyError; use deno_core::FsModuleLoader; use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; use deno_runtime::deno_web::BlobStore; use deno_runtime::permissions::Permissions; use deno_runtime::worker::MainWorker; use deno_runtime::worker::WorkerOptions; use deno_runtime::BootstrapOptions; use locale_config::Locale; use std::path::Path; use std::rc::Rc; use std::sync::Arc; fn get_error_class_name(e: &AnyError) -> &'static str { deno_runtime::errors::get_error_class_name(e).unwrap_or("Error") } #[tokio::main] async fn main() -> Result<(), AnyError> { let module_loader = Rc::new(FsModuleLoader); let create_web_worker_cb = Arc::new(|_| { todo!("Web workers are not supported in the example"); }); let web_worker_preload_module_cb = Arc::new(|_| { todo!("Web workers are not supported in the example"); }); let options = WorkerOptions { bootstrap: BootstrapOptions { apply_source_maps: false, args: vec![], cpu_count: 1, debug_flag: false, enable_testing_features: false, locale: Locale::user_default() .tags() .map(|(_, l)| l.to_string()) .collect(), location: None, no_color: false, runtime_version: "x".to_string(), ts_version: "x".to_string(), unstable: false, }, extensions: vec![], unsafely_ignore_certificate_errors: None, root_cert_store: None, user_agent: "hello_runtime".to_string(), seed: None, js_error_create_fn: None, web_worker_preload_module_cb, create_web_worker_cb, maybe_inspector_server: None, should_break_on_first_statement: false, module_loader, get_error_class_fn: Some(&get_error_class_name), origin_storage_dir: None, blob_store: BlobStore::default(), broadcast_channel: InMemoryBroadcastChannel::default(), shared_array_buffer_store: None, compiled_wasm_module_store: None, }; let js_path = Path::new(env!("CARGO_MANIFEST_DIR")).join("examples/hello_runtime.js"); let main_module = deno_core::resolve_path(&js_path.to_string_lossy())?; let permissions = Permissions::allow_all(); let mut worker = MainWorker::bootstrap_from_options( main_module.clone(), permissions, options, ); worker.execute_main_module(&main_module).await?; worker.run_event_loop(false).await?; Ok(()) }
30.575
76
0.70646
8fdf24a62777595ab59a7c1fc51dad8305c04719
4,160
#![allow(non_snake_case, non_upper_case_globals)] #![allow(non_camel_case_types)] //! Nested Vectored Interrupt Controller //! //! Used by: stm32l100, stm32l151, stm32l162 #[cfg(not(feature = "nosync"))] pub use crate::stm32l1::peripherals::nvic::Instance; pub use crate::stm32l1::peripherals::nvic::{RegisterBlock, ResetValues}; pub use crate::stm32l1::peripherals::nvic::{ IABR0, IABR1, ICER0, ICER1, ICPR0, ICPR1, IPR0, IPR1, IPR10, IPR11, IPR12, IPR13, IPR2, IPR3, IPR4, IPR5, IPR6, IPR7, IPR8, IPR9, ISER0, ISER1, ISPR0, ISPR1, }; /// Access functions for the NVIC peripheral instance pub mod NVIC { use super::ResetValues; #[cfg(not(feature = "nosync"))] use super::Instance; #[cfg(not(feature = "nosync"))] const INSTANCE: Instance = Instance { addr: 0xe000e100, _marker: ::core::marker::PhantomData, }; /// Reset values for each field in NVIC pub const reset: ResetValues = ResetValues { ISER0: 0x00000000, ISER1: 0x00000000, ICER0: 0x00000000, ICER1: 0x00000000, ISPR0: 0x00000000, ISPR1: 0x00000000, ICPR0: 0x00000000, ICPR1: 0x00000000, IABR0: 0x00000000, IABR1: 0x00000000, IPR0: 0x00000000, IPR1: 0x00000000, IPR2: 0x00000000, IPR3: 0x00000000, IPR4: 0x00000000, IPR5: 0x00000000, IPR6: 0x00000000, IPR7: 0x00000000, IPR8: 0x00000000, IPR9: 0x00000000, IPR10: 0x00000000, IPR11: 0x00000000, IPR12: 0x00000000, IPR13: 0x00000000, }; #[cfg(not(feature = "nosync"))] #[allow(renamed_and_removed_lints)] #[allow(private_no_mangle_statics)] #[no_mangle] static mut NVIC_TAKEN: bool = false; /// Safe access to NVIC /// /// This function returns `Some(Instance)` if this instance is not /// currently taken, and `None` if it is. This ensures that if you /// do get `Some(Instance)`, you are ensured unique access to /// the peripheral and there cannot be data races (unless other /// code uses `unsafe`, of course). You can then pass the /// `Instance` around to other functions as required. When you're /// done with it, you can call `release(instance)` to return it. /// /// `Instance` itself dereferences to a `RegisterBlock`, which /// provides access to the peripheral's registers. #[cfg(not(feature = "nosync"))] #[inline] pub fn take() -> Option<Instance> { external_cortex_m::interrupt::free(|_| unsafe { if NVIC_TAKEN { None } else { NVIC_TAKEN = true; Some(INSTANCE) } }) } /// Release exclusive access to NVIC /// /// This function allows you to return an `Instance` so that it /// is available to `take()` again. This function will panic if /// you return a different `Instance` or if this instance is not /// already taken. #[cfg(not(feature = "nosync"))] #[inline] pub fn release(inst: Instance) { external_cortex_m::interrupt::free(|_| unsafe { if NVIC_TAKEN && inst.addr == INSTANCE.addr { NVIC_TAKEN = false; } else { panic!("Released a peripheral which was not taken"); } }); } /// Unsafely steal NVIC /// /// This function is similar to take() but forcibly takes the /// Instance, marking it as taken irregardless of its previous /// state. #[cfg(not(feature = "nosync"))] #[inline] pub unsafe fn steal() -> Instance { NVIC_TAKEN = true; INSTANCE } } /// Raw pointer to NVIC /// /// Dereferencing this is unsafe because you are not ensured unique /// access to the peripheral, so you may encounter data races with /// other users of this peripheral. It is up to you to ensure you /// will not cause data races. /// /// This constant is provided for ease of use in unsafe code: you can /// simply call for example `write_reg!(gpio, GPIOA, ODR, 1);`. pub const NVIC: *const RegisterBlock = 0xe000e100 as *const _;
32.5
97
0.613462
28b97b2a58014cf5ee1f9cebf9196692c7191530
6,882
#[cfg(feature="safe")] use custos::{libs::cpu::CPU, Matrix, AsDev, range, VecRead}; #[cfg(feature="safe")] #[cfg(feature="opencl")] use custos::libs::opencl::CLDevice; #[cfg(feature="safe")] #[test] fn test_threading_cpu() { let device = CPU::new().select(); let th1_cl = std::thread::spawn(|| { let device = CPU::new().select(); let a = Matrix::from( ( &device, (3, 2), [3f32, 2., 1., 5., 6., 4.]) ); let b = Matrix::from( ( &device, (2, 3), [1., 3., 2., 6., 5., 4.]) ); for _ in range(500) { let c = &a * &b; assert_eq!(device.read(c.data()), vec![3., 6., 2., 30., 30., 16.]); } //CPU_CACHE.with(|f| assert!(f.borrow().nodes.len() == 1)); for _ in range(500) { let c = &a - &b; let d = &a + &b + &c; let e = &a * &b - &c + &d * &d - &a; assert_eq!(34., e.read()[0]); } // CPU_CACHE.with(|f| assert!(f.borrow().nodes.len() == 8)); let c = &a - &b; let d = &a + &b + &c; let e = &a * &b - &c + &d * &d - &a; assert_eq!(34., e.read()[0]); // CPU_CACHE.with(|f| assert!(f.borrow().nodes.len() == 8)); }); let th1_cpu = std::thread::spawn(|| { let device = CPU::new().select(); let a = Matrix::from( ( &device, (3, 2), [3f32, 2., 1., 5., 6., 4.]) ); let b = Matrix::from( ( &device, (2, 3), [1., 3., 2., 6., 5., 4.]) ); for _ in range(500) { let c = &a * &b; assert_eq!(device.read(c.data()), vec![3., 6., 2., 30., 30., 16.]); } // CPU_CACHE.with(|f| assert!(f.borrow().nodes.len() == 1)); for _ in range(500) { let c = &a - &b; let d = &a + &b + &c; let e = &a * &b - &c + &d * &d - &a; assert_eq!(34., e.read()[0]); } //CPU_CACHE.with(|f| assert!(f.borrow().nodes.len() == 8)); let c = &a - &b; let d = &a + &b + &c; let e = &a * &b - &c + &d * &d - &a; assert_eq!(34., e.read()[0]); // CPU_CACHE.with(|f| assert!(f.borrow().nodes.len() == 8)); }); let th2 = std::thread::spawn(|| { { let device = CPU::new().select(); let a = Matrix::from( ( &device, (3, 2), [3f32, 2., 1., 5., 6., 4.]) ); let b = Matrix::from( ( &device, (2, 3), [1., 3., 2., 6., 5., 4.]) ); for _ in range(500) { let c = &a + &b; assert_eq!(device.read(c.data()), vec![4., 5., 3., 11., 11., 8.]); for _ in range(5) { let d = &a * &b * &c; let _ = &d + &c - ( &b + &a * &d); } // CPU_CACHE.with(|f| assert!(f.borrow().nodes.len() == 7)); } } //'device' is dropped // CPU_CACHE.with(|f| assert!(f.borrow().nodes.len() == 0)); }); let a = Matrix::from( ( &device, (3, 2), [3f32, 2., 1., 5., 6., 4.]) ); let b = Matrix::from( ( &device, (2, 3), [1., 3., 2., 6., 5., 4.]) ); for _ in range(500) { let c = &a - &b; assert_eq!(c.read(), vec![2., -1., -1., -1., 1., 0.]); } // CPU_CACHE.with(|f| assert!(f.borrow().nodes.len() == 1)); th1_cl.join().unwrap(); th1_cpu.join().unwrap(); th2.join().unwrap(); } #[cfg(feature="safe")] #[cfg(feature="opencl")] #[test] fn test_threading_cl_a() { let device = CLDevice::get(0).unwrap().select(); let th1_cl = std::thread::spawn(|| { let device = CLDevice::get(0).unwrap().select(); let a = Matrix::from( ( &device, (3, 2), [3f32, 2., 1., 5., 6., 4.]) ); let b = Matrix::from( ( &device, (2, 3), [1., 3., 2., 6., 5., 4.]) ); for _ in range(500) { let c = &a * &b; assert_eq!(device.read(c.data()), vec![3., 6., 2., 30., 30., 16.]); } // CL_CACHE.with(|f| assert!(f.borrow().output_nodes.len() == 1)); for _ in range(500) { let c = &a - &b; let d = &a + &b + &c; let e = &a * &b - c + &d * &d - &a; assert_eq!(34., e.read()[0]); } // CL_CACHE.with(|f| assert!(f.borrow().output_nodes.len() == 8)); let c = &a - &b; let d = &a + &b + &c; let e = &a * &b - &c + &d * &d - &a; assert_eq!(34., e.read()[0]); // CL_CACHE.with(|f| assert!(f.borrow().output_nodes.len() == 8)); }); let th1_cpu = std::thread::spawn(|| { let device = CPU::new().select(); let a = Matrix::from( ( &device, (3, 2), [3f32, 2., 1., 5., 6., 4.]) ); let b = Matrix::from( ( &device, (2, 3), [1., 3., 2., 6., 5., 4.]) ); for _ in range(500) { let c = &a * &b; assert_eq!(device.read(c.data()), vec![3., 6., 2., 30., 30., 16.]); } // CPU_CACHE.with(|f| assert!(f.borrow().nodes.len() == 1)); for _ in range(500) { let c = &a - &b; let d = &a + &b + &c; let e = &a * &b - &c + &d * &d - &a; assert_eq!(34., e.read()[0]); } // CPU_CACHE.with(|f| assert!(f.borrow().nodes.len() == 8)); let c = &a - &b; let d = &a + &b + &c; let e = &a * &b - &c + &d * &d - &a; assert_eq!(34., e.read()[0]); // CPU_CACHE.with(|f| assert!(f.borrow().nodes.len() == 8)); }); let th2 = std::thread::spawn(|| { { let device = CPU::new().select(); let a = Matrix::from( ( &device, (3, 2), [3f32, 2., 1., 5., 6., 4.]) ); let b = Matrix::from( ( &device, (2, 3), [1., 3., 2., 6., 5., 4.]) ); for _ in range(500) { let c = &a + &b; assert_eq!(device.read(c.data()), vec![4., 5., 3., 11., 11., 8.]); for _ in range(5) { let d = &a * &b * &c; let _ = &d + &c - (&b + &a * &d); } // CPU_CACHE.with(|f| assert!(f.borrow().nodes.len() == 7)); } } //'device' is dropped // CPU_CACHE.with(|f| assert!(f.borrow().nodes.len() == 0)); }); let a = Matrix::from( ( &device, (3, 2), [3f32, 2., 1., 5., 6., 4.]) ); let b = Matrix::from( ( &device, (2, 3), [1., 3., 2., 6., 5., 4.]) ); for _ in range(500) { let c = &a - &b; assert_eq!(c.read(), vec![2., -1., -1., -1., 1., 0.]); } // CL_CACHE.with(|f| assert!(f.borrow().output_nodes.len() == 1)); th1_cl.join().unwrap(); th1_cpu.join().unwrap(); th2.join().unwrap(); }
31.140271
83
0.385062
4b0b97fecb8c6ebeb64f1d42bc30e2f22960a3b6
1,626
use std::error; use std::fmt; use std::io; use std::convert; /// Contains error options that can be encountered while performing the decoding /// operations. #[derive(Debug, PartialEq)] pub enum DecoderError { /// Indicates that the decoder received an invalid stream of bytes that can /// not be decoded. InvalidInput, /// Indicates that the decoder encountered an I/O interruption. Interrupted /// operations can typically be retried. Interrupted, /// Indicates that the buffer from which an item was supposed to be decoded /// does not contain enough octets to complete the decoding. InputUnderflow, /// Indicates that the decoder encountered an invalid tag number of a key. /// A tag number must be unique per message and the value can be between `1` /// and `2^29 - 1`. InvalidTag, } impl From<io::Error> for DecoderError { fn from(_err: io::Error) -> Self { Self::Interrupted } } impl From<convert::Infallible> for DecoderError { // until solved: https://github.com/rust-lang/rust/issues/64715 fn from(_: convert::Infallible) -> Self { unreachable!() } } impl fmt::Display for DecoderError { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match self { Self::InvalidInput => write!(fmt, "Invalid byte stream."), Self::Interrupted => write!(fmt, "Read operation interrupted."), Self::InputUnderflow => write!(fmt, "Not enough bytes."), Self::InvalidTag => write!(fmt, "Found tag with invalid number."), } } } impl error::Error for DecoderError {}
31.269231
113
0.654982
2290e695a1b4f8de1a50fdb88bba5de56077d477
2,557
//! Types for the [`m.space.parent`] event. //! //! [`m.space.parent`]: https://spec.matrix.org/v1.2/client-server-api/#mspaceparent use ruma_events_macros::EventContent; use ruma_identifiers::ServerName; use serde::{Deserialize, Serialize}; /// The content of an `m.space.parent` event. /// /// Rooms can claim parents via the `m.space.parent` state event. /// /// Similar to `m.space.child`, the `state_key` is the ID of the parent space, and the content must /// contain a `via` key which gives a list of candidate servers that can be used to join the /// parent. #[derive(Clone, Debug, Deserialize, Serialize, EventContent)] #[cfg_attr(not(feature = "unstable-exhaustive-types"), non_exhaustive)] #[ruma_event(type = "m.space.parent", kind = State)] pub struct SpaceParentEventContent { /// List of candidate servers that can be used to join the room. #[serde(skip_serializing_if = "Option::is_none")] pub via: Option<Vec<Box<ServerName>>>, /// Determines whether this is the main parent for the space. /// /// When a user joins a room with a canonical parent, clients may switch to view the room in /// the context of that space, peeking into it in order to find other rooms and group them /// together. In practice, well behaved rooms should only have one `canonical` parent, but /// given this is not enforced: if multiple are present the client should select the one with /// the lowest room ID, as determined via a lexicographic ordering of the Unicode code-points. pub canonical: bool, } impl SpaceParentEventContent { /// Creates a new `ParentEventContent` with the given canonical flag. pub fn new(canonical: bool) -> Self { Self { via: None, canonical } } } #[cfg(test)] mod tests { use ruma_identifiers::server_name; use serde_json::{json, to_value as to_json_value}; use super::SpaceParentEventContent; #[test] fn space_parent_serialization() { let content = SpaceParentEventContent { via: Some(vec![server_name!("example.com").to_owned()]), canonical: true, }; let json = json!({ "via": ["example.com"], "canonical": true, }); assert_eq!(to_json_value(&content).unwrap(), json); } #[test] fn space_parent_empty_serialization() { let content = SpaceParentEventContent { via: None, canonical: true }; let json = json!({ "canonical": true, }); assert_eq!(to_json_value(&content).unwrap(), json); } }
34.554054
99
0.662495
eb67b718dbd3d4fcf71f3221883c89d68b387898
23,016
//! Temporal quantification. //! //! Example: //! //! ``` //! use std::time::Duration; //! //! let five_seconds = Duration::new(5, 0); //! // both declarations are equivalent //! assert_eq!(Duration::new(5, 0), Duration::from_secs(5)); //! ``` #![stable(feature = "time", since = "1.3.0")] use error::Error; use fmt; use ops::{Add, Sub, AddAssign, SubAssign}; use sys::time; use sys_common::FromInner; #[stable(feature = "time", since = "1.3.0")] pub use core::time::Duration; /// A measurement of a monotonically nondecreasing clock. /// Opaque and useful only with `Duration`. /// /// Instants are always guaranteed to be no less than any previously measured /// instant when created, and are often useful for tasks such as measuring /// benchmarks or timing how long an operation takes. /// /// Note, however, that instants are not guaranteed to be **steady**. In other /// words, each tick of the underlying clock may not be the same length (e.g. /// some seconds may be longer than others). An instant may jump forwards or /// experience time dilation (slow down or speed up), but it will never go /// backwards. /// /// Instants are opaque types that can only be compared to one another. There is /// no method to get "the number of seconds" from an instant. Instead, it only /// allows measuring the duration between two instants (or comparing two /// instants). /// /// The size of an `Instant` struct may vary depending on the target operating /// system. /// /// Example: /// /// ```no_run /// use std::time::{Duration, Instant}; /// use std::thread::sleep; /// /// fn main() { /// let now = Instant::now(); /// /// // we sleep for 2 seconds /// sleep(Duration::new(2, 0)); /// // it prints '2' /// println!("{}", now.elapsed().as_secs()); /// } /// ``` #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[stable(feature = "time2", since = "1.8.0")] pub struct Instant(time::Instant); /// A measurement of the system clock, useful for talking to /// external entities like the file system or other processes. /// /// Distinct from the [`Instant`] type, this time measurement **is not /// monotonic**. This means that you can save a file to the file system, then /// save another file to the file system, **and the second file has a /// `SystemTime` measurement earlier than the first**. In other words, an /// operation that happens after another operation in real time may have an /// earlier `SystemTime`! /// /// Consequently, comparing two `SystemTime` instances to learn about the /// duration between them returns a [`Result`] instead of an infallible [`Duration`] /// to indicate that this sort of time drift may happen and needs to be handled. /// /// Although a `SystemTime` cannot be directly inspected, the [`UNIX_EPOCH`] /// constant is provided in this module as an anchor in time to learn /// information about a `SystemTime`. By calculating the duration from this /// fixed point in time, a `SystemTime` can be converted to a human-readable time, /// or perhaps some other string representation. /// /// The size of a `SystemTime` struct may vary depending on the target operating /// system. /// /// [`Instant`]: ../../std/time/struct.Instant.html /// [`Result`]: ../../std/result/enum.Result.html /// [`Duration`]: ../../std/time/struct.Duration.html /// [`UNIX_EPOCH`]: ../../std/time/constant.UNIX_EPOCH.html /// /// Example: /// /// ```no_run /// use std::time::{Duration, SystemTime}; /// use std::thread::sleep; /// /// fn main() { /// let now = SystemTime::now(); /// /// // we sleep for 2 seconds /// sleep(Duration::new(2, 0)); /// match now.elapsed() { /// Ok(elapsed) => { /// // it prints '2' /// println!("{}", elapsed.as_secs()); /// } /// Err(e) => { /// // an error occurred! /// println!("Error: {:?}", e); /// } /// } /// } /// ``` #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[stable(feature = "time2", since = "1.8.0")] pub struct SystemTime(time::SystemTime); /// An error returned from the `duration_since` and `elapsed` methods on /// `SystemTime`, used to learn how far in the opposite direction a system time /// lies. /// /// # Examples /// /// ```no_run /// use std::thread::sleep; /// use std::time::{Duration, SystemTime}; /// /// let sys_time = SystemTime::now(); /// sleep(Duration::from_secs(1)); /// let new_sys_time = SystemTime::now(); /// match sys_time.duration_since(new_sys_time) { /// Ok(_) => {} /// Err(e) => println!("SystemTimeError difference: {:?}", e.duration()), /// } /// ``` #[derive(Clone, Debug)] #[stable(feature = "time2", since = "1.8.0")] pub struct SystemTimeError(Duration); impl Instant { /// Returns an instant corresponding to "now". /// /// # Examples /// /// ``` /// use std::time::Instant; /// /// let now = Instant::now(); /// ``` #[stable(feature = "time2", since = "1.8.0")] pub fn now() -> Instant { Instant(time::Instant::now()) } /// Returns the amount of time elapsed from another instant to this one. /// /// # Panics /// /// This function will panic if `earlier` is later than `self`. /// /// # Examples /// /// ```no_run /// use std::time::{Duration, Instant}; /// use std::thread::sleep; /// /// let now = Instant::now(); /// sleep(Duration::new(1, 0)); /// let new_now = Instant::now(); /// println!("{:?}", new_now.duration_since(now)); /// ``` #[stable(feature = "time2", since = "1.8.0")] pub fn duration_since(&self, earlier: Instant) -> Duration { self.0.sub_instant(&earlier.0) } /// Returns the amount of time elapsed since this instant was created. /// /// # Panics /// /// This function may panic if the current time is earlier than this /// instant, which is something that can happen if an `Instant` is /// produced synthetically. /// /// # Examples /// /// ```no_run /// use std::thread::sleep; /// use std::time::{Duration, Instant}; /// /// let instant = Instant::now(); /// let three_secs = Duration::from_secs(3); /// sleep(three_secs); /// assert!(instant.elapsed() >= three_secs); /// ``` #[stable(feature = "time2", since = "1.8.0")] pub fn elapsed(&self) -> Duration { Instant::now() - *self } /// Returns `Some(t)` where `t` is the time `self + duration` if `t` can be represented as /// `Instant` (which means it's inside the bounds of the underlying data structure), `None` /// otherwise. #[unstable(feature = "time_checked_add", issue = "55940")] pub fn checked_add(&self, duration: Duration) -> Option<Instant> { self.0.checked_add_duration(&duration).map(|t| Instant(t)) } /// Returns `Some(t)` where `t` is the time `self - duration` if `t` can be represented as /// `Instant` (which means it's inside the bounds of the underlying data structure), `None` /// otherwise. #[unstable(feature = "time_checked_add", issue = "55940")] pub fn checked_sub(&self, duration: Duration) -> Option<Instant> { self.0.checked_sub_duration(&duration).map(|t| Instant(t)) } } #[stable(feature = "time2", since = "1.8.0")] impl Add<Duration> for Instant { type Output = Instant; /// # Panics /// /// This function may panic if the resulting point in time cannot be represented by the /// underlying data structure. See [`checked_add`] for a version without panic. /// /// [`checked_add`]: ../../std/time/struct.Instant.html#method.checked_add fn add(self, other: Duration) -> Instant { self.checked_add(other) .expect("overflow when adding duration to instant") } } #[stable(feature = "time_augmented_assignment", since = "1.9.0")] impl AddAssign<Duration> for Instant { fn add_assign(&mut self, other: Duration) { *self = *self + other; } } #[stable(feature = "time2", since = "1.8.0")] impl Sub<Duration> for Instant { type Output = Instant; fn sub(self, other: Duration) -> Instant { self.checked_sub(other) .expect("overflow when subtracting duration from instant") } } #[stable(feature = "time_augmented_assignment", since = "1.9.0")] impl SubAssign<Duration> for Instant { fn sub_assign(&mut self, other: Duration) { *self = *self - other; } } #[stable(feature = "time2", since = "1.8.0")] impl Sub<Instant> for Instant { type Output = Duration; fn sub(self, other: Instant) -> Duration { self.duration_since(other) } } #[stable(feature = "time2", since = "1.8.0")] impl fmt::Debug for Instant { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.0.fmt(f) } } impl SystemTime { /// An anchor in time which can be used to create new `SystemTime` instances or /// learn about where in time a `SystemTime` lies. /// /// This constant is defined to be "1970-01-01 00:00:00 UTC" on all systems with /// respect to the system clock. Using `duration_since` on an existing /// `SystemTime` instance can tell how far away from this point in time a /// measurement lies, and using `UNIX_EPOCH + duration` can be used to create a /// `SystemTime` instance to represent another fixed point in time. /// /// # Examples /// /// ```no_run /// use std::time::SystemTime; /// /// match SystemTime::now().duration_since(SystemTime::UNIX_EPOCH) { /// Ok(n) => println!("1970-01-01 00:00:00 UTC was {} seconds ago!", n.as_secs()), /// Err(_) => panic!("SystemTime before UNIX EPOCH!"), /// } /// ``` #[stable(feature = "assoc_unix_epoch", since = "1.28.0")] pub const UNIX_EPOCH: SystemTime = UNIX_EPOCH; /// Returns the system time corresponding to "now". /// /// # Examples /// /// ``` /// use std::time::SystemTime; /// /// let sys_time = SystemTime::now(); /// ``` #[stable(feature = "time2", since = "1.8.0")] pub fn now() -> SystemTime { SystemTime(time::SystemTime::now()) } /// Returns the amount of time elapsed from an earlier point in time. /// /// This function may fail because measurements taken earlier are not /// guaranteed to always be before later measurements (due to anomalies such /// as the system clock being adjusted either forwards or backwards). /// /// If successful, [`Ok`]`(`[`Duration`]`)` is returned where the duration represents /// the amount of time elapsed from the specified measurement to this one. /// /// Returns an [`Err`] if `earlier` is later than `self`, and the error /// contains how far from `self` the time is. /// /// [`Ok`]: ../../std/result/enum.Result.html#variant.Ok /// [`Duration`]: ../../std/time/struct.Duration.html /// [`Err`]: ../../std/result/enum.Result.html#variant.Err /// /// # Examples /// /// ``` /// use std::time::SystemTime; /// /// let sys_time = SystemTime::now(); /// let difference = sys_time.duration_since(sys_time) /// .expect("SystemTime::duration_since failed"); /// println!("{:?}", difference); /// ``` #[stable(feature = "time2", since = "1.8.0")] pub fn duration_since(&self, earlier: SystemTime) -> Result<Duration, SystemTimeError> { self.0.sub_time(&earlier.0).map_err(SystemTimeError) } /// Returns the amount of time elapsed since this system time was created. /// /// This function may fail as the underlying system clock is susceptible to /// drift and updates (e.g., the system clock could go backwards), so this /// function may not always succeed. If successful, [`Ok`]`(`[`Duration`]`)` is /// returned where the duration represents the amount of time elapsed from /// this time measurement to the current time. /// /// Returns an [`Err`] if `self` is later than the current system time, and /// the error contains how far from the current system time `self` is. /// /// [`Ok`]: ../../std/result/enum.Result.html#variant.Ok /// [`Duration`]: ../../std/time/struct.Duration.html /// [`Err`]: ../../std/result/enum.Result.html#variant.Err /// /// # Examples /// /// ```no_run /// use std::thread::sleep; /// use std::time::{Duration, SystemTime}; /// /// let sys_time = SystemTime::now(); /// let one_sec = Duration::from_secs(1); /// sleep(one_sec); /// assert!(sys_time.elapsed().unwrap() >= one_sec); /// ``` #[stable(feature = "time2", since = "1.8.0")] pub fn elapsed(&self) -> Result<Duration, SystemTimeError> { SystemTime::now().duration_since(*self) } /// Returns `Some(t)` where `t` is the time `self + duration` if `t` can be represented as /// `SystemTime` (which means it's inside the bounds of the underlying data structure), `None` /// otherwise. #[unstable(feature = "time_checked_add", issue = "55940")] pub fn checked_add(&self, duration: Duration) -> Option<SystemTime> { self.0.checked_add_duration(&duration).map(|t| SystemTime(t)) } /// Returns `Some(t)` where `t` is the time `self - duration` if `t` can be represented as /// `SystemTime` (which means it's inside the bounds of the underlying data structure), `None` /// otherwise. #[unstable(feature = "time_checked_add", issue = "55940")] pub fn checked_sub(&self, duration: Duration) -> Option<SystemTime> { self.0.checked_sub_duration(&duration).map(|t| SystemTime(t)) } } #[stable(feature = "time2", since = "1.8.0")] impl Add<Duration> for SystemTime { type Output = SystemTime; /// # Panics /// /// This function may panic if the resulting point in time cannot be represented by the /// underlying data structure. See [`checked_add`] for a version without panic. /// /// [`checked_add`]: ../../std/time/struct.SystemTime.html#method.checked_add fn add(self, dur: Duration) -> SystemTime { self.checked_add(dur) .expect("overflow when adding duration to instant") } } #[stable(feature = "time_augmented_assignment", since = "1.9.0")] impl AddAssign<Duration> for SystemTime { fn add_assign(&mut self, other: Duration) { *self = *self + other; } } #[stable(feature = "time2", since = "1.8.0")] impl Sub<Duration> for SystemTime { type Output = SystemTime; fn sub(self, dur: Duration) -> SystemTime { self.checked_sub(dur) .expect("overflow when subtracting duration from instant") } } #[stable(feature = "time_augmented_assignment", since = "1.9.0")] impl SubAssign<Duration> for SystemTime { fn sub_assign(&mut self, other: Duration) { *self = *self - other; } } #[stable(feature = "time2", since = "1.8.0")] impl fmt::Debug for SystemTime { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.0.fmt(f) } } /// An anchor in time which can be used to create new `SystemTime` instances or /// learn about where in time a `SystemTime` lies. /// /// This constant is defined to be "1970-01-01 00:00:00 UTC" on all systems with /// respect to the system clock. Using `duration_since` on an existing /// [`SystemTime`] instance can tell how far away from this point in time a /// measurement lies, and using `UNIX_EPOCH + duration` can be used to create a /// [`SystemTime`] instance to represent another fixed point in time. /// /// [`SystemTime`]: ../../std/time/struct.SystemTime.html /// /// # Examples /// /// ```no_run /// use std::time::{SystemTime, UNIX_EPOCH}; /// /// match SystemTime::now().duration_since(UNIX_EPOCH) { /// Ok(n) => println!("1970-01-01 00:00:00 UTC was {} seconds ago!", n.as_secs()), /// Err(_) => panic!("SystemTime before UNIX EPOCH!"), /// } /// ``` #[stable(feature = "time2", since = "1.8.0")] pub const UNIX_EPOCH: SystemTime = SystemTime(time::UNIX_EPOCH); impl SystemTimeError { /// Returns the positive duration which represents how far forward the /// second system time was from the first. /// /// A `SystemTimeError` is returned from the [`duration_since`] and [`elapsed`] /// methods of [`SystemTime`] whenever the second system time represents a point later /// in time than the `self` of the method call. /// /// [`duration_since`]: ../../std/time/struct.SystemTime.html#method.duration_since /// [`elapsed`]: ../../std/time/struct.SystemTime.html#method.elapsed /// [`SystemTime`]: ../../std/time/struct.SystemTime.html /// /// # Examples /// /// ```no_run /// use std::thread::sleep; /// use std::time::{Duration, SystemTime}; /// /// let sys_time = SystemTime::now(); /// sleep(Duration::from_secs(1)); /// let new_sys_time = SystemTime::now(); /// match sys_time.duration_since(new_sys_time) { /// Ok(_) => {} /// Err(e) => println!("SystemTimeError difference: {:?}", e.duration()), /// } /// ``` #[stable(feature = "time2", since = "1.8.0")] pub fn duration(&self) -> Duration { self.0 } } #[stable(feature = "time2", since = "1.8.0")] impl Error for SystemTimeError { fn description(&self) -> &str { "other time was not earlier than self" } } #[stable(feature = "time2", since = "1.8.0")] impl fmt::Display for SystemTimeError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "second time provided was later than self") } } impl FromInner<time::SystemTime> for SystemTime { fn from_inner(time: time::SystemTime) -> SystemTime { SystemTime(time) } } #[cfg(test)] mod tests { use super::{Instant, SystemTime, Duration, UNIX_EPOCH}; macro_rules! assert_almost_eq { ($a:expr, $b:expr) => ({ let (a, b) = ($a, $b); if a != b { let (a, b) = if a > b {(a, b)} else {(b, a)}; assert!(a - Duration::new(0, 1000) <= b, "{:?} is not almost equal to {:?}", a, b); } }) } #[test] fn instant_monotonic() { let a = Instant::now(); let b = Instant::now(); assert!(b >= a); } #[test] fn instant_elapsed() { let a = Instant::now(); a.elapsed(); } #[test] fn instant_math() { let a = Instant::now(); let b = Instant::now(); println!("a: {:?}", a); println!("b: {:?}", b); let dur = b.duration_since(a); println!("dur: {:?}", dur); assert_almost_eq!(b - dur, a); assert_almost_eq!(a + dur, b); let second = Duration::new(1, 0); assert_almost_eq!(a - second + second, a); assert_almost_eq!(a.checked_sub(second).unwrap().checked_add(second).unwrap(), a); // checked_add_duration will not panic on overflow let mut maybe_t = Some(Instant::now()); let max_duration = Duration::from_secs(u64::max_value()); // in case `Instant` can store `>= now + max_duration`. for _ in 0..2 { maybe_t = maybe_t.and_then(|t| t.checked_add(max_duration)); } assert_eq!(maybe_t, None); // checked_add_duration calculates the right time and will work for another year let year = Duration::from_secs(60 * 60 * 24 * 365); assert_eq!(a + year, a.checked_add(year).unwrap()); } #[test] #[should_panic] fn instant_duration_panic() { let a = Instant::now(); (a - Duration::new(1, 0)).duration_since(a); } #[test] fn system_time_math() { let a = SystemTime::now(); let b = SystemTime::now(); match b.duration_since(a) { Ok(dur) if dur == Duration::new(0, 0) => { assert_almost_eq!(a, b); } Ok(dur) => { assert!(b > a); assert_almost_eq!(b - dur, a); assert_almost_eq!(a + dur, b); } Err(dur) => { let dur = dur.duration(); assert!(a > b); assert_almost_eq!(b + dur, a); assert_almost_eq!(a - dur, b); } } let second = Duration::new(1, 0); assert_almost_eq!(a.duration_since(a - second).unwrap(), second); assert_almost_eq!(a.duration_since(a + second).unwrap_err() .duration(), second); assert_almost_eq!(a - second + second, a); assert_almost_eq!(a.checked_sub(second).unwrap().checked_add(second).unwrap(), a); // A difference of 80 and 800 years cannot fit inside a 32-bit time_t if !(cfg!(unix) && ::mem::size_of::<::libc::time_t>() <= 4) { let eighty_years = second * 60 * 60 * 24 * 365 * 80; assert_almost_eq!(a - eighty_years + eighty_years, a); assert_almost_eq!(a - (eighty_years * 10) + (eighty_years * 10), a); } let one_second_from_epoch = UNIX_EPOCH + Duration::new(1, 0); let one_second_from_epoch2 = UNIX_EPOCH + Duration::new(0, 500_000_000) + Duration::new(0, 500_000_000); assert_eq!(one_second_from_epoch, one_second_from_epoch2); // checked_add_duration will not panic on overflow let mut maybe_t = Some(SystemTime::UNIX_EPOCH); let max_duration = Duration::from_secs(u64::max_value()); // in case `SystemTime` can store `>= UNIX_EPOCH + max_duration`. for _ in 0..2 { maybe_t = maybe_t.and_then(|t| t.checked_add(max_duration)); } assert_eq!(maybe_t, None); // checked_add_duration calculates the right time and will work for another year let year = Duration::from_secs(60 * 60 * 24 * 365); assert_eq!(a + year, a.checked_add(year).unwrap()); } #[test] fn system_time_elapsed() { let a = SystemTime::now(); drop(a.elapsed()); } #[test] fn since_epoch() { let ts = SystemTime::now(); let a = ts.duration_since(UNIX_EPOCH).unwrap(); let b = ts.duration_since(UNIX_EPOCH - Duration::new(1, 0)).unwrap(); assert!(b > a); assert_eq!(b - a, Duration::new(1, 0)); let thirty_years = Duration::new(1, 0) * 60 * 60 * 24 * 365 * 30; // Right now for CI this test is run in an emulator, and apparently the // aarch64 emulator's sense of time is that we're still living in the // 70s. // // Otherwise let's assume that we're all running computers later than // 2000. if !cfg!(target_arch = "aarch64") { assert!(a > thirty_years); } // let's assume that we're all running computers earlier than 2090. // Should give us ~70 years to fix this! let hundred_twenty_years = thirty_years * 4; assert!(a < hundred_twenty_years); } }
34.662651
98
0.595151
38bc20cc8ae50469246ca1ce80aef8f95d5d6838
2,918
use crate::vec2::Vec2; #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub struct Rect { start: Vec2, size: Vec2, } impl Rect { pub fn new( start: impl Into<Vec2>, size: impl Into<Vec2>, ) -> Self { Self { start: start.into(), size: size.into(), } } pub fn contains( self, p: impl Into<Vec2>, ) -> bool { let p = p.into(); p.x >= self.x() && p.x < (self.x() + self.w()) && p.y >= self.y() && p.y < (self.y() + self.h()) } pub fn contains_inclusive( self, p: impl Into<Vec2>, ) -> bool { let p = p.into(); p.x >= self.x() && p.x <= (self.x() + self.w()) && p.y >= self.y() && p.y <= (self.y() + self.h()) } pub fn add_start( self, add: impl Into<Vec2>, ) -> Self { let add = add.into(); Self { start: self.start + add, size: self.size - add, } } pub fn sub_size( self, sub: impl Into<Vec2>, ) -> Self { let sub = sub.into(); Self { start: self.start, size: self.size - sub, } } pub fn with_margin( self, margin: u16, ) -> Self { self.add_start((margin, margin)).sub_size((margin, margin)) } pub fn split_vertical( self, pos: u16, ) -> (Self, Self) { let up = Self { start: self.start, size: Vec2 { y: pos, ..self.size }, }; let down = Self { start: self.start.add_y(pos), size: Vec2 { y: self.size.y - pos, ..self.size }, }; debug_assert!(self.contains(down.start())); (up, down) } #[inline] pub fn end(self) -> Vec2 { self.start + self.size } #[inline] pub const fn start(self) -> Vec2 { self.start } #[inline] pub const fn size(self) -> Vec2 { self.size } #[inline] pub const fn x(self) -> u16 { self.start.x } #[inline] pub const fn y(self) -> u16 { self.start.y } #[inline] pub const fn w(self) -> u16 { self.size.x } #[inline] pub const fn h(self) -> u16 { self.size.y } } #[test] fn zero_contains_test() { let rect = Rect::new((0, 0), (0, 0)); assert!(!rect.contains((0, 0))); } #[test] fn contains_test() { let rect = Rect::new((0, 0), (2, 2)); assert!(rect.contains((0, 0))); assert!(rect.contains((0, 1))); assert!(rect.contains((1, 0))); assert!(rect.contains((1, 1))); assert!(!rect.contains((0, 2))); assert!(!rect.contains((2, 0))); assert!(!rect.contains((2, 2))); }
19.716216
67
0.426662
ddfd4e55a85c3594be560e1bf9505c962e4bd18f
3,239
// Bitcoin Dev Kit // Written in 2020 by Alekos Filini <alekos.filini@gmail.com> // // Copyright (c) 2020-2021 Bitcoin Dev Kit Developers // // This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE // or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option. // You may not use this file except in accordance with one or both of these // licenses. extern crate bdk; extern crate bitcoin; extern crate clap; extern crate log; extern crate miniscript; extern crate serde_json; use std::error::Error; use std::str::FromStr; use log::info; use clap::{App, Arg}; use dogecoin::Network; use miniscript_doge::policy::Concrete; use miniscript_doge::Descriptor; use bdk_doge::database::memory::MemoryDatabase; use bdk_doge::wallet::AddressIndex::New; use bdk_doge::{KeychainKind, Wallet}; fn main() -> Result<(), Box<dyn Error>> { env_logger::init_from_env( env_logger::Env::default().filter_or(env_logger::DEFAULT_FILTER_ENV, "info"), ); let matches = App::new("Miniscript Compiler") .arg( Arg::with_name("POLICY") .help("Sets the spending policy to compile") .required(true) .index(1), ) .arg( Arg::with_name("TYPE") .help("Sets the script type used to embed the compiled policy") .required(true) .index(2) .possible_values(&["sh", "wsh", "sh-wsh"]), ) .arg( Arg::with_name("parsed_policy") .long("parsed_policy") .short("p") .help("Also return the parsed spending policy in JSON format"), ) .arg( Arg::with_name("network") .short("n") .long("network") .help("Sets the network") .takes_value(true) .default_value("testnet") .possible_values(&["testnet", "regtest", "bitcoin", "signet"]), ) .get_matches(); let policy_str = matches.value_of("POLICY").unwrap(); info!("Compiling policy: {}", policy_str); let policy = Concrete::<String>::from_str(&policy_str)?; let descriptor = match matches.value_of("TYPE").unwrap() { "sh" => Descriptor::new_sh(policy.compile()?)?, "wsh" => Descriptor::new_wsh(policy.compile()?)?, "sh-wsh" => Descriptor::new_sh_wsh(policy.compile()?)?, _ => panic!("Invalid type"), }; info!("... Descriptor: {}", descriptor); let database = MemoryDatabase::new(); let network = matches .value_of("network") .map(|n| Network::from_str(n)) .transpose() .unwrap() .unwrap_or(Network::Testnet); let wallet = Wallet::new_offline(&format!("{}", descriptor), None, network, database)?; info!("... First address: {}", wallet.get_address(New)?); if matches.is_present("parsed_policy") { let spending_policy = wallet.policies(KeychainKind::External)?; info!( "... Spending policy:\n{}", serde_json::to_string_pretty(&spending_policy)? ); } Ok(()) }
30.556604
91
0.58444
5bbd46c585896856c82092ad8131f04ffdc6599f
4,063
use errors::*; use goblin::elf::Elf; use goblin::mach::{self, Mach, MachO}; use goblin::Object; use patcher::BUILTIN_PREFIX; use std::fs::File; use std::io::Read; use std::path::Path; #[derive(Clone, Debug, Default)] pub struct ExtractedSymbol { pub name: String, } #[derive(Clone, Debug, Default)] pub struct ExtractedSymbols { pub symbols: Vec<ExtractedSymbol>, } impl From<Vec<ExtractedSymbol>> for ExtractedSymbols { fn from(symbols: Vec<ExtractedSymbol>) -> Self { ExtractedSymbols { symbols } } } impl ExtractedSymbols { pub fn builtins_names(&self) -> Vec<&str> { let builtins_names: Vec<&str> = self.symbols .iter() .filter(|symbol| symbol.name.starts_with(BUILTIN_PREFIX)) .map(|symbol| &symbol.name[BUILTIN_PREFIX.len()..]) .collect(); builtins_names } pub fn merge_additional(mut self, additional_names: &[String]) -> Self { let mut additional_symbols: Vec<_> = additional_names .into_iter() .map(|name| ExtractedSymbol { name: name.to_string(), }) .collect(); self.symbols.append(&mut additional_symbols); self.symbols.dedup_by(|a, b| a.name == b.name); self } } fn parse_elf(elf: &Elf) -> Result<ExtractedSymbols, WError> { let mut symbols = vec![]; for symbol in elf.dynsyms .iter() .filter(|symbol| symbol.st_info == 0x12 || symbol.st_info == 0x22) { let name = elf.dynstrtab .get(symbol.st_name) .ok_or(WError::ParseError)? .map_err(|_| WError::ParseError)? .to_string(); let extracted_symbol = ExtractedSymbol { name }; symbols.push(extracted_symbol); } Ok(symbols.into()) } fn parse_macho(macho: &MachO) -> Result<ExtractedSymbols, WError> { let mut symbols = vec![]; // Start by finding the boundaries of the text section let mut text_offset = None; let mut text_size = None; for section in macho.segments.sections() { for segment in section { if let Ok(( mach::segment::Section { sectname: [b'_', b'_', b't', b'e', b'x', b't', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], segname: [b'_', b'_', b'T', b'E', b'X', b'T', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], size, offset, .. }, _, )) = segment { text_offset = Some(offset as usize); text_size = Some(size as usize); } } } let text_offset = text_offset.ok_or(WError::ParseError)?; let text_size = text_size.ok_or(WError::ParseError)?; // Extract the symbols we are interested in for symbol in macho.symbols.as_ref().ok_or(WError::ParseError)?.iter() { match symbol { Ok(( name, mach::symbols::Nlist { n_type: 0xf, n_sect: 1, n_value, .. }, )) if name.len() > 1 && name.starts_with('_') => { let extracted_symbol = ExtractedSymbol { name: name[1..].to_string(), }; let offset = n_value as usize; if offset < text_offset || offset >= text_offset + text_size { continue; } symbols.push(extracted_symbol); } _ => {} } } Ok(symbols.into()) } pub fn extract_symbols<P: AsRef<Path>>(path: P) -> Result<ExtractedSymbols, WError> { let mut buffer = Vec::new(); File::open(path)?.read_to_end(&mut buffer)?; let symbols = match Object::parse(&buffer).map_err(|_| WError::ParseError)? { Object::Mach(Mach::Binary(macho)) => parse_macho(&macho), Object::Elf(elf) => parse_elf(&elf), _ => xbail!(WError::Unsupported), }?; Ok(symbols) }
30.780303
97
0.524489
5b73af10b4d1d2ca903f885b39579e166d047637
1,512
use notify::DebouncedEvent; use std::path::PathBuf; use std::sync::mpsc::Receiver; use std::time::Duration; use failure::{format_err, Error}; use crate::terminal::message; use log::info; // Add cooldown for all types of events to watching logic pub fn wait_for_changes( rx: &Receiver<DebouncedEvent>, cooldown: Duration, ) -> Result<PathBuf, Error> { loop { let event = rx.recv()?; match get_changed_path_from_event(event) { Ok(Some(path)) => { message::working("Detected changes..."); // wait for cooldown while rx.recv_timeout(cooldown).is_ok() {} return Ok(path); } Ok(None) => { continue; // was an event type we don't care about, continue } Err(error) => { message::user_error(&format!("WatchError {:?}", error)); continue; } }; } } fn get_changed_path_from_event(event: DebouncedEvent) -> Result<Option<PathBuf>, Error> { info!("Detected Event {:?}", event); match event { DebouncedEvent::Error(error, _) => Err(format_err!("{:?}", error)), DebouncedEvent::NoticeWrite(path) => Ok(Some(path)), DebouncedEvent::Write(path) => Ok(Some(path)), DebouncedEvent::NoticeRemove(path) => Ok(Some(path)), DebouncedEvent::Remove(path) => Ok(Some(path)), DebouncedEvent::Create(path) => Ok(Some(path)), _ => Ok(None), } }
31.5
89
0.564815
167317f426b7160b853cf955bc41847b16ca9d30
27,091
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. /*! Managing the scope stack. The scopes are tied to lexical scopes, so as we descend the HAIR, we push a scope on the stack, translate ite contents, and then pop it off. Every scope is named by a `CodeExtent`. ### SEME Regions When pushing a new scope, we record the current point in the graph (a basic block); this marks the entry to the scope. We then generate more stuff in the control-flow graph. Whenever the scope is exited, either via a `break` or `return` or just by fallthrough, that marks an exit from the scope. Each lexical scope thus corresponds to a single-entry, multiple-exit (SEME) region in the control-flow graph. For now, we keep a mapping from each `CodeExtent` to its corresponding SEME region for later reference (see caveat in next paragraph). This is because region scopes are tied to them. Eventually, when we shift to non-lexical lifetimes, three should be no need to remember this mapping. There is one additional wrinkle, actually, that I wanted to hide from you but duty compels me to mention. In the course of translating matches, it sometimes happen that certain code (namely guards) gets executed multiple times. This means that the scope lexical scope may in fact correspond to multiple, disjoint SEME regions. So in fact our mapping is from one scope to a vector of SEME regions. ### Drops The primary purpose for scopes is to insert drops: while translating the contents, we also accumulate lvalues that need to be dropped upon exit from each scope. This is done by calling `schedule_drop`. Once a drop is scheduled, whenever we branch out we will insert drops of all those lvalues onto the outgoing edge. Note that we don't know the full set of scheduled drops up front, and so whenever we exit from the scope we only drop the values scheduled thus far. For example, consider the scope S corresponding to this loop: ``` loop { let x = ...; if cond { break; } let y = ...; } ``` When processing the `let x`, we will add one drop to the scope for `x`. The break will then insert a drop for `x`. When we process `let y`, we will add another drop (in fact, to a subscope, but let's ignore that for now); any later drops would also drop `y`. ### Early exit There are numerous "normal" ways to early exit a scope: `break`, `continue`, `return` (panics are handled separately). Whenever an early exit occurs, the method `exit_scope` is called. It is given the current point in execution where the early exit occurs, as well as the scope you want to branch to (note that all early exits from to some other enclosing scope). `exit_scope` will record thid exit point and also add all drops. Panics are handled in a similar fashion, except that a panic always returns out to the `DIVERGE_BLOCK`. To trigger a panic, simply call `panic(p)` with the current point `p`. Or else you can call `diverge_cleanup`, which will produce a block that you can branch to which does the appropriate cleanup and then diverges. `panic(p)` simply calls `diverge_cleanup()` and adds an edge from `p` to the result. ### Loop scopes In addition to the normal scope stack, we track a loop scope stack that contains only loops. It tracks where a `break` and `continue` should go to. */ use build::{BlockAnd, BlockAndExtension, Builder, CFG}; use rustc::middle::region::CodeExtent; use rustc::middle::lang_items; use rustc::middle::subst::{Substs, Subst, VecPerParamSpace}; use rustc::middle::ty::{self, Ty}; use rustc::mir::repr::*; use syntax::codemap::{Span, DUMMY_SP}; use syntax::parse::token::intern_and_get_ident; pub struct Scope<'tcx> { extent: CodeExtent, drops: Vec<DropData<'tcx>>, // A scope may only have one associated free, because: // 1. We require a `free` to only be scheduled in the scope of `EXPR` in `box EXPR`; // 2. It only makes sense to have it translated into the diverge-path. // // This kind of drop will be run *after* all the regular drops scheduled onto this scope, // because drops may have dependencies on the allocated memory. // // This is expected to go away once `box EXPR` becomes a sugar for placement protocol and gets // desugared in some earlier stage. free: Option<FreeData<'tcx>>, } struct DropData<'tcx> { value: Lvalue<'tcx>, // NB: per-drop “cache” is necessary for the build_scope_drops function below. /// The cached block for the cleanups-on-diverge path. This block contains code to run the /// current drop and all the preceding drops (i.e. those having lower index in Drop’s /// Scope drop array) cached_block: Option<BasicBlock> } struct FreeData<'tcx> { span: Span, /// Lvalue containing the allocated box. value: Lvalue<'tcx>, /// type of item for which the box was allocated for (i.e. the T in Box<T>). item_ty: Ty<'tcx>, /// The cached block containing code to run the free. The block will also execute all the drops /// in the scope. cached_block: Option<BasicBlock> } #[derive(Clone, Debug)] pub struct LoopScope { /// Extent of the loop pub extent: CodeExtent, /// Where the body of the loop begins pub continue_block: BasicBlock, /// Block to branch into when the loop terminates (either by being `break`-en out from, or by /// having its condition to become false) pub break_block: BasicBlock, // where to go on a `break /// Indicates the reachability of the break_block for this loop pub might_break: bool } impl<'tcx> Scope<'tcx> { /// Invalidate all the cached blocks in the scope. /// /// Should always be run for all inner scopes when a drop is pushed into some scope enclosing a /// larger extent of code. fn invalidate_cache(&mut self) { for dropdata in &mut self.drops { dropdata.cached_block = None; } if let Some(ref mut freedata) = self.free { freedata.cached_block = None; } } /// Returns the cached block for this scope. /// /// Precondition: the caches must be fully filled (i.e. diverge_cleanup is called) in order for /// this method to work correctly. fn cached_block(&self) -> Option<BasicBlock> { if let Some(data) = self.drops.last() { Some(data.cached_block.expect("drop cache is not filled")) } else if let Some(ref data) = self.free { Some(data.cached_block.expect("free cache is not filled")) } else { None } } } impl<'a,'tcx> Builder<'a,'tcx> { // Adding and removing scopes // ========================== /// Start a loop scope, which tracks where `continue` and `break` /// should branch to. See module comment for more details. /// /// Returns the might_break attribute of the LoopScope used. pub fn in_loop_scope<F>(&mut self, loop_block: BasicBlock, break_block: BasicBlock, f: F) -> bool where F: FnOnce(&mut Builder<'a, 'tcx>) { let extent = self.extent_of_innermost_scope(); let loop_scope = LoopScope { extent: extent.clone(), continue_block: loop_block, break_block: break_block, might_break: false }; self.loop_scopes.push(loop_scope); f(self); let loop_scope = self.loop_scopes.pop().unwrap(); assert!(loop_scope.extent == extent); loop_scope.might_break } /// Convenience wrapper that pushes a scope and then executes `f` /// to build its contents, popping the scope afterwards. pub fn in_scope<F, R>(&mut self, extent: CodeExtent, mut block: BasicBlock, f: F) -> BlockAnd<R> where F: FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<R> { debug!("in_scope(extent={:?}, block={:?})", extent, block); self.push_scope(extent); let rv = unpack!(block = f(self)); unpack!(block = self.pop_scope(extent, block)); debug!("in_scope: exiting extent={:?} block={:?}", extent, block); block.and(rv) } /// Push a scope onto the stack. You can then build code in this /// scope and call `pop_scope` afterwards. Note that these two /// calls must be paired; using `in_scope` as a convenience /// wrapper maybe preferable. pub fn push_scope(&mut self, extent: CodeExtent) { debug!("push_scope({:?})", extent); self.scopes.push(Scope { extent: extent.clone(), drops: vec![], free: None }); } /// Pops a scope, which should have extent `extent`, adding any /// drops onto the end of `block` that are needed. This must /// match 1-to-1 with `push_scope`. pub fn pop_scope(&mut self, extent: CodeExtent, block: BasicBlock) -> BlockAnd<()> { debug!("pop_scope({:?}, {:?})", extent, block); // We need to have `cached_block`s available for all the drops, so we call diverge_cleanup // to make sure all the `cached_block`s are filled in. self.diverge_cleanup(); let scope = self.scopes.pop().unwrap(); assert_eq!(scope.extent, extent); build_scope_drops(&mut self.cfg, &scope, &self.scopes[..], block) } /// Branch out of `block` to `target`, exiting all scopes up to /// and including `extent`. This will insert whatever drops are /// needed, as well as tracking this exit for the SEME region. See /// module comment for details. pub fn exit_scope(&mut self, span: Span, extent: CodeExtent, mut block: BasicBlock, target: BasicBlock) { let scope_count = 1 + self.scopes.iter().rev().position(|scope| scope.extent == extent) .unwrap_or_else(||{ self.hir.span_bug(span, &format!("extent {:?} does not enclose", extent)) }); let tmp = self.get_unit_temp(); for (idx, ref scope) in self.scopes.iter().enumerate().rev().take(scope_count) { unpack!(block = build_scope_drops(&mut self.cfg, scope, &self.scopes[..idx], block)); if let Some(ref free_data) = scope.free { let next = self.cfg.start_new_block(); let free = build_free(self.hir.tcx(), tmp.clone(), free_data, next); self.cfg.terminate(block, free); block = next; } } self.cfg.terminate(block, Terminator::Goto { target: target }); } // Finding scopes // ============== /// Finds the loop scope for a given label. This is used for /// resolving `break` and `continue`. pub fn find_loop_scope(&mut self, span: Span, label: Option<CodeExtent>) -> &mut LoopScope { let Builder { ref mut loop_scopes, ref mut hir, .. } = *self; match label { None => { // no label? return the innermost loop scope loop_scopes.iter_mut().rev().next() } Some(label) => { // otherwise, find the loop-scope with the correct id loop_scopes.iter_mut() .rev() .filter(|loop_scope| loop_scope.extent == label) .next() } }.unwrap_or_else(|| hir.span_bug(span, "no enclosing loop scope found?")) } pub fn extent_of_innermost_scope(&self) -> CodeExtent { self.scopes.last().map(|scope| scope.extent).unwrap() } pub fn extent_of_outermost_scope(&self) -> CodeExtent { self.scopes.first().map(|scope| scope.extent).unwrap() } // Scheduling drops // ================ /// Indicates that `lvalue` should be dropped on exit from /// `extent`. pub fn schedule_drop(&mut self, span: Span, extent: CodeExtent, lvalue: &Lvalue<'tcx>, lvalue_ty: Ty<'tcx>) { if !self.hir.needs_drop(lvalue_ty) { return } for scope in self.scopes.iter_mut().rev() { if scope.extent == extent { // No need to invalidate any caches here. The just-scheduled drop will branch into // the drop that comes before it in the vector. scope.drops.push(DropData { value: lvalue.clone(), cached_block: None }); return; } else { // We must invalidate all the cached_blocks leading up to the scope we’re // looking for, because all of the blocks in the chain will become incorrect. scope.invalidate_cache() } } self.hir.span_bug(span, &format!("extent {:?} not in scope to drop {:?}", extent, lvalue)); } /// Schedule dropping of a not-yet-fully-initialised box. /// /// This cleanup will only be translated into unwind branch. /// The extent should be for the `EXPR` inside `box EXPR`. /// There may only be one “free” scheduled in any given scope. pub fn schedule_box_free(&mut self, span: Span, extent: CodeExtent, value: &Lvalue<'tcx>, item_ty: Ty<'tcx>) { for scope in self.scopes.iter_mut().rev() { if scope.extent == extent { assert!(scope.free.is_none(), "scope already has a scheduled free!"); // We also must invalidate the caches in the scope for which the free is scheduled // because the drops must branch into the free we schedule here. scope.invalidate_cache(); scope.free = Some(FreeData { span: span, value: value.clone(), item_ty: item_ty, cached_block: None }); return; } else { // We must invalidate all the cached_blocks leading up to the scope we’re looking // for, because otherwise some/most of the blocks in the chain will become // incorrect. scope.invalidate_cache(); } } self.hir.span_bug(span, &format!("extent {:?} not in scope to free {:?}", extent, value)); } // Other // ===== /// Creates a path that performs all required cleanup for unwinding. /// /// This path terminates in Resume. Returns the start of the path. /// See module comment for more details. None indicates there’s no /// cleanup to do at this point. pub fn diverge_cleanup(&mut self) -> Option<BasicBlock> { if self.scopes.is_empty() { return None; } let unit_temp = self.get_unit_temp(); let Builder { ref mut hir, ref mut cfg, ref mut scopes, .. } = *self; let mut next_block = None; // Given an array of scopes, we generate these from the outermost scope to the innermost // one. Thus for array [S0, S1, S2] with corresponding cleanup blocks [B0, B1, B2], we will // generate B0 <- B1 <- B2 in left-to-right order. Control flow of the generated blocks // always ends up at a block with the Resume terminator. for scope in scopes.iter_mut().filter(|s| !s.drops.is_empty() || s.free.is_some()) { next_block = Some(build_diverge_scope(hir.tcx(), cfg, unit_temp.clone(), scope, next_block)); } scopes.iter().rev().flat_map(|x| x.cached_block()).next() } /// Utility function for *non*-scope code to build their own drops pub fn build_drop(&mut self, block: BasicBlock, value: Lvalue<'tcx>) -> BlockAnd<()> { let next_target = self.cfg.start_new_block(); let diverge_target = self.diverge_cleanup(); self.cfg.terminate(block, Terminator::Drop { value: value, target: next_target, unwind: diverge_target, }); next_target.unit() } // Panicking // ========= // FIXME: should be moved into their own module pub fn panic_bounds_check(&mut self, block: BasicBlock, index: Operand<'tcx>, len: Operand<'tcx>, span: Span) { // fn(&(filename: &'static str, line: u32), index: usize, length: usize) -> ! let func = self.lang_function(lang_items::PanicBoundsCheckFnLangItem); let args = func.ty.fn_args(); let ref_ty = args.skip_binder()[0]; let (region, tup_ty) = if let ty::TyRef(region, tyandmut) = ref_ty.sty { (region, tyandmut.ty) } else { self.hir.span_bug(span, &format!("unexpected panic_bound_check type: {:?}", func.ty)); }; let (tuple, tuple_ref) = (self.temp(tup_ty), self.temp(ref_ty)); let (file, line) = self.span_to_fileline_args(span); let elems = vec![Operand::Constant(file), Operand::Constant(line)]; // FIXME: We should have this as a constant, rather than a stack variable (to not pollute // icache with cold branch code), however to achieve that we either have to rely on rvalue // promotion or have some way, in MIR, to create constants. self.cfg.push_assign(block, DUMMY_SP, &tuple, // tuple = (file_arg, line_arg); Rvalue::Aggregate(AggregateKind::Tuple, elems)); // FIXME: is this region really correct here? self.cfg.push_assign(block, DUMMY_SP, &tuple_ref, // tuple_ref = &tuple; Rvalue::Ref(*region, BorrowKind::Unique, tuple)); let cleanup = self.diverge_cleanup(); self.cfg.terminate(block, Terminator::Call { func: Operand::Constant(func), args: vec![Operand::Consume(tuple_ref), index, len], destination: None, cleanup: cleanup, }); } /// Create diverge cleanup and branch to it from `block`. pub fn panic(&mut self, block: BasicBlock, msg: &'static str, span: Span) { // fn(&(msg: &'static str filename: &'static str, line: u32)) -> ! let func = self.lang_function(lang_items::PanicFnLangItem); let args = func.ty.fn_args(); let ref_ty = args.skip_binder()[0]; let (region, tup_ty) = if let ty::TyRef(region, tyandmut) = ref_ty.sty { (region, tyandmut.ty) } else { self.hir.span_bug(span, &format!("unexpected panic type: {:?}", func.ty)); }; let (tuple, tuple_ref) = (self.temp(tup_ty), self.temp(ref_ty)); let (file, line) = self.span_to_fileline_args(span); let message = Constant { span: DUMMY_SP, ty: self.hir.tcx().mk_static_str(), literal: self.hir.str_literal(intern_and_get_ident(msg)) }; let elems = vec![Operand::Constant(message), Operand::Constant(file), Operand::Constant(line)]; // FIXME: We should have this as a constant, rather than a stack variable (to not pollute // icache with cold branch code), however to achieve that we either have to rely on rvalue // promotion or have some way, in MIR, to create constants. self.cfg.push_assign(block, DUMMY_SP, &tuple, // tuple = (message_arg, file_arg, line_arg); Rvalue::Aggregate(AggregateKind::Tuple, elems)); // FIXME: is this region really correct here? self.cfg.push_assign(block, DUMMY_SP, &tuple_ref, // tuple_ref = &tuple; Rvalue::Ref(*region, BorrowKind::Unique, tuple)); let cleanup = self.diverge_cleanup(); self.cfg.terminate(block, Terminator::Call { func: Operand::Constant(func), args: vec![Operand::Consume(tuple_ref)], cleanup: cleanup, destination: None, }); } fn lang_function(&mut self, lang_item: lang_items::LangItem) -> Constant<'tcx> { let funcdid = match self.hir.tcx().lang_items.require(lang_item) { Ok(d) => d, Err(m) => { self.hir.tcx().sess.fatal(&m) } }; Constant { span: DUMMY_SP, ty: self.hir.tcx().lookup_item_type(funcdid).ty, literal: Literal::Item { def_id: funcdid, kind: ItemKind::Function, substs: self.hir.tcx().mk_substs(Substs::empty()) } } } fn span_to_fileline_args(&mut self, span: Span) -> (Constant<'tcx>, Constant<'tcx>) { let span_lines = self.hir.tcx().sess.codemap().lookup_char_pos(span.lo); (Constant { span: DUMMY_SP, ty: self.hir.tcx().mk_static_str(), literal: self.hir.str_literal(intern_and_get_ident(&span_lines.file.name)) }, Constant { span: DUMMY_SP, ty: self.hir.tcx().types.u32, literal: self.hir.usize_literal(span_lines.line) }) } } /// Builds drops for pop_scope and exit_scope. fn build_scope_drops<'tcx>(cfg: &mut CFG<'tcx>, scope: &Scope<'tcx>, earlier_scopes: &[Scope<'tcx>], mut block: BasicBlock) -> BlockAnd<()> { let mut iter = scope.drops.iter().rev().peekable(); while let Some(drop_data) = iter.next() { // Try to find the next block with its cached block for us to diverge into in case the // drop panics. let on_diverge = iter.peek().iter().flat_map(|dd| dd.cached_block.into_iter()).next(); // If there’s no `cached_block`s within current scope, we must look for one in the // enclosing scope. let on_diverge = on_diverge.or_else(||{ earlier_scopes.iter().rev().flat_map(|s| s.cached_block()).next() }); let next = cfg.start_new_block(); cfg.terminate(block, Terminator::Drop { value: drop_data.value.clone(), target: next, unwind: on_diverge }); block = next; } block.unit() } fn build_diverge_scope<'tcx>(tcx: &ty::ctxt<'tcx>, cfg: &mut CFG<'tcx>, unit_temp: Lvalue<'tcx>, scope: &mut Scope<'tcx>, target: Option<BasicBlock>) -> BasicBlock { debug_assert!(!scope.drops.is_empty() || scope.free.is_some()); // First, we build the drops, iterating the drops array in reverse. We do that so that as soon // as we find a `cached_block`, we know that we’re finished and don’t need to do anything else. let mut previous = None; let mut last_drop_block = None; for drop_data in scope.drops.iter_mut().rev() { if let Some(cached_block) = drop_data.cached_block { if let Some((previous_block, previous_value)) = previous { cfg.terminate(previous_block, Terminator::Drop { value: previous_value, target: cached_block, unwind: None }); return last_drop_block.unwrap(); } else { return cached_block; } } else { let block = cfg.start_new_cleanup_block(); drop_data.cached_block = Some(block); if let Some((previous_block, previous_value)) = previous { cfg.terminate(previous_block, Terminator::Drop { value: previous_value, target: block, unwind: None }); } else { last_drop_block = Some(block); } previous = Some((block, drop_data.value.clone())); } } // Prepare the end target for this chain. let mut target = target.unwrap_or_else(||{ let b = cfg.start_new_cleanup_block(); cfg.terminate(b, Terminator::Resume); b }); // Then, build the free branching into the prepared target. if let Some(ref mut free_data) = scope.free { target = if let Some(cached_block) = free_data.cached_block { cached_block } else { let into = cfg.start_new_cleanup_block(); cfg.terminate(into, build_free(tcx, unit_temp, free_data, target)); free_data.cached_block = Some(into); into } }; if let Some((previous_block, previous_value)) = previous { // Finally, branch into that just-built `target` from the `previous_block`. cfg.terminate(previous_block, Terminator::Drop { value: previous_value, target: target, unwind: None }); last_drop_block.unwrap() } else { // If `previous.is_none()`, there were no drops in this scope – we return the // target. target } } fn build_free<'tcx>(tcx: &ty::ctxt<'tcx>, unit_temp: Lvalue<'tcx>, data: &FreeData<'tcx>, target: BasicBlock) -> Terminator<'tcx> { let free_func = tcx.lang_items.require(lang_items::BoxFreeFnLangItem) .unwrap_or_else(|e| tcx.sess.fatal(&e)); let substs = tcx.mk_substs(Substs::new( VecPerParamSpace::new(vec![], vec![], vec![data.item_ty]), VecPerParamSpace::new(vec![], vec![], vec![]) )); Terminator::Call { func: Operand::Constant(Constant { span: data.span, ty: tcx.lookup_item_type(free_func).ty.subst(tcx, substs), literal: Literal::Item { def_id: free_func, kind: ItemKind::Function, substs: substs } }), args: vec![Operand::Consume(data.value.clone())], destination: Some((unit_temp, target)), cleanup: None } }
41.936533
100
0.579971
56bc5115f99dc8b0214432b0de9199714acef72f
4,787
use either::Either; use agda_mode::agda::{preprint_agda_result, ReplState}; use agda_mode::base::ComputeMode; use agda_mode::cmd::{Cmd, GoalInput}; use agda_mode::pos::InteractionId; use agda_mode::resp::GoalInfo; use crate::file_io::{Monad, Repl}; use crate::input::UserInput; use crate::interact::help; use agda_mode::debug::{toggle_debug_command, toggle_debug_response}; pub async fn line(agda: &mut Repl, line: &str) -> Monad<bool> { line_impl(agda, UserInput::from(line)).await } async fn line_impl<'a>(agda: &mut Repl, line: UserInput<'a>) -> Monad<bool> { use UserInput::*; match line { Define(function_name) => { agda.append(&format!("{} : ?\n", function_name))?; agda.append(&format!("{} = ?\n", function_name))?; reload(agda).await?; } RawLine(code) => { agda.append(code)?; agda.append("\n")?; reload(agda).await?; } Give(i, new) => { let command = Cmd::give(GoalInput::no_range(i, new.to_owned())); agda.agda.command(command).await?; if let Some(gs) = preprint_agda_result(agda.agda.next_give_action().await?) { match gs.give_result.into_either() { Either::Left(s) => agda.fill_goal_buffer(gs.interaction_point, &s), // Don't know yet what to do Either::Right(b) => unimplemented!(), } agda.sync_buffer()?; // Poll the goals' information agda.agda.next_goals().await?; } } Infer(i, new) => { let command = Cmd::infer(GoalInput::no_range(i, new.to_owned())); agda.agda.command(command).await?; if let Some(gs) = preprint_agda_result(agda.agda.next_goal_specific().await?) { match gs.goal_info { GoalInfo::InferredType { expr } => println!("{} : {}", new, expr), _ => unreachable!(), } } } Simplify(i, new) => norm(agda, i, new, ComputeMode::DefaultCompute).await?, Normalize(i, new) => norm(agda, i, new, ComputeMode::UseShowInstance).await?, Type(i) => { let command = Cmd::goal_type(GoalInput::simple(i)); agda.agda.command(command).await?; if let Some(gs) = preprint_agda_result(agda.agda.next_goal_specific().await?) { match gs.goal_info { GoalInfo::CurrentGoal { the_type, .. } => println!("{}", the_type), _ => unreachable!(), } } } Reload => reload(agda).await?, Help => { println!("{}", help(agda.is_plain)); // TODO: info for commands. } ToggleDebugCommand => unsafe { toggle_debug_command() }, ToggleDebugResponse => unsafe { toggle_debug_response() }, Unknown(Some(err)) => println!("Wait, {}", err), Unknown(None) => println!("Sorry, I don't understand."), Exit => { finish(&mut agda.agda).await?; return Ok(true); } } Ok(false) } async fn norm(agda: &mut Repl, i: InteractionId, new: &str, mode: ComputeMode) -> Monad { let command = Cmd::Compute { compute_mode: mode, input: GoalInput::no_range(i, new.to_owned()), }; agda.agda.command(command).await?; if let Some(gs) = preprint_agda_result(agda.agda.next_goal_specific().await?) { match gs.goal_info { GoalInfo::NormalForm { expr, .. } => println!("{} --> {}", new, expr), _ => unreachable!(), } } Ok(()) } pub async fn reload(agda: &mut Repl) -> Monad { let da = &mut agda.agda; da.reload_file().await?; poll_goals(da).await } pub async fn poll_goals(agda: &mut ReplState) -> Monad { if let Some(agw) = preprint_agda_result(agda.next_all_goals_warnings().await?) { if agw.visible_goals.is_empty() { println!("No goals."); } else { println!("Goals:"); } for goal in agw.visible_goals { // I believe `goal` will always be `OfType`. match goal.try_as_of_type() { Ok(ok) => println!("?{} : {}", ok.constraint_obj, ok.of_type), Err(bad) => eprintln!("[WARN]: unexpected goal: {:?}", bad), } } if !agw.invisible_goals.is_empty() { println!("Unsolved metas:"); } for meta in agw.invisible_goals { println!("{}", meta); } agda.next_goals().await?; } Ok(()) } async fn finish(agda: &mut ReplState) -> Monad { agda.command(Cmd::Abort).await?; agda.shutdown().await }
35.723881
91
0.535199
268fdb25fcb38c41a699597f160994ace0ef867d
16,395
use fawkes_crypto_derive::Signal; use crate::{ circuit::bitify::c_into_bits_le_strict, circuit::bool::CBool, circuit::mux::c_mux3, circuit::num::CNum, core::cs::ConstraintSystem, core::field::Field, core::signal::Signal, native::ecc::{EdwardsPoint, EdwardsPointEx, JubJubParams, MontgomeryPoint}, native::num::Num, }; #[derive(Clone, Signal)] #[Value = "EdwardsPoint<CS::F>"] pub struct CEdwardsPoint<'a, CS: ConstraintSystem> { pub x: CNum<'a, CS>, pub y: CNum<'a, CS>, } #[derive(Clone, Signal)] #[Value = "MontgomeryPoint<CS::F>"] pub struct CMontgomeryPoint<'a, CS: ConstraintSystem> { pub x: CNum<'a, CS>, pub y: CNum<'a, CS>, } impl<'a, CS: ConstraintSystem> CEdwardsPoint<'a, CS> { pub fn double<J: JubJubParams<Fr = CS::F>>(&self, params: &J) -> Self { let v = &self.x * &self.y; let v2 = v.square(); let u = (&self.x + &self.y).square(); Self { x: &v * num!(2) / (Num::one() + params.edwards_d() * &v2), y: (&u - &v * num!(2)) / (Num::one() - params.edwards_d() * &v2), } } pub fn mul_by_cofactor<J: JubJubParams<Fr = CS::F>>(&self, params: &J) -> Self { self.double(params).double(params).double(params) } pub fn add<J: JubJubParams<Fr = CS::F>>(&self, p: &Self, params: &J) -> Self { let v1 = &self.x * &p.y; let v2 = &p.x * &self.y; let v12 = &v1 * &v2; let u = (&self.x + &self.y) * (&p.x + &p.y); Self { x: (&v1 + &v2) / (Num::one() + params.edwards_d() * &v12), y: (&u - &v1 - &v2) / (Num::one() - params.edwards_d() * &v12), } } pub fn assert_in_curve<J: JubJubParams<Fr = CS::F>>(&self, params: &J) { let x2 = self.x.square(); let y2 = self.y.square(); x2.cs .enforce(&(params.edwards_d() * &x2), &y2, &(&y2 - &x2 - Num::one())); } pub fn assert_in_subgroup<J: JubJubParams<Fr = CS::F>>(&self, params: &J) { let preimage_value = self.get_value().map(|p| p.mul(num!(8).inverse(), params)); let preimage = self.derive_alloc::<Self>(preimage_value.as_ref()); preimage.assert_in_curve(params); let preimage8 = preimage.mul_by_cofactor(params); (&self.x - &preimage8.x).assert_zero(); (&self.y - &preimage8.y).assert_zero(); } pub fn subgroup_decompress<J: JubJubParams<Fr = CS::F>>(x: &CNum<'a, CS>, params: &J) -> Self { let preimage_value = x.get_value().map(|x| { EdwardsPoint::subgroup_decompress(x, params) .unwrap_or(params.edwards_g().clone()) .mul(num!(8).inverse(), params) }); let preimage = CEdwardsPoint::alloc(x.get_cs(), preimage_value.as_ref()); preimage.assert_in_curve(params); let preimage8 = preimage.mul_by_cofactor(params); (x - &preimage8.x).assert_zero(); preimage8 } // assume nonzero subgroup point pub fn into_montgomery(&self) -> CMontgomeryPoint<'a, CS> { let x = (Num::one() + &self.y) / (Num::one() - &self.y); let y = &x / &self.x; CMontgomeryPoint { x, y } } // assume subgroup point, bits pub fn mul<J: JubJubParams<Fr = CS::F>>(&self, bits: &[CBool<'a, CS>], params: &J) -> Self { fn gen_table<F: Field, J: JubJubParams<Fr = F>>( p: &EdwardsPointEx<F>, params: &J, ) -> Vec<Vec<Num<F>>> { let mut x_col = vec![]; let mut y_col = vec![]; let mut q = p.clone(); for _ in 0..8 { let MontgomeryPoint { x, y } = q.into_montgomery().unwrap(); x_col.push(x); y_col.push(y); q = q.add(&p, params); } vec![x_col, y_col] } let cs = self.get_cs(); match self.as_const() { Some(c_base) => { let c_base = c_base.into_extended(); let mut base = c_base; if base.is_zero() { self.derive_const(&EdwardsPoint::zero()) } else { let bits_len = bits.len(); let zeros_len = (3 - (bits_len % 3)) % 3; let zero_bits = vec![CBool::c_false(cs); zeros_len]; let all_bits = [bits, &zero_bits].concat(); let all_bits_len = all_bits.len(); let nwindows = all_bits_len / 3; let mut acc = EdwardsPoint { x: Num::zero(), y: -Num::one(), } .into_extended(); for _ in 0..nwindows { acc = acc.add(&base, params); base = base.double().double().double(); } let mp = acc.negate().into_montgomery().unwrap(); let mut acc = CMontgomeryPoint::from_const(cs, &mp); let mut base = c_base; for i in 0..nwindows { let table = gen_table(&base, params); let res = c_mux3(&all_bits[3 * i..3 * (i + 1)], &table); let p = CMontgomeryPoint { x: res[0].clone(), y: res[1].clone(), }; acc = acc.add(&p, params); base = base.double().double().double(); } let res = acc.into_edwards(); CEdwardsPoint { x: -res.x, y: -res.y, } } } _ => { let base_is_zero = self.x.is_zero(); let dummy_point = CEdwardsPoint::from_const(cs, params.edwards_g()); let base_point = dummy_point.switch(&base_is_zero, self); let mut base_point = base_point.into_montgomery(); let mut exponents = vec![base_point.clone()]; for _ in 1..bits.len() { base_point = base_point.double(params); exponents.push(base_point.clone()); } let empty_acc = CMontgomeryPoint { x: CNum::zero(cs), y: CNum::zero(cs), }; let mut acc = empty_acc.clone(); for i in 0..bits.len() { let inc_acc = acc.add(&exponents[i], params); acc = inc_acc.switch(&bits[i], &acc); } acc = empty_acc.switch(&base_is_zero, &acc); let res = acc.into_edwards(); CEdwardsPoint { x: -res.x, y: -res.y, } } } } // assuming t!=-1 pub fn from_scalar<J: JubJubParams<Fr = CS::F>>(t: &CNum<'a, CS>, params: &J) -> Self { fn filter_even<F: Field>(x: Num<F>) -> Num<F> { if x.is_even() { x } else { -x } } fn check_and_get_y<'a, CS: ConstraintSystem, J: JubJubParams<Fr = CS::F>>( x: &CNum<'a, CS>, params: &J, ) -> (CBool<'a, CS>, CNum<'a, CS>) { let g = (x.square() * (x + params.montgomery_a()) + x) / params.montgomery_b(); let preimage_value = g.get_value().map(|g| match g.sqrt() { Some(g_sqrt) => filter_even(g_sqrt), _ => filter_even((g * params.montgomery_u()).sqrt().unwrap()), }); let preimage = x.derive_alloc(preimage_value.as_ref()); let preimage_bits = c_into_bits_le_strict(&preimage); preimage_bits[0].assert_false(); let preimage_square = preimage.square(); let is_square = (&g - &preimage_square).is_zero(); let isnot_square = (&g * params.montgomery_u() - &preimage_square).is_zero(); (&is_square.0 + isnot_square.0 - Num::one()).assert_zero(); (is_square, preimage) } let t = t + Num::one(); let t2g1 = t.square() * params.montgomery_u(); let x3 = -Num::one() / params.montgomery_a() * (&t2g1 + Num::one()); let x2 = &x3 / &t2g1; let (is_valid, y2) = check_and_get_y(&x2, params); let (_, y3) = check_and_get_y(&x3, params); let x = x2.switch(&is_valid, &x3); let y = y2.switch(&is_valid, &y3); CMontgomeryPoint { x, y } .into_edwards() .mul_by_cofactor(params) } } impl<'a, CS: ConstraintSystem> CMontgomeryPoint<'a, CS> { // assume self != (0, 0) pub fn double<J: JubJubParams<Fr = CS::F>>(&self, params: &J) -> Self { let x2 = self.x.square(); let l = (num!(3) * &x2 + num!(2) * params.montgomery_a() * &self.x + Num::one()) / (num!(2) * params.montgomery_b() * &self.y); let b_l2 = params.montgomery_b() * &l.square(); let a = params.montgomery_a(); Self { x: &b_l2 - &a - num!(2) * &self.x, y: l * (num!(3) * &self.x + a - &b_l2) - &self.y, } } // assume self != p pub fn add<J: JubJubParams<Fr = CS::F>>(&self, p: &Self, params: &J) -> Self { let l = (&p.y - &self.y) / (&p.x - &self.x); let b_l2 = params.montgomery_b() * &l.square(); let a = params.montgomery_a(); Self { x: &b_l2 - &a - &self.x - &p.x, y: l * (num!(2) * &self.x + &p.x + a - &b_l2) - &self.y, } } // assume any nonzero point pub fn into_edwards(&self) -> CEdwardsPoint<'a, CS> { let y_is_zero = self.y.is_zero(); CEdwardsPoint { x: &self.x / (&self.y + y_is_zero.0), y: (&self.x - Num::one()) / (&self.x + Num::one()), } } } #[cfg(test)] mod ecc_test { use rand::{thread_rng, Rng}; use super::*; use crate::{ circuit::bitify::c_into_bits_le_strict, core::cs::TestCS, native::bn256::{Fr, JubJubBN256}, }; #[test] fn test_scalar_point_picker() { let mut rng = thread_rng(); let jubjub_params = JubJubBN256::new(); let t = rng.gen(); let ref mut cs = TestCS::<Fr>::new(); let signal_t = CNum::alloc(cs, Some(&t)); let signal_p = CEdwardsPoint::from_scalar(&signal_t, &jubjub_params); let p = EdwardsPoint::from_scalar(t, &jubjub_params); signal_p.assert_const(&p); } #[test] fn test_circuit_subgroup_decompress() { let mut rng = thread_rng(); let jubjub_params = JubJubBN256::new(); let p = EdwardsPoint::<Fr>::rand(&mut rng, &jubjub_params).mul(num!(8), &jubjub_params); let ref mut cs = TestCS::<Fr>::new(); let signal_x = CNum::alloc(cs, Some(&p.x)); let mut n_constraints = cs.num_constraints(); let res = CEdwardsPoint::subgroup_decompress(&signal_x, &jubjub_params); n_constraints = cs.num_constraints() - n_constraints; res.y.assert_const(&p.y); println!("subgroup_decompress constraints = {}", n_constraints); assert!(res.y.get_value().unwrap() == p.y); } #[test] fn test_circuit_edwards_add() { let mut rng = thread_rng(); let jubjub_params = JubJubBN256::new(); let p1 = EdwardsPoint::<Fr>::rand(&mut rng, &jubjub_params); let p2 = EdwardsPoint::<Fr>::rand(&mut rng, &jubjub_params); let p3 = p1.add(&p2, &jubjub_params); let ref mut cs = TestCS::<Fr>::new(); let signal_p1 = CEdwardsPoint::alloc(cs, Some(&p1)); let signal_p2 = CEdwardsPoint::alloc(cs, Some(&p2)); let signal_p3 = signal_p1.add(&signal_p2, &jubjub_params); signal_p3.assert_const(&p3); } #[test] fn test_circuit_edwards_double() { let mut rng = thread_rng(); let jubjub_params = JubJubBN256::new(); let p = EdwardsPoint::<Fr>::rand(&mut rng, &jubjub_params); let p3 = p.double(); let ref mut cs = TestCS::<Fr>::new(); let signal_p = CEdwardsPoint::alloc(cs, Some(&p)); let signal_p3 = signal_p.double(&jubjub_params); signal_p3.assert_const(&p3); } #[test] fn test_circuit_edwards_into_montgomery() { let mut rng = thread_rng(); let jubjub_params = JubJubBN256::new(); let p = EdwardsPoint::<Fr>::rand(&mut rng, &jubjub_params); let mp = p.into_montgomery().unwrap(); let ref mut cs = TestCS::<Fr>::new(); let signal_p = CEdwardsPoint::alloc(cs, Some(&p)); let signal_mp = signal_p.into_montgomery(); signal_mp.assert_const(&mp); } #[test] fn test_circuit_montgomery_into_edwards() { let mut rng = thread_rng(); let jubjub_params = JubJubBN256::new(); let p = EdwardsPoint::<Fr>::rand(&mut rng, &jubjub_params); let mp = p.into_montgomery().unwrap(); let ref mut cs = TestCS::<Fr>::new(); let signal_mp = CMontgomeryPoint::alloc(cs, Some(&mp)); let signal_p = signal_mp.into_edwards(); signal_p.assert_const(&p); } #[test] fn test_circuit_montgomery_add() { let mut rng = thread_rng(); let jubjub_params = JubJubBN256::new(); let p1 = EdwardsPoint::<Fr>::rand(&mut rng, &jubjub_params); let p2 = EdwardsPoint::<Fr>::rand(&mut rng, &jubjub_params); let p3 = p1.add(&p2, &jubjub_params); let ref mut cs = TestCS::<Fr>::new(); let signal_p1 = CEdwardsPoint::alloc(cs, Some(&p1)); let signal_p2 = CEdwardsPoint::alloc(cs, Some(&p2)); let signal_mp1 = signal_p1.into_montgomery(); let signal_mp2 = signal_p2.into_montgomery(); let signal_mp3 = signal_mp1.add(&signal_mp2, &jubjub_params); let signal_p3 = signal_mp3.into_edwards(); signal_p3.assert_const(&p3); } #[test] fn test_circuit_montgomery_double() { let mut rng = thread_rng(); let jubjub_params = JubJubBN256::new(); let p = EdwardsPoint::<Fr>::rand(&mut rng, &jubjub_params); let p3 = p.double(); let ref mut cs = TestCS::<Fr>::new(); let signal_p = CEdwardsPoint::alloc(cs, Some(&p)); let signal_mp = signal_p.into_montgomery(); let signal_mp3 = signal_mp.double(&jubjub_params); let signal_p3 = signal_mp3.into_edwards(); signal_p3.assert_const(&p3); } #[test] fn test_circuit_edwards_mul() { let mut rng = thread_rng(); let jubjub_params = JubJubBN256::new(); let p = crate::native::ecc::EdwardsPoint::<Fr>::rand(&mut rng, &jubjub_params) .mul(num!(8), &jubjub_params); let n: Num<Fr> = rng.gen(); let p3 = p.mul(n.into_other(), &jubjub_params); let ref mut cs = TestCS::<Fr>::new(); let signal_p = CEdwardsPoint::alloc(cs, Some(&p)); let signal_n = CNum::alloc(cs, Some(&n)); let signal_n_bits = c_into_bits_le_strict(&signal_n); let mut n_constraints = cs.num_constraints(); let signal_p3 = signal_p.mul(&signal_n_bits, &jubjub_params); n_constraints = cs.num_constraints() - n_constraints; signal_p3.assert_const(&p3); println!("edwards_mul constraints = {}", n_constraints); } #[test] fn test_circuit_edwards_mul_const() { let mut rng = thread_rng(); let jubjub_params = JubJubBN256::new(); let p = crate::native::ecc::EdwardsPoint::<Fr>::rand(&mut rng, &jubjub_params) .mul(num!(8), &jubjub_params); let n: Num<Fr> = rng.gen(); let p3 = p.mul(n.into_other(), &jubjub_params); let ref mut cs = TestCS::<Fr>::new(); let signal_p = CEdwardsPoint::from_const(cs, &p); let signal_n = CNum::alloc(cs, Some(&n)); let signal_n_bits = c_into_bits_le_strict(&signal_n); let mut n_constraints = cs.num_constraints(); let signal_p3 = signal_p.mul(&signal_n_bits, &jubjub_params); n_constraints = cs.num_constraints() - n_constraints; signal_p3.assert_const(&p3); println!("edwards_mul_const constraints = {}", n_constraints); } }
33.665298
99
0.520586
9c50344d9768378d5af20a3bb3d27ef726eac51b
9,214
//! Groestlcoin serde utilities. //! //! This module is for special serde serializations. //! pub mod btreemap_byte_values { //! Module for serialization of BTreeMaps with hex byte values. #![allow(missing_docs)] // NOTE: This module can be exactly copied to use with HashMap. use crate::prelude::*; use crate::hashes::hex::{FromHex, ToHex}; use serde; pub fn serialize<S, T>(v: &BTreeMap<T, Vec<u8>>, s: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, T: serde::Serialize + core::hash::Hash + Eq + Ord, { use serde::ser::SerializeMap; // Don't do anything special when not human readable. if !s.is_human_readable() { serde::Serialize::serialize(v, s) } else { let mut map = s.serialize_map(Some(v.len()))?; for (key, value) in v.iter() { map.serialize_entry(key, &value.to_hex())?; } map.end() } } pub fn deserialize<'de, D, T>(d: D) -> Result<BTreeMap<T, Vec<u8>>, D::Error> where D: serde::Deserializer<'de>, T: serde::Deserialize<'de> + core::hash::Hash + Eq + Ord, { use core::marker::PhantomData; struct Visitor<T>(PhantomData<T>); impl<'de, T> serde::de::Visitor<'de> for Visitor<T> where T: serde::Deserialize<'de> + core::hash::Hash + Eq + Ord, { type Value = BTreeMap<T, Vec<u8>>; fn expecting(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { write!(f, "a map with hexadecimal values") } fn visit_map<A: serde::de::MapAccess<'de>>(self, mut a: A) -> Result<Self::Value, A::Error> { let mut ret = BTreeMap::new(); while let Some((key, value)) = a.next_entry()? { ret.insert(key, FromHex::from_hex(value).map_err(serde::de::Error::custom)?); } Ok(ret) } } // Don't do anything special when not human readable. if !d.is_human_readable() { serde::Deserialize::deserialize(d) } else { d.deserialize_map(Visitor(PhantomData)) } } } pub mod btreemap_as_seq { //! Module for serialization of BTreeMaps as lists of sequences because //! serde_json will not serialize hashmaps with non-string keys be default. #![allow(missing_docs)] // NOTE: This module can be exactly copied to use with HashMap. use crate::prelude::*; use serde; pub fn serialize<S, T, U>(v: &BTreeMap<T, U>, s: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, T: serde::Serialize + core::hash::Hash + Eq + Ord, U: serde::Serialize, { use serde::ser::SerializeSeq; // Don't do anything special when not human readable. if !s.is_human_readable() { serde::Serialize::serialize(v, s) } else { let mut seq = s.serialize_seq(Some(v.len()))?; for pair in v.iter() { seq.serialize_element(&pair)?; } seq.end() } } pub fn deserialize<'de, D, T, U>(d: D) -> Result<BTreeMap<T, U>, D::Error> where D: serde::Deserializer<'de>, T: serde::Deserialize<'de> + core::hash::Hash + Eq + Ord, U: serde::Deserialize<'de>, { use core::marker::PhantomData; struct Visitor<T, U>(PhantomData<(T, U)>); impl<'de, T, U> serde::de::Visitor<'de> for Visitor<T, U> where T: serde::Deserialize<'de> + core::hash::Hash + Eq + Ord, U: serde::Deserialize<'de>, { type Value = BTreeMap<T, U>; fn expecting(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { write!(f, "a sequence of pairs") } fn visit_seq<A: serde::de::SeqAccess<'de>>(self, mut a: A) -> Result<Self::Value, A::Error> { let mut ret = BTreeMap::new(); while let Some((key, value)) = a.next_element()? { ret.insert(key, value); } Ok(ret) } } // Don't do anything special when not human readable. if !d.is_human_readable() { serde::Deserialize::deserialize(d) } else { d.deserialize_seq(Visitor(PhantomData)) } } } pub mod btreemap_as_seq_byte_values { //! Module for serialization of BTreeMaps as lists of sequences because //! serde_json will not serialize hashmaps with non-string keys be default. #![allow(missing_docs)] // NOTE: This module can be exactly copied to use with HashMap. use crate::prelude::*; use serde; /// A custom key-value pair type that serialized the bytes as hex. #[derive(Debug, Deserialize)] #[serde(crate = "actual_serde")] struct OwnedPair<T>( T, #[serde(deserialize_with = "crate::serde_utils::hex_bytes::deserialize")] Vec<u8>, ); /// A custom key-value pair type that serialized the bytes as hex. #[derive(Debug, Serialize)] #[serde(crate = "actual_serde")] struct BorrowedPair<'a, T: 'static>( &'a T, #[serde(serialize_with = "crate::serde_utils::hex_bytes::serialize")] &'a [u8], ); pub fn serialize<S, T>(v: &BTreeMap<T, Vec<u8>>, s: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, T: serde::Serialize + core::hash::Hash + Eq + Ord + 'static, { use serde::ser::SerializeSeq; // Don't do anything special when not human readable. if !s.is_human_readable() { serde::Serialize::serialize(v, s) } else { let mut seq = s.serialize_seq(Some(v.len()))?; for (key, value) in v.iter() { seq.serialize_element(&BorrowedPair(key, value))?; } seq.end() } } pub fn deserialize<'de, D, T>(d: D) -> Result<BTreeMap<T, Vec<u8>>, D::Error> where D: serde::Deserializer<'de>, T: serde::Deserialize<'de> + core::hash::Hash + Eq + Ord, { use core::marker::PhantomData; struct Visitor<T>(PhantomData<T>); impl<'de, T> serde::de::Visitor<'de> for Visitor<T> where T: serde::Deserialize<'de> + core::hash::Hash + Eq + Ord, { type Value = BTreeMap<T, Vec<u8>>; fn expecting(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { write!(f, "a sequence of pairs") } fn visit_seq<A: serde::de::SeqAccess<'de>>(self, mut a: A) -> Result<Self::Value, A::Error> { let mut ret = BTreeMap::new(); while let Option::Some(OwnedPair(key, value)) = a.next_element()? { ret.insert(key, value); } Ok(ret) } } // Don't do anything special when not human readable. if !d.is_human_readable() { serde::Deserialize::deserialize(d) } else { d.deserialize_seq(Visitor(PhantomData)) } } } pub mod hex_bytes { //! Module for serialization of byte arrays as hex strings. #![allow(missing_docs)] use hashes::hex::{FromHex, ToHex}; use serde; pub fn serialize<T, S>(bytes: &T, s: S) -> Result<S::Ok, S::Error> where T: serde::Serialize + AsRef<[u8]>, S: serde::Serializer { // Don't do anything special when not human readable. if !s.is_human_readable() { serde::Serialize::serialize(bytes, s) } else { s.serialize_str(&bytes.as_ref().to_hex()) } } pub fn deserialize<'de, D, B>(d: D) -> Result<B, D::Error> where D: serde::Deserializer<'de>, B: serde::Deserialize<'de> + FromHex, { struct Visitor<B>(core::marker::PhantomData<B>); impl<'de, B: FromHex> serde::de::Visitor<'de> for Visitor<B> { type Value = B; fn expecting(&self, formatter: &mut core::fmt::Formatter) -> core::fmt::Result { formatter.write_str("an ASCII hex string") } fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E> where E: serde::de::Error, { if let Ok(hex) = core::str::from_utf8(v) { FromHex::from_hex(hex).map_err(E::custom) } else { return Err(E::invalid_value(serde::de::Unexpected::Bytes(v), &self)); } } fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error, { FromHex::from_hex(v).map_err(E::custom) } } // Don't do anything special when not human readable. if !d.is_human_readable() { serde::Deserialize::deserialize(d) } else { d.deserialize_str(Visitor(core::marker::PhantomData)) } } }
32.10453
97
0.523551
cc9ec9e57626ecc03f49e81cdd0d04dfec7ffdd9
33,141
use std::borrow::{Borrow, Cow}; use std::convert::AsRef; use std::cmp::Ordering; use std::str::Utf8Error; use std::fmt; use ref_cast::RefCast; use stable_pattern::{Pattern, Searcher, ReverseSearcher, Split, SplitInternal}; use crate::uri::fmt::{percent_encode, DEFAULT_ENCODE_SET}; use crate::uncased::UncasedStr; /// A reference to a string inside of a raw HTTP message. /// /// A `RawStr` is an unsanitzed, unvalidated, and undecoded raw string from an /// HTTP message. It exists to separate validated string inputs, represented by /// the `String`, `&str`, and `Cow<str>` types, from unvalidated inputs, /// represented by `&RawStr`. /// /// # Validation /// /// An `&RawStr` should be converted into one of the validated string input /// types through methods on `RawStr`. These methods are summarized below: /// /// * **[`url_decode()`]** - used to decode a raw string in a form value /// context /// * **[`percent_decode()`], [`percent_decode_lossy()`]** - used to /// percent-decode a raw string, typically in a URL context /// * **[`html_escape()`]** - used to decode a string for use in HTML /// templates /// * **[`as_str()`]** - used when the `RawStr` is known to be safe in the /// context of its intended use. Use sparingly and with care! /// * **[`as_uncased_str()`]** - used when the `RawStr` is known to be safe in /// the context of its intended, uncased use /// /// **Note:** Template engines like Tera and Handlebars all functions like /// [`html_escape()`] on all rendered template outputs by default. /// /// [`as_str()`]: RawStr::as_str() /// [`as_uncased_str()`]: RawStr::as_uncased_str() /// [`url_decode()`]: RawStr::url_decode() /// [`html_escape()`]: RawStr::html_escape() /// [`percent_decode()`]: RawStr::percent_decode() /// [`percent_decode_lossy()`]: RawStr::percent_decode_lossy() /// /// # Usage /// /// A `RawStr` is a dynamically sized type (just like `str`). It is always used /// through a reference an as `&RawStr` (just like &str). #[repr(transparent)] #[derive(RefCast, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct RawStr(str); impl ToOwned for RawStr { type Owned = RawStrBuf; fn to_owned(&self) -> Self::Owned { RawStrBuf(self.to_string()) } } /// An owned version of [`RawStr`]. #[repr(transparent)] #[derive(RefCast, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct RawStrBuf(String); impl RawStrBuf { /// Cost-free conversion from `self` into a `String`. /// /// # Example /// /// ```rust /// # extern crate rocket; /// use rocket::http::RawStrBuf; /// /// let raw = RawStrBuf::from(format!("hello {}", "world")); /// let string = raw.into_string(); /// ``` pub fn into_string(self) -> String { self.0 } } impl RawStr { /// Constructs an `&RawStr` from a string-like type at no cost. /// /// # Example /// /// ```rust /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let raw_str = RawStr::new("Hello, world!"); /// /// // `into` can also be used; note that the type must be specified /// let raw_str: &RawStr = "Hello, world!".into(); /// ``` pub fn new<S: AsRef<str> + ?Sized>(string: &S) -> &RawStr { RawStr::ref_cast(string.as_ref()) } /// Construct a `Cow<RawStr>` from a `Cow<Str>`. Does not allocate. /// /// See [`RawStr::into_cow_str()`] for the inverse operation. /// /// # Example /// /// ```rust /// # extern crate rocket; /// use std::borrow::Cow; /// use rocket::http::RawStr; /// /// let cow_str = Cow::from("hello!"); /// let cow_raw = RawStr::from_cow_str(cow_str); /// assert_eq!(cow_raw.as_str(), "hello!"); /// ``` pub fn from_cow_str<'a>(cow: Cow<'a, str>) -> Cow<'a, RawStr> { match cow { Cow::Borrowed(b) => Cow::Borrowed(b.into()), Cow::Owned(b) => Cow::Owned(b.into()), } } /// Construct a `Cow<str>` from a `Cow<RawStr>`. Does not allocate. /// /// See [`RawStr::from_cow_str()`] for the inverse operation. /// /// # Example /// /// ```rust /// # extern crate rocket; /// use std::borrow::Cow; /// use rocket::http::RawStr; /// /// let cow_raw = Cow::from(RawStr::new("hello!")); /// let cow_str = RawStr::into_cow_str(cow_raw); /// assert_eq!(&*cow_str, "hello!"); /// ``` pub fn into_cow_str<'a>(cow: Cow<'a, RawStr>) -> Cow<'a, str> { match cow { Cow::Borrowed(b) => Cow::Borrowed(b.as_str()), Cow::Owned(b) => Cow::Owned(b.into_string()), } } /// Percent-decodes `self`. fn _percent_decode(&self) -> percent_encoding::PercentDecode<'_> { percent_encoding::percent_decode(self.as_bytes()) } /// Returns a percent-decoded version of the string. /// /// # Errors /// /// Returns an `Err` if the percent encoded values are not valid UTF-8. /// /// # Example /// /// With a valid string: /// /// ```rust /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let raw_str = RawStr::new("Hello%21"); /// let decoded = raw_str.percent_decode(); /// assert_eq!(decoded, Ok("Hello!".into())); /// ``` /// /// With an invalid string: /// /// ```rust /// # extern crate rocket; /// use rocket::http::RawStr; /// /// // Note: Rocket should never hand you a bad `&RawStr`. /// let bad_str = unsafe { std::str::from_utf8_unchecked(b"a=\xff") }; /// let bad_raw_str = RawStr::new(bad_str); /// assert!(bad_raw_str.percent_decode().is_err()); /// ``` #[inline(always)] pub fn percent_decode(&self) -> Result<Cow<'_, str>, Utf8Error> { self._percent_decode().decode_utf8() } /// Returns a percent-decoded version of the string. Any invalid UTF-8 /// percent-encoded byte sequences will be replaced � U+FFFD, the /// replacement character. /// /// # Example /// /// With a valid string: /// /// ```rust /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let raw_str = RawStr::new("Hello%21"); /// let decoded = raw_str.percent_decode_lossy(); /// assert_eq!(decoded, "Hello!"); /// ``` /// /// With an invalid string: /// /// ```rust /// # extern crate rocket; /// use rocket::http::RawStr; /// /// // Note: Rocket should never hand you a bad `&RawStr`. /// let bad_str = unsafe { std::str::from_utf8_unchecked(b"a=\xff") }; /// let bad_raw_str = RawStr::new(bad_str); /// assert_eq!(bad_raw_str.percent_decode_lossy(), "a=�"); /// ``` #[inline(always)] pub fn percent_decode_lossy(&self) -> Cow<'_, str> { self._percent_decode().decode_utf8_lossy() } /// Replaces '+' with ' ' in `self`, allocating only when necessary. fn _replace_plus(&self) -> Cow<'_, str> { let string = self.as_str(); let mut allocated = String::new(); // this is allocation free for i in memchr::memchr_iter(b'+', string.as_bytes()) { if allocated.is_empty() { allocated = string.into(); } unsafe { allocated.as_bytes_mut()[i] = b' '; } } match allocated.is_empty() { true => Cow::Borrowed(string), false => Cow::Owned(allocated) } } /// Returns a percent-encoded version of the string. /// /// # Example /// /// With a valid string: /// /// ```rust /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let raw_str = RawStr::new("Hello%21"); /// let decoded = raw_str.percent_decode(); /// assert_eq!(decoded, Ok("Hello!".into())); /// ``` /// /// With an invalid string: /// /// ```rust /// # extern crate rocket; /// use rocket::http::RawStr; /// /// // Note: Rocket should never hand you a bad `&RawStr`. /// let bad_str = unsafe { std::str::from_utf8_unchecked(b"a=\xff") }; /// let bad_raw_str = RawStr::new(bad_str); /// assert!(bad_raw_str.percent_decode().is_err()); /// ``` #[inline(always)] pub fn percent_encode(&self) -> Cow<'_, RawStr> { Self::from_cow_str(percent_encode::<DEFAULT_ENCODE_SET>(self)) } /// Returns a URL-decoded version of the string. This is identical to /// percent decoding except that `+` characters are converted into spaces. /// This is the encoding used by form values. /// /// # Errors /// /// Returns an `Err` if the percent encoded values are not valid UTF-8. /// /// # Example /// /// ```rust /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let raw_str = RawStr::new("Hello%2C+world%21"); /// let decoded = raw_str.url_decode(); /// assert_eq!(decoded.unwrap(), "Hello, world!"); /// ``` pub fn url_decode(&self) -> Result<Cow<'_, str>, Utf8Error> { let string = self._replace_plus(); match percent_encoding::percent_decode(string.as_bytes()).decode_utf8()? { Cow::Owned(s) => Ok(Cow::Owned(s)), Cow::Borrowed(_) => Ok(string) } } /// Returns a URL-decoded version of the string. /// /// Any invalid UTF-8 percent-encoded byte sequences will be replaced � /// U+FFFD, the replacement character. This is identical to lossy percent /// decoding except that `+` characters are converted into spaces. This is /// the encoding used by form values. /// /// # Example /// /// With a valid string: /// /// ```rust /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let raw_str: &RawStr = "Hello%2C+world%21".into(); /// let decoded = raw_str.url_decode_lossy(); /// assert_eq!(decoded, "Hello, world!"); /// ``` /// /// With an invalid string: /// /// ```rust /// # extern crate rocket; /// use rocket::http::RawStr; /// /// // Note: Rocket should never hand you a bad `&RawStr`. /// let bad_str = unsafe { std::str::from_utf8_unchecked(b"a+b=\xff") }; /// let bad_raw_str = RawStr::new(bad_str); /// assert_eq!(bad_raw_str.url_decode_lossy(), "a b=�"); /// ``` pub fn url_decode_lossy(&self) -> Cow<'_, str> { let string = self._replace_plus(); match percent_encoding::percent_decode(string.as_bytes()).decode_utf8_lossy() { Cow::Owned(s) => Cow::Owned(s), Cow::Borrowed(_) => string } } /// Returns an HTML escaped version of `self`. Allocates only when /// characters need to be escaped. /// /// The following characters are escaped: `&`, `<`, `>`, `"`, `'`, `/`, /// <code>`</code>. **This suffices as long as the escaped string is not /// used in an execution context such as inside of &lt;script> or &lt;style> /// tags!** See the [OWASP XSS Prevention Rules] for more information. /// /// [OWASP XSS Prevention Rules]: https://www.owasp.org/index.php/XSS_%28Cross_Site_Scripting%29_Prevention_Cheat_Sheet#XSS_Prevention_Rules /// /// # Example /// /// Strings with HTML sequences are escaped: /// /// ```rust /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let raw_str: &RawStr = "<b>Hi!</b>".into(); /// let escaped = raw_str.html_escape(); /// assert_eq!(escaped, "&lt;b&gt;Hi!&lt;&#x2F;b&gt;"); /// /// let raw_str: &RawStr = "Hello, <i>world!</i>".into(); /// let escaped = raw_str.html_escape(); /// assert_eq!(escaped, "Hello, &lt;i&gt;world!&lt;&#x2F;i&gt;"); /// ``` /// /// Strings without HTML sequences remain untouched: /// /// ```rust /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let raw_str: &RawStr = "Hello!".into(); /// let escaped = raw_str.html_escape(); /// assert_eq!(escaped, "Hello!"); /// /// let raw_str: &RawStr = "大阪".into(); /// let escaped = raw_str.html_escape(); /// assert_eq!(escaped, "大阪"); /// ``` // NOTE: This is the ~fastest (a table-based implementation is slightly // faster) implementation benchmarked for dense-ish escaping. For sparser // texts, a regex-based-find solution is much faster. pub fn html_escape(&self) -> Cow<'_, str> { let mut escaped = false; let mut allocated = Vec::new(); // this is allocation free for c in self.as_bytes() { match *c { b'&' | b'<' | b'>' | b'"' | b'\'' | b'/' | b'`' => { if !escaped { let i = (c as *const u8 as usize) - (self.as_ptr() as usize); allocated = Vec::with_capacity(self.len() * 2); allocated.extend_from_slice(&self.as_bytes()[..i]); } match *c { b'&' => allocated.extend_from_slice(b"&amp;"), b'<' => allocated.extend_from_slice(b"&lt;"), b'>' => allocated.extend_from_slice(b"&gt;"), b'"' => allocated.extend_from_slice(b"&quot;"), b'\'' => allocated.extend_from_slice(b"&#x27;"), b'/' => allocated.extend_from_slice(b"&#x2F;"), // Old versions of IE treat a ` as a '. b'`' => allocated.extend_from_slice(b"&#96;"), _ => unreachable!() } escaped = true; } _ if escaped => allocated.push(*c), _ => { } } } if escaped { // This use of `unsafe` is only correct if the bytes in `allocated` // form a valid UTF-8 string. We prove this by cases: // // 1. In the `!escaped` branch, capacity for the vector is first // allocated. No characters are pushed to `allocated` prior to // this branch running since the `escaped` flag isn't set. To // enter this branch, the current byte must be a valid ASCII // character. This implies that every byte preceding forms a // valid UTF-8 string since ASCII characters in UTF-8 are never // part of a multi-byte sequence. Thus, extending the `allocated` // vector with these bytes results in a valid UTF-8 string in // `allocated`. // // 2. After the `!escaped` branch, `allocated` is extended with a // byte string of valid ASCII characters. Thus, `allocated` is // still a valid UTF-8 string. // // 3. In the `_ if escaped` branch, the byte is simply pushed into // the `allocated` vector. At this point, `allocated` may contain // an invalid UTF-8 string as we are not a known boundary. // However, note that this byte is part of a known valid string // (`self`). If the byte is not part of a multi-byte sequence, it // is ASCII, and no further justification is needed. If the byte // _is_ part of a multi-byte sequence, it is _not_ ASCII, and // thus will not fall into the escaped character set and it will // be pushed into `allocated` subsequently, resulting in a valid // UTF-8 string in `allocated`. unsafe { Cow::Owned(String::from_utf8_unchecked(allocated)) } } else { Cow::Borrowed(self.as_str()) } } /// Returns the length of `self`. /// /// This length is in bytes, not [`char`]s or graphemes. In other words, /// it may not be what a human considers the length of the string. /// /// # Example /// /// ```rust /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let raw_str = RawStr::new("Hello, world!"); /// assert_eq!(raw_str.len(), 13); /// ``` #[inline] pub const fn len(&self) -> usize { self.0.len() } /// Returns `true` if `self` has a length of zero bytes. /// /// # Example /// /// ```rust /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let raw_str = RawStr::new("Hello, world!"); /// assert!(!raw_str.is_empty()); /// /// let raw_str = RawStr::new(""); /// assert!(raw_str.is_empty()); /// ``` #[inline] pub const fn is_empty(&self) -> bool { self.len() == 0 } /// Converts `self` into an `&str`. /// /// This method should be used sparingly. **Only use this method when you /// are absolutely certain that doing so is safe.** /// /// # Example /// /// ```rust /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let raw_str = RawStr::new("Hello, world!"); /// assert_eq!(raw_str.as_str(), "Hello, world!"); /// ``` #[inline(always)] pub const fn as_str(&self) -> &str { &self.0 } /// Converts `self` into an `&[u8]`. /// /// # Example /// /// ```rust /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let raw_str = RawStr::new("hi"); /// assert_eq!(raw_str.as_bytes(), &[0x68, 0x69]); /// ``` #[inline(always)] pub const fn as_bytes(&self) -> &[u8] { self.0.as_bytes() } /// Converts a string slice to a raw pointer. /// /// As string slices are a slice of bytes, the raw pointer points to a /// [`u8`]. This pointer will be pointing to the first byte of the string /// slice. /// /// The caller must ensure that the returned pointer is never written to. /// If you need to mutate the contents of the string slice, use [`as_mut_ptr`]. /// /// [`as_mut_ptr`]: str::as_mut_ptr /// /// # Examples /// /// Basic usage: /// /// ``` /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let raw_str = RawStr::new("hi"); /// let ptr = raw_str.as_ptr(); /// ``` pub const fn as_ptr(&self) -> *const u8 { self.as_str().as_ptr() } /// Converts `self` into an `&UncasedStr`. /// /// This method should be used sparingly. **Only use this method when you /// are absolutely certain that doing so is safe.** /// /// # Example /// /// ```rust /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let raw_str = RawStr::new("Content-Type"); /// assert!(raw_str.as_uncased_str() == "content-TYPE"); /// ``` #[inline(always)] pub fn as_uncased_str(&self) -> &UncasedStr { self.as_str().into() } /// Returns `true` if the given pattern matches a sub-slice of /// this string slice. /// /// Returns `false` if it does not. /// /// The pattern can be a `&str`, [`char`], a slice of [`char`]s, or a /// function or closure that determines if a character matches. /// /// [`char`]: prim@char /// /// # Examples /// /// Basic usage: /// /// ``` /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let bananas = RawStr::new("bananas"); /// /// assert!(bananas.contains("nana")); /// assert!(!bananas.contains("apples")); /// ``` #[inline] pub fn contains<'a, P: Pattern<'a>>(&'a self, pat: P) -> bool { pat.is_contained_in(self.as_str()) } /// Returns `true` if the given pattern matches a prefix of this /// string slice. /// /// Returns `false` if it does not. /// /// The pattern can be a `&str`, [`char`], a slice of [`char`]s, or a /// function or closure that determines if a character matches. /// /// [`char`]: prim@char /// /// # Examples /// /// Basic usage: /// /// ``` /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let bananas = RawStr::new("bananas"); /// /// assert!(bananas.starts_with("bana")); /// assert!(!bananas.starts_with("nana")); /// ``` pub fn starts_with<'a, P: Pattern<'a>>(&'a self, pat: P) -> bool { pat.is_prefix_of(self.as_str()) } /// Returns `true` if the given pattern matches a suffix of this /// string slice. /// /// Returns `false` if it does not. /// /// The pattern can be a `&str`, [`char`], a slice of [`char`]s, or a /// function or closure that determines if a character matches. /// /// [`char`]: prim@char /// /// # Examples /// /// Basic usage: /// /// ``` /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let bananas = RawStr::new("bananas"); /// /// assert!(bananas.ends_with("anas")); /// assert!(!bananas.ends_with("nana")); /// ``` pub fn ends_with<'a, P>(&'a self, pat: P) -> bool where P: Pattern<'a>, <P as Pattern<'a>>::Searcher: ReverseSearcher<'a> { pat.is_suffix_of(self.as_str()) } /// Returns the byte index of the first character of this string slice that /// matches the pattern. /// /// Returns [`None`] if the pattern doesn't match. /// /// The pattern can be a `&str`, [`char`], a slice of [`char`]s, or a /// function or closure that determines if a character matches. /// /// [`char`]: prim@char /// /// # Example /// /// ``` /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let s = RawStr::new("Löwe 老虎 Léopard Gepardi"); /// /// assert_eq!(s.find('L'), Some(0)); /// assert_eq!(s.find('é'), Some(14)); /// assert_eq!(s.find("pard"), Some(17)); /// ``` #[inline] pub fn find<'a, P: Pattern<'a>>(&'a self, pat: P) -> Option<usize> { pat.into_searcher(self.as_str()).next_match().map(|(i, _)| i) } /// An iterator over substrings of this string slice, separated by /// characters matched by a pattern. /// /// The pattern can be a `&str`, [`char`], a slice of [`char`]s, or a /// function or closure that determines if a character matches. /// /// [`char`]: prim@char /// /// # Examples /// /// Simple patterns: /// /// ``` /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let v: Vec<_> = RawStr::new("Mary had a little lamb") /// .split(' ') /// .map(|r| r.as_str()) /// .collect(); /// /// assert_eq!(v, ["Mary", "had", "a", "little", "lamb"]); /// ``` #[inline] pub fn split<'a, P>(&'a self, pat: P) -> impl Iterator<Item = &'a RawStr> where P: Pattern<'a> { let split: Split<'_, P> = Split(SplitInternal { start: 0, end: self.len(), matcher: pat.into_searcher(self.as_str()), allow_trailing_empty: true, finished: false, }); split.map(|s| s.into()) } /// Splits `self` into two pieces: the piece _before_ the first byte `b` and /// the piece _after_ (not including `b`). Returns the tuple (`before`, /// `after`). If `b` is not in `self`, or `b` is not an ASCII characters, /// returns the entire string `self` as `before` and the empty string as /// `after`. /// /// # Example /// /// ```rust /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let haystack = RawStr::new("a good boy!"); /// /// let (before, after) = haystack.split_at_byte(b'a'); /// assert_eq!(before, ""); /// assert_eq!(after, " good boy!"); /// /// let (before, after) = haystack.split_at_byte(b' '); /// assert_eq!(before, "a"); /// assert_eq!(after, "good boy!"); /// /// let (before, after) = haystack.split_at_byte(b'o'); /// assert_eq!(before, "a g"); /// assert_eq!(after, "od boy!"); /// /// let (before, after) = haystack.split_at_byte(b'!'); /// assert_eq!(before, "a good boy"); /// assert_eq!(after, ""); /// /// let (before, after) = haystack.split_at_byte(b'?'); /// assert_eq!(before, "a good boy!"); /// assert_eq!(after, ""); /// /// let haystack = RawStr::new(""); /// let (before, after) = haystack.split_at_byte(b' '); /// assert_eq!(before, ""); /// assert_eq!(after, ""); /// ``` #[inline] pub fn split_at_byte(&self, b: u8) -> (&RawStr, &RawStr) { if !b.is_ascii() { return (self, &self[0..0]); } match memchr::memchr(b, self.as_bytes()) { // SAFETY: `b` is a character boundary since it's ASCII, `i` is in // bounds in `self` (or else None), and i is at most len - 1, so i + // 1 is at most len. Some(i) => unsafe { let s = self.as_str(); let start = s.get_unchecked(0..i); let end = s.get_unchecked((i + 1)..self.len()); (start.into(), end.into()) }, None => (self, &self[0..0]) } } /// Returns a string slice with the prefix removed. /// /// If the string starts with the pattern `prefix`, returns substring after /// the prefix, wrapped in `Some`. This method removes the prefix exactly /// once. /// /// If the string does not start with `prefix`, returns `None`. /// /// The pattern can be a `&str`, `char`, a slice of `char`s, or a function /// or closure that determines if a character matches. /// /// # Examples /// /// ``` /// # extern crate rocket; /// use rocket::http::RawStr; /// /// assert_eq!(RawStr::new("foo:bar").strip_prefix("foo:").unwrap(), "bar"); /// assert_eq!(RawStr::new("foofoo").strip_prefix("foo").unwrap(), "foo"); /// assert!(RawStr::new("foo:bar").strip_prefix("bar").is_none()); /// ``` #[inline] pub fn strip_prefix<'a, P: Pattern<'a>>(&'a self, prefix: P) -> Option<&'a RawStr> { prefix.strip_prefix_of(self.as_str()).map(RawStr::new) } /// Returns a string slice with the suffix removed. /// /// If the string ends with the pattern `suffix`, returns the substring /// before the suffix, wrapped in `Some`. Unlike `trim_end_matches`, this /// method removes the suffix exactly once. /// /// If the string does not end with `suffix`, returns `None`. /// /// The pattern can be a `&str`, `char`, a slice of `char`s, or a function /// or closure that determines if a character matches. /// /// # Examples /// /// ``` /// # extern crate rocket; /// use rocket::http::RawStr; /// /// assert_eq!(RawStr::new("bar:foo").strip_suffix(":foo").unwrap(), "bar"); /// assert_eq!(RawStr::new("foofoo").strip_suffix("foo").unwrap(), "foo"); /// assert!(RawStr::new("bar:foo").strip_suffix("bar").is_none()); /// ``` #[inline] pub fn strip_suffix<'a, P>(&'a self, suffix: P) -> Option<&'a RawStr> where P: Pattern<'a>,<P as Pattern<'a>>::Searcher: ReverseSearcher<'a>, { suffix.strip_suffix_of(self.as_str()).map(RawStr::new) } /// Parses this string slice into another type. /// /// Because `parse` is so general, it can cause problems with type /// inference. As such, `parse` is one of the few times you'll see /// the syntax affectionately known as the 'turbofish': `::<>`. This /// helps the inference algorithm understand specifically which type /// you're trying to parse into. /// /// # Errors /// /// Will return `Err` if it's not possible to parse this string slice into /// the desired type. /// /// # Examples /// /// Basic usage /// /// ``` /// # extern crate rocket; /// use rocket::http::RawStr; /// /// let four: u32 = RawStr::new("4").parse().unwrap(); /// /// assert_eq!(4, four); /// ``` #[inline] pub fn parse<F: std::str::FromStr>(&self) -> Result<F, F::Err> { std::str::FromStr::from_str(self.as_str()) } } #[cfg(feature = "serde")] mod serde { use _serde::{ser, de, Serialize, Deserialize}; use super::*; impl Serialize for RawStr { fn serialize<S>(&self, ser: S) -> Result<S::Ok, S::Error> where S: ser::Serializer { self.as_str().serialize(ser) } } impl<'de: 'a, 'a> Deserialize<'de> for &'a RawStr { fn deserialize<D>(de: D) -> Result<Self, D::Error> where D: de::Deserializer<'de> { <&'a str as Deserialize<'de>>::deserialize(de).map(RawStr::new) } } } impl fmt::Debug for RawStr { #[inline(always)] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) } } impl<'a> From<&'a str> for &'a RawStr { #[inline(always)] fn from(string: &'a str) -> &'a RawStr { RawStr::new(string) } } impl<'a> From<&'a RawStr> for Cow<'a, RawStr> { fn from(raw: &'a RawStr) -> Self { Cow::Borrowed(raw) } } impl From<RawStrBuf> for Cow<'_, RawStr> { fn from(raw: RawStrBuf) -> Self { Cow::Owned(raw) } } macro_rules! impl_partial { ($A:ty : $B:ty) => ( impl PartialEq<$A> for $B { #[inline(always)] fn eq(&self, other: &$A) -> bool { let left: &str = self.as_ref(); let right: &str = other.as_ref(); left == right } } impl PartialOrd<$A> for $B { #[inline(always)] fn partial_cmp(&self, other: &$A) -> Option<Ordering> { let left: &str = self.as_ref(); let right: &str = other.as_ref(); left.partial_cmp(right) } } ) } impl_partial!(RawStr : &RawStr); impl_partial!(&RawStr : RawStr); impl_partial!(str : RawStr); impl_partial!(str : &RawStr); impl_partial!(&str : RawStr); impl_partial!(&&str : RawStr); impl_partial!(Cow<'_, str> : RawStr); impl_partial!(Cow<'_, str> : &RawStr); impl_partial!(RawStr : Cow<'_, str>); impl_partial!(&RawStr : Cow<'_, str>); impl_partial!(String : RawStr); impl_partial!(String : &RawStr); impl_partial!(RawStr : String); impl_partial!(&RawStr : String); impl_partial!(RawStr : str); impl_partial!(RawStr : &str); impl_partial!(RawStr : &&str); impl_partial!(&RawStr : str); impl AsRef<str> for RawStr { #[inline(always)] fn as_ref(&self) -> &str { self.as_str() } } impl AsRef<RawStr> for str { #[inline(always)] fn as_ref(&self) -> &RawStr { RawStr::new(self) } } impl AsRef<RawStr> for RawStr { #[inline(always)] fn as_ref(&self) -> &RawStr { self } } impl AsRef<[u8]> for RawStr { #[inline(always)] fn as_ref(&self) -> &[u8] { self.as_bytes() } } impl<I: core::slice::SliceIndex<str, Output=str>> core::ops::Index<I> for RawStr { type Output = RawStr; #[inline] fn index(&self, index: I) -> &Self::Output { self.as_str()[index].into() } } impl std::borrow::Borrow<str> for RawStr { #[inline(always)] fn borrow(&self) -> &str { self.as_str() } } impl std::borrow::Borrow<RawStr> for &str { #[inline(always)] fn borrow(&self) -> &RawStr { (*self).into() } } impl fmt::Display for RawStr { #[inline(always)] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) } } impl AsRef<RawStr> for RawStrBuf { #[inline(always)] fn as_ref(&self) -> &RawStr { RawStr::new(self.0.as_str()) } } impl Borrow<RawStr> for RawStrBuf { #[inline(always)] fn borrow(&self) -> &RawStr { self.as_ref() } } impl std::ops::Deref for RawStrBuf { type Target = RawStr; #[inline(always)] fn deref(&self) -> &Self::Target { self.as_ref() } } impl fmt::Display for RawStrBuf { #[inline(always)] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) } } impl fmt::Debug for RawStrBuf { #[inline(always)] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) } } impl From<String> for RawStrBuf { #[inline(always)] fn from(string: String) -> Self { RawStrBuf(string) } } impl From<&str> for RawStrBuf { #[inline(always)] fn from(string: &str) -> Self { string.to_string().into() } } impl From<&RawStr> for RawStrBuf { #[inline(always)] fn from(raw: &RawStr) -> Self { raw.to_string().into() } } #[cfg(test)] mod tests { use super::RawStr; #[test] fn can_compare() { let raw_str = RawStr::new("abc"); assert_eq!(raw_str, "abc"); assert_eq!("abc", raw_str.as_str()); assert_eq!(raw_str, RawStr::new("abc")); assert_eq!(raw_str, "abc".to_string()); assert_eq!("abc".to_string(), raw_str.as_str()); } }
30.686111
144
0.537703
3846c7e133a005a835794483b5f37a2c4b6077d4
503
use serde::{Deserialize, Serialize}; use crate::common::*; /// If announce request has answer = true, send this to peer with /// peer id == "to_peer_id" field /// Action field should be 'announce' #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct MiddlemanAnswerToPeer { pub action: AnnounceAction, /// Note: if equal to client peer_id, client ignores answer pub peer_id: PeerId, pub info_hash: InfoHash, pub answer: JsonValue, pub offer_id: OfferId, }
29.588235
65
0.705765
d75dfb72054b89fb5c6e0a3479abc1accc606f23
23,909
// Copyright 2018 Parity Technologies (UK) Ltd. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. use crate::handler::{IdentifyHandler, IdentifyHandlerEvent, IdentifyPush}; use crate::protocol::{IdentifyInfo, ReplySubstream}; use futures::prelude::*; use libp2p_core::{ connection::{ConnectionId, ListenerId}, upgrade::UpgradeError, ConnectedPoint, Multiaddr, PeerId, PublicKey, }; use libp2p_swarm::{ AddressScore, DialError, DialPeerCondition, IntoProtocolsHandler, NegotiatedSubstream, NetworkBehaviour, NetworkBehaviourAction, NotifyHandler, PollParameters, ProtocolsHandler, ProtocolsHandlerUpgrErr, }; use std::{ collections::{HashMap, HashSet, VecDeque}, io, pin::Pin, task::Context, task::Poll, time::Duration, }; /// Network behaviour that automatically identifies nodes periodically, returns information /// about them, and answers identify queries from other nodes. /// /// All external addresses of the local node supposedly observed by remotes /// are reported via [`NetworkBehaviourAction::ReportObservedAddr`] with a /// [score](AddressScore) of `1`. pub struct Identify { config: IdentifyConfig, /// For each peer we're connected to, the observed address to send back to it. connected: HashMap<PeerId, HashMap<ConnectionId, Multiaddr>>, /// Pending replies to send. pending_replies: VecDeque<Reply>, /// Pending events to be emitted when polled. events: VecDeque<NetworkBehaviourAction<IdentifyEvent, IdentifyHandler>>, /// Peers to which an active push with current information about /// the local peer should be sent. pending_push: HashSet<PeerId>, } /// A pending reply to an inbound identification request. enum Reply { /// The reply is queued for sending. Queued { peer: PeerId, io: ReplySubstream<NegotiatedSubstream>, observed: Multiaddr, }, /// The reply is being sent. Sending { peer: PeerId, io: Pin<Box<dyn Future<Output = Result<(), io::Error>> + Send>>, }, } /// Configuration for the [`Identify`] [`NetworkBehaviour`]. #[non_exhaustive] #[derive(Debug)] pub struct IdentifyConfig { /// Application-specific version of the protocol family used by the peer, /// e.g. `ipfs/1.0.0` or `polkadot/1.0.0`. pub protocol_version: String, /// The public key of the local node. To report on the wire. pub local_public_key: PublicKey, /// Name and version of the local peer implementation, similar to the /// `User-Agent` header in the HTTP protocol. /// /// Defaults to `rust-libp2p/<libp2p-identify-version>`. pub agent_version: String, /// The initial delay before the first identification request /// is sent to a remote on a newly established connection. /// /// Defaults to 500ms. pub initial_delay: Duration, /// The interval at which identification requests are sent to /// the remote on established connections after the first request, /// i.e. the delay between identification requests. /// /// Defaults to 5 minutes. pub interval: Duration, /// Whether new or expired listen addresses of the local node should /// trigger an active push of an identify message to all connected peers. /// /// Enabling this option can result in connected peers being informed /// earlier about new or expired listen addresses of the local node, /// i.e. before the next periodic identify request with each peer. /// /// Disabled by default. pub push_listen_addr_updates: bool, } impl IdentifyConfig { /// Creates a new configuration for the `Identify` behaviour that /// advertises the given protocol version and public key. pub fn new(protocol_version: String, local_public_key: PublicKey) -> Self { IdentifyConfig { protocol_version, agent_version: format!("rust-libp2p/{}", env!("CARGO_PKG_VERSION")), local_public_key, initial_delay: Duration::from_millis(500), interval: Duration::from_secs(5 * 60), push_listen_addr_updates: false, } } /// Configures the agent version sent to peers. pub fn with_agent_version(mut self, v: String) -> Self { self.agent_version = v; self } /// Configures the initial delay before the first identification /// request is sent on a newly established connection to a peer. pub fn with_initial_delay(mut self, d: Duration) -> Self { self.initial_delay = d; self } /// Configures the interval at which identification requests are /// sent to peers after the initial request. pub fn with_interval(mut self, d: Duration) -> Self { self.interval = d; self } /// Configures whether new or expired listen addresses of the local /// node should trigger an active push of an identify message to all /// connected peers. pub fn with_push_listen_addr_updates(mut self, b: bool) -> Self { self.push_listen_addr_updates = b; self } } impl Identify { /// Creates a new `Identify` network behaviour. pub fn new(config: IdentifyConfig) -> Self { Identify { config, connected: HashMap::new(), pending_replies: VecDeque::new(), events: VecDeque::new(), pending_push: HashSet::new(), } } /// Initiates an active push of the local peer information to the given peers. pub fn push<I>(&mut self, peers: I) where I: IntoIterator<Item = PeerId>, { for p in peers { if self.pending_push.insert(p) { if !self.connected.contains_key(&p) { let handler = self.new_handler(); self.events.push_back(NetworkBehaviourAction::DialPeer { peer_id: p, condition: DialPeerCondition::Disconnected, handler, }); } } } } } impl NetworkBehaviour for Identify { type ProtocolsHandler = IdentifyHandler; type OutEvent = IdentifyEvent; fn new_handler(&mut self) -> Self::ProtocolsHandler { IdentifyHandler::new(self.config.initial_delay, self.config.interval) } fn inject_connection_established( &mut self, peer_id: &PeerId, conn: &ConnectionId, endpoint: &ConnectedPoint, ) { let addr = match endpoint { ConnectedPoint::Dialer { address } => address.clone(), ConnectedPoint::Listener { send_back_addr, .. } => send_back_addr.clone(), }; self.connected .entry(*peer_id) .or_default() .insert(*conn, addr); } fn inject_connection_closed( &mut self, peer_id: &PeerId, conn: &ConnectionId, _: &ConnectedPoint, _: <Self::ProtocolsHandler as IntoProtocolsHandler>::Handler, ) { if let Some(addrs) = self.connected.get_mut(peer_id) { addrs.remove(conn); } } fn inject_dial_failure(&mut self, peer_id: &PeerId, _: Self::ProtocolsHandler, _: DialError) { if !self.connected.contains_key(peer_id) { self.pending_push.remove(peer_id); } } fn inject_disconnected(&mut self, peer_id: &PeerId) { self.connected.remove(peer_id); self.pending_push.remove(peer_id); } fn inject_new_listen_addr(&mut self, _id: ListenerId, _addr: &Multiaddr) { if self.config.push_listen_addr_updates { self.pending_push.extend(self.connected.keys()); } } fn inject_expired_listen_addr(&mut self, _id: ListenerId, _addr: &Multiaddr) { if self.config.push_listen_addr_updates { self.pending_push.extend(self.connected.keys()); } } fn inject_event( &mut self, peer_id: PeerId, connection: ConnectionId, event: <Self::ProtocolsHandler as ProtocolsHandler>::OutEvent, ) { match event { IdentifyHandlerEvent::Identified(info) => { let observed = info.observed_addr.clone(); self.events.push_back(NetworkBehaviourAction::GenerateEvent( IdentifyEvent::Received { peer_id, info }, )); self.events .push_back(NetworkBehaviourAction::ReportObservedAddr { address: observed, score: AddressScore::Finite(1), }); } IdentifyHandlerEvent::IdentificationPushed => { self.events.push_back(NetworkBehaviourAction::GenerateEvent( IdentifyEvent::Pushed { peer_id }, )); } IdentifyHandlerEvent::Identify(sender) => { let observed = self .connected .get(&peer_id) .and_then(|addrs| addrs.get(&connection)) .expect( "`inject_event` is only called with an established connection \ and `inject_connection_established` ensures there is an entry; qed", ); self.pending_replies.push_back(Reply::Queued { peer: peer_id, io: sender, observed: observed.clone(), }); } IdentifyHandlerEvent::IdentificationError(error) => { self.events.push_back(NetworkBehaviourAction::GenerateEvent( IdentifyEvent::Error { peer_id, error }, )); } } } fn poll( &mut self, cx: &mut Context<'_>, params: &mut impl PollParameters, ) -> Poll<NetworkBehaviourAction<Self::OutEvent, Self::ProtocolsHandler>> { if let Some(event) = self.events.pop_front() { return Poll::Ready(event); } // Check for a pending active push to perform. let peer_push = self.pending_push.iter().find_map(|peer| { self.connected.get(peer).map(|conns| { let observed_addr = conns .values() .next() .expect("connected peer has a connection") .clone(); let listen_addrs = listen_addrs(params); let protocols = supported_protocols(params); let info = IdentifyInfo { public_key: self.config.local_public_key.clone(), protocol_version: self.config.protocol_version.clone(), agent_version: self.config.agent_version.clone(), listen_addrs, protocols, observed_addr, }; (*peer, IdentifyPush(info)) }) }); if let Some((peer_id, push)) = peer_push { self.pending_push.remove(&peer_id); return Poll::Ready(NetworkBehaviourAction::NotifyHandler { peer_id, event: push, handler: NotifyHandler::Any, }); } // Check for pending replies to send. if let Some(r) = self.pending_replies.pop_front() { let mut sending = 0; let to_send = self.pending_replies.len() + 1; let mut reply = Some(r); loop { match reply { Some(Reply::Queued { peer, io, observed }) => { let info = IdentifyInfo { listen_addrs: listen_addrs(params), protocols: supported_protocols(params), public_key: self.config.local_public_key.clone(), protocol_version: self.config.protocol_version.clone(), agent_version: self.config.agent_version.clone(), observed_addr: observed, }; let io = Box::pin(io.send(info)); reply = Some(Reply::Sending { peer, io }); } Some(Reply::Sending { peer, mut io }) => { sending += 1; match Future::poll(Pin::new(&mut io), cx) { Poll::Ready(Ok(())) => { let event = IdentifyEvent::Sent { peer_id: peer }; return Poll::Ready(NetworkBehaviourAction::GenerateEvent(event)); } Poll::Pending => { self.pending_replies.push_back(Reply::Sending { peer, io }); if sending == to_send { // All remaining futures are NotReady break; } else { reply = self.pending_replies.pop_front(); } } Poll::Ready(Err(err)) => { let event = IdentifyEvent::Error { peer_id: peer, error: ProtocolsHandlerUpgrErr::Upgrade(UpgradeError::Apply( err, )), }; return Poll::Ready(NetworkBehaviourAction::GenerateEvent(event)); } } } None => unreachable!(), } } } Poll::Pending } } /// Event emitted by the `Identify` behaviour. #[derive(Debug)] pub enum IdentifyEvent { /// Identification information has been received from a peer. Received { /// The peer that has been identified. peer_id: PeerId, /// The information provided by the peer. info: IdentifyInfo, }, /// Identification information of the local node has been sent to a peer in /// response to an identification request. Sent { /// The peer that the information has been sent to. peer_id: PeerId, }, /// Identification information of the local node has been actively pushed to /// a peer. Pushed { /// The peer that the information has been sent to. peer_id: PeerId, }, /// Error while attempting to identify the remote. Error { /// The peer with whom the error originated. peer_id: PeerId, /// The error that occurred. error: ProtocolsHandlerUpgrErr<io::Error>, }, } fn supported_protocols(params: &impl PollParameters) -> Vec<String> { // The protocol names can be bytes, but the identify protocol except UTF-8 strings. // There's not much we can do to solve this conflict except strip non-UTF-8 characters. params .supported_protocols() .map(|p| String::from_utf8_lossy(&p).to_string()) .collect() } fn listen_addrs(params: &impl PollParameters) -> Vec<Multiaddr> { let mut listen_addrs: Vec<_> = params.external_addresses().map(|r| r.addr).collect(); listen_addrs.extend(params.listened_addresses()); listen_addrs } #[cfg(test)] mod tests { use super::*; use futures::pin_mut; use libp2p_core::{identity, muxing::StreamMuxerBox, transport, upgrade, PeerId, Transport}; use libp2p_mplex::MplexConfig; use libp2p_noise as noise; use libp2p_swarm::{Swarm, SwarmEvent}; use libp2p_tcp::TcpConfig; fn transport() -> ( identity::PublicKey, transport::Boxed<(PeerId, StreamMuxerBox)>, ) { let id_keys = identity::Keypair::generate_ed25519(); let noise_keys = noise::Keypair::<noise::X25519Spec>::new() .into_authentic(&id_keys) .unwrap(); let pubkey = id_keys.public(); let transport = TcpConfig::new() .nodelay(true) .upgrade(upgrade::Version::V1) .authenticate(noise::NoiseConfig::xx(noise_keys).into_authenticated()) .multiplex(MplexConfig::new()) .boxed(); (pubkey, transport) } #[test] fn periodic_identify() { let (mut swarm1, pubkey1) = { let (pubkey, transport) = transport(); let protocol = Identify::new( IdentifyConfig::new("a".to_string(), pubkey.clone()) .with_agent_version("b".to_string()), ); let swarm = Swarm::new(transport, protocol, pubkey.to_peer_id()); (swarm, pubkey) }; let (mut swarm2, pubkey2) = { let (pubkey, transport) = transport(); let protocol = Identify::new( IdentifyConfig::new("c".to_string(), pubkey.clone()) .with_agent_version("d".to_string()), ); let swarm = Swarm::new(transport, protocol, pubkey.to_peer_id()); (swarm, pubkey) }; swarm1 .listen_on("/ip4/127.0.0.1/tcp/0".parse().unwrap()) .unwrap(); let listen_addr = async_std::task::block_on(async { loop { let swarm1_fut = swarm1.select_next_some(); pin_mut!(swarm1_fut); match swarm1_fut.await { SwarmEvent::NewListenAddr { address, .. } => return address, _ => {} } } }); swarm2.dial_addr(listen_addr).unwrap(); // nb. Either swarm may receive the `Identified` event first, upon which // it will permit the connection to be closed, as defined by // `IdentifyHandler::connection_keep_alive`. Hence the test succeeds if // either `Identified` event arrives correctly. async_std::task::block_on(async move { loop { let swarm1_fut = swarm1.select_next_some(); pin_mut!(swarm1_fut); let swarm2_fut = swarm2.select_next_some(); pin_mut!(swarm2_fut); match future::select(swarm1_fut, swarm2_fut) .await .factor_second() .0 { future::Either::Left(SwarmEvent::Behaviour(IdentifyEvent::Received { info, .. })) => { assert_eq!(info.public_key, pubkey2); assert_eq!(info.protocol_version, "c"); assert_eq!(info.agent_version, "d"); assert!(!info.protocols.is_empty()); assert!(info.listen_addrs.is_empty()); return; } future::Either::Right(SwarmEvent::Behaviour(IdentifyEvent::Received { info, .. })) => { assert_eq!(info.public_key, pubkey1); assert_eq!(info.protocol_version, "a"); assert_eq!(info.agent_version, "b"); assert!(!info.protocols.is_empty()); assert_eq!(info.listen_addrs.len(), 1); return; } _ => {} } } }) } #[test] fn identify_push() { let _ = env_logger::try_init(); let (mut swarm1, pubkey1) = { let (pubkey, transport) = transport(); let protocol = Identify::new( IdentifyConfig::new("a".to_string(), pubkey.clone()) // Delay identification requests so we can test the push protocol. .with_initial_delay(Duration::from_secs(u32::MAX as u64)), ); let swarm = Swarm::new(transport, protocol, pubkey.to_peer_id()); (swarm, pubkey) }; let (mut swarm2, pubkey2) = { let (pubkey, transport) = transport(); let protocol = Identify::new( IdentifyConfig::new("a".to_string(), pubkey.clone()) .with_agent_version("b".to_string()) // Delay identification requests so we can test the push protocol. .with_initial_delay(Duration::from_secs(u32::MAX as u64)), ); let swarm = Swarm::new(transport, protocol, pubkey.to_peer_id()); (swarm, pubkey) }; Swarm::listen_on(&mut swarm1, "/ip4/127.0.0.1/tcp/0".parse().unwrap()).unwrap(); let listen_addr = async_std::task::block_on(async { loop { let swarm1_fut = swarm1.select_next_some(); pin_mut!(swarm1_fut); match swarm1_fut.await { SwarmEvent::NewListenAddr { address, .. } => return address, _ => {} } } }); Swarm::dial_addr(&mut swarm2, listen_addr).unwrap(); async_std::task::block_on(async move { loop { let swarm1_fut = swarm1.select_next_some(); let swarm2_fut = swarm2.select_next_some(); { pin_mut!(swarm1_fut); pin_mut!(swarm2_fut); match future::select(swarm1_fut, swarm2_fut) .await .factor_second() .0 { future::Either::Left(SwarmEvent::Behaviour(IdentifyEvent::Received { info, .. })) => { assert_eq!(info.public_key, pubkey2); assert_eq!(info.protocol_version, "a"); assert_eq!(info.agent_version, "b"); assert!(!info.protocols.is_empty()); assert!(info.listen_addrs.is_empty()); return; } future::Either::Right(SwarmEvent::ConnectionEstablished { .. }) => { // Once a connection is established, we can initiate an // active push below. } _ => continue, } } swarm2 .behaviour_mut() .push(std::iter::once(pubkey1.to_peer_id())); } }) } }
37.950794
98
0.542515
5b6e7b9946df667e771b10edde99a219367aafae
21,756
extern crate base64; extern crate md5; use std::collections::HashMap; use std::io::Write; use super::bucket::Bucket; use super::command::Command; use chrono::{DateTime, Utc}; use hmac::{Mac, NewMac}; use reqwest::header::{self, HeaderMap, HeaderName, HeaderValue}; use reqwest::{Client, Response}; use sha2::{Digest, Sha256}; use url::Url; use crate::signing; use crate::EMPTY_PAYLOAD_SHA; use crate::LONG_DATE; use crate::{Result, S3Error}; // use once_cell::sync::Lazy; use tokio::io::AsyncWriteExt; use tokio_stream::StreamExt; /// Collection of HTTP headers sent to S3 service, in key/value format. pub type Headers = HashMap<String, String>; /// Collection of HTTP query parameters sent to S3 service, in key/value /// format. pub type Query = HashMap<String, String>; // static CLIENT: Lazy<Client> = Lazy::new(|| { // if cfg!(feature = "no-verify-ssl") { // Client::builder() // .danger_accept_invalid_certs(true) // .danger_accept_invalid_hostnames(true) // .build() // .expect("Could not build dangerous client!") // } else { // Client::new() // } // }); // Temporary structure for making a request pub struct Request<'a> { pub bucket: &'a Bucket, pub path: &'a str, pub command: Command<'a>, pub datetime: DateTime<Utc>, pub sync: bool, } impl<'a> Request<'a> { pub fn new<'b>(bucket: &'b Bucket, path: &'b str, command: Command<'b>) -> Request<'b> { Request { bucket, path, command, datetime: Utc::now(), sync: false, } } pub fn presigned(&self) -> Result<String> { let expiry = match self.command { Command::PresignGet { expiry_secs } => expiry_secs, Command::PresignPut { expiry_secs, .. } => expiry_secs, _ => unreachable!(), }; let custom_headers = match &self.command { Command::PresignPut { custom_headers, .. } => { custom_headers.clone() } _ => None, }; let authorization = self.presigned_authorization(custom_headers.clone())?; Ok(format!( "{}&X-Amz-Signature={}", self.presigned_url_no_sig(expiry, custom_headers)?, authorization )) } fn host_header(&self) -> Result<HeaderValue> { let host = self.bucket.host(); HeaderValue::from_str(&host).map_err(|_e| { S3Error::from(format!("Could not parse HOST header value {}", host).as_ref()) }) } fn url(&self, encode_path: bool) -> Url { let mut url_str = self.bucket.url(); if let Command::CreateBucket { .. } = self.command { return Url::parse(&url_str).unwrap() } let path = if self.path.starts_with('/') { &self.path[1..] } else { self.path }; url_str.push('/'); if encode_path { url_str.push_str(&signing::uri_encode(path, true)); } else { url_str.push_str(path); } // Since every part of this URL is either pre-encoded or statically // generated, there's really no way this should fail. let mut url = Url::parse(&url_str).expect("static URL parsing"); for (key, value) in &self.bucket.extra_query { url.query_pairs_mut().append_pair(key, value); } if let Command::ListBucket { prefix, delimiter, continuation_token, start_after, max_keys, } = self.command.clone() { let mut query_pairs = url.query_pairs_mut(); delimiter.map(|d| query_pairs.append_pair("delimiter", &d)); query_pairs.append_pair("prefix", &prefix); query_pairs.append_pair("list-type", "2"); if let Some(token) = continuation_token { query_pairs.append_pair("continuation-token", &token); } if let Some(start_after) = start_after { query_pairs.append_pair("start-after", &start_after); } if let Some(max_keys) = max_keys { query_pairs.append_pair("max-keys", &max_keys.to_string()); } } match self.command { Command::PutObjectTagging { .. } | Command::GetObjectTagging | Command::DeleteObjectTagging => { url.query_pairs_mut().append_pair("tagging", ""); } _ => {} } url } fn content_length(&self) -> usize { match &self.command { Command::PutObject { content, .. } => content.len(), Command::PutObjectTagging { tags } => tags.len(), Command::UploadPart { content, .. } => content.len(), Command::CompleteMultipartUpload { data, .. } => data.len(), Command::CreateBucket { config } => { if let Some(payload) = config.location_constraint_payload() { Vec::from(payload).len() } else { 0 } } _ => 0, } } fn content_type(&self) -> String { match self.command { Command::PutObject { content_type, .. } => content_type.into(), Command::CompleteMultipartUpload { .. } => "application/xml".into(), _ => "text/plain".into(), } } fn sha256(&self) -> String { match &self.command { Command::PutObject { content, .. } => { let mut sha = Sha256::default(); sha.update(content); hex::encode(sha.finalize().as_slice()) } Command::PutObjectTagging { tags } => { let mut sha = Sha256::default(); sha.update(tags.as_bytes()); hex::encode(sha.finalize().as_slice()) } Command::CompleteMultipartUpload { data, .. } => { let mut sha = Sha256::default(); sha.update(data.to_string().as_bytes()); hex::encode(sha.finalize().as_slice()) } Command::CreateBucket { config } => { if let Some(payload) = config.location_constraint_payload() { let mut sha = Sha256::default(); sha.update(payload.as_bytes()); hex::encode(sha.finalize().as_slice()) } else { EMPTY_PAYLOAD_SHA.into() } } _ => EMPTY_PAYLOAD_SHA.into(), } } fn long_date(&self) -> String { self.datetime.format(LONG_DATE).to_string() } fn canonical_request(&self, headers: &HeaderMap) -> String { signing::canonical_request( self.command.http_verb().as_str(), &self.url(false), headers, &self.sha256(), ) } fn presigned_url_no_sig(&self, expiry: u32, custom_headers: Option<HeaderMap>) -> Result<Url> { let token = self.bucket.security_token().or_else(|| self.bucket.session_token()); let url = Url::parse(&format!( "{}{}", self.url(true), &signing::authorization_query_params_no_sig( &self.bucket.access_key().unwrap(), &self.datetime, &self.bucket.region(), expiry, custom_headers, token )? ))?; Ok(url) } fn presigned_canonical_request(&self, headers: &HeaderMap) -> Result<String> { let expiry = match self.command { Command::PresignGet { expiry_secs } => expiry_secs, Command::PresignPut { expiry_secs, .. } => expiry_secs, _ => unreachable!(), }; let custom_headers = match &self.command { Command::PresignPut { custom_headers, .. } => { custom_headers.clone() } _ => None, }; let canonical_request = signing::canonical_request( self.command.http_verb().as_str(), &self.presigned_url_no_sig(expiry, custom_headers)?, headers, "UNSIGNED-PAYLOAD", ); Ok(canonical_request) } fn string_to_sign(&self, request: &str) -> String { signing::string_to_sign(&self.datetime, &self.bucket.region(), request) } fn signing_key(&self) -> Result<Vec<u8>> { signing::signing_key( &self.datetime, &self .bucket .secret_key() .expect("Secret key must be provided to sign headers, found None"), &self.bucket.region(), "s3", ) } fn presigned_authorization(&self, custom_headers: Option<HeaderMap>) -> Result<String> { let mut headers = HeaderMap::new(); let host_header = self.host_header()?; headers.insert(header::HOST, host_header); if let Some(custom_headers) = custom_headers { for (k, v) in custom_headers.into_iter() { if let Some(k) = k { headers.insert(k, v); } } } let canonical_request = self.presigned_canonical_request(&headers)?; let string_to_sign = self.string_to_sign(&canonical_request); let mut hmac = signing::HmacSha256::new_varkey(&self.signing_key()?)?; hmac.update(string_to_sign.as_bytes()); let signature = hex::encode(hmac.finalize().into_bytes()); // let signed_header = signing::signed_header_string(&headers); Ok(signature) } fn authorization(&self, headers: &HeaderMap) -> Result<String> { let canonical_request = self.canonical_request(headers); let string_to_sign = self.string_to_sign(&canonical_request); let mut hmac = signing::HmacSha256::new_varkey(&self.signing_key()?)?; hmac.update(string_to_sign.as_bytes()); let signature = hex::encode(hmac.finalize().into_bytes()); let signed_header = signing::signed_header_string(headers); Ok(signing::authorization_header( &self.bucket.access_key().unwrap(), &self.datetime, &self.bucket.region(), &signed_header, &signature, )) } fn extra_headers(&self, headers: &mut HeaderMap) -> Result<()> { for (k, v) in self.bucket.extra_headers.iter() { headers.insert( match HeaderName::from_bytes(k.as_bytes()) { Ok(name) => name, Err(e) => { return Err(S3Error::from( format!("Could not parse {} to HeaderName.\n {}", k, e).as_ref(), )) } }, match HeaderValue::from_bytes(v.as_bytes()) { Ok(value) => value, Err(e) => { return Err(S3Error::from( format!("Could not parse {} to HeaderValue.\n {}", v, e).as_ref(), )) } }, ); } Ok(()) } fn headers(&self) -> Result<HeaderMap> { // Generate this once, but it's used in more than one place. let sha256 = self.sha256(); // Start with extra_headers, that way our headers replace anything with // the same name. let mut headers = HeaderMap::new(); self.extra_headers(&mut headers)?; headers.insert(header::HOST, self.host_header()?); match self.command { Command::ListBucket { .. } => {} Command::GetObject => {} Command::GetObjectTagging => {} Command::GetBucketLocation => {} _ => { headers.insert(header::CONTENT_TYPE, self.content_type().parse()?); headers.insert( header::CONTENT_LENGTH, self.content_length().to_string().parse()?, ); } } headers.insert("X-Amz-Content-Sha256", sha256.parse()?); headers.insert("X-Amz-Date", self.long_date().parse()?); if let Some(session_token) = self.bucket.session_token() { headers.insert("X-Amz-Security-Token", session_token.parse()?); } else if let Some(security_token) = self.bucket.security_token() { headers.insert("X-Amz-Security-Token", security_token.parse()?); } if let Command::PutObjectTagging { tags } = self.command { let digest = md5::compute(tags); let hash = base64::encode(digest.as_ref()); headers.insert("Content-MD5", hash.parse()?); } else if let Command::PutObject { content, .. } = self.command { let digest = md5::compute(content); let hash = base64::encode(digest.as_ref()); headers.insert("Content-MD5", hash.parse()?); } else if let Command::UploadPart { content, .. } = self.command { let digest = md5::compute(content); let hash = base64::encode(digest.as_ref()); headers.insert("Content-MD5", hash.parse()?); } else if let Command::GetObject {} = self.command { headers.insert( header::ACCEPT, HeaderValue::from_str("application/octet-stream")?, ); // headers.insert(header::ACCEPT_CHARSET, HeaderValue::from_str("UTF-8")?); } else if let Command::CreateBucket { ref config} = self.command { config.add_headers(&mut headers)?; } // This must be last, as it signs the other headers, omitted if no secret key is provided if self.bucket.secret_key().is_some() { let authorization = self.authorization(&headers)?; headers.insert(header::AUTHORIZATION, authorization.parse()?); } // The format of RFC2822 is somewhat malleable, so including it in // signed headers can cause signature mismatches. We do include the // X-Amz-Date header, so requests are still properly limited to a date // range and can't be used again e.g. reply attacks. Adding this header // after the generation of the Authorization header leaves it out of // the signed headers. headers.insert(header::DATE, self.datetime.to_rfc2822().parse()?); Ok(headers) } // pub fn response_data(&self) -> Result<(Vec<u8>, u16)> { // Ok(futures::executor::block_on(self.response_data_future())?) // } // pub fn response_data_to_writer<T: Write>(&self, writer: &mut T) -> Result<u16> { // Ok(futures::executor::block_on(self.response_data_to_writer_future(writer))?) // } pub async fn response_future(&self) -> Result<Response> { // Build headers let headers = match self.headers() { Ok(headers) => headers, Err(e) => return Err(e), }; // Get owned content to pass to reqwest let content = if let Command::PutObject { content, .. } = self.command { Vec::from(content) } else if let Command::PutObjectTagging { tags } = self.command { Vec::from(tags) } else if let Command::UploadPart { content, .. } = self.command { Vec::from(content) } else if let Command::CompleteMultipartUpload { data, .. } = &self.command { let body = data.to_string(); // assert_eq!(body, "body".to_string()); body.as_bytes().to_vec() } else if let Command::CreateBucket { config } = &self.command { if let Some(payload) = config.location_constraint_payload() { Vec::from(payload) } else { Vec::new() } } else { Vec::new() }; let client = if cfg!(feature = "no-verify-ssl") { let client = Client::builder().danger_accept_invalid_certs(true); cfg_if::cfg_if! { if #[cfg(feature = "native-tls")] { let client = client.danger_accept_invalid_hostnames(true); } } client.build().expect("Could not build dangerous client!") } else { Client::new() }; let request = client .request(self.command.http_verb(), self.url(false).as_str()) .headers(headers.to_owned()) .body(content.to_owned()); let response = request.send().await?; if cfg!(feature = "fail-on-err") && response.status().as_u16() >= 400 { return Err(S3Error::from( format!( "Request failed with code {}\n{}", response.status().as_u16(), response.text().await? ) .as_str(), )); } Ok(response) } pub async fn response_data_future(&self, etag: bool) -> Result<(Vec<u8>, u16)> { let response = self.response_future().await?; let status_code = response.status().as_u16(); let headers = response.headers().clone(); let etag_header = headers.get("ETag"); let body = response.bytes().await?; let mut body_vec = Vec::new(); body_vec.extend_from_slice(&body[..]); if etag { if let Some(etag) = etag_header { body_vec = etag.to_str()?.as_bytes().to_vec(); } } Ok((body_vec, status_code)) } pub async fn response_data_to_writer_future<'b, T: Write>( &self, writer: &'b mut T, ) -> Result<u16> { let response = self.response_future().await?; let status_code = response.status(); let mut stream = response.bytes_stream(); while let Some(item) = stream.next().await { writer.write_all(&item?)?; } Ok(status_code.as_u16()) } pub async fn tokio_response_data_to_writer_future<'b, T: AsyncWriteExt + Unpin>( &self, writer: &'b mut T, ) -> Result<u16> { let response = self.response_future().await?; let status_code = response.status(); let mut stream = response.bytes_stream(); while let Some(item) = stream.next().await { writer.write_all(&item?).await?; } Ok(status_code.as_u16()) } pub async fn response_header_future(&self) -> Result<(HeaderMap, u16)> { let response = self.response_future().await?; let status_code = response.status().as_u16(); let headers = response.headers().clone(); Ok((headers, status_code)) } } #[cfg(test)] mod tests { use crate::bucket::Bucket; use crate::command::Command; use crate::request::Request; use crate::Result; use awscreds::Credentials; // Fake keys - otherwise using Credentials::default will use actual user // credentials if they exist. fn fake_credentials() -> Credentials { let access_key = "AKIAIOSFODNN7EXAMPLE"; let secert_key = "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY"; Credentials::new(Some(access_key), Some(secert_key), None, None, None).unwrap() } #[test] fn url_uses_https_by_default() -> Result<()> { let region = "custom-region".parse()?; let bucket = Bucket::new("my-first-bucket", region, fake_credentials())?; let path = "/my-first/path"; let request = Request::new(&bucket, path, Command::GetObject); assert_eq!(request.url(false).scheme(), "https"); let headers = request.headers().unwrap(); let host = headers.get("Host").unwrap(); assert_eq!(*host, "my-first-bucket.custom-region".to_string()); Ok(()) } #[test] fn url_uses_https_by_default_path_style() -> Result<()> { let region = "custom-region".parse()?; let bucket = Bucket::new_with_path_style("my-first-bucket", region, fake_credentials())?; let path = "/my-first/path"; let request = Request::new(&bucket, path, Command::GetObject); assert_eq!(request.url(false).scheme(), "https"); let headers = request.headers().unwrap(); let host = headers.get("Host").unwrap(); assert_eq!(*host, "custom-region".to_string()); Ok(()) } #[test] fn url_uses_scheme_from_custom_region_if_defined() -> Result<()> { let region = "http://custom-region".parse()?; let bucket = Bucket::new("my-second-bucket", region, fake_credentials())?; let path = "/my-second/path"; let request = Request::new(&bucket, path, Command::GetObject); assert_eq!(request.url(false).scheme(), "http"); let headers = request.headers().unwrap(); let host = headers.get("Host").unwrap(); assert_eq!(*host, "my-second-bucket.custom-region".to_string()); Ok(()) } #[test] fn url_uses_scheme_from_custom_region_if_defined_with_path_style() -> Result<()> { let region = "http://custom-region".parse()?; let bucket = Bucket::new_with_path_style("my-second-bucket", region, fake_credentials())?; let path = "/my-second/path"; let request = Request::new(&bucket, path, Command::GetObject); assert_eq!(request.url(false).scheme(), "http"); let headers = request.headers().unwrap(); let host = headers.get("Host").unwrap(); assert_eq!(*host, "custom-region".to_string()); Ok(()) } }
34.921348
99
0.544448
0157b4c5369ac861ade8d7d322db57543ad4a84a
8,582
use bevy::{ core::FixedTimestep, diagnostic::{Diagnostics, FrameTimeDiagnosticsPlugin, LogDiagnosticsPlugin}, prelude::*, window::PresentMode, }; use rand::{thread_rng, Rng}; const BIRDS_PER_SECOND: u32 = 10000; const GRAVITY: f32 = -9.8 * 100.0; const MAX_VELOCITY: f32 = 750.; const BIRD_SCALE: f32 = 0.15; const HALF_BIRD_SIZE: f32 = 256. * BIRD_SCALE * 0.5; struct BevyCounter { pub count: usize, pub color: Color, } #[derive(Component)] struct Bird { velocity: Vec3, } /// This example provides a 2D benchmark. /// /// Usage: spawn more entities by clicking on the screen. fn main() { App::new() .insert_resource(WindowDescriptor { title: "BevyMark".to_string(), width: 800., height: 600., present_mode: PresentMode::Immediate, resizable: true, ..Default::default() }) .add_plugins(DefaultPlugins) .add_plugin(FrameTimeDiagnosticsPlugin::default()) .add_plugin(LogDiagnosticsPlugin::default()) .insert_resource(BevyCounter { count: 0, color: Color::WHITE, }) .add_startup_system(setup) .add_system(mouse_handler) .add_system(movement_system) .add_system(collision_system) .add_system(counter_system) .add_system_set( SystemSet::new() .with_run_criteria(FixedTimestep::step(0.2)) .with_system(scheduled_spawner), ) .run(); } struct BirdScheduled { wave: usize, per_wave: usize, } fn scheduled_spawner( mut commands: Commands, windows: Res<Windows>, mut scheduled: ResMut<BirdScheduled>, mut counter: ResMut<BevyCounter>, bird_texture: Res<BirdTexture>, ) { if scheduled.wave > 0 { spawn_birds( &mut commands, &windows, &mut counter, scheduled.per_wave, bird_texture.0.clone_weak(), ); let mut rng = thread_rng(); counter.color = Color::rgb_linear(rng.gen(), rng.gen(), rng.gen()); scheduled.wave -= 1; } } struct BirdTexture(Handle<Image>); #[derive(Component)] struct StatsText; fn setup(mut commands: Commands, asset_server: Res<AssetServer>) { let texture = asset_server.load("branding/icon.png"); commands.spawn_bundle(OrthographicCameraBundle::new_2d()); commands.spawn_bundle(UiCameraBundle::default()); commands .spawn_bundle(TextBundle { text: Text { sections: vec![ TextSection { value: "Bird Count: ".to_string(), style: TextStyle { font: asset_server.load("fonts/FiraSans-Bold.ttf"), font_size: 40.0, color: Color::rgb(0.0, 1.0, 0.0), }, }, TextSection { value: "".to_string(), style: TextStyle { font: asset_server.load("fonts/FiraSans-Bold.ttf"), font_size: 40.0, color: Color::rgb(0.0, 1.0, 1.0), }, }, TextSection { value: "\nAverage FPS: ".to_string(), style: TextStyle { font: asset_server.load("fonts/FiraSans-Bold.ttf"), font_size: 40.0, color: Color::rgb(0.0, 1.0, 0.0), }, }, TextSection { value: "".to_string(), style: TextStyle { font: asset_server.load("fonts/FiraSans-Bold.ttf"), font_size: 40.0, color: Color::rgb(0.0, 1.0, 1.0), }, }, ], ..Default::default() }, style: Style { position_type: PositionType::Absolute, position: Rect { top: Val::Px(5.0), left: Val::Px(5.0), ..Default::default() }, ..Default::default() }, ..Default::default() }) .insert(StatsText); commands.insert_resource(BirdTexture(texture)); commands.insert_resource(BirdScheduled { per_wave: std::env::args() .nth(1) .and_then(|arg| arg.parse::<usize>().ok()) .unwrap_or_default(), wave: std::env::args() .nth(2) .and_then(|arg| arg.parse::<usize>().ok()) .unwrap_or(1), }); } fn mouse_handler( mut commands: Commands, time: Res<Time>, mouse_button_input: Res<Input<MouseButton>>, windows: Res<Windows>, bird_texture: Res<BirdTexture>, mut counter: ResMut<BevyCounter>, ) { if mouse_button_input.just_released(MouseButton::Left) { let mut rng = thread_rng(); counter.color = Color::rgb_linear(rng.gen(), rng.gen(), rng.gen()); } if mouse_button_input.pressed(MouseButton::Left) { let spawn_count = (BIRDS_PER_SECOND as f64 * time.delta_seconds_f64()) as usize; spawn_birds( &mut commands, &windows, &mut counter, spawn_count, bird_texture.0.clone_weak(), ); } } fn spawn_birds( commands: &mut Commands, windows: &Windows, counter: &mut BevyCounter, spawn_count: usize, texture: Handle<Image>, ) { let window = windows.get_primary().unwrap(); let bird_x = (window.width() as f32 / -2.) + HALF_BIRD_SIZE; let bird_y = (window.height() as f32 / 2.) - HALF_BIRD_SIZE; let mut rng = thread_rng(); for count in 0..spawn_count { let bird_z = (counter.count + count) as f32 * 0.00001; commands .spawn_bundle(SpriteBundle { texture: texture.clone(), transform: Transform { translation: Vec3::new(bird_x, bird_y, bird_z), scale: Vec3::splat(BIRD_SCALE), ..Default::default() }, sprite: Sprite { color: counter.color, ..Default::default() }, ..Default::default() }) .insert(Bird { velocity: Vec3::new( rng.gen::<f32>() * MAX_VELOCITY - (MAX_VELOCITY * 0.5), 0., 0., ), }); } counter.count += spawn_count; } fn movement_system(time: Res<Time>, mut bird_query: Query<(&mut Bird, &mut Transform)>) { for (mut bird, mut transform) in bird_query.iter_mut() { transform.translation.x += bird.velocity.x * time.delta_seconds(); transform.translation.y += bird.velocity.y * time.delta_seconds(); bird.velocity.y += GRAVITY * time.delta_seconds(); } } fn collision_system(windows: Res<Windows>, mut bird_query: Query<(&mut Bird, &Transform)>) { let window = windows.get_primary().unwrap(); let half_width = window.width() as f32 * 0.5; let half_height = window.height() as f32 * 0.5; for (mut bird, transform) in bird_query.iter_mut() { let x_vel = bird.velocity.x; let y_vel = bird.velocity.y; let x_pos = transform.translation.x; let y_pos = transform.translation.y; if (x_vel > 0. && x_pos + HALF_BIRD_SIZE > half_width) || (x_vel <= 0. && x_pos - HALF_BIRD_SIZE < -(half_width)) { bird.velocity.x = -x_vel; } if y_vel < 0. && y_pos - HALF_BIRD_SIZE < -half_height { bird.velocity.y = -y_vel; } if y_pos + HALF_BIRD_SIZE > half_height && y_vel > 0.0 { bird.velocity.y = 0.0; } } } fn counter_system( diagnostics: Res<Diagnostics>, counter: Res<BevyCounter>, mut query: Query<&mut Text, With<StatsText>>, ) { let mut text = query.single_mut(); if counter.is_changed() { text.sections[1].value = format!("{}", counter.count); } if let Some(fps) = diagnostics.get(FrameTimeDiagnosticsPlugin::FPS) { if let Some(average) = fps.average() { text.sections[3].value = format!("{:.2}", average); } }; }
31.321168
92
0.516546
cc80b6308165fca5446bec3a863b609f8ae015e9
285
use config::Config; use git::Repository; use super::{Module, State}; pub(crate) trait ModuleProvider { fn new(config: &Config, repository: Repository) -> Self; fn get_mut_module(&mut self, _state: State) -> &mut dyn Module; fn get_module(&self, _state: State) -> &dyn Module; }
21.923077
64
0.701754
db0aeed1ea48f871bb85ea0533f42fa271c9213d
442
mod todotxt; fn main () { let mut tasks = ~[]; let t = ::todotxt::Task::create(~"some important task", 1); println(t.to_str()); tasks.push(t); let x = ::todotxt::Task::create(~"x some important task", 1); println(x.to_str()); tasks.push(x); println!("tasks length: {}", tasks.len()); for task in tasks.iter() { println(task.to_str()); } }
20.090909
69
0.486425
71822a520a2f76af508bed9a653d71ba057522a4
1,177
use sqlx::{Pool, Postgres}; use crate::entities::Note; pub async fn insert_note(note: &Note, pool: &Pool<Postgres>) -> anyhow::Result<()> { sqlx::query!( r#" INSERT into notes (chat_id, note_id, note_content) VALUES ($1, $2, $3) ON CONFLICT (chat_id, note_id) DO UPDATE SET note_content = excluded.note_content WHERE (notes.note_content) IS DISTINCT FROM (excluded.note_content) "#, note.chat_id, note.note_id, note.note_content, ) .execute(pool) .await?; Ok(()) } pub async fn fetch_note( chat_id: Option<i64>, note_id: Option<&str>, pool: &Pool<Postgres>, ) -> anyhow::Result<Note> { let note = sqlx::query_as!( Note, "SELECT * FROM notes WHERE chat_id = $1 AND note_id = $2", chat_id, note_id ) .fetch_one(pool) .await?; Ok(note) } pub async fn delete_note( chat_id: Option<i64>, note_id: Option<&str>, pool: &Pool<Postgres>, ) -> anyhow::Result<()> { sqlx::query!( "DELETE FROM notes WHERE chat_id = $1 AND note_id = $2", chat_id, note_id ) .execute(pool) .await?; Ok(()) }
21.796296
84
0.57774
4a3ac838958c5405538c3819edadf7e2daaa2fec
8,546
//! This module provides functions to parse PICA+ records. use crate::{Field, Occurrence, OccurrenceMatcher, Path, Subfield}; use nom::branch::alt; use nom::bytes::complete::{is_not, tag}; use nom::character::complete::{char, multispace0, one_of, satisfy}; use nom::combinator::{all_consuming, cut, map, opt, recognize, success}; use nom::multi::{count, many0, many1, many_m_n}; use nom::sequence::{delimited, pair, preceded, terminated, tuple}; use nom::Err; use bstr::BString; use std::fmt; const NL: char = '\x0A'; const US: char = '\x1F'; const RS: char = '\x1E'; const SP: char = '\x20'; /// Parser result. pub type ParseResult<'a, O> = Result<(&'a [u8], O), Err<()>>; /// An error that can occur when parsing PICA+ records. #[derive(Debug, PartialEq)] pub struct ParsePicaError { pub message: String, pub data: Vec<u8>, } #[derive(Debug)] pub struct ParsePathError(pub(crate) String); impl std::error::Error for ParsePicaError {} impl std::error::Error for ParsePathError {} impl fmt::Display for ParsePicaError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(&self.message) } } impl fmt::Display for ParsePathError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(&self.0) } } /// Parses a subfield code. pub(crate) fn parse_subfield_code(i: &[u8]) -> ParseResult<char> { map(satisfy(|c| c.is_ascii_alphanumeric()), char::from)(i) } /// Parses multiple subfield codes. pub(crate) fn parse_subfield_codes(i: &[u8]) -> ParseResult<Vec<char>> { alt(( map(parse_subfield_code, |x| vec![x]), delimited(char('['), many1(parse_subfield_code), char(']')), ))(i) } /// Parses a subfield value. pub(crate) fn parse_subfield_value(i: &[u8]) -> ParseResult<BString> { recognize(many0(is_not("\x1E\x1F")))(i).map(|(i, o)| (i, BString::from(o))) } /// Parses a subfield. pub(crate) fn parse_subfield(i: &[u8]) -> ParseResult<Subfield> { map( preceded( char(US), cut(pair(parse_subfield_code, parse_subfield_value)), ), |(code, value)| Subfield::from_unchecked(code, value), )(i) } /// Parses a field occurrence. pub fn parse_field_occurrence(i: &[u8]) -> ParseResult<Occurrence> { map( preceded( tag(b"/"), cut(recognize(many_m_n(2, 3, one_of("0123456789")))), ), Occurrence::from_unchecked, )(i) } /// Parses a field tag. pub fn parse_field_tag(i: &[u8]) -> ParseResult<BString> { map( recognize(tuple(( one_of("012"), count(one_of("0123456789"), 2), one_of("ABCDEFGHIJKLMNOPQRSTUVWXYZ@"), ))), BString::from, )(i) } /// Parses a field. pub fn parse_field(i: &[u8]) -> ParseResult<Field> { map( terminated( tuple(( parse_field_tag, alt((map(parse_field_occurrence, Some), success(None))), preceded(char(SP), many0(parse_subfield)), )), char(RS), ), |(tag, occurrence, subfields)| Field { tag, occurrence, subfields, }, )(i) } /// Parses a record. pub fn parse_fields(i: &[u8]) -> ParseResult<Vec<Field>> { all_consuming(terminated(many1(parse_field), opt(char(NL))))(i) } /// Parses a occurrence matcher. pub(crate) fn parse_occurrence_matcher( i: &[u8], ) -> ParseResult<OccurrenceMatcher> { alt(( map(tag(b"/*"), |_| OccurrenceMatcher::Any), map(parse_field_occurrence, OccurrenceMatcher::Occurrence), success(OccurrenceMatcher::None), ))(i) } pub(crate) fn parse_path(i: &[u8]) -> ParseResult<Path> { map( all_consuming(delimited( multispace0, tuple(( parse_field_tag, parse_occurrence_matcher, preceded(char('.'), parse_subfield_codes), )), multispace0, )), |(tag, occurrence, codes)| Path { tag, occurrence, codes, }, )(i) } #[cfg(test)] mod tests { use super::*; #[test] fn test_parse_subfield_code() { assert_eq!(parse_subfield_code(b"0").unwrap().1, '0'); assert_eq!(parse_subfield_code(b"a").unwrap().1, 'a'); assert_eq!(parse_subfield_code(b"Z").unwrap().1, 'Z'); assert_eq!(parse_subfield_code(b"!").is_err(), true); } #[test] fn test_parse_parse_subfield_value() { assert_eq!(parse_subfield_value(b"abc").unwrap().1, "abc"); assert_eq!(parse_subfield_value(b"a\x1ebc").unwrap().1, "a"); assert_eq!(parse_subfield_value(b"a\x1fbc").unwrap().1, "a"); assert_eq!(parse_subfield_value(b"").unwrap().1, ""); } #[test] fn test_parse_subfield() { assert_eq!( parse_subfield(b"\x1fa123").unwrap().1, Subfield::from_unchecked('a', "123") ); assert_eq!( parse_subfield(b"\x1fa").unwrap().1, Subfield::from_unchecked('a', "") ); assert_eq!(parse_subfield(b"a123").is_err(), true); assert_eq!(parse_subfield(b"").is_err(), true); } #[test] fn test_parse_field_occurrence() { assert_eq!( parse_field_occurrence(b"/00").unwrap().1, Occurrence::from_unchecked("00") ); assert_eq!( parse_field_occurrence(b"/01").unwrap().1, Occurrence::from_unchecked("01") ); assert_eq!( parse_field_occurrence(b"/001").unwrap().1, Occurrence::from_unchecked("001") ); assert_eq!(parse_field_occurrence(b"/XYZ").is_err(), true); } #[test] fn test_parse_field_tag() { assert_eq!(parse_field_tag(b"003@").unwrap().1, BString::from("003@")); assert_eq!(parse_field_tag(b"012A").unwrap().1, BString::from("012A")); assert_eq!(parse_field_tag(b"003").is_err(), true); assert_eq!(parse_field_tag(b"03").is_err(), true); assert_eq!(parse_field_tag(b"0").is_err(), true); assert_eq!(parse_field_tag(b"").is_err(), true); assert_eq!(parse_field_tag(b"003!").is_err(), true); assert_eq!(parse_field_tag(b"303@").is_err(), true); } #[test] fn test_parse_field() { assert_eq!( parse_field(b"003@ \x1f0123456789X\x1e").unwrap().1, Field::new( "003@", None, vec![Subfield::new('0', "123456789X").unwrap()] ) .unwrap() ); } #[test] fn test_parse_fields() { assert_eq!( parse_fields(b"003@ \x1f0123456789X\x1e012A/00 \x1fa123\x1e") .unwrap() .1, vec![ Field::new( "003@", None, vec![Subfield::new('0', "123456789X").unwrap()] ) .unwrap(), Field::new( "012A", Some(Occurrence::new("00").unwrap()), vec![Subfield::new('a', "123").unwrap()] ) .unwrap() ] ); } #[test] fn test_parse_occurrence_matcher() { assert_eq!( parse_occurrence_matcher(b"/00").unwrap().1, OccurrenceMatcher::new("00").unwrap() ); assert_eq!( parse_occurrence_matcher(b"/001").unwrap().1, OccurrenceMatcher::new("001").unwrap() ); assert_eq!( parse_occurrence_matcher(b"/*").unwrap().1, OccurrenceMatcher::Any, ); assert_eq!( parse_occurrence_matcher(b"").unwrap().1, OccurrenceMatcher::None, ); } #[test] fn test_parse_path() { assert_eq!( parse_path(b"003@.0").unwrap().1, Path::new("003@", OccurrenceMatcher::None, vec!['0']).unwrap() ); assert_eq!( parse_path(b"012A/01.0").unwrap().1, Path::new("012A", OccurrenceMatcher::new("01").unwrap(), vec!['0']) .unwrap() ); assert_eq!( parse_path(b"012A/*.[ab]").unwrap().1, Path::new("012A", OccurrenceMatcher::Any, vec!['a', 'b']).unwrap() ); assert_eq!( parse_path(b"012A/*.0").unwrap().1, Path::new("012A", OccurrenceMatcher::Any, vec!['0']).unwrap() ); } }
28.774411
79
0.541423
9cf68cbd23e6da2f7200bf2c6c477e8f6bc252df
72,275
//! Source positions and related helper functions. //! //! Important concepts in this module include: //! //! - the *span*, represented by [`SpanData`] and related types; //! - source code as represented by a [`SourceMap`]; and //! - interned strings, represented by [`Symbol`]s, with some common symbols available statically in the [`sym`] module. //! //! Unlike most compilers, the span contains not only the position in the source code, but also various other metadata, //! such as the edition and macro hygiene. This metadata is stored in [`SyntaxContext`] and [`ExpnData`]. //! //! ## Note //! //! This API is completely unstable and subject to change. #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![feature(array_windows)] #![feature(crate_visibility_modifier)] #![feature(negative_impls)] #![feature(nll)] #![feature(min_specialization)] #![feature(thread_local_const_init)] #[macro_use] extern crate rustc_macros; use rustc_data_structures::AtomicRef; use rustc_macros::HashStable_Generic; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; mod caching_source_map_view; pub mod source_map; pub use self::caching_source_map_view::CachingSourceMapView; use source_map::SourceMap; pub mod edition; use edition::Edition; pub mod hygiene; pub use hygiene::SyntaxContext; use hygiene::Transparency; pub use hygiene::{DesugaringKind, ExpnData, ExpnId, ExpnKind, ForLoopLoc, MacroKind}; pub mod def_id; use def_id::{CrateNum, DefId, LOCAL_CRATE}; pub mod lev_distance; mod span_encoding; pub use span_encoding::{Span, DUMMY_SP}; pub mod crate_disambiguator; pub mod symbol; pub use symbol::{sym, Symbol}; mod analyze_source_file; pub mod fatal_error; use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::sync::{Lock, Lrc}; use std::borrow::Cow; use std::cell::RefCell; use std::cmp::{self, Ordering}; use std::fmt; use std::hash::Hash; use std::ops::{Add, Range, Sub}; use std::path::{Path, PathBuf}; use std::str::FromStr; use std::thread::LocalKey; use md5::Md5; use sha1::Digest; use sha1::Sha1; use sha2::Sha256; use tracing::debug; #[cfg(test)] mod tests; // Per-session global variables: this struct is stored in thread-local storage // in such a way that it is accessible without any kind of handle to all // threads within the compilation session, but is not accessible outside the // session. pub struct SessionGlobals { symbol_interner: Lock<symbol::Interner>, span_interner: Lock<span_encoding::SpanInterner>, hygiene_data: Lock<hygiene::HygieneData>, source_map: Lock<Option<Lrc<SourceMap>>>, } impl SessionGlobals { pub fn new(edition: Edition) -> SessionGlobals { SessionGlobals { symbol_interner: Lock::new(symbol::Interner::fresh()), span_interner: Lock::new(span_encoding::SpanInterner::default()), hygiene_data: Lock::new(hygiene::HygieneData::new(edition)), source_map: Lock::new(None), } } } pub fn with_session_globals<R>(edition: Edition, f: impl FnOnce() -> R) -> R { let session_globals = SessionGlobals::new(edition); SESSION_GLOBALS.set(&session_globals, f) } pub fn with_default_session_globals<R>(f: impl FnOnce() -> R) -> R { with_session_globals(edition::DEFAULT_EDITION, f) } // If this ever becomes non thread-local, `decode_syntax_context` // and `decode_expn_id` will need to be updated to handle concurrent // deserialization. scoped_tls::scoped_thread_local!(pub static SESSION_GLOBALS: SessionGlobals); // FIXME: We should use this enum or something like it to get rid of the // use of magic `/rust/1.x/...` paths across the board. #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd)] #[derive(HashStable_Generic, Decodable)] pub enum RealFileName { LocalPath(PathBuf), /// For remapped paths (namely paths into libstd that have been mapped /// to the appropriate spot on the local host's file system, and local file /// system paths that have been remapped with `FilePathMapping`), Remapped { /// `local_path` is the (host-dependent) local path to the file. This is /// None if the file was imported from another crate local_path: Option<PathBuf>, /// `virtual_name` is the stable path rustc will store internally within /// build artifacts. virtual_name: PathBuf, }, } impl Hash for RealFileName { fn hash<H: std::hash::Hasher>(&self, state: &mut H) { // To prevent #70924 from happening again we should only hash the // remapped (virtualized) path if that exists. This is because // virtualized paths to sysroot crates (/rust/$hash or /rust/$version) // remain stable even if the corresponding local_path changes self.remapped_path_if_available().hash(state) } } // This is functionally identical to #[derive(Encodable)], with the exception of // an added assert statement impl<S: Encoder> Encodable<S> for RealFileName { fn encode(&self, encoder: &mut S) -> Result<(), S::Error> { encoder.emit_enum(|encoder| match *self { RealFileName::LocalPath(ref local_path) => { encoder.emit_enum_variant("LocalPath", 0, 1, |encoder| { Ok({ encoder .emit_enum_variant_arg(true, |encoder| local_path.encode(encoder))?; }) }) } RealFileName::Remapped { ref local_path, ref virtual_name } => encoder .emit_enum_variant("Remapped", 1, 2, |encoder| { // For privacy and build reproducibility, we must not embed host-dependant path in artifacts // if they have been remapped by --remap-path-prefix assert!(local_path.is_none()); Ok({ encoder .emit_enum_variant_arg(true, |encoder| local_path.encode(encoder))?; encoder .emit_enum_variant_arg(false, |encoder| virtual_name.encode(encoder))?; }) }), }) } } impl RealFileName { /// Returns the path suitable for reading from the file system on the local host, /// if this information exists. /// Avoid embedding this in build artifacts; see `remapped_path_if_available()` for that. pub fn local_path(&self) -> Option<&Path> { match self { RealFileName::LocalPath(p) => Some(p), RealFileName::Remapped { local_path: p, virtual_name: _ } => { p.as_ref().map(PathBuf::as_path) } } } /// Returns the path suitable for reading from the file system on the local host, /// if this information exists. /// Avoid embedding this in build artifacts; see `remapped_path_if_available()` for that. pub fn into_local_path(self) -> Option<PathBuf> { match self { RealFileName::LocalPath(p) => Some(p), RealFileName::Remapped { local_path: p, virtual_name: _ } => p, } } /// Returns the path suitable for embedding into build artifacts. This would still /// be a local path if it has not been remapped. A remapped path will not correspond /// to a valid file system path: see `local_path_if_available()` for something that /// is more likely to return paths into the local host file system. pub fn remapped_path_if_available(&self) -> &Path { match self { RealFileName::LocalPath(p) | RealFileName::Remapped { local_path: _, virtual_name: p } => &p, } } /// Returns the path suitable for reading from the file system on the local host, /// if this information exists. Otherwise returns the remapped name. /// Avoid embedding this in build artifacts; see `remapped_path_if_available()` for that. pub fn local_path_if_available(&self) -> &Path { match self { RealFileName::LocalPath(path) | RealFileName::Remapped { local_path: None, virtual_name: path } | RealFileName::Remapped { local_path: Some(path), virtual_name: _ } => path, } } pub fn to_string_lossy(&self, prefer_local: bool) -> Cow<'_, str> { if prefer_local { self.local_path_if_available().to_string_lossy() } else { self.remapped_path_if_available().to_string_lossy() } } } /// Differentiates between real files and common virtual files. #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash)] #[derive(HashStable_Generic, Decodable, Encodable)] pub enum FileName { Real(RealFileName), /// Call to `quote!`. QuoteExpansion(u64), /// Command line. Anon(u64), /// Hack in `src/librustc_ast/parse.rs`. // FIXME(jseyfried) MacroExpansion(u64), ProcMacroSourceCode(u64), /// Strings provided as `--cfg [cfgspec]` stored in a `crate_cfg`. CfgSpec(u64), /// Strings provided as crate attributes in the CLI. CliCrateAttr(u64), /// Custom sources for explicit parser calls from plugins and drivers. Custom(String), DocTest(PathBuf, isize), /// Post-substitution inline assembly from LLVM. InlineAsm(u64), } impl From<PathBuf> for FileName { fn from(p: PathBuf) -> Self { assert!(!p.to_string_lossy().ends_with('>')); FileName::Real(RealFileName::LocalPath(p)) } } pub struct FileNameDisplay<'a> { inner: &'a FileName, prefer_local: bool, } impl fmt::Display for FileNameDisplay<'_> { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { use FileName::*; match *self.inner { Real(ref name) => { write!(fmt, "{}", name.to_string_lossy(self.prefer_local)) } QuoteExpansion(_) => write!(fmt, "<quote expansion>"), MacroExpansion(_) => write!(fmt, "<macro expansion>"), Anon(_) => write!(fmt, "<anon>"), ProcMacroSourceCode(_) => write!(fmt, "<proc-macro source code>"), CfgSpec(_) => write!(fmt, "<cfgspec>"), CliCrateAttr(_) => write!(fmt, "<crate attribute>"), Custom(ref s) => write!(fmt, "<{}>", s), DocTest(ref path, _) => write!(fmt, "{}", path.display()), InlineAsm(_) => write!(fmt, "<inline asm>"), } } } impl FileNameDisplay<'_> { pub fn to_string_lossy(&self) -> Cow<'_, str> { match self.inner { FileName::Real(ref inner) => inner.to_string_lossy(self.prefer_local), _ => Cow::from(format!("{}", self)), } } } impl FileName { pub fn is_real(&self) -> bool { use FileName::*; match *self { Real(_) => true, Anon(_) | MacroExpansion(_) | ProcMacroSourceCode(_) | CfgSpec(_) | CliCrateAttr(_) | Custom(_) | QuoteExpansion(_) | DocTest(_, _) | InlineAsm(_) => false, } } pub fn prefer_remapped(&self) -> FileNameDisplay<'_> { FileNameDisplay { inner: self, prefer_local: false } } // This may include transient local filesystem information. // Must not be embedded in build outputs. pub fn prefer_local(&self) -> FileNameDisplay<'_> { FileNameDisplay { inner: self, prefer_local: true } } pub fn macro_expansion_source_code(src: &str) -> FileName { let mut hasher = StableHasher::new(); src.hash(&mut hasher); FileName::MacroExpansion(hasher.finish()) } pub fn anon_source_code(src: &str) -> FileName { let mut hasher = StableHasher::new(); src.hash(&mut hasher); FileName::Anon(hasher.finish()) } pub fn proc_macro_source_code(src: &str) -> FileName { let mut hasher = StableHasher::new(); src.hash(&mut hasher); FileName::ProcMacroSourceCode(hasher.finish()) } pub fn cfg_spec_source_code(src: &str) -> FileName { let mut hasher = StableHasher::new(); src.hash(&mut hasher); FileName::QuoteExpansion(hasher.finish()) } pub fn cli_crate_attr_source_code(src: &str) -> FileName { let mut hasher = StableHasher::new(); src.hash(&mut hasher); FileName::CliCrateAttr(hasher.finish()) } pub fn doc_test_source_code(path: PathBuf, line: isize) -> FileName { FileName::DocTest(path, line) } pub fn inline_asm_source_code(src: &str) -> FileName { let mut hasher = StableHasher::new(); src.hash(&mut hasher); FileName::InlineAsm(hasher.finish()) } } /// Represents a span. /// /// Spans represent a region of code, used for error reporting. Positions in spans /// are *absolute* positions from the beginning of the [`SourceMap`], not positions /// relative to [`SourceFile`]s. Methods on the `SourceMap` can be used to relate spans back /// to the original source. /// /// You must be careful if the span crosses more than one file, since you will not be /// able to use many of the functions on spans in source_map and you cannot assume /// that the length of the span is equal to `span.hi - span.lo`; there may be space in the /// [`BytePos`] range between files. /// /// `SpanData` is public because `Span` uses a thread-local interner and can't be /// sent to other threads, but some pieces of performance infra run in a separate thread. /// Using `Span` is generally preferred. #[derive(Clone, Copy, Hash, PartialEq, Eq, Ord, PartialOrd)] pub struct SpanData { pub lo: BytePos, pub hi: BytePos, /// Information about where the macro came from, if this piece of /// code was created by a macro expansion. pub ctxt: SyntaxContext, } impl SpanData { #[inline] pub fn span(&self) -> Span { Span::new(self.lo, self.hi, self.ctxt) } #[inline] pub fn with_lo(&self, lo: BytePos) -> Span { Span::new(lo, self.hi, self.ctxt) } #[inline] pub fn with_hi(&self, hi: BytePos) -> Span { Span::new(self.lo, hi, self.ctxt) } #[inline] pub fn with_ctxt(&self, ctxt: SyntaxContext) -> Span { Span::new(self.lo, self.hi, ctxt) } } // The interner is pointed to by a thread local value which is only set on the main thread // with parallelization is disabled. So we don't allow `Span` to transfer between threads // to avoid panics and other errors, even though it would be memory safe to do so. #[cfg(not(parallel_compiler))] impl !Send for Span {} #[cfg(not(parallel_compiler))] impl !Sync for Span {} impl PartialOrd for Span { fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> { PartialOrd::partial_cmp(&self.data(), &rhs.data()) } } impl Ord for Span { fn cmp(&self, rhs: &Self) -> Ordering { Ord::cmp(&self.data(), &rhs.data()) } } /// A collection of `Span`s. /// /// Spans have two orthogonal attributes: /// /// - They can be *primary spans*. In this case they are the locus of /// the error, and would be rendered with `^^^`. /// - They can have a *label*. In this case, the label is written next /// to the mark in the snippet when we render. #[derive(Clone, Debug, Hash, PartialEq, Eq, Encodable, Decodable)] pub struct MultiSpan { primary_spans: Vec<Span>, span_labels: Vec<(Span, String)>, } impl Span { #[inline] pub fn lo(self) -> BytePos { self.data().lo } #[inline] pub fn with_lo(self, lo: BytePos) -> Span { self.data().with_lo(lo) } #[inline] pub fn hi(self) -> BytePos { self.data().hi } #[inline] pub fn with_hi(self, hi: BytePos) -> Span { self.data().with_hi(hi) } #[inline] pub fn ctxt(self) -> SyntaxContext { self.data().ctxt } #[inline] pub fn with_ctxt(self, ctxt: SyntaxContext) -> Span { self.data().with_ctxt(ctxt) } /// Returns `true` if this is a dummy span with any hygienic context. #[inline] pub fn is_dummy(self) -> bool { let span = self.data(); span.lo.0 == 0 && span.hi.0 == 0 } /// Returns `true` if this span comes from a macro or desugaring. #[inline] pub fn from_expansion(self) -> bool { self.ctxt() != SyntaxContext::root() } /// Returns `true` if `span` originates in a derive-macro's expansion. pub fn in_derive_expansion(self) -> bool { matches!( self.ctxt().outer_expn_data().kind, ExpnKind::Macro { kind: MacroKind::Derive, name: _, proc_macro: _ } ) } #[inline] pub fn with_root_ctxt(lo: BytePos, hi: BytePos) -> Span { Span::new(lo, hi, SyntaxContext::root()) } /// Returns a new span representing an empty span at the beginning of this span. #[inline] pub fn shrink_to_lo(self) -> Span { let span = self.data(); span.with_hi(span.lo) } /// Returns a new span representing an empty span at the end of this span. #[inline] pub fn shrink_to_hi(self) -> Span { let span = self.data(); span.with_lo(span.hi) } #[inline] /// Returns `true` if `hi == lo`. pub fn is_empty(&self) -> bool { let span = self.data(); span.hi == span.lo } /// Returns `self` if `self` is not the dummy span, and `other` otherwise. pub fn substitute_dummy(self, other: Span) -> Span { if self.is_dummy() { other } else { self } } /// Returns `true` if `self` fully encloses `other`. pub fn contains(self, other: Span) -> bool { let span = self.data(); let other = other.data(); span.lo <= other.lo && other.hi <= span.hi } /// Returns `true` if `self` touches `other`. pub fn overlaps(self, other: Span) -> bool { let span = self.data(); let other = other.data(); span.lo < other.hi && other.lo < span.hi } /// Returns `true` if the spans are equal with regards to the source text. /// /// Use this instead of `==` when either span could be generated code, /// and you only care that they point to the same bytes of source text. pub fn source_equal(&self, other: &Span) -> bool { let span = self.data(); let other = other.data(); span.lo == other.lo && span.hi == other.hi } /// Returns `Some(span)`, where the start is trimmed by the end of `other`. pub fn trim_start(self, other: Span) -> Option<Span> { let span = self.data(); let other = other.data(); if span.hi > other.hi { Some(span.with_lo(cmp::max(span.lo, other.hi))) } else { None } } /// Returns the source span -- this is either the supplied span, or the span for /// the macro callsite that expanded to it. pub fn source_callsite(self) -> Span { let expn_data = self.ctxt().outer_expn_data(); if !expn_data.is_root() { expn_data.call_site.source_callsite() } else { self } } /// The `Span` for the tokens in the previous macro expansion from which `self` was generated, /// if any. pub fn parent(self) -> Option<Span> { let expn_data = self.ctxt().outer_expn_data(); if !expn_data.is_root() { Some(expn_data.call_site) } else { None } } /// Edition of the crate from which this span came. pub fn edition(self) -> edition::Edition { self.ctxt().edition() } #[inline] pub fn rust_2015(&self) -> bool { self.edition() == edition::Edition::Edition2015 } #[inline] pub fn rust_2018(&self) -> bool { self.edition() >= edition::Edition::Edition2018 } #[inline] pub fn rust_2021(&self) -> bool { self.edition() >= edition::Edition::Edition2021 } /// Returns the source callee. /// /// Returns `None` if the supplied span has no expansion trace, /// else returns the `ExpnData` for the macro definition /// corresponding to the source callsite. pub fn source_callee(self) -> Option<ExpnData> { fn source_callee(expn_data: ExpnData) -> ExpnData { let next_expn_data = expn_data.call_site.ctxt().outer_expn_data(); if !next_expn_data.is_root() { source_callee(next_expn_data) } else { expn_data } } let expn_data = self.ctxt().outer_expn_data(); if !expn_data.is_root() { Some(source_callee(expn_data)) } else { None } } /// Checks if a span is "internal" to a macro in which `#[unstable]` /// items can be used (that is, a macro marked with /// `#[allow_internal_unstable]`). pub fn allows_unstable(&self, feature: Symbol) -> bool { self.ctxt() .outer_expn_data() .allow_internal_unstable .map_or(false, |features| features.iter().any(|&f| f == feature)) } /// Checks if this span arises from a compiler desugaring of kind `kind`. pub fn is_desugaring(&self, kind: DesugaringKind) -> bool { match self.ctxt().outer_expn_data().kind { ExpnKind::Desugaring(k) => k == kind, _ => false, } } /// Returns the compiler desugaring that created this span, or `None` /// if this span is not from a desugaring. pub fn desugaring_kind(&self) -> Option<DesugaringKind> { match self.ctxt().outer_expn_data().kind { ExpnKind::Desugaring(k) => Some(k), _ => None, } } /// Checks if a span is "internal" to a macro in which `unsafe` /// can be used without triggering the `unsafe_code` lint. // (that is, a macro marked with `#[allow_internal_unsafe]`). pub fn allows_unsafe(&self) -> bool { self.ctxt().outer_expn_data().allow_internal_unsafe } pub fn macro_backtrace(mut self) -> impl Iterator<Item = ExpnData> { let mut prev_span = DUMMY_SP; std::iter::from_fn(move || { loop { let expn_data = self.ctxt().outer_expn_data(); if expn_data.is_root() { return None; } let is_recursive = expn_data.call_site.source_equal(&prev_span); prev_span = self; self = expn_data.call_site; // Don't print recursive invocations. if !is_recursive { return Some(expn_data); } } }) } /// Returns a `Span` that would enclose both `self` and `end`. /// /// ```text /// ____ ___ /// self lorem ipsum end /// ^^^^^^^^^^^^^^^^^^^^ /// ``` pub fn to(self, end: Span) -> Span { let span_data = self.data(); let end_data = end.data(); // FIXME(jseyfried): `self.ctxt` should always equal `end.ctxt` here (cf. issue #23480). // Return the macro span on its own to avoid weird diagnostic output. It is preferable to // have an incomplete span than a completely nonsensical one. if span_data.ctxt != end_data.ctxt { if span_data.ctxt == SyntaxContext::root() { return end; } else if end_data.ctxt == SyntaxContext::root() { return self; } // Both spans fall within a macro. // FIXME(estebank): check if it is the *same* macro. } Span::new( cmp::min(span_data.lo, end_data.lo), cmp::max(span_data.hi, end_data.hi), if span_data.ctxt == SyntaxContext::root() { end_data.ctxt } else { span_data.ctxt }, ) } /// Returns a `Span` between the end of `self` to the beginning of `end`. /// /// ```text /// ____ ___ /// self lorem ipsum end /// ^^^^^^^^^^^^^ /// ``` pub fn between(self, end: Span) -> Span { let span = self.data(); let end = end.data(); Span::new( span.hi, end.lo, if end.ctxt == SyntaxContext::root() { end.ctxt } else { span.ctxt }, ) } /// Returns a `Span` from the beginning of `self` until the beginning of `end`. /// /// ```text /// ____ ___ /// self lorem ipsum end /// ^^^^^^^^^^^^^^^^^ /// ``` pub fn until(self, end: Span) -> Span { let span = self.data(); let end = end.data(); Span::new( span.lo, end.lo, if end.ctxt == SyntaxContext::root() { end.ctxt } else { span.ctxt }, ) } pub fn from_inner(self, inner: InnerSpan) -> Span { let span = self.data(); Span::new( span.lo + BytePos::from_usize(inner.start), span.lo + BytePos::from_usize(inner.end), span.ctxt, ) } /// Equivalent of `Span::def_site` from the proc macro API, /// except that the location is taken from the `self` span. pub fn with_def_site_ctxt(self, expn_id: ExpnId) -> Span { self.with_ctxt_from_mark(expn_id, Transparency::Opaque) } /// Equivalent of `Span::call_site` from the proc macro API, /// except that the location is taken from the `self` span. pub fn with_call_site_ctxt(&self, expn_id: ExpnId) -> Span { self.with_ctxt_from_mark(expn_id, Transparency::Transparent) } /// Equivalent of `Span::mixed_site` from the proc macro API, /// except that the location is taken from the `self` span. pub fn with_mixed_site_ctxt(&self, expn_id: ExpnId) -> Span { self.with_ctxt_from_mark(expn_id, Transparency::SemiTransparent) } /// Produces a span with the same location as `self` and context produced by a macro with the /// given ID and transparency, assuming that macro was defined directly and not produced by /// some other macro (which is the case for built-in and procedural macros). pub fn with_ctxt_from_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span { self.with_ctxt(SyntaxContext::root().apply_mark(expn_id, transparency)) } #[inline] pub fn apply_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span { let span = self.data(); span.with_ctxt(span.ctxt.apply_mark(expn_id, transparency)) } #[inline] pub fn remove_mark(&mut self) -> ExpnId { let mut span = self.data(); let mark = span.ctxt.remove_mark(); *self = Span::new(span.lo, span.hi, span.ctxt); mark } #[inline] pub fn adjust(&mut self, expn_id: ExpnId) -> Option<ExpnId> { let mut span = self.data(); let mark = span.ctxt.adjust(expn_id); *self = Span::new(span.lo, span.hi, span.ctxt); mark } #[inline] pub fn normalize_to_macros_2_0_and_adjust(&mut self, expn_id: ExpnId) -> Option<ExpnId> { let mut span = self.data(); let mark = span.ctxt.normalize_to_macros_2_0_and_adjust(expn_id); *self = Span::new(span.lo, span.hi, span.ctxt); mark } #[inline] pub fn glob_adjust(&mut self, expn_id: ExpnId, glob_span: Span) -> Option<Option<ExpnId>> { let mut span = self.data(); let mark = span.ctxt.glob_adjust(expn_id, glob_span); *self = Span::new(span.lo, span.hi, span.ctxt); mark } #[inline] pub fn reverse_glob_adjust( &mut self, expn_id: ExpnId, glob_span: Span, ) -> Option<Option<ExpnId>> { let mut span = self.data(); let mark = span.ctxt.reverse_glob_adjust(expn_id, glob_span); *self = Span::new(span.lo, span.hi, span.ctxt); mark } #[inline] pub fn normalize_to_macros_2_0(self) -> Span { let span = self.data(); span.with_ctxt(span.ctxt.normalize_to_macros_2_0()) } #[inline] pub fn normalize_to_macro_rules(self) -> Span { let span = self.data(); span.with_ctxt(span.ctxt.normalize_to_macro_rules()) } } /// A span together with some additional data. #[derive(Clone, Debug)] pub struct SpanLabel { /// The span we are going to include in the final snippet. pub span: Span, /// Is this a primary span? This is the "locus" of the message, /// and is indicated with a `^^^^` underline, versus `----`. pub is_primary: bool, /// What label should we attach to this span (if any)? pub label: Option<String>, } impl Default for Span { fn default() -> Self { DUMMY_SP } } impl<E: Encoder> Encodable<E> for Span { default fn encode(&self, s: &mut E) -> Result<(), E::Error> { let span = self.data(); s.emit_struct(false, |s| { s.emit_struct_field("lo", true, |s| span.lo.encode(s))?; s.emit_struct_field("hi", false, |s| span.hi.encode(s)) }) } } impl<D: Decoder> Decodable<D> for Span { default fn decode(s: &mut D) -> Result<Span, D::Error> { s.read_struct(|d| { let lo = d.read_struct_field("lo", Decodable::decode)?; let hi = d.read_struct_field("hi", Decodable::decode)?; Ok(Span::new(lo, hi, SyntaxContext::root())) }) } } /// Calls the provided closure, using the provided `SourceMap` to format /// any spans that are debug-printed during the closure's execution. /// /// Normally, the global `TyCtxt` is used to retrieve the `SourceMap` /// (see `rustc_interface::callbacks::span_debug1`). However, some parts /// of the compiler (e.g. `rustc_parse`) may debug-print `Span`s before /// a `TyCtxt` is available. In this case, we fall back to /// the `SourceMap` provided to this function. If that is not available, /// we fall back to printing the raw `Span` field values. pub fn with_source_map<T, F: FnOnce() -> T>(source_map: Lrc<SourceMap>, f: F) -> T { SESSION_GLOBALS.with(|session_globals| { *session_globals.source_map.borrow_mut() = Some(source_map); }); struct ClearSourceMap; impl Drop for ClearSourceMap { fn drop(&mut self) { SESSION_GLOBALS.with(|session_globals| { session_globals.source_map.borrow_mut().take(); }); } } let _guard = ClearSourceMap; f() } pub fn debug_with_source_map( span: Span, f: &mut fmt::Formatter<'_>, source_map: &SourceMap, ) -> fmt::Result { write!(f, "{} ({:?})", source_map.span_to_diagnostic_string(span), span.ctxt()) } pub fn default_span_debug(span: Span, f: &mut fmt::Formatter<'_>) -> fmt::Result { SESSION_GLOBALS.with(|session_globals| { if let Some(source_map) = &*session_globals.source_map.borrow() { debug_with_source_map(span, f, source_map) } else { f.debug_struct("Span") .field("lo", &span.lo()) .field("hi", &span.hi()) .field("ctxt", &span.ctxt()) .finish() } }) } impl fmt::Debug for Span { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { (*SPAN_DEBUG)(*self, f) } } impl fmt::Debug for SpanData { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { (*SPAN_DEBUG)(Span::new(self.lo, self.hi, self.ctxt), f) } } impl MultiSpan { #[inline] pub fn new() -> MultiSpan { MultiSpan { primary_spans: vec![], span_labels: vec![] } } pub fn from_span(primary_span: Span) -> MultiSpan { MultiSpan { primary_spans: vec![primary_span], span_labels: vec![] } } pub fn from_spans(mut vec: Vec<Span>) -> MultiSpan { vec.sort(); MultiSpan { primary_spans: vec, span_labels: vec![] } } pub fn push_span_label(&mut self, span: Span, label: String) { self.span_labels.push((span, label)); } /// Selects the first primary span (if any). pub fn primary_span(&self) -> Option<Span> { self.primary_spans.first().cloned() } /// Returns all primary spans. pub fn primary_spans(&self) -> &[Span] { &self.primary_spans } /// Returns `true` if any of the primary spans are displayable. pub fn has_primary_spans(&self) -> bool { self.primary_spans.iter().any(|sp| !sp.is_dummy()) } /// Returns `true` if this contains only a dummy primary span with any hygienic context. pub fn is_dummy(&self) -> bool { let mut is_dummy = true; for span in &self.primary_spans { if !span.is_dummy() { is_dummy = false; } } is_dummy } /// Replaces all occurrences of one Span with another. Used to move `Span`s in areas that don't /// display well (like std macros). Returns whether replacements occurred. pub fn replace(&mut self, before: Span, after: Span) -> bool { let mut replacements_occurred = false; for primary_span in &mut self.primary_spans { if *primary_span == before { *primary_span = after; replacements_occurred = true; } } for span_label in &mut self.span_labels { if span_label.0 == before { span_label.0 = after; replacements_occurred = true; } } replacements_occurred } /// Returns the strings to highlight. We always ensure that there /// is an entry for each of the primary spans -- for each primary /// span `P`, if there is at least one label with span `P`, we return /// those labels (marked as primary). But otherwise we return /// `SpanLabel` instances with empty labels. pub fn span_labels(&self) -> Vec<SpanLabel> { let is_primary = |span| self.primary_spans.contains(&span); let mut span_labels = self .span_labels .iter() .map(|&(span, ref label)| SpanLabel { span, is_primary: is_primary(span), label: Some(label.clone()), }) .collect::<Vec<_>>(); for &span in &self.primary_spans { if !span_labels.iter().any(|sl| sl.span == span) { span_labels.push(SpanLabel { span, is_primary: true, label: None }); } } span_labels } /// Returns `true` if any of the span labels is displayable. pub fn has_span_labels(&self) -> bool { self.span_labels.iter().any(|(sp, _)| !sp.is_dummy()) } } impl From<Span> for MultiSpan { fn from(span: Span) -> MultiSpan { MultiSpan::from_span(span) } } impl From<Vec<Span>> for MultiSpan { fn from(spans: Vec<Span>) -> MultiSpan { MultiSpan::from_spans(spans) } } /// Identifies an offset of a multi-byte character in a `SourceFile`. #[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)] pub struct MultiByteChar { /// The absolute offset of the character in the `SourceMap`. pub pos: BytePos, /// The number of bytes, `>= 2`. pub bytes: u8, } /// Identifies an offset of a non-narrow character in a `SourceFile`. #[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)] pub enum NonNarrowChar { /// Represents a zero-width character. ZeroWidth(BytePos), /// Represents a wide (full-width) character. Wide(BytePos), /// Represents a tab character, represented visually with a width of 4 characters. Tab(BytePos), } impl NonNarrowChar { fn new(pos: BytePos, width: usize) -> Self { match width { 0 => NonNarrowChar::ZeroWidth(pos), 2 => NonNarrowChar::Wide(pos), 4 => NonNarrowChar::Tab(pos), _ => panic!("width {} given for non-narrow character", width), } } /// Returns the absolute offset of the character in the `SourceMap`. pub fn pos(&self) -> BytePos { match *self { NonNarrowChar::ZeroWidth(p) | NonNarrowChar::Wide(p) | NonNarrowChar::Tab(p) => p, } } /// Returns the width of the character, 0 (zero-width) or 2 (wide). pub fn width(&self) -> usize { match *self { NonNarrowChar::ZeroWidth(_) => 0, NonNarrowChar::Wide(_) => 2, NonNarrowChar::Tab(_) => 4, } } } impl Add<BytePos> for NonNarrowChar { type Output = Self; fn add(self, rhs: BytePos) -> Self { match self { NonNarrowChar::ZeroWidth(pos) => NonNarrowChar::ZeroWidth(pos + rhs), NonNarrowChar::Wide(pos) => NonNarrowChar::Wide(pos + rhs), NonNarrowChar::Tab(pos) => NonNarrowChar::Tab(pos + rhs), } } } impl Sub<BytePos> for NonNarrowChar { type Output = Self; fn sub(self, rhs: BytePos) -> Self { match self { NonNarrowChar::ZeroWidth(pos) => NonNarrowChar::ZeroWidth(pos - rhs), NonNarrowChar::Wide(pos) => NonNarrowChar::Wide(pos - rhs), NonNarrowChar::Tab(pos) => NonNarrowChar::Tab(pos - rhs), } } } /// Identifies an offset of a character that was normalized away from `SourceFile`. #[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)] pub struct NormalizedPos { /// The absolute offset of the character in the `SourceMap`. pub pos: BytePos, /// The difference between original and normalized string at position. pub diff: u32, } #[derive(PartialEq, Eq, Clone, Debug)] pub enum ExternalSource { /// No external source has to be loaded, since the `SourceFile` represents a local crate. Unneeded, Foreign { kind: ExternalSourceKind, /// This SourceFile's byte-offset within the source_map of its original crate. original_start_pos: BytePos, /// The end of this SourceFile within the source_map of its original crate. original_end_pos: BytePos, }, } /// The state of the lazy external source loading mechanism of a `SourceFile`. #[derive(PartialEq, Eq, Clone, Debug)] pub enum ExternalSourceKind { /// The external source has been loaded already. Present(Lrc<String>), /// No attempt has been made to load the external source. AbsentOk, /// A failed attempt has been made to load the external source. AbsentErr, Unneeded, } impl ExternalSource { pub fn get_source(&self) -> Option<&Lrc<String>> { match self { ExternalSource::Foreign { kind: ExternalSourceKind::Present(ref src), .. } => Some(src), _ => None, } } } #[derive(Debug)] pub struct OffsetOverflowError; #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable)] pub enum SourceFileHashAlgorithm { Md5, Sha1, Sha256, } impl FromStr for SourceFileHashAlgorithm { type Err = (); fn from_str(s: &str) -> Result<SourceFileHashAlgorithm, ()> { match s { "md5" => Ok(SourceFileHashAlgorithm::Md5), "sha1" => Ok(SourceFileHashAlgorithm::Sha1), "sha256" => Ok(SourceFileHashAlgorithm::Sha256), _ => Err(()), } } } rustc_data_structures::impl_stable_hash_via_hash!(SourceFileHashAlgorithm); /// The hash of the on-disk source file used for debug info. #[derive(Copy, Clone, PartialEq, Eq, Debug)] #[derive(HashStable_Generic, Encodable, Decodable)] pub struct SourceFileHash { pub kind: SourceFileHashAlgorithm, value: [u8; 32], } impl SourceFileHash { pub fn new(kind: SourceFileHashAlgorithm, src: &str) -> SourceFileHash { let mut hash = SourceFileHash { kind, value: Default::default() }; let len = hash.hash_len(); let value = &mut hash.value[..len]; let data = src.as_bytes(); match kind { SourceFileHashAlgorithm::Md5 => { value.copy_from_slice(&Md5::digest(data)); } SourceFileHashAlgorithm::Sha1 => { value.copy_from_slice(&Sha1::digest(data)); } SourceFileHashAlgorithm::Sha256 => { value.copy_from_slice(&Sha256::digest(data)); } } hash } /// Check if the stored hash matches the hash of the string. pub fn matches(&self, src: &str) -> bool { Self::new(self.kind, src) == *self } /// The bytes of the hash. pub fn hash_bytes(&self) -> &[u8] { let len = self.hash_len(); &self.value[..len] } fn hash_len(&self) -> usize { match self.kind { SourceFileHashAlgorithm::Md5 => 16, SourceFileHashAlgorithm::Sha1 => 20, SourceFileHashAlgorithm::Sha256 => 32, } } } /// A single source in the [`SourceMap`]. #[derive(Clone)] pub struct SourceFile { /// The name of the file that the source came from. Source that doesn't /// originate from files has names between angle brackets by convention /// (e.g., `<anon>`). pub name: FileName, /// The complete source code. pub src: Option<Lrc<String>>, /// The source code's hash. pub src_hash: SourceFileHash, /// The external source code (used for external crates, which will have a `None` /// value as `self.src`. pub external_src: Lock<ExternalSource>, /// The start position of this source in the `SourceMap`. pub start_pos: BytePos, /// The end position of this source in the `SourceMap`. pub end_pos: BytePos, /// Locations of lines beginnings in the source code. pub lines: Vec<BytePos>, /// Locations of multi-byte characters in the source code. pub multibyte_chars: Vec<MultiByteChar>, /// Width of characters that are not narrow in the source code. pub non_narrow_chars: Vec<NonNarrowChar>, /// Locations of characters removed during normalization. pub normalized_pos: Vec<NormalizedPos>, /// A hash of the filename, used for speeding up hashing in incremental compilation. pub name_hash: u128, /// Indicates which crate this `SourceFile` was imported from. pub cnum: CrateNum, } impl<S: Encoder> Encodable<S> for SourceFile { fn encode(&self, s: &mut S) -> Result<(), S::Error> { s.emit_struct(false, |s| { s.emit_struct_field("name", true, |s| self.name.encode(s))?; s.emit_struct_field("src_hash", false, |s| self.src_hash.encode(s))?; s.emit_struct_field("start_pos", false, |s| self.start_pos.encode(s))?; s.emit_struct_field("end_pos", false, |s| self.end_pos.encode(s))?; s.emit_struct_field("lines", false, |s| { let lines = &self.lines[..]; // Store the length. s.emit_u32(lines.len() as u32)?; if !lines.is_empty() { // In order to preserve some space, we exploit the fact that // the lines list is sorted and individual lines are // probably not that long. Because of that we can store lines // as a difference list, using as little space as possible // for the differences. let max_line_length = if lines.len() == 1 { 0 } else { lines .array_windows() .map(|&[fst, snd]| snd - fst) .map(|bp| bp.to_usize()) .max() .unwrap() }; let bytes_per_diff: u8 = match max_line_length { 0..=0xFF => 1, 0x100..=0xFFFF => 2, _ => 4, }; // Encode the number of bytes used per diff. bytes_per_diff.encode(s)?; // Encode the first element. lines[0].encode(s)?; let diff_iter = lines[..].array_windows().map(|&[fst, snd]| snd - fst); match bytes_per_diff { 1 => { for diff in diff_iter { (diff.0 as u8).encode(s)? } } 2 => { for diff in diff_iter { (diff.0 as u16).encode(s)? } } 4 => { for diff in diff_iter { diff.0.encode(s)? } } _ => unreachable!(), } } Ok(()) })?; s.emit_struct_field("multibyte_chars", false, |s| self.multibyte_chars.encode(s))?; s.emit_struct_field("non_narrow_chars", false, |s| self.non_narrow_chars.encode(s))?; s.emit_struct_field("name_hash", false, |s| self.name_hash.encode(s))?; s.emit_struct_field("normalized_pos", false, |s| self.normalized_pos.encode(s))?; s.emit_struct_field("cnum", false, |s| self.cnum.encode(s)) }) } } impl<D: Decoder> Decodable<D> for SourceFile { fn decode(d: &mut D) -> Result<SourceFile, D::Error> { d.read_struct(|d| { let name: FileName = d.read_struct_field("name", |d| Decodable::decode(d))?; let src_hash: SourceFileHash = d.read_struct_field("src_hash", |d| Decodable::decode(d))?; let start_pos: BytePos = d.read_struct_field("start_pos", |d| Decodable::decode(d))?; let end_pos: BytePos = d.read_struct_field("end_pos", |d| Decodable::decode(d))?; let lines: Vec<BytePos> = d.read_struct_field("lines", |d| { let num_lines: u32 = Decodable::decode(d)?; let mut lines = Vec::with_capacity(num_lines as usize); if num_lines > 0 { // Read the number of bytes used per diff. let bytes_per_diff: u8 = Decodable::decode(d)?; // Read the first element. let mut line_start: BytePos = Decodable::decode(d)?; lines.push(line_start); for _ in 1..num_lines { let diff = match bytes_per_diff { 1 => d.read_u8()? as u32, 2 => d.read_u16()? as u32, 4 => d.read_u32()?, _ => unreachable!(), }; line_start = line_start + BytePos(diff); lines.push(line_start); } } Ok(lines) })?; let multibyte_chars: Vec<MultiByteChar> = d.read_struct_field("multibyte_chars", |d| Decodable::decode(d))?; let non_narrow_chars: Vec<NonNarrowChar> = d.read_struct_field("non_narrow_chars", |d| Decodable::decode(d))?; let name_hash: u128 = d.read_struct_field("name_hash", |d| Decodable::decode(d))?; let normalized_pos: Vec<NormalizedPos> = d.read_struct_field("normalized_pos", |d| Decodable::decode(d))?; let cnum: CrateNum = d.read_struct_field("cnum", |d| Decodable::decode(d))?; Ok(SourceFile { name, start_pos, end_pos, src: None, src_hash, // Unused - the metadata decoder will construct // a new SourceFile, filling in `external_src` properly external_src: Lock::new(ExternalSource::Unneeded), lines, multibyte_chars, non_narrow_chars, normalized_pos, name_hash, cnum, }) }) } } impl fmt::Debug for SourceFile { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { write!(fmt, "SourceFile({:?})", self.name) } } impl SourceFile { pub fn new( name: FileName, mut src: String, start_pos: BytePos, hash_kind: SourceFileHashAlgorithm, ) -> Self { // Compute the file hash before any normalization. let src_hash = SourceFileHash::new(hash_kind, &src); let normalized_pos = normalize_src(&mut src, start_pos); let name_hash = { let mut hasher: StableHasher = StableHasher::new(); name.hash(&mut hasher); hasher.finish::<u128>() }; let end_pos = start_pos.to_usize() + src.len(); assert!(end_pos <= u32::MAX as usize); let (lines, multibyte_chars, non_narrow_chars) = analyze_source_file::analyze_source_file(&src[..], start_pos); SourceFile { name, src: Some(Lrc::new(src)), src_hash, external_src: Lock::new(ExternalSource::Unneeded), start_pos, end_pos: Pos::from_usize(end_pos), lines, multibyte_chars, non_narrow_chars, normalized_pos, name_hash, cnum: LOCAL_CRATE, } } /// Returns the `BytePos` of the beginning of the current line. pub fn line_begin_pos(&self, pos: BytePos) -> BytePos { let line_index = self.lookup_line(pos).unwrap(); self.lines[line_index] } /// Add externally loaded source. /// If the hash of the input doesn't match or no input is supplied via None, /// it is interpreted as an error and the corresponding enum variant is set. /// The return value signifies whether some kind of source is present. pub fn add_external_src<F>(&self, get_src: F) -> bool where F: FnOnce() -> Option<String>, { if matches!( *self.external_src.borrow(), ExternalSource::Foreign { kind: ExternalSourceKind::AbsentOk, .. } ) { let src = get_src(); let mut external_src = self.external_src.borrow_mut(); // Check that no-one else have provided the source while we were getting it if let ExternalSource::Foreign { kind: src_kind @ ExternalSourceKind::AbsentOk, .. } = &mut *external_src { if let Some(mut src) = src { // The src_hash needs to be computed on the pre-normalized src. if self.src_hash.matches(&src) { normalize_src(&mut src, BytePos::from_usize(0)); *src_kind = ExternalSourceKind::Present(Lrc::new(src)); return true; } } else { *src_kind = ExternalSourceKind::AbsentErr; } false } else { self.src.is_some() || external_src.get_source().is_some() } } else { self.src.is_some() || self.external_src.borrow().get_source().is_some() } } /// Gets a line from the list of pre-computed line-beginnings. /// The line number here is 0-based. pub fn get_line(&self, line_number: usize) -> Option<Cow<'_, str>> { fn get_until_newline(src: &str, begin: usize) -> &str { // We can't use `lines.get(line_number+1)` because we might // be parsing when we call this function and thus the current // line is the last one we have line info for. let slice = &src[begin..]; match slice.find('\n') { Some(e) => &slice[..e], None => slice, } } let begin = { let line = self.lines.get(line_number)?; let begin: BytePos = *line - self.start_pos; begin.to_usize() }; if let Some(ref src) = self.src { Some(Cow::from(get_until_newline(src, begin))) } else if let Some(src) = self.external_src.borrow().get_source() { Some(Cow::Owned(String::from(get_until_newline(src, begin)))) } else { None } } pub fn is_real_file(&self) -> bool { self.name.is_real() } pub fn is_imported(&self) -> bool { self.src.is_none() } pub fn count_lines(&self) -> usize { self.lines.len() } /// Finds the line containing the given position. The return value is the /// index into the `lines` array of this `SourceFile`, not the 1-based line /// number. If the source_file is empty or the position is located before the /// first line, `None` is returned. pub fn lookup_line(&self, pos: BytePos) -> Option<usize> { if self.lines.is_empty() { return None; } let line_index = lookup_line(&self.lines[..], pos); assert!(line_index < self.lines.len() as isize); if line_index >= 0 { Some(line_index as usize) } else { None } } pub fn line_bounds(&self, line_index: usize) -> Range<BytePos> { if self.is_empty() { return self.start_pos..self.end_pos; } assert!(line_index < self.lines.len()); if line_index == (self.lines.len() - 1) { self.lines[line_index]..self.end_pos } else { self.lines[line_index]..self.lines[line_index + 1] } } /// Returns whether or not the file contains the given `SourceMap` byte /// position. The position one past the end of the file is considered to be /// contained by the file. This implies that files for which `is_empty` /// returns true still contain one byte position according to this function. #[inline] pub fn contains(&self, byte_pos: BytePos) -> bool { byte_pos >= self.start_pos && byte_pos <= self.end_pos } #[inline] pub fn is_empty(&self) -> bool { self.start_pos == self.end_pos } /// Calculates the original byte position relative to the start of the file /// based on the given byte position. pub fn original_relative_byte_pos(&self, pos: BytePos) -> BytePos { // Diff before any records is 0. Otherwise use the previously recorded // diff as that applies to the following characters until a new diff // is recorded. let diff = match self.normalized_pos.binary_search_by(|np| np.pos.cmp(&pos)) { Ok(i) => self.normalized_pos[i].diff, Err(i) if i == 0 => 0, Err(i) => self.normalized_pos[i - 1].diff, }; BytePos::from_u32(pos.0 - self.start_pos.0 + diff) } /// Converts an absolute `BytePos` to a `CharPos` relative to the `SourceFile`. pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos { // The number of extra bytes due to multibyte chars in the `SourceFile`. let mut total_extra_bytes = 0; for mbc in self.multibyte_chars.iter() { debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos); if mbc.pos < bpos { // Every character is at least one byte, so we only // count the actual extra bytes. total_extra_bytes += mbc.bytes as u32 - 1; // We should never see a byte position in the middle of a // character. assert!(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32); } else { break; } } assert!(self.start_pos.to_u32() + total_extra_bytes <= bpos.to_u32()); CharPos(bpos.to_usize() - self.start_pos.to_usize() - total_extra_bytes as usize) } /// Looks up the file's (1-based) line number and (0-based `CharPos`) column offset, for a /// given `BytePos`. pub fn lookup_file_pos(&self, pos: BytePos) -> (usize, CharPos) { let chpos = self.bytepos_to_file_charpos(pos); match self.lookup_line(pos) { Some(a) => { let line = a + 1; // Line numbers start at 1 let linebpos = self.lines[a]; let linechpos = self.bytepos_to_file_charpos(linebpos); let col = chpos - linechpos; debug!("byte pos {:?} is on the line at byte pos {:?}", pos, linebpos); debug!("char pos {:?} is on the line at char pos {:?}", chpos, linechpos); debug!("byte is on line: {}", line); assert!(chpos >= linechpos); (line, col) } None => (0, chpos), } } /// Looks up the file's (1-based) line number, (0-based `CharPos`) column offset, and (0-based) /// column offset when displayed, for a given `BytePos`. pub fn lookup_file_pos_with_col_display(&self, pos: BytePos) -> (usize, CharPos, usize) { let (line, col_or_chpos) = self.lookup_file_pos(pos); if line > 0 { let col = col_or_chpos; let linebpos = self.lines[line - 1]; let col_display = { let start_width_idx = self .non_narrow_chars .binary_search_by_key(&linebpos, |x| x.pos()) .unwrap_or_else(|x| x); let end_width_idx = self .non_narrow_chars .binary_search_by_key(&pos, |x| x.pos()) .unwrap_or_else(|x| x); let special_chars = end_width_idx - start_width_idx; let non_narrow: usize = self.non_narrow_chars[start_width_idx..end_width_idx] .iter() .map(|x| x.width()) .sum(); col.0 - special_chars + non_narrow }; (line, col, col_display) } else { let chpos = col_or_chpos; let col_display = { let end_width_idx = self .non_narrow_chars .binary_search_by_key(&pos, |x| x.pos()) .unwrap_or_else(|x| x); let non_narrow: usize = self.non_narrow_chars[0..end_width_idx].iter().map(|x| x.width()).sum(); chpos.0 - end_width_idx + non_narrow }; (0, chpos, col_display) } } } /// Normalizes the source code and records the normalizations. fn normalize_src(src: &mut String, start_pos: BytePos) -> Vec<NormalizedPos> { let mut normalized_pos = vec![]; remove_bom(src, &mut normalized_pos); normalize_newlines(src, &mut normalized_pos); // Offset all the positions by start_pos to match the final file positions. for np in &mut normalized_pos { np.pos.0 += start_pos.0; } normalized_pos } /// Removes UTF-8 BOM, if any. fn remove_bom(src: &mut String, normalized_pos: &mut Vec<NormalizedPos>) { if src.starts_with('\u{feff}') { src.drain(..3); normalized_pos.push(NormalizedPos { pos: BytePos(0), diff: 3 }); } } /// Replaces `\r\n` with `\n` in-place in `src`. /// /// Returns error if there's a lone `\r` in the string. fn normalize_newlines(src: &mut String, normalized_pos: &mut Vec<NormalizedPos>) { if !src.as_bytes().contains(&b'\r') { return; } // We replace `\r\n` with `\n` in-place, which doesn't break utf-8 encoding. // While we *can* call `as_mut_vec` and do surgery on the live string // directly, let's rather steal the contents of `src`. This makes the code // safe even if a panic occurs. let mut buf = std::mem::replace(src, String::new()).into_bytes(); let mut gap_len = 0; let mut tail = buf.as_mut_slice(); let mut cursor = 0; let original_gap = normalized_pos.last().map_or(0, |l| l.diff); loop { let idx = match find_crlf(&tail[gap_len..]) { None => tail.len(), Some(idx) => idx + gap_len, }; tail.copy_within(gap_len..idx, 0); tail = &mut tail[idx - gap_len..]; if tail.len() == gap_len { break; } cursor += idx - gap_len; gap_len += 1; normalized_pos.push(NormalizedPos { pos: BytePos::from_usize(cursor + 1), diff: original_gap + gap_len as u32, }); } // Account for removed `\r`. // After `set_len`, `buf` is guaranteed to contain utf-8 again. let new_len = buf.len() - gap_len; unsafe { buf.set_len(new_len); *src = String::from_utf8_unchecked(buf); } fn find_crlf(src: &[u8]) -> Option<usize> { let mut search_idx = 0; while let Some(idx) = find_cr(&src[search_idx..]) { if src[search_idx..].get(idx + 1) != Some(&b'\n') { search_idx += idx + 1; continue; } return Some(search_idx + idx); } None } fn find_cr(src: &[u8]) -> Option<usize> { src.iter().position(|&b| b == b'\r') } } // _____________________________________________________________________________ // Pos, BytePos, CharPos // pub trait Pos { fn from_usize(n: usize) -> Self; fn to_usize(&self) -> usize; fn from_u32(n: u32) -> Self; fn to_u32(&self) -> u32; } macro_rules! impl_pos { ( $( $(#[$attr:meta])* $vis:vis struct $ident:ident($inner_vis:vis $inner_ty:ty); )* ) => { $( $(#[$attr])* $vis struct $ident($inner_vis $inner_ty); impl Pos for $ident { #[inline(always)] fn from_usize(n: usize) -> $ident { $ident(n as $inner_ty) } #[inline(always)] fn to_usize(&self) -> usize { self.0 as usize } #[inline(always)] fn from_u32(n: u32) -> $ident { $ident(n as $inner_ty) } #[inline(always)] fn to_u32(&self) -> u32 { self.0 as u32 } } impl Add for $ident { type Output = $ident; #[inline(always)] fn add(self, rhs: $ident) -> $ident { $ident(self.0 + rhs.0) } } impl Sub for $ident { type Output = $ident; #[inline(always)] fn sub(self, rhs: $ident) -> $ident { $ident(self.0 - rhs.0) } } )* }; } impl_pos! { /// A byte offset. /// /// Keep this small (currently 32-bits), as AST contains a lot of them. #[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)] pub struct BytePos(pub u32); /// A character offset. /// /// Because of multibyte UTF-8 characters, a byte offset /// is not equivalent to a character offset. The [`SourceMap`] will convert [`BytePos`] /// values to `CharPos` values as necessary. #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)] pub struct CharPos(pub usize); } impl<S: rustc_serialize::Encoder> Encodable<S> for BytePos { fn encode(&self, s: &mut S) -> Result<(), S::Error> { s.emit_u32(self.0) } } impl<D: rustc_serialize::Decoder> Decodable<D> for BytePos { fn decode(d: &mut D) -> Result<BytePos, D::Error> { Ok(BytePos(d.read_u32()?)) } } // _____________________________________________________________________________ // Loc, SourceFileAndLine, SourceFileAndBytePos // /// A source code location used for error reporting. #[derive(Debug, Clone)] pub struct Loc { /// Information about the original source. pub file: Lrc<SourceFile>, /// The (1-based) line number. pub line: usize, /// The (0-based) column offset. pub col: CharPos, /// The (0-based) column offset when displayed. pub col_display: usize, } // Used to be structural records. #[derive(Debug)] pub struct SourceFileAndLine { pub sf: Lrc<SourceFile>, pub line: usize, } #[derive(Debug)] pub struct SourceFileAndBytePos { pub sf: Lrc<SourceFile>, pub pos: BytePos, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct LineInfo { /// Index of line, starting from 0. pub line_index: usize, /// Column in line where span begins, starting from 0. pub start_col: CharPos, /// Column in line where span ends, starting from 0, exclusive. pub end_col: CharPos, } pub struct FileLines { pub file: Lrc<SourceFile>, pub lines: Vec<LineInfo>, } pub static SPAN_DEBUG: AtomicRef<fn(Span, &mut fmt::Formatter<'_>) -> fmt::Result> = AtomicRef::new(&(default_span_debug as fn(_, &mut fmt::Formatter<'_>) -> _)); // _____________________________________________________________________________ // SpanLinesError, SpanSnippetError, DistinctSources, MalformedSourceMapPositions // pub type FileLinesResult = Result<FileLines, SpanLinesError>; #[derive(Clone, PartialEq, Eq, Debug)] pub enum SpanLinesError { DistinctSources(DistinctSources), } #[derive(Clone, PartialEq, Eq, Debug)] pub enum SpanSnippetError { IllFormedSpan(Span), DistinctSources(DistinctSources), MalformedForSourcemap(MalformedSourceMapPositions), SourceNotAvailable { filename: FileName }, } #[derive(Clone, PartialEq, Eq, Debug)] pub struct DistinctSources { pub begin: (FileName, BytePos), pub end: (FileName, BytePos), } #[derive(Clone, PartialEq, Eq, Debug)] pub struct MalformedSourceMapPositions { pub name: FileName, pub source_len: usize, pub begin_pos: BytePos, pub end_pos: BytePos, } /// Range inside of a `Span` used for diagnostics when we only have access to relative positions. #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub struct InnerSpan { pub start: usize, pub end: usize, } impl InnerSpan { pub fn new(start: usize, end: usize) -> InnerSpan { InnerSpan { start, end } } } // Given a slice of line start positions and a position, returns the index of // the line the position is on. Returns -1 if the position is located before // the first line. fn lookup_line(lines: &[BytePos], pos: BytePos) -> isize { match lines.binary_search(&pos) { Ok(line) => line as isize, Err(line) => line as isize - 1, } } /// Requirements for a `StableHashingContext` to be used in this crate. /// /// This is a hack to allow using the [`HashStable_Generic`] derive macro /// instead of implementing everything in rustc_middle. pub trait HashStableContext { fn hash_def_id(&mut self, _: DefId, hasher: &mut StableHasher); /// Obtains a cache for storing the `Fingerprint` of an `ExpnId`. /// This method allows us to have multiple `HashStableContext` implementations /// that hash things in a different way, without the results of one polluting /// the cache of the other. fn expn_id_cache() -> &'static LocalKey<ExpnIdCache>; fn hash_crate_num(&mut self, _: CrateNum, hasher: &mut StableHasher); fn hash_spans(&self) -> bool; fn span_data_to_lines_and_cols( &mut self, span: &SpanData, ) -> Option<(Lrc<SourceFile>, usize, BytePos, usize, BytePos)>; } impl<CTX> HashStable<CTX> for Span where CTX: HashStableContext, { /// Hashes a span in a stable way. We can't directly hash the span's `BytePos` /// fields (that would be similar to hashing pointers, since those are just /// offsets into the `SourceMap`). Instead, we hash the (file name, line, column) /// triple, which stays the same even if the containing `SourceFile` has moved /// within the `SourceMap`. /// /// Also note that we are hashing byte offsets for the column, not unicode /// codepoint offsets. For the purpose of the hash that's sufficient. /// Also, hashing filenames is expensive so we avoid doing it twice when the /// span starts and ends in the same file, which is almost always the case. fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { const TAG_VALID_SPAN: u8 = 0; const TAG_INVALID_SPAN: u8 = 1; if !ctx.hash_spans() { return; } self.ctxt().hash_stable(ctx, hasher); if self.is_dummy() { Hash::hash(&TAG_INVALID_SPAN, hasher); return; } // If this is not an empty or invalid span, we want to hash the last // position that belongs to it, as opposed to hashing the first // position past it. let span = self.data(); let (file, line_lo, col_lo, line_hi, col_hi) = match ctx.span_data_to_lines_and_cols(&span) { Some(pos) => pos, None => { Hash::hash(&TAG_INVALID_SPAN, hasher); return; } }; Hash::hash(&TAG_VALID_SPAN, hasher); // We truncate the stable ID hash and line and column numbers. The chances // of causing a collision this way should be minimal. Hash::hash(&(file.name_hash as u64), hasher); // Hash both the length and the end location (line/column) of a span. If we // hash only the length, for example, then two otherwise equal spans with // different end locations will have the same hash. This can cause a problem // during incremental compilation wherein a previous result for a query that // depends on the end location of a span will be incorrectly reused when the // end location of the span it depends on has changed (see issue #74890). A // similar analysis applies if some query depends specifically on the length // of the span, but we only hash the end location. So hash both. let col_lo_trunc = (col_lo.0 as u64) & 0xFF; let line_lo_trunc = ((line_lo as u64) & 0xFF_FF_FF) << 8; let col_hi_trunc = (col_hi.0 as u64) & 0xFF << 32; let line_hi_trunc = ((line_hi as u64) & 0xFF_FF_FF) << 40; let col_line = col_lo_trunc | line_lo_trunc | col_hi_trunc | line_hi_trunc; let len = (span.hi - span.lo).0; Hash::hash(&col_line, hasher); Hash::hash(&len, hasher); } } impl<CTX: HashStableContext> HashStable<CTX> for SyntaxContext { fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { const TAG_EXPANSION: u8 = 0; const TAG_NO_EXPANSION: u8 = 1; if *self == SyntaxContext::root() { TAG_NO_EXPANSION.hash_stable(ctx, hasher); } else { TAG_EXPANSION.hash_stable(ctx, hasher); let (expn_id, transparency) = self.outer_mark(); expn_id.hash_stable(ctx, hasher); transparency.hash_stable(ctx, hasher); } } } pub type ExpnIdCache = RefCell<Vec<Option<Fingerprint>>>; impl<CTX: HashStableContext> HashStable<CTX> for ExpnId { fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { const TAG_ROOT: u8 = 0; const TAG_NOT_ROOT: u8 = 1; if *self == ExpnId::root() { TAG_ROOT.hash_stable(ctx, hasher); return; } // Since the same expansion context is usually referenced many // times, we cache a stable hash of it and hash that instead of // recursing every time. let index = self.as_u32() as usize; let res = CTX::expn_id_cache().with(|cache| cache.borrow().get(index).copied().flatten()); if let Some(res) = res { res.hash_stable(ctx, hasher); } else { let new_len = index + 1; let mut sub_hasher = StableHasher::new(); TAG_NOT_ROOT.hash_stable(ctx, &mut sub_hasher); self.expn_data().hash_stable(ctx, &mut sub_hasher); let sub_hash: Fingerprint = sub_hasher.finish(); CTX::expn_id_cache().with(|cache| { let mut cache = cache.borrow_mut(); if cache.len() < new_len { cache.resize(new_len, None); } let prev = cache[index].replace(sub_hash); assert_eq!(prev, None, "Cache slot was filled"); }); sub_hash.hash_stable(ctx, hasher); } } }
34.949226
120
0.583106
7229e6ad07d7329a09967848275b9f91bf9565f5
2,078
#[derive(Debug)] pub struct PPURegisters { pub ppu_ctrl: u8, pub ppu_mask: u8, pub ppu_status: u8, pub oam_addr: u8, pub oam_data: u8, pub ppu_scroll: u8, pub ppu_addr: u8, pub ppu_data: u8, /// Contains the index of the last access, and if it was a write. pub(super) last_access_from: std::cell::Cell<Option<(u16, bool)>>, } impl Default for PPURegisters { fn default() -> Self { PPURegisters { ppu_ctrl: 0, ppu_mask: 0, ppu_status: 0, oam_addr: 0, oam_data: 0, ppu_scroll: 0, ppu_addr: 0, ppu_data: 0, last_access_from: std::cell::Cell::new(None), } } } impl PPURegisters { /// `0 -> PPUCTRL` /// `1 -> PPUMASK` /// `2 -> PPUSTATUS` /// `3 -> OAMADDR` /// `4 -> OAMDATA` /// `5 -> PPUSCROLL` /// `6 -> PPUADDR` /// `7 -> PPUDATA` /// `_ -> 255` pub fn read_by_index(&self, index: u16) -> u8 { self.last_access_from.set(Some((index, false))); match index { 0 => self.ppu_ctrl, 1 => self.ppu_mask, 2 => self.ppu_status, 3 => self.oam_addr, 4 => self.oam_data, 5 => self.ppu_scroll, 6 => self.ppu_addr, 7 => self.ppu_data, _ => { debug_assert!(false, "Illegal PPU register access!"); 255 } } } /// Same indexing as `read_by_index` pub fn write_by_index(&mut self, index: u16, v: u8) { self.last_access_from.set(Some((index, true))); match index { 0 => self.ppu_ctrl = v, 1 => self.ppu_mask = v, 2 => self.ppu_status = v, 3 => self.oam_addr = v, 4 => self.oam_data = v, 5 => self.ppu_scroll = v, 6 => self.ppu_addr = v, 7 => self.ppu_data = v, _ => { debug_assert!(false, "Illegal PPU register access!") } } } }
26.641026
70
0.473532
08088ed86ab49e9e2e8b795d77e35391dc9b204b
8,089
use std::task::{Context, Poll}; use std::{convert::TryFrom, error::Error, future::Future, net, pin::Pin, time}; use bytes::Bytes; use derive_more::From; use futures_core::Stream; use serde::Serialize; use crate::http::body::{Body, BodyStream}; use crate::http::error::HttpError; use crate::http::header::{self, HeaderMap, HeaderName, HeaderValue}; use crate::http::RequestHeadType; use crate::rt::time::{sleep, Sleep}; #[cfg(feature = "compress")] use crate::http::encoding::Decoder; #[cfg(feature = "compress")] use crate::http::Payload; use super::error::{FreezeRequestError, InvalidUrl, SendRequestError}; use super::response::ClientResponse; use super::ClientConfig; #[derive(Debug, From)] pub(crate) enum PrepForSendingError { Url(InvalidUrl), Http(HttpError), } impl From<PrepForSendingError> for FreezeRequestError { fn from(err: PrepForSendingError) -> FreezeRequestError { match err { PrepForSendingError::Url(e) => FreezeRequestError::Url(e), PrepForSendingError::Http(e) => FreezeRequestError::Http(e), } } } impl From<PrepForSendingError> for SendRequestError { fn from(err: PrepForSendingError) -> SendRequestError { match err { PrepForSendingError::Url(e) => SendRequestError::Url(e), PrepForSendingError::Http(e) => SendRequestError::Http(e), } } } /// Future that sends request's payload and resolves to a server response. #[must_use = "futures do nothing unless polled"] pub enum SendClientRequest { Fut( Pin<Box<dyn Future<Output = Result<ClientResponse, SendRequestError>>>>, Option<Pin<Box<Sleep>>>, bool, ), Err(Option<SendRequestError>), } impl SendClientRequest { pub(crate) fn new( send: Pin<Box<dyn Future<Output = Result<ClientResponse, SendRequestError>>>>, response_decompress: bool, timeout: Option<time::Duration>, ) -> SendClientRequest { let delay = timeout.map(|d| Box::pin(sleep(d))); SendClientRequest::Fut(send, delay, response_decompress) } } impl Future for SendClientRequest { type Output = Result<ClientResponse, SendRequestError>; fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { let this = self.get_mut(); match this { SendClientRequest::Fut(send, delay, _response_decompress) => { if delay.is_some() { match Pin::new(delay.as_mut().unwrap()).poll(cx) { Poll::Pending => (), _ => return Poll::Ready(Err(SendRequestError::Timeout)), } } let res = match Pin::new(send).poll(cx) { Poll::Ready(res) => res, Poll::Pending => return Poll::Pending, }; #[cfg(feature = "compress")] let res = res.map(|mut res| { if *_response_decompress { let payload = res.take_payload(); res.set_payload(Payload::from_stream(Decoder::from_headers( payload, &res.head.headers, ))) } res }); Poll::Ready(res) } SendClientRequest::Err(ref mut e) => match e.take() { Some(e) => Poll::Ready(Err(e)), None => panic!("Attempting to call completed future"), }, } } } impl From<SendRequestError> for SendClientRequest { fn from(e: SendRequestError) -> Self { SendClientRequest::Err(Some(e)) } } impl From<HttpError> for SendClientRequest { fn from(e: HttpError) -> Self { SendClientRequest::Err(Some(e.into())) } } impl From<PrepForSendingError> for SendClientRequest { fn from(e: PrepForSendingError) -> Self { SendClientRequest::Err(Some(e.into())) } } impl RequestHeadType { pub(super) fn send_body<B>( self, addr: Option<net::SocketAddr>, response_decompress: bool, timeout: Option<time::Duration>, config: &ClientConfig, body: B, ) -> SendClientRequest where B: Into<Body>, { SendClientRequest::new( config.connector.send_request(self, body.into(), addr), response_decompress, timeout.or(config.timeout), ) } pub(super) fn send_json<T: Serialize>( mut self, addr: Option<net::SocketAddr>, response_decompress: bool, timeout: Option<time::Duration>, config: &ClientConfig, value: &T, ) -> SendClientRequest { let body = match serde_json::to_string(value) { Ok(body) => body, Err(e) => return SendRequestError::Error(Box::new(e)).into(), }; if let Err(e) = self.set_header_if_none(header::CONTENT_TYPE, "application/json") { return e.into(); } self.send_body( addr, response_decompress, timeout, config, Body::Bytes(Bytes::from(body)), ) } pub(super) fn send_form<T: Serialize>( mut self, addr: Option<net::SocketAddr>, response_decompress: bool, timeout: Option<time::Duration>, config: &ClientConfig, value: &T, ) -> SendClientRequest { let body = match serde_urlencoded::to_string(value) { Ok(body) => body, Err(e) => return SendRequestError::Error(Box::new(e)).into(), }; // set content-type if let Err(e) = self.set_header_if_none( header::CONTENT_TYPE, "application/x-www-form-urlencoded", ) { return e.into(); } self.send_body( addr, response_decompress, timeout, config, Body::Bytes(Bytes::from(body)), ) } pub(super) fn send_stream<S, E>( self, addr: Option<net::SocketAddr>, response_decompress: bool, timeout: Option<time::Duration>, config: &ClientConfig, stream: S, ) -> SendClientRequest where S: Stream<Item = Result<Bytes, E>> + Unpin + 'static, E: Error + 'static, { self.send_body( addr, response_decompress, timeout, config, Body::from_message(BodyStream::new(stream)), ) } pub(super) fn send( self, addr: Option<net::SocketAddr>, response_decompress: bool, timeout: Option<time::Duration>, config: &ClientConfig, ) -> SendClientRequest { self.send_body(addr, response_decompress, timeout, config, Body::Empty) } fn set_header_if_none<V>( &mut self, key: HeaderName, value: V, ) -> Result<(), HttpError> where HeaderValue: TryFrom<V>, <HeaderValue as TryFrom<V>>::Error: Into<HttpError>, { match self { RequestHeadType::Owned(head) => { if !head.headers.contains_key(&key) { match HeaderValue::try_from(value) { Ok(value) => head.headers.insert(key, value), Err(e) => return Err(e.into()), } } } RequestHeadType::Rc(head, extra_headers) => { if !head.headers.contains_key(&key) && !extra_headers.iter().any(|h| h.contains_key(&key)) { match HeaderValue::try_from(value) { Ok(v) => { let h = extra_headers.get_or_insert(HeaderMap::new()); h.insert(key, v) } Err(e) => return Err(e.into()), }; } } } Ok(()) } }
29.630037
89
0.534924
29dc93bedf4e4218ffcc733d9b2e8be0f2007d50
2,254
use std::fmt::{Display, Formatter, Result, Debug}; use tui::{ layout::Corner, style::{Color, Modifier, Style}, text::Span, widgets::{Block, Borders, List, ListItem, ListState} }; #[derive(Clone, Debug)] pub enum Action { Copy, Edit, Delete } impl Display for Action { fn fmt(&self, f: &mut Formatter) -> Result { Debug::fmt(self, f) } } pub const ACTIONS: [Action; 3] = [Action::Copy, Action::Edit, Action::Delete]; pub struct ActionList { pub state: ListState, items: Vec<String> } impl ActionList { pub fn new(actions: Vec<Action>) -> ActionList { ActionList { state: ListState::default(), items: actions.iter().map( |i| i.to_string() ).collect() } } pub fn get_widget(&self) -> List { // Define selected action style let selected_action_style = Style::default().fg(Color::Black).bg(Color::White).add_modifier(Modifier::BOLD); // Set list let action_options: Vec<ListItem> = self.items.iter().map( // Note here style is applied on ListItem instead of Span |i| ListItem::new(Span::raw(i)).style(Style::default().fg(Color::White)) ).collect(); List::new(action_options) .block(Block::default().borders(Borders::ALL).title("Actions")) .highlight_style(selected_action_style) .start_corner(Corner::TopLeft) } pub fn reset(&mut self) { self.state.select(None); } pub fn up(&mut self) { match self.state.selected() { Some(ind) => { if ind > 0 { self.state.select(Some(ind - 1)); } }, None => {} } } pub fn down(&mut self) { match self.state.selected() { Some(ind) => { if ind < self.items.len() - 1 { self.state.select(Some(ind + 1)); } }, None => { if self.items.len() > 0 { self.state.select(Some(0)); } } } } pub fn get_state(&self) -> &ListState { &self.state } }
24.769231
116
0.503106
0efe5a56436b5727aa16be01da4fcac05e65946c
6,885
//! Error Reporting for `impl` items that do not match the obligations from their `trait`. use crate::infer::error_reporting::nice_region_error::NiceRegionError; use crate::infer::lexical_region_resolve::RegionResolutionError; use crate::infer::{SubregionOrigin, Subtype, ValuePairs}; use crate::traits::ObligationCauseCode::CompareImplMethodObligation; use rustc_errors::ErrorReported; use rustc_hir as hir; use rustc_hir::def::Res; use rustc_hir::def_id::DefId; use rustc_hir::intravisit::Visitor; use rustc_middle::ty::error::ExpectedFound; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_span::{MultiSpan, Span, Symbol}; impl<'a, 'tcx> NiceRegionError<'a, 'tcx> { /// Print the error message for lifetime errors when the `impl` doesn't conform to the `trait`. pub(super) fn try_report_impl_not_conforming_to_trait(&self) -> Option<ErrorReported> { let error = self.error.as_ref()?; debug!("try_report_impl_not_conforming_to_trait {:?}", error); if let RegionResolutionError::SubSupConflict( _, var_origin, sub_origin, _sub, sup_origin, _sup, ) = error.clone() { if let (&Subtype(ref sup_trace), &Subtype(ref sub_trace)) = (&sup_origin, &sub_origin) { if let ( ValuePairs::Types(sub_expected_found), ValuePairs::Types(sup_expected_found), CompareImplMethodObligation { trait_item_def_id, .. }, ) = (&sub_trace.values, &sup_trace.values, &sub_trace.cause.code) { if sup_expected_found == sub_expected_found { self.emit_err( var_origin.span(), sub_expected_found.expected, sub_expected_found.found, *trait_item_def_id, ); return Some(ErrorReported); } } } } if let RegionResolutionError::ConcreteFailure(origin, _, _) = error.clone() { if let SubregionOrigin::CompareImplTypeObligation { span, item_name, impl_item_def_id, trait_item_def_id, } = origin { self.emit_associated_type_err(span, item_name, impl_item_def_id, trait_item_def_id); return Some(ErrorReported); } } None } fn emit_err(&self, sp: Span, expected: Ty<'tcx>, found: Ty<'tcx>, trait_def_id: DefId) { let trait_sp = self.tcx().def_span(trait_def_id); let mut err = self .tcx() .sess .struct_span_err(sp, "`impl` item signature doesn't match `trait` item signature"); err.span_label(sp, &format!("found `{}`", found)); err.span_label(trait_sp, &format!("expected `{}`", expected)); // Get the span of all the used type parameters in the method. let assoc_item = self.tcx().associated_item(trait_def_id); let mut visitor = TypeParamSpanVisitor { tcx: self.tcx(), types: vec![] }; match assoc_item.kind { ty::AssocKind::Fn => { let hir = self.tcx().hir(); if let Some(hir_id) = assoc_item.def_id.as_local().map(|id| hir.local_def_id_to_hir_id(id)) { if let Some(decl) = hir.fn_decl_by_hir_id(hir_id) { visitor.visit_fn_decl(decl); } } } _ => {} } let mut type_param_span: MultiSpan = visitor.types.to_vec().into(); for &span in &visitor.types { type_param_span.push_span_label( span, "consider borrowing this type parameter in the trait".to_string(), ); } if let Some((expected, found)) = self.infcx.expected_found_str_ty(ExpectedFound { expected, found }) { // Highlighted the differences when showing the "expected/found" note. err.note_expected_found(&"", expected, &"", found); } else { // This fallback shouldn't be necessary, but let's keep it in just in case. err.note(&format!("expected `{}`\n found `{}`", expected, found)); } err.span_help( type_param_span, "the lifetime requirements from the `impl` do not correspond to the requirements in \ the `trait`", ); if visitor.types.is_empty() { err.help( "verify the lifetime relationships in the `trait` and `impl` between the `self` \ argument, the other inputs and its output", ); } err.emit(); } fn emit_associated_type_err( &self, span: Span, item_name: Symbol, impl_item_def_id: DefId, trait_item_def_id: DefId, ) { let impl_sp = self.tcx().def_span(impl_item_def_id); let trait_sp = self.tcx().def_span(trait_item_def_id); let mut err = self .tcx() .sess .struct_span_err(span, &format!("`impl` associated type signature for `{}` doesn't match `trait` associated type signature", item_name)); err.span_label(impl_sp, "found"); err.span_label(trait_sp, "expected"); err.emit(); } } struct TypeParamSpanVisitor<'tcx> { tcx: TyCtxt<'tcx>, types: Vec<Span>, } impl Visitor<'tcx> for TypeParamSpanVisitor<'tcx> { type Map = rustc_middle::hir::map::Map<'tcx>; fn nested_visit_map(&mut self) -> hir::intravisit::NestedVisitorMap<Self::Map> { hir::intravisit::NestedVisitorMap::OnlyBodies(self.tcx.hir()) } fn visit_ty(&mut self, arg: &'tcx hir::Ty<'tcx>) { match arg.kind { hir::TyKind::Rptr(_, ref mut_ty) => { // We don't want to suggest looking into borrowing `&T` or `&Self`. hir::intravisit::walk_ty(self, mut_ty.ty); return; } hir::TyKind::Path(hir::QPath::Resolved(None, path)) => match &path.segments { [segment] if segment .res .map(|res| { matches!( res, Res::SelfTy(_, _) | Res::Def(hir::def::DefKind::TyParam, _) ) }) .unwrap_or(false) => { self.types.push(path.span); } _ => {} }, _ => {} } hir::intravisit::walk_ty(self, arg); } }
38.25
149
0.530428
033e9f5203f7938655cf3270af0258ed754ad79b
3,200
use std::io; use std::sync::{ atomic::{AtomicBool, Ordering}, Arc, }; #[cfg(target_os = "windows")] mod windows; #[cfg(target_os = "windows")] pub use windows::{Event, Registrator, Selector, TcpStream}; #[cfg(target_os = "macos")] mod macos; #[cfg(target_os = "macos")] pub use macos::{Event, Registrator, Selector, TcpStream}; #[cfg(target_os = "linux")] mod linux; #[cfg(target_os = "linux")] pub use linux::{Event, Registrator, Selector, TcpStream}; pub type Events = Vec<Event>; pub type Token = usize; /// `Poll` represents the event queue. The `poll` method will block the current thread /// waiting for events. If no timeout is provided it will potentially block indefinately. /// /// `Poll` can be used in one of two ways. The first way is by registering interest in events and then wait for /// them in the same thread. In this case you'll use the built-in methods on `Poll` for registering events. /// /// Alternatively, it can be used by waiting in one thread and registering interest in events from /// another. In this case you'll ned to call the `Poll::registrator()` method which returns a `Registrator` /// tied to this event queue which can be sent to another thread and used to register events. #[derive(Debug)] pub struct Poll { registry: Registry, is_poll_dead: Arc<AtomicBool>, } impl Poll { pub fn new() -> io::Result<Poll> { Selector::new().map(|selector| Poll { registry: Registry { selector }, is_poll_dead: Arc::new(AtomicBool::new(false)), }) } pub fn registrator(&self) -> Registrator { self.registry .selector .registrator(self.is_poll_dead.clone()) } /// Polls the event loop. The thread yields to the OS while witing for either /// an event to retur or a timeout to occur. A negative timeout will be treated /// as a timeout of 0. pub fn poll(&mut self, events: &mut Events, timeout_ms: Option<i32>) -> io::Result<usize> { // A negative timout is converted to a 0 timeout let timeout = timeout_ms.map(|n| if n < 0 { 0 } else { n }); loop { let res = self.registry.selector.select(events, timeout); match res { Ok(()) => break, Err(ref e) if e.kind() == io::ErrorKind::Interrupted => (), Err(e) => return Err(e), }; } if self.is_poll_dead.load(Ordering::SeqCst) { return Err(io::Error::new(io::ErrorKind::Interrupted, "Poll closed.")); } Ok(events.len()) } } #[derive(Debug)] pub struct Registry { selector: Selector, } const WRITABLE: u8 = 0b0000_0001; const READABLE: u8 = 0b0000_0010; /// Represents interest in either Read or Write events. This struct is created /// by using one of the two constants: /// /// - Interests::READABLE /// - Interests::WRITABLE pub struct Interests(u8); impl Interests { pub const READABLE: Interests = Interests(READABLE); pub const WRITABLE: Interests = Interests(WRITABLE); pub fn is_readable(&self) -> bool { self.0 & READABLE != 0 } pub fn is_writable(&self) -> bool { self.0 & WRITABLE != 0 } }
31.067961
111
0.634063
edd1e8bbe369abeafc592900b04eea370ee6a868
24,821
#![allow(unused_parens)] //! # Saliency API //! //! Many computer vision applications may benefit from understanding where humans focus given a scene. //! Other than cognitively understanding the way human perceive images and scenes, finding salient //! regions and objects in the images helps various tasks such as speeding up object detection, object //! recognition, object tracking and content-aware image editing. //! //! About the saliency, there is a rich literature but the development is very fragmented. The principal //! purpose of this API is to give a unique interface, a unique framework for use and plug sever //! saliency algorithms, also with very different nature and methodology, but they share the same //! purpose, organizing algorithms into three main categories: //! //! **Static Saliency**: algorithms belonging to this category, exploit different image features that //! allow to detect salient objects in a non dynamic scenarios. //! //! **Motion Saliency**: algorithms belonging to this category, are particularly focused to detect //! salient objects over time (hence also over frame), then there is a temporal component sealing //! cosider that allows to detect "moving" objects as salient, meaning therefore also the more general //! sense of detection the changes in the scene. //! //! **Objectness**: Objectness is usually represented as a value which reflects how likely an image //! window covers an object of any category. Algorithms belonging to this category, avoid making //! decisions early on, by proposing a small number of category-independent proposals, that are expected //! to cover all objects in an image. Being able to perceive objects before identifying them is closely //! related to bottom up visual attention (saliency). //! //! ![Saliency diagram](https://docs.opencv.org/4.3.0/saliency.png) //! //! To see how API works, try tracker demo: //! <https://github.com/fpuja/opencv_contrib/blob/saliencyModuleDevelop/modules/saliency/samples/computeSaliency.cpp> //! //! //! Note: This API has been designed with PlantUML. If you modify this API please change UML. use crate::{mod_prelude::*, core, sys, types}; pub mod prelude { pub use { super::Saliency, super::StaticSaliency, super::MotionSaliency, super::Objectness, super::StaticSaliencySpectralResidualTrait, super::StaticSaliencyFineGrainedTrait, super::MotionSaliencyBinWangApr2014Trait, super::ObjectnessBINGTrait }; } /// ********************************* Motion Saliency Base Class *********************************** pub trait MotionSaliency: crate::saliency::Saliency { fn as_raw_MotionSaliency(&self) -> *const c_void; fn as_raw_mut_MotionSaliency(&mut self) -> *mut c_void; } /// ! /// * A Fast Self-tuning Background Subtraction Algorithm. /// * /// * This background subtraction algorithm is inspired to the work of B. Wang and P. Dudek [2] /// * [2] B. Wang and P. Dudek "A Fast Self-tuning Background Subtraction Algorithm", in proc of IEEE Workshop on Change Detection, 2014 /// * /// /// the Fast Self-tuning Background Subtraction Algorithm from [BinWangApr2014](https://docs.opencv.org/4.3.0/d0/de3/citelist.html#CITEREF_BinWangApr2014) pub trait MotionSaliencyBinWangApr2014Trait: crate::saliency::MotionSaliency { fn as_raw_MotionSaliencyBinWangApr2014(&self) -> *const c_void; fn as_raw_mut_MotionSaliencyBinWangApr2014(&mut self) -> *mut c_void; fn compute_saliency(&mut self, image: &dyn core::ToInputArray, saliency_map: &mut dyn core::ToOutputArray) -> Result<bool> { input_array_arg!(image); output_array_arg!(saliency_map); unsafe { sys::cv_saliency_MotionSaliencyBinWangApr2014_computeSaliency_const__InputArrayR_const__OutputArrayR(self.as_raw_mut_MotionSaliencyBinWangApr2014(), image.as_raw__InputArray(), saliency_map.as_raw__OutputArray()) }.into_result() } /// This is a utility function that allows to set the correct size (taken from the input image) in the /// corresponding variables that will be used to size the data structures of the algorithm. /// ## Parameters /// * W: width of input image /// * H: height of input image fn set_imagesize(&mut self, w: i32, h: i32) -> Result<()> { unsafe { sys::cv_saliency_MotionSaliencyBinWangApr2014_setImagesize_int_int(self.as_raw_mut_MotionSaliencyBinWangApr2014(), w, h) }.into_result() } /// This function allows the correct initialization of all data structures that will be used by the /// algorithm. fn init(&mut self) -> Result<bool> { unsafe { sys::cv_saliency_MotionSaliencyBinWangApr2014_init(self.as_raw_mut_MotionSaliencyBinWangApr2014()) }.into_result() } fn get_image_width(&self) -> Result<i32> { unsafe { sys::cv_saliency_MotionSaliencyBinWangApr2014_getImageWidth_const(self.as_raw_MotionSaliencyBinWangApr2014()) }.into_result() } fn set_image_width(&mut self, val: i32) -> Result<()> { unsafe { sys::cv_saliency_MotionSaliencyBinWangApr2014_setImageWidth_int(self.as_raw_mut_MotionSaliencyBinWangApr2014(), val) }.into_result() } fn get_image_height(&self) -> Result<i32> { unsafe { sys::cv_saliency_MotionSaliencyBinWangApr2014_getImageHeight_const(self.as_raw_MotionSaliencyBinWangApr2014()) }.into_result() } fn set_image_height(&mut self, val: i32) -> Result<()> { unsafe { sys::cv_saliency_MotionSaliencyBinWangApr2014_setImageHeight_int(self.as_raw_mut_MotionSaliencyBinWangApr2014(), val) }.into_result() } } /// ! /// * A Fast Self-tuning Background Subtraction Algorithm. /// * /// * This background subtraction algorithm is inspired to the work of B. Wang and P. Dudek [2] /// * [2] B. Wang and P. Dudek "A Fast Self-tuning Background Subtraction Algorithm", in proc of IEEE Workshop on Change Detection, 2014 /// * /// /// the Fast Self-tuning Background Subtraction Algorithm from [BinWangApr2014](https://docs.opencv.org/4.3.0/d0/de3/citelist.html#CITEREF_BinWangApr2014) pub struct MotionSaliencyBinWangApr2014 { ptr: *mut c_void } opencv_type_boxed! { MotionSaliencyBinWangApr2014 } impl Drop for MotionSaliencyBinWangApr2014 { fn drop(&mut self) { extern "C" { fn cv_MotionSaliencyBinWangApr2014_delete(instance: *mut c_void); } unsafe { cv_MotionSaliencyBinWangApr2014_delete(self.as_raw_mut_MotionSaliencyBinWangApr2014()) }; } } impl MotionSaliencyBinWangApr2014 { #[inline] pub fn as_raw_MotionSaliencyBinWangApr2014(&self) -> *const c_void { self.as_raw() } #[inline] pub fn as_raw_mut_MotionSaliencyBinWangApr2014(&mut self) -> *mut c_void { self.as_raw_mut() } } unsafe impl Send for MotionSaliencyBinWangApr2014 {} impl core::AlgorithmTrait for MotionSaliencyBinWangApr2014 { #[inline] fn as_raw_Algorithm(&self) -> *const c_void { self.as_raw() } #[inline] fn as_raw_mut_Algorithm(&mut self) -> *mut c_void { self.as_raw_mut() } } impl crate::saliency::MotionSaliency for MotionSaliencyBinWangApr2014 { #[inline] fn as_raw_MotionSaliency(&self) -> *const c_void { self.as_raw() } #[inline] fn as_raw_mut_MotionSaliency(&mut self) -> *mut c_void { self.as_raw_mut() } } impl crate::saliency::MotionSaliencyBinWangApr2014Trait for MotionSaliencyBinWangApr2014 { #[inline] fn as_raw_MotionSaliencyBinWangApr2014(&self) -> *const c_void { self.as_raw() } #[inline] fn as_raw_mut_MotionSaliencyBinWangApr2014(&mut self) -> *mut c_void { self.as_raw_mut() } } impl crate::saliency::Saliency for MotionSaliencyBinWangApr2014 { #[inline] fn as_raw_Saliency(&self) -> *const c_void { self.as_raw() } #[inline] fn as_raw_mut_Saliency(&mut self) -> *mut c_void { self.as_raw_mut() } } impl MotionSaliencyBinWangApr2014 { pub fn default() -> Result<crate::saliency::MotionSaliencyBinWangApr2014> { unsafe { sys::cv_saliency_MotionSaliencyBinWangApr2014_MotionSaliencyBinWangApr2014() }.into_result().map(|r| unsafe { crate::saliency::MotionSaliencyBinWangApr2014::opencv_from_extern(r) } ) } pub fn create() -> Result<core::Ptr::<crate::saliency::MotionSaliencyBinWangApr2014>> { unsafe { sys::cv_saliency_MotionSaliencyBinWangApr2014_create() }.into_result().map(|r| unsafe { core::Ptr::<crate::saliency::MotionSaliencyBinWangApr2014>::opencv_from_extern(r) } ) } } /// ********************************* Objectness Base Class *********************************** pub trait Objectness: crate::saliency::Saliency { fn as_raw_Objectness(&self) -> *const c_void; fn as_raw_mut_Objectness(&mut self) -> *mut c_void; } /// the Binarized normed gradients algorithm from [BING](https://docs.opencv.org/4.3.0/d0/de3/citelist.html#CITEREF_BING) pub trait ObjectnessBINGTrait: crate::saliency::Objectness { fn as_raw_ObjectnessBING(&self) -> *const c_void; fn as_raw_mut_ObjectnessBING(&mut self) -> *mut c_void; fn compute_saliency(&mut self, image: &dyn core::ToInputArray, saliency_map: &mut dyn core::ToOutputArray) -> Result<bool> { input_array_arg!(image); output_array_arg!(saliency_map); unsafe { sys::cv_saliency_ObjectnessBING_computeSaliency_const__InputArrayR_const__OutputArrayR(self.as_raw_mut_ObjectnessBING(), image.as_raw__InputArray(), saliency_map.as_raw__OutputArray()) }.into_result() } fn read(&mut self) -> Result<()> { unsafe { sys::cv_saliency_ObjectnessBING_read(self.as_raw_mut_ObjectnessBING()) }.into_result() } fn write(&self) -> Result<()> { unsafe { sys::cv_saliency_ObjectnessBING_write_const(self.as_raw_ObjectnessBING()) }.into_result() } /// Return the list of the rectangles' objectness value, /// /// in the same order as the *vector\<Vec4i\> objectnessBoundingBox* returned by the algorithm (in /// computeSaliencyImpl function). The bigger value these scores are, it is more likely to be an /// object window. fn getobjectness_values(&mut self) -> Result<core::Vector::<f32>> { unsafe { sys::cv_saliency_ObjectnessBING_getobjectnessValues(self.as_raw_mut_ObjectnessBING()) }.into_result().map(|r| unsafe { core::Vector::<f32>::opencv_from_extern(r) } ) } /// This is a utility function that allows to set the correct path from which the algorithm will load /// the trained model. /// ## Parameters /// * trainingPath: trained model path fn set_training_path(&mut self, training_path: &str) -> Result<()> { extern_container_arg!(training_path); unsafe { sys::cv_saliency_ObjectnessBING_setTrainingPath_const_StringR(self.as_raw_mut_ObjectnessBING(), training_path.opencv_as_extern()) }.into_result() } /// This is a utility function that allows to set an arbitrary path in which the algorithm will save the /// optional results /// /// (ie writing on file the total number and the list of rectangles returned by objectess, one for /// each row). /// ## Parameters /// * resultsDir: results' folder path fn set_bb_res_dir(&mut self, results_dir: &str) -> Result<()> { extern_container_arg!(results_dir); unsafe { sys::cv_saliency_ObjectnessBING_setBBResDir_const_StringR(self.as_raw_mut_ObjectnessBING(), results_dir.opencv_as_extern()) }.into_result() } fn get_base(&self) -> Result<f64> { unsafe { sys::cv_saliency_ObjectnessBING_getBase_const(self.as_raw_ObjectnessBING()) }.into_result() } fn set_base(&mut self, val: f64) -> Result<()> { unsafe { sys::cv_saliency_ObjectnessBING_setBase_double(self.as_raw_mut_ObjectnessBING(), val) }.into_result() } fn get_nss(&self) -> Result<i32> { unsafe { sys::cv_saliency_ObjectnessBING_getNSS_const(self.as_raw_ObjectnessBING()) }.into_result() } fn set_nss(&mut self, val: i32) -> Result<()> { unsafe { sys::cv_saliency_ObjectnessBING_setNSS_int(self.as_raw_mut_ObjectnessBING(), val) }.into_result() } fn get_w(&self) -> Result<i32> { unsafe { sys::cv_saliency_ObjectnessBING_getW_const(self.as_raw_ObjectnessBING()) }.into_result() } fn set_w(&mut self, val: i32) -> Result<()> { unsafe { sys::cv_saliency_ObjectnessBING_setW_int(self.as_raw_mut_ObjectnessBING(), val) }.into_result() } } /// the Binarized normed gradients algorithm from [BING](https://docs.opencv.org/4.3.0/d0/de3/citelist.html#CITEREF_BING) pub struct ObjectnessBING { ptr: *mut c_void } opencv_type_boxed! { ObjectnessBING } impl Drop for ObjectnessBING { fn drop(&mut self) { extern "C" { fn cv_ObjectnessBING_delete(instance: *mut c_void); } unsafe { cv_ObjectnessBING_delete(self.as_raw_mut_ObjectnessBING()) }; } } impl ObjectnessBING { #[inline] pub fn as_raw_ObjectnessBING(&self) -> *const c_void { self.as_raw() } #[inline] pub fn as_raw_mut_ObjectnessBING(&mut self) -> *mut c_void { self.as_raw_mut() } } unsafe impl Send for ObjectnessBING {} impl core::AlgorithmTrait for ObjectnessBING { #[inline] fn as_raw_Algorithm(&self) -> *const c_void { self.as_raw() } #[inline] fn as_raw_mut_Algorithm(&mut self) -> *mut c_void { self.as_raw_mut() } } impl crate::saliency::Objectness for ObjectnessBING { #[inline] fn as_raw_Objectness(&self) -> *const c_void { self.as_raw() } #[inline] fn as_raw_mut_Objectness(&mut self) -> *mut c_void { self.as_raw_mut() } } impl crate::saliency::ObjectnessBINGTrait for ObjectnessBING { #[inline] fn as_raw_ObjectnessBING(&self) -> *const c_void { self.as_raw() } #[inline] fn as_raw_mut_ObjectnessBING(&mut self) -> *mut c_void { self.as_raw_mut() } } impl crate::saliency::Saliency for ObjectnessBING { #[inline] fn as_raw_Saliency(&self) -> *const c_void { self.as_raw() } #[inline] fn as_raw_mut_Saliency(&mut self) -> *mut c_void { self.as_raw_mut() } } impl ObjectnessBING { pub fn default() -> Result<crate::saliency::ObjectnessBING> { unsafe { sys::cv_saliency_ObjectnessBING_ObjectnessBING() }.into_result().map(|r| unsafe { crate::saliency::ObjectnessBING::opencv_from_extern(r) } ) } pub fn create() -> Result<core::Ptr::<crate::saliency::ObjectnessBING>> { unsafe { sys::cv_saliency_ObjectnessBING_create() }.into_result().map(|r| unsafe { core::Ptr::<crate::saliency::ObjectnessBING>::opencv_from_extern(r) } ) } } /// ********************************* Saliency Base Class *********************************** pub trait Saliency: core::AlgorithmTrait { fn as_raw_Saliency(&self) -> *const c_void; fn as_raw_mut_Saliency(&mut self) -> *mut c_void; /// \brief Compute the saliency /// \param image The image. /// \param saliencyMap The computed saliency map. /// \return true if the saliency map is computed, false otherwise fn compute_saliency(&mut self, image: &dyn core::ToInputArray, saliency_map: &mut dyn core::ToOutputArray) -> Result<bool> { input_array_arg!(image); output_array_arg!(saliency_map); unsafe { sys::cv_saliency_Saliency_computeSaliency_const__InputArrayR_const__OutputArrayR(self.as_raw_mut_Saliency(), image.as_raw__InputArray(), saliency_map.as_raw__OutputArray()) }.into_result() } } /// ********************************* Static Saliency Base Class *********************************** pub trait StaticSaliency: crate::saliency::Saliency { fn as_raw_StaticSaliency(&self) -> *const c_void; fn as_raw_mut_StaticSaliency(&mut self) -> *mut c_void; /// This function perform a binary map of given saliency map. This is obtained in this /// way: /// /// In a first step, to improve the definition of interest areas and facilitate identification of /// targets, a segmentation by clustering is performed, using *K-means algorithm*. Then, to gain a /// binary representation of clustered saliency map, since values of the map can vary according to /// the characteristics of frame under analysis, it is not convenient to use a fixed threshold. So, /// *Otsu's algorithm* is used, which assumes that the image to be thresholded contains two classes /// of pixels or bi-modal histograms (e.g. foreground and back-ground pixels); later on, the /// algorithm calculates the optimal threshold separating those two classes, so that their /// intra-class variance is minimal. /// /// ## Parameters /// * _saliencyMap: the saliency map obtained through one of the specialized algorithms /// * _binaryMap: the binary map fn compute_binary_map(&mut self, _saliency_map: &dyn core::ToInputArray, _binary_map: &mut dyn core::ToOutputArray) -> Result<bool> { input_array_arg!(_saliency_map); output_array_arg!(_binary_map); unsafe { sys::cv_saliency_StaticSaliency_computeBinaryMap_const__InputArrayR_const__OutputArrayR(self.as_raw_mut_StaticSaliency(), _saliency_map.as_raw__InputArray(), _binary_map.as_raw__OutputArray()) }.into_result() } } /// the Fine Grained Saliency approach from [FGS](https://docs.opencv.org/4.3.0/d0/de3/citelist.html#CITEREF_FGS) /// /// This method calculates saliency based on center-surround differences. /// High resolution saliency maps are generated in real time by using integral images. pub trait StaticSaliencyFineGrainedTrait: crate::saliency::StaticSaliency { fn as_raw_StaticSaliencyFineGrained(&self) -> *const c_void; fn as_raw_mut_StaticSaliencyFineGrained(&mut self) -> *mut c_void; fn compute_saliency(&mut self, image: &dyn core::ToInputArray, saliency_map: &mut dyn core::ToOutputArray) -> Result<bool> { input_array_arg!(image); output_array_arg!(saliency_map); unsafe { sys::cv_saliency_StaticSaliencyFineGrained_computeSaliency_const__InputArrayR_const__OutputArrayR(self.as_raw_mut_StaticSaliencyFineGrained(), image.as_raw__InputArray(), saliency_map.as_raw__OutputArray()) }.into_result() } } /// the Fine Grained Saliency approach from [FGS](https://docs.opencv.org/4.3.0/d0/de3/citelist.html#CITEREF_FGS) /// /// This method calculates saliency based on center-surround differences. /// High resolution saliency maps are generated in real time by using integral images. pub struct StaticSaliencyFineGrained { ptr: *mut c_void } opencv_type_boxed! { StaticSaliencyFineGrained } impl Drop for StaticSaliencyFineGrained { fn drop(&mut self) { extern "C" { fn cv_StaticSaliencyFineGrained_delete(instance: *mut c_void); } unsafe { cv_StaticSaliencyFineGrained_delete(self.as_raw_mut_StaticSaliencyFineGrained()) }; } } impl StaticSaliencyFineGrained { #[inline] pub fn as_raw_StaticSaliencyFineGrained(&self) -> *const c_void { self.as_raw() } #[inline] pub fn as_raw_mut_StaticSaliencyFineGrained(&mut self) -> *mut c_void { self.as_raw_mut() } } unsafe impl Send for StaticSaliencyFineGrained {} impl core::AlgorithmTrait for StaticSaliencyFineGrained { #[inline] fn as_raw_Algorithm(&self) -> *const c_void { self.as_raw() } #[inline] fn as_raw_mut_Algorithm(&mut self) -> *mut c_void { self.as_raw_mut() } } impl crate::saliency::Saliency for StaticSaliencyFineGrained { #[inline] fn as_raw_Saliency(&self) -> *const c_void { self.as_raw() } #[inline] fn as_raw_mut_Saliency(&mut self) -> *mut c_void { self.as_raw_mut() } } impl crate::saliency::StaticSaliency for StaticSaliencyFineGrained { #[inline] fn as_raw_StaticSaliency(&self) -> *const c_void { self.as_raw() } #[inline] fn as_raw_mut_StaticSaliency(&mut self) -> *mut c_void { self.as_raw_mut() } } impl crate::saliency::StaticSaliencyFineGrainedTrait for StaticSaliencyFineGrained { #[inline] fn as_raw_StaticSaliencyFineGrained(&self) -> *const c_void { self.as_raw() } #[inline] fn as_raw_mut_StaticSaliencyFineGrained(&mut self) -> *mut c_void { self.as_raw_mut() } } impl StaticSaliencyFineGrained { pub fn default() -> Result<crate::saliency::StaticSaliencyFineGrained> { unsafe { sys::cv_saliency_StaticSaliencyFineGrained_StaticSaliencyFineGrained() }.into_result().map(|r| unsafe { crate::saliency::StaticSaliencyFineGrained::opencv_from_extern(r) } ) } pub fn create() -> Result<core::Ptr::<crate::saliency::StaticSaliencyFineGrained>> { unsafe { sys::cv_saliency_StaticSaliencyFineGrained_create() }.into_result().map(|r| unsafe { core::Ptr::<crate::saliency::StaticSaliencyFineGrained>::opencv_from_extern(r) } ) } } /// the Spectral Residual approach from [SR](https://docs.opencv.org/4.3.0/d0/de3/citelist.html#CITEREF_SR) /// /// Starting from the principle of natural image statistics, this method simulate the behavior of /// pre-attentive visual search. The algorithm analyze the log spectrum of each image and obtain the /// spectral residual. Then transform the spectral residual to spatial domain to obtain the saliency /// map, which suggests the positions of proto-objects. pub trait StaticSaliencySpectralResidualTrait: crate::saliency::StaticSaliency { fn as_raw_StaticSaliencySpectralResidual(&self) -> *const c_void; fn as_raw_mut_StaticSaliencySpectralResidual(&mut self) -> *mut c_void; fn compute_saliency(&mut self, image: &dyn core::ToInputArray, saliency_map: &mut dyn core::ToOutputArray) -> Result<bool> { input_array_arg!(image); output_array_arg!(saliency_map); unsafe { sys::cv_saliency_StaticSaliencySpectralResidual_computeSaliency_const__InputArrayR_const__OutputArrayR(self.as_raw_mut_StaticSaliencySpectralResidual(), image.as_raw__InputArray(), saliency_map.as_raw__OutputArray()) }.into_result() } fn read(&mut self, fn_: &core::FileNode) -> Result<()> { unsafe { sys::cv_saliency_StaticSaliencySpectralResidual_read_const_FileNodeR(self.as_raw_mut_StaticSaliencySpectralResidual(), fn_.as_raw_FileNode()) }.into_result() } fn write(&self, fs: &mut core::FileStorage) -> Result<()> { unsafe { sys::cv_saliency_StaticSaliencySpectralResidual_write_const_FileStorageR(self.as_raw_StaticSaliencySpectralResidual(), fs.as_raw_mut_FileStorage()) }.into_result() } fn get_image_width(&self) -> Result<i32> { unsafe { sys::cv_saliency_StaticSaliencySpectralResidual_getImageWidth_const(self.as_raw_StaticSaliencySpectralResidual()) }.into_result() } fn set_image_width(&mut self, val: i32) -> Result<()> { unsafe { sys::cv_saliency_StaticSaliencySpectralResidual_setImageWidth_int(self.as_raw_mut_StaticSaliencySpectralResidual(), val) }.into_result() } fn get_image_height(&self) -> Result<i32> { unsafe { sys::cv_saliency_StaticSaliencySpectralResidual_getImageHeight_const(self.as_raw_StaticSaliencySpectralResidual()) }.into_result() } fn set_image_height(&mut self, val: i32) -> Result<()> { unsafe { sys::cv_saliency_StaticSaliencySpectralResidual_setImageHeight_int(self.as_raw_mut_StaticSaliencySpectralResidual(), val) }.into_result() } } /// the Spectral Residual approach from [SR](https://docs.opencv.org/4.3.0/d0/de3/citelist.html#CITEREF_SR) /// /// Starting from the principle of natural image statistics, this method simulate the behavior of /// pre-attentive visual search. The algorithm analyze the log spectrum of each image and obtain the /// spectral residual. Then transform the spectral residual to spatial domain to obtain the saliency /// map, which suggests the positions of proto-objects. pub struct StaticSaliencySpectralResidual { ptr: *mut c_void } opencv_type_boxed! { StaticSaliencySpectralResidual } impl Drop for StaticSaliencySpectralResidual { fn drop(&mut self) { extern "C" { fn cv_StaticSaliencySpectralResidual_delete(instance: *mut c_void); } unsafe { cv_StaticSaliencySpectralResidual_delete(self.as_raw_mut_StaticSaliencySpectralResidual()) }; } } impl StaticSaliencySpectralResidual { #[inline] pub fn as_raw_StaticSaliencySpectralResidual(&self) -> *const c_void { self.as_raw() } #[inline] pub fn as_raw_mut_StaticSaliencySpectralResidual(&mut self) -> *mut c_void { self.as_raw_mut() } } unsafe impl Send for StaticSaliencySpectralResidual {} impl core::AlgorithmTrait for StaticSaliencySpectralResidual { #[inline] fn as_raw_Algorithm(&self) -> *const c_void { self.as_raw() } #[inline] fn as_raw_mut_Algorithm(&mut self) -> *mut c_void { self.as_raw_mut() } } impl crate::saliency::Saliency for StaticSaliencySpectralResidual { #[inline] fn as_raw_Saliency(&self) -> *const c_void { self.as_raw() } #[inline] fn as_raw_mut_Saliency(&mut self) -> *mut c_void { self.as_raw_mut() } } impl crate::saliency::StaticSaliency for StaticSaliencySpectralResidual { #[inline] fn as_raw_StaticSaliency(&self) -> *const c_void { self.as_raw() } #[inline] fn as_raw_mut_StaticSaliency(&mut self) -> *mut c_void { self.as_raw_mut() } } impl crate::saliency::StaticSaliencySpectralResidualTrait for StaticSaliencySpectralResidual { #[inline] fn as_raw_StaticSaliencySpectralResidual(&self) -> *const c_void { self.as_raw() } #[inline] fn as_raw_mut_StaticSaliencySpectralResidual(&mut self) -> *mut c_void { self.as_raw_mut() } } impl StaticSaliencySpectralResidual { pub fn default() -> Result<crate::saliency::StaticSaliencySpectralResidual> { unsafe { sys::cv_saliency_StaticSaliencySpectralResidual_StaticSaliencySpectralResidual() }.into_result().map(|r| unsafe { crate::saliency::StaticSaliencySpectralResidual::opencv_from_extern(r) } ) } pub fn create() -> Result<core::Ptr::<crate::saliency::StaticSaliencySpectralResidual>> { unsafe { sys::cv_saliency_StaticSaliencySpectralResidual_create() }.into_result().map(|r| unsafe { core::Ptr::<crate::saliency::StaticSaliencySpectralResidual>::opencv_from_extern(r) } ) } }
49.150495
247
0.753153
d59211cad2a5e37d0c19e9b713a7340486f80f12
287
use serde::{Deserialize, Serialize}; // tb_debate join tb_user #[derive(Deserialize, Serialize, Debug, Clone, Queryable)] pub struct Debate { pub id: i64, pub writer_id: i64, pub writer_name: String, pub subject: String, pub content: String, pub reg_utc: i64, }
22.076923
58
0.682927
2274fdf57aef53ae763c2a94335c344cd51d473d
806
use std::{cell::RefCell, sync::atomic}; #[cfg(feature = "hazard_ptr")] use nolock::hazard_ptr; #[cfg(feature = "hazard_ptr")] #[test] fn protect_boxed() { use std::sync::atomic::AtomicPtr; let initial_ptr = Box::into_raw(Box::new(RefCell::new(false))); let atom_ptr = AtomicPtr::new(initial_ptr); let global = hazard_ptr::get_global_domain(); let guard = global.protect(&atom_ptr, atomic::Ordering::SeqCst); unsafe { global.retire(initial_ptr, |ptr| { let ref_cell = unsafe { &*ptr }; *ref_cell.borrow_mut() = true; }); } global.reclaim(); assert_eq!(false, *guard.borrow()); drop(guard); global.reclaim(); let initial_refcell = unsafe { &*initial_ptr }; assert_eq!(true, *initial_refcell.borrow()); }
21.783784
68
0.617866
de0e7d05332cc0edfc3cf452b12974dbe0a64d2d
33,624
// Copyright © 2019 Intel Corporation // // SPDX-License-Identifier: Apache-2.0 OR BSD-3-Clause // // Copyright © 2020, Microsoft Corporation // // Copyright 2018-2019 CrowdStrike, Inc. // // use kvm_ioctls::{NoDatamatch, VcpuFd, VmFd}; use std::os::unix::io::{AsRawFd, RawFd}; use std::result; use std::sync::Arc; #[cfg(target_arch = "x86_64")] use vm_memory::Address; use vmm_sys_util::eventfd::EventFd; #[cfg(target_arch = "aarch64")] pub use crate::aarch64::{check_required_kvm_extensions, VcpuInit, VcpuKvmState as CpuState}; use crate::cpu; use crate::device; use crate::hypervisor; use crate::vm; // x86_64 dependencies #[cfg(target_arch = "x86_64")] pub mod x86_64; #[cfg(target_arch = "x86_64")] use x86_64::{ check_required_kvm_extensions, FpuState, SpecialRegisters, StandardRegisters, KVM_TSS_ADDRESS, }; #[cfg(target_arch = "x86_64")] pub use x86_64::{ CpuId, CpuIdEntry, ExtendedControlRegisters, LapicState, MsrEntries, VcpuKvmState as CpuState, Xsave, CPUID_FLAG_VALID_INDEX, }; #[cfg(target_arch = "x86_64")] use kvm_bindings::{kvm_enable_cap, MsrList, KVM_CAP_SPLIT_IRQCHIP}; #[cfg(target_arch = "x86_64")] use crate::arch::x86::NUM_IOAPIC_PINS; // aarch64 dependencies #[cfg(target_arch = "aarch64")] pub mod aarch64; pub use kvm_bindings; pub use kvm_bindings::{ kvm_create_device, kvm_device_type_KVM_DEV_TYPE_VFIO, kvm_irq_routing, kvm_irq_routing_entry, kvm_userspace_memory_region, KVM_IRQ_ROUTING_MSI, KVM_MEM_READONLY, KVM_MSI_VALID_DEVID, }; pub use kvm_ioctls; pub use kvm_ioctls::{Cap, Kvm}; /// /// Export generically-named wrappers of kvm-bindings for Unix-based platforms /// pub use { kvm_bindings::kvm_clock_data as ClockData, kvm_bindings::kvm_create_device as CreateDevice, kvm_bindings::kvm_device_attr as DeviceAttr, kvm_bindings::kvm_irq_routing_entry as IrqRoutingEntry, kvm_bindings::kvm_mp_state as MpState, kvm_bindings::kvm_userspace_memory_region as MemoryRegion, kvm_bindings::kvm_vcpu_events as VcpuEvents, kvm_ioctls::DeviceFd, kvm_ioctls::IoEventAddress, kvm_ioctls::VcpuExit, }; /// Wrapper over KVM VM ioctls. pub struct KvmVm { fd: Arc<VmFd>, #[cfg(target_arch = "x86_64")] msrs: MsrEntries, } // Returns a `Vec<T>` with a size in bytes at least as large as `size_in_bytes`. fn vec_with_size_in_bytes<T: Default>(size_in_bytes: usize) -> Vec<T> { let rounded_size = (size_in_bytes + size_of::<T>() - 1) / size_of::<T>(); let mut v = Vec::with_capacity(rounded_size); v.resize_with(rounded_size, T::default); v } // The kvm API has many structs that resemble the following `Foo` structure: // // ``` // #[repr(C)] // struct Foo { // some_data: u32 // entries: __IncompleteArrayField<__u32>, // } // ``` // // In order to allocate such a structure, `size_of::<Foo>()` would be too small because it would not // include any space for `entries`. To make the allocation large enough while still being aligned // for `Foo`, a `Vec<Foo>` is created. Only the first element of `Vec<Foo>` would actually be used // as a `Foo`. The remaining memory in the `Vec<Foo>` is for `entries`, which must be contiguous // with `Foo`. This function is used to make the `Vec<Foo>` with enough space for `count` entries. use std::mem::size_of; fn vec_with_array_field<T: Default, F>(count: usize) -> Vec<T> { let element_space = count * size_of::<F>(); let vec_size_bytes = size_of::<T>() + element_space; vec_with_size_in_bytes(vec_size_bytes) } /// /// Implementation of Vm trait for KVM /// Example: /// #[cfg(feature = "kvm")] /// extern crate hypervisor /// let kvm = hypervisor::kvm::KvmHypervisor::new().unwrap(); /// let hypervisor: Arc<dyn hypervisor::Hypervisor> = Arc::new(kvm); /// let vm = hypervisor.create_vm().expect("new VM fd creation failed"); /// vm.set/get().unwrap() /// impl vm::Vm for KvmVm { #[cfg(target_arch = "x86_64")] /// /// Sets the address of the three-page region in the VM's address space. /// fn set_tss_address(&self, offset: usize) -> vm::Result<()> { self.fd .set_tss_address(offset) .map_err(|e| vm::HypervisorVmError::SetTssAddress(e.into())) } /// /// Creates an in-kernel interrupt controller. /// fn create_irq_chip(&self) -> vm::Result<()> { self.fd .create_irq_chip() .map_err(|e| vm::HypervisorVmError::CreateIrq(e.into())) } /// /// Registers an event that will, when signaled, trigger the `gsi` IRQ. /// fn register_irqfd(&self, fd: &EventFd, gsi: u32) -> vm::Result<()> { self.fd .register_irqfd(fd, gsi) .map_err(|e| vm::HypervisorVmError::RegisterIrqFd(e.into())) } /// /// Unregisters an event that will, when signaled, trigger the `gsi` IRQ. /// fn unregister_irqfd(&self, fd: &EventFd, gsi: u32) -> vm::Result<()> { self.fd .unregister_irqfd(fd, gsi) .map_err(|e| vm::HypervisorVmError::UnregisterIrqFd(e.into())) } /// /// Creates a VcpuFd object from a vcpu RawFd. /// fn create_vcpu(&self, id: u8) -> vm::Result<Arc<dyn cpu::Vcpu>> { let vc = self .fd .create_vcpu(id) .map_err(|e| vm::HypervisorVmError::CreateVcpu(e.into()))?; let vcpu = KvmVcpu { fd: vc, #[cfg(target_arch = "x86_64")] msrs: self.msrs.clone(), }; Ok(Arc::new(vcpu)) } /// /// Registers an event to be signaled whenever a certain address is written to. /// fn register_ioevent( &self, fd: &EventFd, addr: &IoEventAddress, datamatch: Option<vm::DataMatch>, ) -> vm::Result<()> { if let Some(dm) = datamatch { match dm { vm::DataMatch::DataMatch32(kvm_dm32) => self .fd .register_ioevent(fd, addr, kvm_dm32) .map_err(|e| vm::HypervisorVmError::RegisterIoEvent(e.into())), vm::DataMatch::DataMatch64(kvm_dm64) => self .fd .register_ioevent(fd, addr, kvm_dm64) .map_err(|e| vm::HypervisorVmError::RegisterIoEvent(e.into())), } } else { self.fd .register_ioevent(fd, addr, NoDatamatch) .map_err(|e| vm::HypervisorVmError::RegisterIoEvent(e.into())) } } /// /// Unregisters an event from a certain address it has been previously registered to. /// fn unregister_ioevent(&self, fd: &EventFd, addr: &IoEventAddress) -> vm::Result<()> { self.fd .unregister_ioevent(fd, addr, NoDatamatch) .map_err(|e| vm::HypervisorVmError::UnregisterIoEvent(e.into())) } /// /// Sets the GSI routing table entries, overwriting any previously set /// entries, as per the `KVM_SET_GSI_ROUTING` ioctl. /// fn set_gsi_routing(&self, entries: &[IrqRoutingEntry]) -> vm::Result<()> { let mut irq_routing = vec_with_array_field::<kvm_irq_routing, kvm_irq_routing_entry>(entries.len()); irq_routing[0].nr = entries.len() as u32; irq_routing[0].flags = 0; unsafe { let entries_slice: &mut [kvm_irq_routing_entry] = irq_routing[0].entries.as_mut_slice(entries.len()); entries_slice.copy_from_slice(&entries); } self.fd .set_gsi_routing(&irq_routing[0]) .map_err(|e| vm::HypervisorVmError::SetGsiRouting(e.into())) } /// /// Creates a memory region structure that can be used with set_user_memory_region /// fn make_user_memory_region( &self, slot: u32, guest_phys_addr: u64, memory_size: u64, userspace_addr: u64, readonly: bool, ) -> MemoryRegion { MemoryRegion { slot, guest_phys_addr, memory_size, userspace_addr, flags: if readonly { KVM_MEM_READONLY } else { 0 }, } } /// /// Creates/modifies a guest physical memory slot. /// fn set_user_memory_region(&self, user_memory_region: MemoryRegion) -> vm::Result<()> { // Safe because guest regions are guaranteed not to overlap. unsafe { self.fd .set_user_memory_region(user_memory_region) .map_err(|e| vm::HypervisorVmError::SetUserMemory(e.into())) } } /// /// Creates an emulated device in the kernel. /// /// See the documentation for `KVM_CREATE_DEVICE`. fn create_device(&self, device: &mut CreateDevice) -> vm::Result<Arc<dyn device::Device>> { let fd = self .fd .create_device(device) .map_err(|e| vm::HypervisorVmError::CreateDevice(e.into()))?; let device = KvmDevice { fd }; Ok(Arc::new(device)) } /// /// Returns the preferred CPU target type which can be emulated by KVM on underlying host. /// #[cfg(any(target_arch = "arm", target_arch = "aarch64"))] fn get_preferred_target(&self, kvi: &mut VcpuInit) -> vm::Result<()> { self.fd .get_preferred_target(kvi) .map_err(|e| vm::HypervisorVmError::GetPreferredTarget(e.into())) } #[cfg(target_arch = "x86_64")] fn enable_split_irq(&self) -> vm::Result<()> { // Set TSS self.fd .set_tss_address(KVM_TSS_ADDRESS.raw_value() as usize) .map_err(|e| vm::HypervisorVmError::EnableSplitIrq(e.into()))?; // Create split irqchip // Only the local APIC is emulated in kernel, both PICs and IOAPIC // are not. let mut cap: kvm_enable_cap = Default::default(); cap.cap = KVM_CAP_SPLIT_IRQCHIP; cap.args[0] = NUM_IOAPIC_PINS as u64; self.fd .enable_cap(&cap) .map_err(|e| vm::HypervisorVmError::EnableSplitIrq(e.into()))?; Ok(()) } /// Retrieve guest clock. #[cfg(target_arch = "x86_64")] fn get_clock(&self) -> vm::Result<ClockData> { self.fd .get_clock() .map_err(|e| vm::HypervisorVmError::GetClock(e.into())) } /// Set guest clock. #[cfg(target_arch = "x86_64")] fn set_clock(&self, data: &ClockData) -> vm::Result<()> { self.fd .set_clock(data) .map_err(|e| vm::HypervisorVmError::SetClock(e.into())) } /// Checks if a particular `Cap` is available. fn check_extension(&self, c: Cap) -> bool { self.fd.check_extension(c) } /// Create a device that is used for passthrough fn create_passthrough_device(&self) -> vm::Result<Arc<dyn device::Device>> { let mut vfio_dev = kvm_create_device { type_: kvm_device_type_KVM_DEV_TYPE_VFIO, fd: 0, flags: 0, }; self.create_device(&mut vfio_dev) .map_err(|e| vm::HypervisorVmError::CreatePassthroughDevice(e.into())) } } /// Wrapper over KVM system ioctls. pub struct KvmHypervisor { kvm: Kvm, } /// Enum for KVM related error #[derive(Debug)] pub enum KvmError { CapabilityMissing(Cap), } pub type KvmResult<T> = result::Result<T, KvmError>; impl KvmHypervisor { /// Create a hypervisor based on Kvm pub fn new() -> hypervisor::Result<KvmHypervisor> { let kvm_obj = Kvm::new().map_err(|e| hypervisor::HypervisorError::VmCreate(e.into()))?; Ok(KvmHypervisor { kvm: kvm_obj }) } } /// Implementation of Hypervisor trait for KVM /// Example: /// #[cfg(feature = "kvm")] /// extern crate hypervisor /// let kvm = hypervisor::kvm::KvmHypervisor::new().unwrap(); /// let hypervisor: Arc<dyn hypervisor::Hypervisor> = Arc::new(kvm); /// let vm = hypervisor.create_vm().expect("new VM fd creation failed"); /// impl hypervisor::Hypervisor for KvmHypervisor { /// Create a KVM vm object and return the object as Vm trait object /// Example /// # extern crate hypervisor; /// # use hypervisor::KvmHypervisor; /// use hypervisor::KvmVm; /// let hypervisor = KvmHypervisor::new().unwrap(); /// let vm = hypervisor.create_vm().unwrap() /// fn create_vm(&self) -> hypervisor::Result<Arc<dyn vm::Vm>> { let fd: VmFd; loop { match self.kvm.create_vm() { Ok(res) => fd = res, Err(e) => { if e.errno() == libc::EINTR { // If the error returned is EINTR, which means the // ioctl has been interrupted, we have to retry as // this can't be considered as a regular error. continue; } else { return Err(hypervisor::HypervisorError::VmCreate(e.into())); } } } break; } let vm_fd = Arc::new(fd); #[cfg(target_arch = "x86_64")] { let msr_list = self.get_msr_list()?; let num_msrs = msr_list.as_fam_struct_ref().nmsrs as usize; let mut msrs = MsrEntries::new(num_msrs); let indices = msr_list.as_slice(); let msr_entries = msrs.as_mut_slice(); for (pos, index) in indices.iter().enumerate() { msr_entries[pos].index = *index; } Ok(Arc::new(KvmVm { fd: vm_fd, msrs })) } #[cfg(any(target_arch = "arm", target_arch = "aarch64"))] { Ok(Arc::new(KvmVm { fd: vm_fd })) } } fn check_required_extensions(&self) -> hypervisor::Result<()> { check_required_kvm_extensions(&self.kvm).expect("Missing KVM capabilities"); Ok(()) } /// /// Returns the KVM API version. /// fn get_api_version(&self) -> i32 { self.kvm.get_api_version() } /// /// Returns the size of the memory mapping required to use the vcpu's `kvm_run` structure. /// fn get_vcpu_mmap_size(&self) -> hypervisor::Result<usize> { self.kvm .get_vcpu_mmap_size() .map_err(|e| hypervisor::HypervisorError::GetVcpuMmap(e.into())) } /// /// Gets the recommended maximum number of VCPUs per VM. /// fn get_max_vcpus(&self) -> hypervisor::Result<usize> { Ok(self.kvm.get_max_vcpus()) } /// /// Gets the recommended number of VCPUs per VM. /// fn get_nr_vcpus(&self) -> hypervisor::Result<usize> { Ok(self.kvm.get_nr_vcpus()) } #[cfg(target_arch = "x86_64")] /// /// Checks if a particular `Cap` is available. /// fn check_capability(&self, c: Cap) -> bool { self.kvm.check_extension(c) } #[cfg(target_arch = "x86_64")] /// /// X86 specific call to get the system supported CPUID values. /// fn get_cpuid(&self) -> hypervisor::Result<CpuId> { self.kvm .get_supported_cpuid(kvm_bindings::KVM_MAX_CPUID_ENTRIES) .map_err(|e| hypervisor::HypervisorError::GetCpuId(e.into())) } #[cfg(target_arch = "x86_64")] /// /// Retrieve the list of MSRs supported by KVM. /// fn get_msr_list(&self) -> hypervisor::Result<MsrList> { self.kvm .get_msr_index_list() .map_err(|e| hypervisor::HypervisorError::GetMsrList(e.into())) } } /// Vcpu struct for KVM pub struct KvmVcpu { fd: VcpuFd, #[cfg(target_arch = "x86_64")] msrs: MsrEntries, } /// Implementation of Vcpu trait for KVM /// Example: /// #[cfg(feature = "kvm")] /// extern crate hypervisor /// let kvm = hypervisor::kvm::KvmHypervisor::new().unwrap(); /// let hypervisor: Arc<dyn hypervisor::Hypervisor> = Arc::new(kvm); /// let vm = hypervisor.create_vm().expect("new VM fd creation failed"); /// let vcpu = vm.create_vcpu(0).unwrap(); /// vcpu.get/set().unwrap() /// impl cpu::Vcpu for KvmVcpu { #[cfg(target_arch = "x86_64")] /// /// Returns the vCPU general purpose registers. /// fn get_regs(&self) -> cpu::Result<StandardRegisters> { self.fd .get_regs() .map_err(|e| cpu::HypervisorCpuError::GetStandardRegs(e.into())) } #[cfg(target_arch = "x86_64")] /// /// Sets the vCPU general purpose registers using the `KVM_SET_REGS` ioctl. /// fn set_regs(&self, regs: &StandardRegisters) -> cpu::Result<()> { self.fd .set_regs(regs) .map_err(|e| cpu::HypervisorCpuError::SetStandardRegs(e.into())) } #[cfg(target_arch = "x86_64")] /// /// Returns the vCPU special registers. /// fn get_sregs(&self) -> cpu::Result<SpecialRegisters> { self.fd .get_sregs() .map_err(|e| cpu::HypervisorCpuError::GetSpecialRegs(e.into())) } #[cfg(target_arch = "x86_64")] /// /// Sets the vCPU special registers using the `KVM_SET_SREGS` ioctl. /// fn set_sregs(&self, sregs: &SpecialRegisters) -> cpu::Result<()> { self.fd .set_sregs(sregs) .map_err(|e| cpu::HypervisorCpuError::SetSpecialRegs(e.into())) } #[cfg(target_arch = "x86_64")] /// /// Returns the floating point state (FPU) from the vCPU. /// fn get_fpu(&self) -> cpu::Result<FpuState> { self.fd .get_fpu() .map_err(|e| cpu::HypervisorCpuError::GetFloatingPointRegs(e.into())) } #[cfg(target_arch = "x86_64")] /// /// Set the floating point state (FPU) of a vCPU using the `KVM_SET_FPU` ioct. /// fn set_fpu(&self, fpu: &FpuState) -> cpu::Result<()> { self.fd .set_fpu(fpu) .map_err(|e| cpu::HypervisorCpuError::SetFloatingPointRegs(e.into())) } #[cfg(target_arch = "x86_64")] /// /// X86 specific call to setup the CPUID registers. /// fn set_cpuid2(&self, cpuid: &CpuId) -> cpu::Result<()> { self.fd .set_cpuid2(cpuid) .map_err(|e| cpu::HypervisorCpuError::SetCpuid(e.into())) } /// /// X86 specific call to retrieve the CPUID registers. /// #[cfg(target_arch = "x86_64")] fn get_cpuid2(&self, num_entries: usize) -> cpu::Result<CpuId> { self.fd .get_cpuid2(num_entries) .map_err(|e| cpu::HypervisorCpuError::GetCpuid(e.into())) } #[cfg(target_arch = "x86_64")] /// /// Returns the state of the LAPIC (Local Advanced Programmable Interrupt Controller). /// fn get_lapic(&self) -> cpu::Result<LapicState> { self.fd .get_lapic() .map_err(|e| cpu::HypervisorCpuError::GetlapicState(e.into())) } #[cfg(target_arch = "x86_64")] /// /// Sets the state of the LAPIC (Local Advanced Programmable Interrupt Controller). /// fn set_lapic(&self, klapic: &LapicState) -> cpu::Result<()> { self.fd .set_lapic(klapic) .map_err(|e| cpu::HypervisorCpuError::SetLapicState(e.into())) } #[cfg(target_arch = "x86_64")] /// /// Returns the model-specific registers (MSR) for this vCPU. /// fn get_msrs(&self, msrs: &mut MsrEntries) -> cpu::Result<usize> { self.fd .get_msrs(msrs) .map_err(|e| cpu::HypervisorCpuError::GetMsrEntries(e.into())) } #[cfg(target_arch = "x86_64")] /// /// Setup the model-specific registers (MSR) for this vCPU. /// Returns the number of MSR entries actually written. /// fn set_msrs(&self, msrs: &MsrEntries) -> cpu::Result<usize> { self.fd .set_msrs(msrs) .map_err(|e| cpu::HypervisorCpuError::SetMsrEntries(e.into())) } /// /// Returns the vcpu's current "multiprocessing state". /// fn get_mp_state(&self) -> cpu::Result<MpState> { self.fd .get_mp_state() .map_err(|e| cpu::HypervisorCpuError::GetMpState(e.into())) } /// /// Sets the vcpu's current "multiprocessing state". /// fn set_mp_state(&self, mp_state: MpState) -> cpu::Result<()> { self.fd .set_mp_state(mp_state) .map_err(|e| cpu::HypervisorCpuError::SetMpState(e.into())) } #[cfg(target_arch = "x86_64")] /// /// X86 specific call that returns the vcpu's current "xsave struct". /// fn get_xsave(&self) -> cpu::Result<Xsave> { self.fd .get_xsave() .map_err(|e| cpu::HypervisorCpuError::GetXsaveState(e.into())) } #[cfg(target_arch = "x86_64")] /// /// X86 specific call that sets the vcpu's current "xsave struct". /// fn set_xsave(&self, xsave: &Xsave) -> cpu::Result<()> { self.fd .set_xsave(xsave) .map_err(|e| cpu::HypervisorCpuError::SetXsaveState(e.into())) } #[cfg(target_arch = "x86_64")] /// /// X86 specific call that returns the vcpu's current "xcrs". /// fn get_xcrs(&self) -> cpu::Result<ExtendedControlRegisters> { self.fd .get_xcrs() .map_err(|e| cpu::HypervisorCpuError::GetXcsr(e.into())) } #[cfg(target_arch = "x86_64")] /// /// X86 specific call that sets the vcpu's current "xcrs". /// fn set_xcrs(&self, xcrs: &ExtendedControlRegisters) -> cpu::Result<()> { self.fd .set_xcrs(&xcrs) .map_err(|e| cpu::HypervisorCpuError::SetXcsr(e.into())) } /// /// Triggers the running of the current virtual CPU returning an exit reason. /// fn run(&self) -> std::result::Result<cpu::VmExit, cpu::HypervisorCpuError> { match self.fd.run() { Ok(run) => match run { #[cfg(target_arch = "x86_64")] VcpuExit::IoIn(addr, data) => Ok(cpu::VmExit::IoIn(addr, data)), #[cfg(target_arch = "x86_64")] VcpuExit::IoOut(addr, data) => Ok(cpu::VmExit::IoOut(addr, data)), #[cfg(target_arch = "x86_64")] VcpuExit::IoapicEoi(vector) => Ok(cpu::VmExit::IoapicEoi(vector)), #[cfg(target_arch = "x86_64")] VcpuExit::Shutdown | VcpuExit::Hlt => Ok(cpu::VmExit::Reset), #[cfg(target_arch = "aarch64")] VcpuExit::SystemEvent(event_type, flags) => { use kvm_bindings::KVM_SYSTEM_EVENT_SHUTDOWN; // On Aarch64, when the VM is shutdown, run() returns // VcpuExit::SystemEvent with reason KVM_SYSTEM_EVENT_SHUTDOWN if event_type == KVM_SYSTEM_EVENT_SHUTDOWN { Ok(cpu::VmExit::Reset) } else { Err(cpu::HypervisorCpuError::RunVcpu(anyhow!( "Unexpected system event with type 0x{:x}, flags 0x{:x}", event_type, flags ))) } } VcpuExit::MmioRead(addr, data) => Ok(cpu::VmExit::MmioRead(addr, data)), VcpuExit::MmioWrite(addr, data) => Ok(cpu::VmExit::MmioWrite(addr, data)), r => Err(cpu::HypervisorCpuError::RunVcpu(anyhow!( "Unexpected exit reason on vcpu run: {:?}", r ))), }, Err(ref e) => match e.errno() { libc::EAGAIN | libc::EINTR => Ok(cpu::VmExit::Ignore), _ => Err(cpu::HypervisorCpuError::RunVcpu(anyhow!( "VCPU error {:?}", e ))), }, } } #[cfg(target_arch = "x86_64")] /// /// Returns currently pending exceptions, interrupts, and NMIs as well as related /// states of the vcpu. /// fn get_vcpu_events(&self) -> cpu::Result<VcpuEvents> { self.fd .get_vcpu_events() .map_err(|e| cpu::HypervisorCpuError::GetVcpuEvents(e.into())) } #[cfg(target_arch = "x86_64")] /// /// Sets pending exceptions, interrupts, and NMIs as well as related states /// of the vcpu. /// fn set_vcpu_events(&self, events: &VcpuEvents) -> cpu::Result<()> { self.fd .set_vcpu_events(events) .map_err(|e| cpu::HypervisorCpuError::SetVcpuEvents(e.into())) } #[cfg(target_arch = "x86_64")] /// /// Let the guest know that it has been paused, which prevents from /// potential soft lockups when being resumed. /// fn notify_guest_clock_paused(&self) -> cpu::Result<()> { self.fd .kvmclock_ctrl() .map_err(|e| cpu::HypervisorCpuError::NotifyGuestClockPaused(e.into())) } #[cfg(any(target_arch = "arm", target_arch = "aarch64"))] fn vcpu_init(&self, kvi: &VcpuInit) -> cpu::Result<()> { self.fd .vcpu_init(kvi) .map_err(|e| cpu::HypervisorCpuError::VcpuInit(e.into())) } /// /// Sets the value of one register for this vCPU. /// #[cfg(any(target_arch = "arm", target_arch = "aarch64"))] fn set_one_reg(&self, reg_id: u64, data: u64) -> cpu::Result<()> { self.fd .set_one_reg(reg_id, data) .map_err(|e| cpu::HypervisorCpuError::SetOneReg(e.into())) } /// /// Gets the value of one register for this vCPU. /// #[cfg(any(target_arch = "arm", target_arch = "aarch64"))] fn get_one_reg(&self, reg_id: u64) -> cpu::Result<u64> { self.fd .get_one_reg(reg_id) .map_err(|e| cpu::HypervisorCpuError::GetOneReg(e.into())) } #[cfg(target_arch = "x86_64")] /// /// Get the current CPU state /// /// Ordering requirements: /// /// KVM_GET_MP_STATE calls kvm_apic_accept_events(), which might modify /// vCPU/LAPIC state. As such, it must be done before most everything /// else, otherwise we cannot restore everything and expect it to work. /// /// KVM_GET_VCPU_EVENTS/KVM_SET_VCPU_EVENTS is unsafe if other vCPUs are /// still running. /// /// KVM_GET_LAPIC may change state of LAPIC before returning it. /// /// GET_VCPU_EVENTS should probably be last to save. The code looks as /// it might as well be affected by internal state modifications of the /// GET ioctls. /// /// SREGS saves/restores a pending interrupt, similar to what /// VCPU_EVENTS also does. /// /// GET_MSRS requires a pre-populated data structure to do something /// meaningful. For SET_MSRS it will then contain good data. /// /// # Example /// /// ```rust /// # extern crate hypervisor; /// # use hypervisor::KvmHypervisor; /// # use std::sync::Arc; /// let kvm = hypervisor::kvm::KvmHypervisor::new().unwrap(); /// let hv: Arc<dyn hypervisor::Hypervisor> = Arc::new(kvm); /// let vm = hv.create_vm().expect("new VM fd creation failed"); /// vm.enable_split_irq().unwrap(); /// let vcpu = vm.create_vcpu(0).unwrap(); /// let state = vcpu.state().unwrap(); /// ``` fn state(&self) -> cpu::Result<CpuState> { let mp_state = self.get_mp_state()?; let regs = self.get_regs()?; let sregs = self.get_sregs()?; let xsave = self.get_xsave()?; let xcrs = self.get_xcrs()?; let lapic_state = self.get_lapic()?; let fpu = self.get_fpu()?; // Try to get all MSRs based on the list previously retrieved from KVM. // If the number of MSRs obtained from GET_MSRS is different from the // expected amount, we fallback onto a slower method by getting MSRs // by chunks. This is the only way to make sure we try to get as many // MSRs as possible, even if some MSRs are not supported. let mut msr_entries = self.msrs.clone(); let expected_num_msrs = msr_entries.as_fam_struct_ref().nmsrs as usize; let num_msrs = self.get_msrs(&mut msr_entries)?; let msrs = if num_msrs != expected_num_msrs { let mut faulty_msr_index = num_msrs; let mut msr_entries_tmp = MsrEntries::from_entries(&msr_entries.as_slice()[..faulty_msr_index]); loop { warn!( "Detected faulty MSR 0x{:x} while getting MSRs", msr_entries.as_slice()[faulty_msr_index].index ); let start_pos = faulty_msr_index + 1; let mut sub_msr_entries = MsrEntries::from_entries(&msr_entries.as_slice()[start_pos..]); let expected_num_msrs = sub_msr_entries.as_fam_struct_ref().nmsrs as usize; let num_msrs = self.get_msrs(&mut sub_msr_entries)?; for i in 0..num_msrs { msr_entries_tmp .push(sub_msr_entries.as_slice()[i]) .map_err(|e| { cpu::HypervisorCpuError::GetMsrEntries(anyhow!( "Failed adding MSR entries: {:?}", e )) })?; } if num_msrs == expected_num_msrs { break; } faulty_msr_index = start_pos + num_msrs; } msr_entries_tmp } else { msr_entries }; let vcpu_events = self.get_vcpu_events()?; Ok(CpuState { msrs, vcpu_events, regs, sregs, fpu, lapic_state, xsave, xcrs, mp_state, }) } #[cfg(target_arch = "aarch64")] fn state(&self) -> cpu::Result<CpuState> { unimplemented!(); } #[cfg(target_arch = "x86_64")] /// /// Restore the previously saved CPU state /// /// Ordering requirements: /// /// KVM_GET_VCPU_EVENTS/KVM_SET_VCPU_EVENTS is unsafe if other vCPUs are /// still running. /// /// Some SET ioctls (like set_mp_state) depend on kvm_vcpu_is_bsp(), so /// if we ever change the BSP, we have to do that before restoring anything. /// The same seems to be true for CPUID stuff. /// /// SREGS saves/restores a pending interrupt, similar to what /// VCPU_EVENTS also does. /// /// SET_REGS clears pending exceptions unconditionally, thus, it must be /// done before SET_VCPU_EVENTS, which restores it. /// /// SET_LAPIC must come after SET_SREGS, because the latter restores /// the apic base msr. /// /// SET_LAPIC must come before SET_MSRS, because the TSC deadline MSR /// only restores successfully, when the LAPIC is correctly configured. /// /// Arguments: CpuState /// # Example /// /// ```rust /// # extern crate hypervisor; /// # use hypervisor::KvmHypervisor; /// # use std::sync::Arc; /// let kvm = hypervisor::kvm::KvmHypervisor::new().unwrap(); /// let hv: Arc<dyn hypervisor::Hypervisor> = Arc::new(kvm); /// let vm = hv.create_vm().expect("new VM fd creation failed"); /// vm.enable_split_irq().unwrap(); /// let vcpu = vm.create_vcpu(0).unwrap(); /// let state = vcpu.state().unwrap(); /// vcpu.set_state(&state).unwrap(); /// ``` fn set_state(&self, state: &CpuState) -> cpu::Result<()> { self.set_mp_state(state.mp_state)?; self.set_regs(&state.regs)?; self.set_sregs(&state.sregs)?; self.set_xsave(&state.xsave)?; self.set_xcrs(&state.xcrs)?; self.set_lapic(&state.lapic_state)?; self.set_fpu(&state.fpu)?; // Try to set all MSRs previously stored. // If the number of MSRs set from SET_MSRS is different from the // expected amount, we fallback onto a slower method by setting MSRs // by chunks. This is the only way to make sure we try to set as many // MSRs as possible, even if some MSRs are not supported. let expected_num_msrs = state.msrs.as_fam_struct_ref().nmsrs as usize; let num_msrs = self.set_msrs(&state.msrs)?; if num_msrs != expected_num_msrs { let mut faulty_msr_index = num_msrs; loop { warn!( "Detected faulty MSR 0x{:x} while setting MSRs", state.msrs.as_slice()[faulty_msr_index].index ); let start_pos = faulty_msr_index + 1; let sub_msr_entries = MsrEntries::from_entries(&state.msrs.as_slice()[start_pos..]); let expected_num_msrs = sub_msr_entries.as_fam_struct_ref().nmsrs as usize; let num_msrs = self.set_msrs(&sub_msr_entries)?; if num_msrs == expected_num_msrs { break; } faulty_msr_index = start_pos + num_msrs; } } self.set_vcpu_events(&state.vcpu_events)?; Ok(()) } #[allow(unused_variables)] #[cfg(target_arch = "aarch64")] fn set_state(&self, state: &CpuState) -> cpu::Result<()> { Ok(()) } } /// Device struct for KVM pub struct KvmDevice { fd: DeviceFd, } impl device::Device for KvmDevice { /// /// Set device attribute /// fn set_device_attr(&self, attr: &DeviceAttr) -> device::Result<()> { self.fd .set_device_attr(attr) .map_err(|e| device::HypervisorDeviceError::SetDeviceAttribute(e.into())) } } impl AsRawFd for KvmDevice { fn as_raw_fd(&self) -> RawFd { self.fd.as_raw_fd() } }
35.245283
100
0.576374
ac644b87eb1bbdea821e46af5a510652e48c4438
2,877
//! Mocks for the rewards module. #![cfg(test)] use super::*; use frame_support::{construct_runtime, parameter_types, weights::constants::RocksDbWeight}; use sp_core::H256; use sp_runtime::{testing::Header, traits::IdentityLookup}; use sp_std::cell::RefCell; use std::collections::HashMap; use crate as rewards; pub type AccountId = u128; pub type Balance = u64; pub type Share = u64; pub type PoolId = u32; pub type BlockNumber = u64; pub type CurrencyId = u32; pub const ALICE: AccountId = 1; pub const BOB: AccountId = 2; pub const CAROL: AccountId = 3; pub const DOT_POOL: PoolId = 1; pub const NATIVE_COIN: CurrencyId = 0; pub const STABLE_COIN: CurrencyId = 1; parameter_types! { pub const BlockHashCount: u64 = 250; } impl frame_system::Config for Runtime { type Origin = Origin; type Index = u64; type BlockNumber = BlockNumber; type Call = Call; type Hash = H256; type Hashing = ::sp_runtime::traits::BlakeTwo256; type AccountId = AccountId; type Lookup = IdentityLookup<Self::AccountId>; type Header = Header; type Event = Event; type BlockHashCount = BlockHashCount; type BlockWeights = (); type BlockLength = (); type Version = (); type PalletInfo = PalletInfo; type AccountData = (); type OnNewAccount = (); type OnKilledAccount = (); type DbWeight = RocksDbWeight; type BaseCallFilter = (); type SystemWeightInfo = (); type SS58Prefix = (); type OnSetCode = (); } thread_local! { pub static RECEIVED_PAYOUT: RefCell<HashMap<(PoolId, AccountId, CurrencyId), Balance>> = RefCell::new(HashMap::new()); } pub struct Handler; impl RewardHandler<AccountId, CurrencyId> for Handler { type Balance = Balance; type PoolId = PoolId; fn payout(who: &AccountId, pool: &Self::PoolId, currency_id: CurrencyId, amount: Self::Balance) { RECEIVED_PAYOUT.with(|v| { let mut old_map = v.borrow().clone(); if let Some(before) = old_map.get_mut(&(*pool, *who, currency_id)) { *before += amount; } else { old_map.insert((*pool, *who, currency_id), amount); }; *v.borrow_mut() = old_map; }); } } impl Config for Runtime { type Share = Share; type Balance = Balance; type PoolId = PoolId; type CurrencyId = CurrencyId; type Handler = Handler; } type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Runtime>; type Block = frame_system::mocking::MockBlock<Runtime>; construct_runtime!( pub enum Runtime where Block = Block, NodeBlock = Block, UncheckedExtrinsic = UncheckedExtrinsic, { System: frame_system::{Pallet, Call, Storage, Config, Event<T>}, RewardsModule: rewards::{Pallet, Storage, Call}, } ); pub struct ExtBuilder; impl Default for ExtBuilder { fn default() -> Self { ExtBuilder } } impl ExtBuilder { pub fn build(self) -> sp_io::TestExternalities { let t = frame_system::GenesisConfig::default() .build_storage::<Runtime>() .unwrap(); t.into() } }
23.975
119
0.706986
ed67151755be983ad162b55d08390ade56602ce8
1,007
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ use common::PerfLogger; use lsp_types::{ notification::{Exit, Notification}, request::{Request, Shutdown}, }; use schema_documentation::SchemaDocumentation; use crate::{lsp_runtime_error::LSPRuntimeResult, server::LSPState}; pub(crate) fn on_shutdown< TPerfLogger: PerfLogger + 'static, TSchemaDocumentation: SchemaDocumentation, >( _state: &mut LSPState<TPerfLogger, TSchemaDocumentation>, _params: <Shutdown as Request>::Params, ) -> LSPRuntimeResult<<Shutdown as Request>::Result> { std::process::exit(0); } pub(crate) fn on_exit< TPerfLogger: PerfLogger + 'static, TSchemaDocumentation: SchemaDocumentation, >( _state: &mut LSPState<TPerfLogger, TSchemaDocumentation>, _params: <Exit as Notification>::Params, ) -> LSPRuntimeResult<()> { std::process::exit(0); }
27.972222
67
0.718967
23a900f2c649e5a8acb35ab317f1c327ca876678
14,416
//! Schemes for instantiating a cluster of shards. use std::{ convert::TryFrom, error::Error, fmt::{Display, Formatter, Result as FmtResult}, iter::StepBy, ops::{Bound, RangeBounds, RangeInclusive}, }; /// Starting a cluster failed. #[derive(Debug)] pub struct ShardSchemeRangeError { kind: ShardSchemeRangeErrorType, } impl ShardSchemeRangeError { /// Immutable reference to the type of error that occurred. #[must_use = "retrieving the type has no effect if left unused"] pub const fn kind(&self) -> &ShardSchemeRangeErrorType { &self.kind } /// Consume the error, returning the source error if there is any. #[allow(clippy::unused_self)] #[must_use = "consuming the error and retrieving the source has no effect if left unused"] pub fn into_source(self) -> Option<Box<dyn Error + Send + Sync>> { None } /// Consume the error, returning the owned error type and the source error. #[must_use = "consuming the error into its parts has no effect if left unused"] pub fn into_parts( self, ) -> ( ShardSchemeRangeErrorType, Option<Box<dyn Error + Send + Sync>>, ) { (self.kind, None) } } impl Display for ShardSchemeRangeError { fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { match &self.kind { ShardSchemeRangeErrorType::BucketTooLarge { bucket_id, concurrency, .. } => { f.write_str("bucket ID ")?; Display::fmt(bucket_id, f)?; f.write_str(" is larger than maximum concurrency (")?; Display::fmt(concurrency, f)?; f.write_str(")") } ShardSchemeRangeErrorType::IdTooLarge { end, start, total } => { f.write_str("The shard ID range ")?; Display::fmt(start, f)?; f.write_str("-")?; Display::fmt(end, f)?; f.write_str("/")?; Display::fmt(total, f)?; f.write_str(" is larger than the total") } } } } /// Starting a cluster failed. #[derive(Debug)] #[non_exhaustive] pub enum ShardSchemeRangeErrorType { /// Bucket ID is larger than the maximum concurrency. BucketTooLarge { /// ID of the bucket. bucket_id: u64, /// Number of shards in a bucket. concurrency: u64, /// Total number of buckets. total: u64, }, /// Start of the shard range was greater than the end or total. IdTooLarge { /// Last shard in the range to manage. end: u64, /// First shard in the range to manage. start: u64, /// Total number of shards used by the bot. total: u64, }, } impl Error for ShardSchemeRangeError {} /// Iterator of shard IDs based on a shard scheme. /// /// # Examples /// /// Iterate over a shard scheme range from 0 to 4 with a total of 19 shards: /// /// ``` /// # fn main() { try_main().unwrap() } /// # /// # fn try_main() -> Option<()> { /// use twilight_gateway::cluster::ShardScheme; /// /// let scheme = ShardScheme::Range { /// from: 0, /// to: 4, /// total: 19, /// }; /// let mut iter = scheme.iter()?; /// assert_eq!(0, iter.next()?); /// assert_eq!(1, iter.next()?); /// assert_eq!(2, iter.next()?); /// assert_eq!(3, iter.next()?); /// assert_eq!(4, iter.next()?); /// assert!(iter.next().is_none()); /// # Some(()) } /// ``` #[derive(Clone, Debug)] pub struct ShardSchemeIter { inner: StepBy<RangeInclusive<u64>>, } impl ShardSchemeIter { /// Create an iterator of shard IDs out of a scheme. fn new(scheme: &ShardScheme) -> Option<Self> { let (from, to, step) = match scheme { ShardScheme::Auto => return None, ShardScheme::Bucket { bucket_id, concurrency, total, } => { // It's reasonable to assume that no one will ever have a // concurrency size greater than even 16 bits. let concurrency = usize::try_from(*concurrency) .expect("concurrency is larger than target pointer width"); (*bucket_id, *total - 1, concurrency) } ShardScheme::Range { from, to, .. } => (*from, *to, 1), }; Some(Self { inner: (from..=to).step_by(step), }) } } impl Iterator for ShardSchemeIter { type Item = u64; fn next(&mut self) -> Option<Self::Item> { self.inner.next() } } /// The method of sharding to use. /// /// By default this is [`Auto`]. /// /// [`Auto`]: Self::Auto #[derive(Clone, Debug, Eq, Hash, PartialEq)] #[non_exhaustive] pub enum ShardScheme { /// Specifies to retrieve the amount of shards recommended by Discord and /// then start all of them. /// /// For example, if Discord recommends 10 shards, then all 10 shards will be /// started. Auto, /// Manage a single bucket's worth of shards within the cluster. /// /// This is primarily useful for bots in the [Sharding for Very Large Bots] /// program. /// /// [Sharding for Very Large Bots]: https://discord.com/developers/docs/topics/gateway#sharding-for-very-large-bots Bucket { /// ID of the first shard to start. /// /// This must be less than the maximum concurrency. /// /// For example, if you have a maximum concurrency of 16 and the bucket /// ID is 0, then shards 0, 16, 32, etc. will be managed by the cluster. bucket_id: u64, /// Number of shards allowed to be started simultaneously within a /// bucket, also known as the maximum concurrency. /// /// This is provided via [`SessionStartLimit::max_concurrency`]. /// /// [`SessionStartLimit::max_concurrency`]: ::twilight_model::gateway::SessionStartLimit::max_concurrency concurrency: u64, /// Total number of shards used across all clusters. total: u64, }, /// Specifies to start a range of shards. /// /// # Examples /// /// For example, if your bot uses 50 shards, then you might specify to start /// shards 0 through 24: /// /// ``` /// use twilight_gateway::cluster::ShardScheme; /// use std::convert::TryFrom; /// /// # fn main() -> Result<(), Box<dyn std::error::Error>> { /// let range = ShardScheme::try_from((0..24, 50))?; /// # Ok(()) } /// ``` Range { /// First shard ID to spawn. from: u64, /// Last shard ID to spawn. /// /// This doesn't necessarily have to be up to the `total`. to: u64, /// Total number of shards used by the bot. total: u64, }, } impl ShardScheme { /// Consume the shard scheme, returning an iterator of the shards that it /// denotes. /// /// Returns `None` if the scheme is dynamic, i.e. the scheme is the [`Auto`] /// variant. /// /// [`Auto`]: Self::Auto pub fn iter(&self) -> Option<ShardSchemeIter> { ShardSchemeIter::new(self) } /// First shard ID that will be started, if known. /// /// In the case of the [`Auto`] variant the total is unknown. /// /// [`Auto`]: Self::Auto pub const fn from(&self) -> Option<u64> { match self { Self::Auto => None, Self::Bucket { bucket_id, .. } => Some(*bucket_id), Self::Range { from, .. } => Some(*from), } } /// Total number of shards used by the bot across all clusters, if known. /// /// In the case of the [`Auto`] variant the total is unknown. /// /// [`Auto`]: Self::Auto pub const fn total(&self) -> Option<u64> { match self { Self::Auto => None, Self::Bucket { total, .. } | Self::Range { total, .. } => Some(*total), } } /// Maximum shard ID across all clusters, if known. /// /// In the case of the [`Auto`] variant the total is unknown. /// /// [`Auto`]: Self::Auto pub fn to(&self) -> Option<u64> { match self { Self::Auto => None, Self::Bucket { bucket_id, concurrency, total, } => { let buckets = total / concurrency; // Total is 1-indexed but shards are 0-indexed, so we need to // subtract 1 here. Some(total - (buckets - bucket_id) - 1) } Self::Range { to, .. } => Some(*to), } } } impl Default for ShardScheme { fn default() -> Self { Self::Auto } } impl<T: RangeBounds<u64>> TryFrom<(T, u64)> for ShardScheme { type Error = ShardSchemeRangeError; fn try_from((range, total): (T, u64)) -> Result<Self, Self::Error> { let start = match range.start_bound() { Bound::Excluded(num) => *num - 1, Bound::Included(num) => *num, Bound::Unbounded => 0, }; let end = match range.end_bound() { Bound::Excluded(num) => *num - 1, Bound::Included(num) => *num, Bound::Unbounded => total - 1, }; if start > end { return Err(ShardSchemeRangeError { kind: ShardSchemeRangeErrorType::IdTooLarge { end, start, total }, }); } Ok(Self::Range { from: start, to: end, total, }) } } /// Create a [`ShardScheme::Bucket`] shard scheme. /// /// # Examples /// /// Create a scheme for bucket 7 and with a maximum concurrency of 16 and a /// total of 320 shards: /// /// ``` /// # fn main() -> Result<(), Box<dyn std::error::Error>> { /// use std::convert::TryFrom; /// use twilight_gateway::cluster::ShardScheme; /// /// let scheme = ShardScheme::try_from((7u64, 16, 320))?; /// assert_eq!(Some(7), scheme.from()); /// assert_eq!(Some(306), scheme.to()); /// assert_eq!(Some(320), scheme.total()); /// # Ok(()) } /// ``` /// /// # Errors /// /// Returns [`ShardSchemeRangeErrorType::BucketTooLarge`] if the provided bucket ID /// is larger than the total number of buckets (`total / concurrency`). impl TryFrom<(u64, u64, u64)> for ShardScheme { type Error = ShardSchemeRangeError; fn try_from((bucket_id, concurrency, total): (u64, u64, u64)) -> Result<Self, Self::Error> { let buckets = total / concurrency; if bucket_id >= buckets { return Err(ShardSchemeRangeError { kind: ShardSchemeRangeErrorType::BucketTooLarge { bucket_id, concurrency, total, }, }); } Ok(ShardScheme::Bucket { bucket_id, concurrency, total, }) } } #[cfg(test)] mod tests { use super::{ShardScheme, ShardSchemeIter, ShardSchemeRangeError, ShardSchemeRangeErrorType}; use static_assertions::{assert_fields, assert_impl_all}; use std::{convert::TryFrom, error::Error, fmt::Debug, hash::Hash}; assert_impl_all!(ShardSchemeIter: Clone, Debug, Send, Sync); assert_fields!(ShardSchemeRangeErrorType::IdTooLarge: end, start, total); assert_impl_all!(ShardSchemeRangeError: Error, Send, Sync); assert_fields!(ShardScheme::Range: from, to, total); assert_impl_all!( ShardScheme: Clone, Debug, Default, Eq, Hash, PartialEq, Send, Sync, TryFrom<(u64, u64, u64)>, ); #[test] fn test_scheme() -> Result<(), Box<dyn Error>> { assert_eq!( ShardScheme::Range { from: 0, to: 9, total: 10, }, ShardScheme::try_from((0..=9, 10))? ); Ok(()) } #[test] fn test_scheme_from() { assert!(ShardScheme::Auto.from().is_none()); assert_eq!( 18, ShardScheme::Bucket { bucket_id: 18, concurrency: 16, total: 320, } .from() .unwrap() ); assert_eq!( 50, ShardScheme::Range { from: 50, to: 99, total: 200, } .from() .unwrap() ); } #[test] fn test_scheme_total() { assert!(ShardScheme::Auto.total().is_none()); assert_eq!( 160, ShardScheme::Bucket { bucket_id: 3, concurrency: 16, total: 160, } .total() .unwrap() ); assert_eq!( 17, ShardScheme::Range { from: 0, to: 9, total: 17, } .total() .unwrap() ); } #[test] fn test_scheme_to() { assert!(ShardScheme::Auto.to().is_none()); assert_eq!( 317, ShardScheme::Bucket { bucket_id: 18, concurrency: 16, total: 320, } .to() .unwrap() ); assert_eq!( 299, ShardScheme::Bucket { bucket_id: 0, concurrency: 16, total: 320, } .to() .unwrap() ); assert_eq!( 99, ShardScheme::Range { from: 50, to: 99, total: 200, } .to() .unwrap() ); } /// Test that a [`BucketTooLarge`] error will return if the ID of the bucket /// is greater than the specified concurrency. /// /// [`BucketTooLarge`]: super::ShardSchemeRangeError::BucketTooLarge #[test] fn test_scheme_bucket_larger_than_concurrency() { assert!(matches!( ShardScheme::try_from((25, 16, 320)).unwrap_err(), ShardSchemeRangeError { kind: ShardSchemeRangeErrorType::BucketTooLarge { bucket_id, concurrency, total }} if bucket_id == 25 && concurrency == 16 && total == 320 )); } }
28.490119
119
0.520117
ef145a82027f5d39d05614658c8f8c031e62c0b2
1,466
use glib::translate::*; #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct ButtonEvent(crate::Event); event_wrapper!(ButtonEvent, ClutterButtonEvent); event_subtype!( ButtonEvent, ffi::CLUTTER_BUTTON_PRESS // | ffi::GDK_DOUBLE_BUTTON_PRESS // | ffi::GDK_TRIPLE_BUTTON_PRESS | ffi::CLUTTER_BUTTON_RELEASE ); impl ButtonEvent { // pub fn get_position(&self) -> (f64, f64) { // let x = self.as_ref().x; // let y = self.as_ref().y; // (x, y) // } // pub fn get_state(&self) -> ::ModifierType { // from_glib(self.as_ref().state) // } // pub fn get_time(&self) -> u32 { // self.as_ref().time // } // pub fn get_button(&self) -> u32 { // self.as_ref().button // } // pub fn get_device(&self) -> Option<::Device> { // unsafe { from_glib_none(self.as_ref().device) } // } // pub fn get_axes(&self) -> Option<(f64, f64)> { // let axes = self.as_ref().axes; // if axes.is_null() { // None // } else { // unsafe { Some((*axes, *axes.offset(1))) } // } // } // pub fn get_root(&self) -> (f64, f64) { // let x_root = self.as_ref().x_root; // let y_root = self.as_ref().y_root; // (x_root, y_root) // } }
27.148148
62
0.480218
feecdd2c04e09fb8892b15b56013c775488f48ea
2,297
//! Elemental properties. use crate::internal::Float; /// Every element on the periodic table. #[derive(Clone, Copy, Debug, PartialEq, EnumString, Hash, Eq)] pub enum Element { /// Hydrogen H, /// Helium He, /// Boron B, /// Nitrogen N, /// Oxygen O, /// Fluorine F, /// Sodium Na, /// Magnesium Mg, /// Chlorine Cl, /// Argon Ar, /// Xenon Xe, } impl Element { /// Returns the atomic mass of the element in amu. pub const fn mass(&self) -> Float { match self { Element::H => 1.008, Element::He => 4.0026, Element::B => 10.811, Element::N => 14.0067, Element::O => 15.999, Element::F => 18.998, Element::Na => 22.989, Element::Mg => 24.305, Element::Cl => 35.453, Element::Ar => 39.948, Element::Xe => 131.293, } } /// Returns the electronic charge of the element as a multiple of electron charge. pub const fn charge(&self) -> Float { match self { Element::H => 1.0, Element::He => 0.0, Element::B => 3.0, Element::N => -3.0, Element::O => -2.0, Element::F => -1.0, Element::Na => 1.0, Element::Mg => 2.0, Element::Cl => -1.0, Element::Ar => 0.0, Element::Xe => 0.0, } } /// Returns the atomic number of the element. pub const fn number(&self) -> u8 { match self { Element::H => 1, Element::He => 2, Element::B => 5, Element::N => 7, Element::O => 8, Element::F => 9, Element::Na => 11, Element::Mg => 12, Element::Cl => 17, Element::Ar => 18, Element::Xe => 54, } } } #[cfg(test)] mod tests { use super::Element; use std::str::FromStr; #[test] fn from_str_valid() { let hydrogen = Element::from_str("H").unwrap(); assert_eq!(Element::H, hydrogen) } #[test] #[should_panic] fn from_str_invalid() { let _ = Element::from_str("not a valid symbol").unwrap(); } }
22.519608
86
0.451894
67e891f8c0b22c698e569d2add6c7ae37b461d7b
10,777
use std::cell::RefCell; use std::rc::Rc; use rand::Rng; use chunk::{print_error, Chunk, Value}; use vm::*; impl VM { /// Remove the top element from the stack. pub fn opcode_drop(&mut self, chunk: &Chunk, i: usize) -> i32 { if self.stack.len() == 0 { print_error(chunk, i, "drop requires one argument"); return 0; } self.stack.pop().unwrap(); return 1; } /// Remove all elements from the stack. #[allow(unused_variables)] pub fn opcode_clear(&mut self, chunk: &Chunk, i: usize) -> i32 { self.stack.clear(); return 1; } /// Take the top element from the stack, duplicate it, and add it /// onto the stack. pub fn opcode_dup(&mut self, chunk: &Chunk, i: usize) -> i32 { if self.stack.len() == 0 { print_error(chunk, i, "dup requires one argument"); return 0; } self.stack.push(self.stack.last().unwrap().clone()); return 1; } /// Take the second element from the top from the stack, duplicate /// it, and add it onto the stack. pub fn opcode_over(&mut self, chunk: &Chunk, i: usize) -> i32 { if self.stack.len() < 2 { print_error(chunk, i, "over requires two arguments"); return 0; } self.stack.push(self.stack[self.stack.len() - 2].clone()); return 1; } /// Swap the top two elements from the stack. pub fn opcode_swap(&mut self, chunk: &Chunk, i: usize) -> i32 { if self.stack.len() < 2 { print_error(chunk, i, "swap requires two arguments"); return 0; } let first_rr = self.stack.pop().unwrap(); let second_rr = self.stack.pop().unwrap(); self.stack.push(first_rr); self.stack.push(second_rr); return 1; } /// Rotate the top three elements from the stack: the top element /// becomes the second from top element, the second from top /// element becomes the third from top element, and the third from /// top element becomes the top element. pub fn opcode_rot(&mut self, chunk: &Chunk, i: usize) -> i32 { if self.stack.len() < 3 { print_error(chunk, i, "rot requires three arguments"); return 0; } let first_rr = self.stack.pop().unwrap(); let second_rr = self.stack.pop().unwrap(); let third_rr = self.stack.pop().unwrap(); self.stack.push(second_rr); self.stack.push(first_rr); self.stack.push(third_rr); return 1; } /// Push the current depth of the stack onto the stack. #[allow(unused_variables)] pub fn opcode_depth(&mut self, chunk: &Chunk, i: usize) -> i32 { self.stack .push(Rc::new(RefCell::new(Value::Int(self.stack.len() as i32)))); return 1; } /// If the topmost element is a list, adds the length of that list /// onto the stack. If the topmost element is a string, adds the /// length of that sting onto the stack. pub fn core_len(&mut self, chunk: &Chunk, i: usize) -> i32 { if self.stack.len() < 1 { print_error(chunk, i, "len requires one argument"); return 0; } let lst_rr = self.stack.pop().unwrap(); let lst_rrb = lst_rr.borrow(); match &*lst_rrb { Value::List(lst) => { let len = lst.len(); self.stack .push(Rc::new(RefCell::new(Value::Int(len as i32)))); } Value::String(s, _) => { let len = s.len(); self.stack .push(Rc::new(RefCell::new(Value::Int(len as i32)))); } _ => { print_error(chunk, i, "len argument must be a list or a string"); return 0; } } return 1; } /// Adds a boolean onto the stack indicating whether the topmost /// element is a null value. pub fn opcode_isnull(&mut self, chunk: &Chunk, i: usize) -> i32 { if self.stack.len() < 1 { print_error(chunk, i, "is-null requires one argument"); return 0; } let i1_rr = self.stack.pop().unwrap(); let i1_rrb = i1_rr.borrow(); let is_null = match *i1_rrb { Value::Null => 1, _ => 0, }; self.stack.push(Rc::new(RefCell::new(Value::Int(is_null)))); return 1; } /// Adds a boolean onto the stack indicating whether the topmost /// element is a list. pub fn opcode_islist(&mut self, chunk: &Chunk, i: usize) -> i32 { if self.stack.len() < 1 { print_error(chunk, i, "is-list requires one argument"); return 0; } let i1_rr = self.stack.pop().unwrap(); let i1_rrb = i1_rr.borrow(); let is_list = match *i1_rrb { Value::List(_) => 1, _ => 0, }; self.stack.push(Rc::new(RefCell::new(Value::Int(is_list)))); return 1; } /// Adds a boolean onto the stack indicating whether the topmost /// element can be called. (In the case of a string, this doesn't /// currently check that the string name maps to a function or /// core form, though.) pub fn opcode_iscallable(&mut self, chunk: &Chunk, i: usize) -> i32 { if self.stack.len() < 1 { print_error(chunk, i, "is-callable requires one argument"); return 0; } let i1_rr = self.stack.pop().unwrap(); let i1_rrb = i1_rr.borrow(); let is_callable = match *i1_rrb { Value::Function(_, _, _) => 1, /* This could be better. */ Value::String(_, _) => 1, _ => 0, }; self.stack.push(Rc::new(RefCell::new(Value::Int(is_callable)))); return 1; } /// Convert a value into a string value. pub fn opcode_str(&mut self, chunk: &Chunk, i: usize) -> i32 { if self.stack.len() < 1 { print_error(chunk, i, "str requires one argument"); return 0; } let value_rr = self.stack.pop().unwrap(); let is_string; { let value_rrb = value_rr.borrow(); match *value_rrb { Value::String(_, _) => { is_string = true; } _ => { let value_pre = value_rrb.to_string(); let value_opt = to_string_2(&value_pre); match value_opt { Some(s) => { self.stack.push(Rc::new(RefCell::new(Value::String(s.to_string(), None)))); return 1; } _ => { print_error(chunk, i, "unable to convert argument to string"); return 0; } } } } } if is_string { self.stack.push(value_rr); } return 1; } /// Convert a value into an integer/bigint value. pub fn opcode_int(&mut self, chunk: &Chunk, i: usize) -> i32 { if self.stack.len() < 1 { print_error(chunk, i, "int requires one argument"); return 0; } let value_rr = self.stack.pop().unwrap(); let is_int; { let value_rrb = value_rr.borrow(); match *value_rrb { Value::Int(_) => { is_int = true; } Value::BigInt(_) => { is_int = true; } _ => { let value_opt = value_rrb.to_int(); match value_opt { Some(n) => { self.stack.push(Rc::new(RefCell::new(Value::Int(n)))); return 1; } _ => { let value_opt = value_rrb.to_bigint(); match value_opt { Some(n) => { self.stack.push(Rc::new(RefCell::new(Value::BigInt(n)))); return 1; } _ => { print_error(chunk, i, "unable to convert argument to int"); return 0; } } } } } } } if is_int { self.stack.push(value_rr); } return 1; } /// Convert a value into a floating-point value. pub fn opcode_flt(&mut self, chunk: &Chunk, i: usize) -> i32 { if self.stack.len() < 1 { print_error(chunk, i, "flt requires one argument"); return 0; } let value_rr = self.stack.pop().unwrap(); let is_float; { let value_rrb = value_rr.borrow(); match *value_rrb { Value::Float(_) => { is_float = true; } _ => { let value_opt = value_rrb.to_float(); match value_opt { Some(n) => { self.stack.push(Rc::new(RefCell::new(Value::Float(n)))); return 1; } _ => { print_error(chunk, i, "unable to convert argument to float"); return 0; } } } } } if is_float { self.stack.push(value_rr); } return 1; } /// Get a random floating-point value. pub fn opcode_rand(&mut self, chunk: &Chunk, i: usize) -> i32 { if self.stack.len() < 1 { print_error(chunk, i, "rand requires one argument"); return 0; } let value_rr = self.stack.pop().unwrap(); let value_rrb = value_rr.borrow(); let value_opt = value_rrb.to_float(); match value_opt { Some(n) => { let mut rng = rand::thread_rng(); let rand_value = rng.gen_range(0.0..n); self.stack.push(Rc::new(RefCell::new(Value::Float(rand_value)))); } _ => { print_error(chunk, i, "unable to convert argument to float"); return 0; } } return 1; } }
33.573209
103
0.46525
dee6907832ea1c974a8209571dfd186b8fa2e557
28,335
// This file is Copyright its original authors, visible in version control // history. // // This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE // or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option. // You may not use this file except in accordance with one or both of these // licenses. use chain; use chain::WatchedOutput; use chain::chaininterface; use chain::chaininterface::ConfirmationTarget; use chain::chainmonitor; use chain::chainmonitor::MonitorUpdateId; use chain::channelmonitor; use chain::channelmonitor::MonitorEvent; use chain::transaction::OutPoint; use chain::keysinterface; use ln::features::{ChannelFeatures, InitFeatures}; use ln::msgs; use ln::msgs::OptionalField; use ln::script::ShutdownScript; use routing::scoring::ScorerUsingTime; use routing::scoring::time::Eternity; use util::enforcing_trait_impls::{EnforcingSigner, EnforcementState}; use util::events; use util::logger::{Logger, Level, Record}; use util::ser::{Readable, ReadableArgs, Writer, Writeable}; use bitcoin::blockdata::constants::genesis_block; use bitcoin::blockdata::transaction::{Transaction, TxOut}; use bitcoin::blockdata::script::{Builder, Script}; use bitcoin::blockdata::opcodes; use bitcoin::blockdata::block::BlockHeader; use bitcoin::network::constants::Network; use bitcoin::hash_types::{BlockHash, Txid}; use bitcoin::secp256k1::{SecretKey, PublicKey, Secp256k1, Signature}; use bitcoin::secp256k1::recovery::RecoverableSignature; use regex; use io; use prelude::*; use core::time::Duration; use sync::{Mutex, Arc}; use core::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; use core::{cmp, mem}; use bitcoin::bech32::u5; use chain::keysinterface::{InMemorySigner, KeyMaterial}; pub struct TestVecWriter(pub Vec<u8>); impl Writer for TestVecWriter { fn write_all(&mut self, buf: &[u8]) -> Result<(), io::Error> { self.0.extend_from_slice(buf); Ok(()) } } pub struct TestFeeEstimator { pub sat_per_kw: Mutex<u32>, } impl chaininterface::FeeEstimator for TestFeeEstimator { fn get_est_sat_per_1000_weight(&self, _confirmation_target: ConfirmationTarget) -> u32 { *self.sat_per_kw.lock().unwrap() } } pub struct OnlyReadsKeysInterface {} impl keysinterface::KeysInterface for OnlyReadsKeysInterface { type Signer = EnforcingSigner; fn get_node_secret(&self) -> SecretKey { unreachable!(); } fn get_inbound_payment_key_material(&self) -> KeyMaterial { unreachable!(); } fn get_destination_script(&self) -> Script { unreachable!(); } fn get_shutdown_scriptpubkey(&self) -> ShutdownScript { unreachable!(); } fn get_channel_signer(&self, _inbound: bool, _channel_value_satoshis: u64) -> EnforcingSigner { unreachable!(); } fn get_secure_random_bytes(&self) -> [u8; 32] { [0; 32] } fn read_chan_signer(&self, mut reader: &[u8]) -> Result<Self::Signer, msgs::DecodeError> { let dummy_sk = SecretKey::from_slice(&[42; 32]).unwrap(); let inner: InMemorySigner = ReadableArgs::read(&mut reader, dummy_sk)?; let state = Arc::new(Mutex::new(EnforcementState::new())); Ok(EnforcingSigner::new_with_revoked( inner, state, false )) } fn sign_invoice(&self, _hrp_bytes: &[u8], _invoice_data: &[u5]) -> Result<RecoverableSignature, ()> { unreachable!(); } } pub struct TestChainMonitor<'a> { pub added_monitors: Mutex<Vec<(OutPoint, channelmonitor::ChannelMonitor<EnforcingSigner>)>>, pub monitor_updates: Mutex<HashMap<[u8; 32], Vec<channelmonitor::ChannelMonitorUpdate>>>, pub latest_monitor_update_id: Mutex<HashMap<[u8; 32], (OutPoint, u64, MonitorUpdateId)>>, pub chain_monitor: chainmonitor::ChainMonitor<EnforcingSigner, &'a TestChainSource, &'a chaininterface::BroadcasterInterface, &'a TestFeeEstimator, &'a TestLogger, &'a chainmonitor::Persist<EnforcingSigner>>, pub keys_manager: &'a TestKeysInterface, /// If this is set to Some(), the next update_channel call (not watch_channel) must be a /// ChannelForceClosed event for the given channel_id with should_broadcast set to the given /// boolean. pub expect_channel_force_closed: Mutex<Option<([u8; 32], bool)>>, } impl<'a> TestChainMonitor<'a> { pub fn new(chain_source: Option<&'a TestChainSource>, broadcaster: &'a chaininterface::BroadcasterInterface, logger: &'a TestLogger, fee_estimator: &'a TestFeeEstimator, persister: &'a chainmonitor::Persist<EnforcingSigner>, keys_manager: &'a TestKeysInterface) -> Self { Self { added_monitors: Mutex::new(Vec::new()), monitor_updates: Mutex::new(HashMap::new()), latest_monitor_update_id: Mutex::new(HashMap::new()), chain_monitor: chainmonitor::ChainMonitor::new(chain_source, broadcaster, logger, fee_estimator, persister), keys_manager, expect_channel_force_closed: Mutex::new(None), } } } impl<'a> chain::Watch<EnforcingSigner> for TestChainMonitor<'a> { fn watch_channel(&self, funding_txo: OutPoint, monitor: channelmonitor::ChannelMonitor<EnforcingSigner>) -> Result<(), chain::ChannelMonitorUpdateErr> { // At every point where we get a monitor update, we should be able to send a useful monitor // to a watchtower and disk... let mut w = TestVecWriter(Vec::new()); monitor.write(&mut w).unwrap(); let new_monitor = <(BlockHash, channelmonitor::ChannelMonitor<EnforcingSigner>)>::read( &mut io::Cursor::new(&w.0), self.keys_manager).unwrap().1; assert!(new_monitor == monitor); self.latest_monitor_update_id.lock().unwrap().insert(funding_txo.to_channel_id(), (funding_txo, monitor.get_latest_update_id(), MonitorUpdateId::from_new_monitor(&monitor))); self.added_monitors.lock().unwrap().push((funding_txo, monitor)); self.chain_monitor.watch_channel(funding_txo, new_monitor) } fn update_channel(&self, funding_txo: OutPoint, update: channelmonitor::ChannelMonitorUpdate) -> Result<(), chain::ChannelMonitorUpdateErr> { // Every monitor update should survive roundtrip let mut w = TestVecWriter(Vec::new()); update.write(&mut w).unwrap(); assert!(channelmonitor::ChannelMonitorUpdate::read( &mut io::Cursor::new(&w.0)).unwrap() == update); self.monitor_updates.lock().unwrap().entry(funding_txo.to_channel_id()).or_insert(Vec::new()).push(update.clone()); if let Some(exp) = self.expect_channel_force_closed.lock().unwrap().take() { assert_eq!(funding_txo.to_channel_id(), exp.0); assert_eq!(update.updates.len(), 1); if let channelmonitor::ChannelMonitorUpdateStep::ChannelForceClosed { should_broadcast } = update.updates[0] { assert_eq!(should_broadcast, exp.1); } else { panic!(); } } self.latest_monitor_update_id.lock().unwrap().insert(funding_txo.to_channel_id(), (funding_txo, update.update_id, MonitorUpdateId::from_monitor_update(&update))); let update_res = self.chain_monitor.update_channel(funding_txo, update); // At every point where we get a monitor update, we should be able to send a useful monitor // to a watchtower and disk... let monitor = self.chain_monitor.get_monitor(funding_txo).unwrap(); w.0.clear(); monitor.write(&mut w).unwrap(); let new_monitor = <(BlockHash, channelmonitor::ChannelMonitor<EnforcingSigner>)>::read( &mut io::Cursor::new(&w.0), self.keys_manager).unwrap().1; assert!(new_monitor == *monitor); self.added_monitors.lock().unwrap().push((funding_txo, new_monitor)); update_res } fn release_pending_monitor_events(&self) -> Vec<MonitorEvent> { return self.chain_monitor.release_pending_monitor_events(); } } pub struct TestPersister { pub update_ret: Mutex<Result<(), chain::ChannelMonitorUpdateErr>>, /// If this is set to Some(), after the next return, we'll always return this until update_ret /// is changed: pub next_update_ret: Mutex<Option<Result<(), chain::ChannelMonitorUpdateErr>>>, /// When we get an update_persisted_channel call with no ChannelMonitorUpdate, we insert the /// MonitorUpdateId here. pub chain_sync_monitor_persistences: Mutex<HashMap<OutPoint, HashSet<MonitorUpdateId>>>, /// When we get an update_persisted_channel call *with* a ChannelMonitorUpdate, we insert the /// MonitorUpdateId here. pub offchain_monitor_updates: Mutex<HashMap<OutPoint, HashSet<MonitorUpdateId>>>, } impl TestPersister { pub fn new() -> Self { Self { update_ret: Mutex::new(Ok(())), next_update_ret: Mutex::new(None), chain_sync_monitor_persistences: Mutex::new(HashMap::new()), offchain_monitor_updates: Mutex::new(HashMap::new()), } } pub fn set_update_ret(&self, ret: Result<(), chain::ChannelMonitorUpdateErr>) { *self.update_ret.lock().unwrap() = ret; } pub fn set_next_update_ret(&self, next_ret: Option<Result<(), chain::ChannelMonitorUpdateErr>>) { *self.next_update_ret.lock().unwrap() = next_ret; } } impl<Signer: keysinterface::Sign> chainmonitor::Persist<Signer> for TestPersister { fn persist_new_channel(&self, _funding_txo: OutPoint, _data: &channelmonitor::ChannelMonitor<Signer>, _id: MonitorUpdateId) -> Result<(), chain::ChannelMonitorUpdateErr> { let ret = self.update_ret.lock().unwrap().clone(); if let Some(next_ret) = self.next_update_ret.lock().unwrap().take() { *self.update_ret.lock().unwrap() = next_ret; } ret } fn update_persisted_channel(&self, funding_txo: OutPoint, update: &Option<channelmonitor::ChannelMonitorUpdate>, _data: &channelmonitor::ChannelMonitor<Signer>, update_id: MonitorUpdateId) -> Result<(), chain::ChannelMonitorUpdateErr> { let ret = self.update_ret.lock().unwrap().clone(); if let Some(next_ret) = self.next_update_ret.lock().unwrap().take() { *self.update_ret.lock().unwrap() = next_ret; } if update.is_none() { self.chain_sync_monitor_persistences.lock().unwrap().entry(funding_txo).or_insert(HashSet::new()).insert(update_id); } else { self.offchain_monitor_updates.lock().unwrap().entry(funding_txo).or_insert(HashSet::new()).insert(update_id); } ret } } pub struct TestBroadcaster { pub txn_broadcasted: Mutex<Vec<Transaction>>, pub blocks: Arc<Mutex<Vec<(BlockHeader, u32)>>>, } impl TestBroadcaster { pub fn new(blocks: Arc<Mutex<Vec<(BlockHeader, u32)>>>) -> TestBroadcaster { TestBroadcaster { txn_broadcasted: Mutex::new(Vec::new()), blocks } } } impl chaininterface::BroadcasterInterface for TestBroadcaster { fn broadcast_transaction(&self, tx: &Transaction) { assert!(tx.lock_time < 1_500_000_000); if tx.lock_time > self.blocks.lock().unwrap().len() as u32 + 1 && tx.lock_time < 500_000_000 { for inp in tx.input.iter() { if inp.sequence != 0xffffffff { panic!("We should never broadcast a transaction before its locktime ({})!", tx.lock_time); } } } self.txn_broadcasted.lock().unwrap().push(tx.clone()); } } pub struct TestChannelMessageHandler { pub pending_events: Mutex<Vec<events::MessageSendEvent>>, } impl TestChannelMessageHandler { pub fn new() -> Self { TestChannelMessageHandler { pending_events: Mutex::new(Vec::new()), } } } impl msgs::ChannelMessageHandler for TestChannelMessageHandler { fn handle_open_channel(&self, _their_node_id: &PublicKey, _their_features: InitFeatures, _msg: &msgs::OpenChannel) {} fn handle_accept_channel(&self, _their_node_id: &PublicKey, _their_features: InitFeatures, _msg: &msgs::AcceptChannel) {} fn handle_funding_created(&self, _their_node_id: &PublicKey, _msg: &msgs::FundingCreated) {} fn handle_funding_signed(&self, _their_node_id: &PublicKey, _msg: &msgs::FundingSigned) {} fn handle_funding_locked(&self, _their_node_id: &PublicKey, _msg: &msgs::FundingLocked) {} fn handle_shutdown(&self, _their_node_id: &PublicKey, _their_features: &InitFeatures, _msg: &msgs::Shutdown) {} fn handle_closing_signed(&self, _their_node_id: &PublicKey, _msg: &msgs::ClosingSigned) {} fn handle_update_add_htlc(&self, _their_node_id: &PublicKey, _msg: &msgs::UpdateAddHTLC) {} fn handle_update_fulfill_htlc(&self, _their_node_id: &PublicKey, _msg: &msgs::UpdateFulfillHTLC) {} fn handle_update_fail_htlc(&self, _their_node_id: &PublicKey, _msg: &msgs::UpdateFailHTLC) {} fn handle_update_fail_malformed_htlc(&self, _their_node_id: &PublicKey, _msg: &msgs::UpdateFailMalformedHTLC) {} fn handle_commitment_signed(&self, _their_node_id: &PublicKey, _msg: &msgs::CommitmentSigned) {} fn handle_revoke_and_ack(&self, _their_node_id: &PublicKey, _msg: &msgs::RevokeAndACK) {} fn handle_update_fee(&self, _their_node_id: &PublicKey, _msg: &msgs::UpdateFee) {} fn handle_channel_update(&self, _their_node_id: &PublicKey, _msg: &msgs::ChannelUpdate) {} fn handle_announcement_signatures(&self, _their_node_id: &PublicKey, _msg: &msgs::AnnouncementSignatures) {} fn handle_channel_reestablish(&self, _their_node_id: &PublicKey, _msg: &msgs::ChannelReestablish) {} fn peer_disconnected(&self, _their_node_id: &PublicKey, _no_connection_possible: bool) {} fn peer_connected(&self, _their_node_id: &PublicKey, _msg: &msgs::Init) {} fn handle_error(&self, _their_node_id: &PublicKey, _msg: &msgs::ErrorMessage) {} } impl events::MessageSendEventsProvider for TestChannelMessageHandler { fn get_and_clear_pending_msg_events(&self) -> Vec<events::MessageSendEvent> { let mut pending_events = self.pending_events.lock().unwrap(); let mut ret = Vec::new(); mem::swap(&mut ret, &mut *pending_events); ret } } fn get_dummy_channel_announcement(short_chan_id: u64) -> msgs::ChannelAnnouncement { use bitcoin::secp256k1::ffi::Signature as FFISignature; let secp_ctx = Secp256k1::new(); let network = Network::Testnet; let node_1_privkey = SecretKey::from_slice(&[42; 32]).unwrap(); let node_2_privkey = SecretKey::from_slice(&[41; 32]).unwrap(); let node_1_btckey = SecretKey::from_slice(&[40; 32]).unwrap(); let node_2_btckey = SecretKey::from_slice(&[39; 32]).unwrap(); let unsigned_ann = msgs::UnsignedChannelAnnouncement { features: ChannelFeatures::known(), chain_hash: genesis_block(network).header.block_hash(), short_channel_id: short_chan_id, node_id_1: PublicKey::from_secret_key(&secp_ctx, &node_1_privkey), node_id_2: PublicKey::from_secret_key(&secp_ctx, &node_2_privkey), bitcoin_key_1: PublicKey::from_secret_key(&secp_ctx, &node_1_btckey), bitcoin_key_2: PublicKey::from_secret_key(&secp_ctx, &node_2_btckey), excess_data: Vec::new(), }; unsafe { msgs::ChannelAnnouncement { node_signature_1: Signature::from(FFISignature::new()), node_signature_2: Signature::from(FFISignature::new()), bitcoin_signature_1: Signature::from(FFISignature::new()), bitcoin_signature_2: Signature::from(FFISignature::new()), contents: unsigned_ann, } } } fn get_dummy_channel_update(short_chan_id: u64) -> msgs::ChannelUpdate { use bitcoin::secp256k1::ffi::Signature as FFISignature; let network = Network::Testnet; msgs::ChannelUpdate { signature: Signature::from(unsafe { FFISignature::new() }), contents: msgs::UnsignedChannelUpdate { chain_hash: genesis_block(network).header.block_hash(), short_channel_id: short_chan_id, timestamp: 0, flags: 0, cltv_expiry_delta: 0, htlc_minimum_msat: 0, htlc_maximum_msat: OptionalField::Absent, fee_base_msat: 0, fee_proportional_millionths: 0, excess_data: vec![], } } } pub struct TestRoutingMessageHandler { pub chan_upds_recvd: AtomicUsize, pub chan_anns_recvd: AtomicUsize, pub request_full_sync: AtomicBool, } impl TestRoutingMessageHandler { pub fn new() -> Self { TestRoutingMessageHandler { chan_upds_recvd: AtomicUsize::new(0), chan_anns_recvd: AtomicUsize::new(0), request_full_sync: AtomicBool::new(false), } } } impl msgs::RoutingMessageHandler for TestRoutingMessageHandler { fn handle_node_announcement(&self, _msg: &msgs::NodeAnnouncement) -> Result<bool, msgs::LightningError> { Err(msgs::LightningError { err: "".to_owned(), action: msgs::ErrorAction::IgnoreError }) } fn handle_channel_announcement(&self, _msg: &msgs::ChannelAnnouncement) -> Result<bool, msgs::LightningError> { self.chan_anns_recvd.fetch_add(1, Ordering::AcqRel); Err(msgs::LightningError { err: "".to_owned(), action: msgs::ErrorAction::IgnoreError }) } fn handle_channel_update(&self, _msg: &msgs::ChannelUpdate) -> Result<bool, msgs::LightningError> { self.chan_upds_recvd.fetch_add(1, Ordering::AcqRel); Err(msgs::LightningError { err: "".to_owned(), action: msgs::ErrorAction::IgnoreError }) } fn get_next_channel_announcements(&self, starting_point: u64, batch_amount: u8) -> Vec<(msgs::ChannelAnnouncement, Option<msgs::ChannelUpdate>, Option<msgs::ChannelUpdate>)> { let mut chan_anns = Vec::new(); const TOTAL_UPDS: u64 = 50; let end: u64 = cmp::min(starting_point + batch_amount as u64, TOTAL_UPDS); for i in starting_point..end { let chan_upd_1 = get_dummy_channel_update(i); let chan_upd_2 = get_dummy_channel_update(i); let chan_ann = get_dummy_channel_announcement(i); chan_anns.push((chan_ann, Some(chan_upd_1), Some(chan_upd_2))); } chan_anns } fn get_next_node_announcements(&self, _starting_point: Option<&PublicKey>, _batch_amount: u8) -> Vec<msgs::NodeAnnouncement> { Vec::new() } fn sync_routing_table(&self, _their_node_id: &PublicKey, _init_msg: &msgs::Init) {} fn handle_reply_channel_range(&self, _their_node_id: &PublicKey, _msg: msgs::ReplyChannelRange) -> Result<(), msgs::LightningError> { Ok(()) } fn handle_reply_short_channel_ids_end(&self, _their_node_id: &PublicKey, _msg: msgs::ReplyShortChannelIdsEnd) -> Result<(), msgs::LightningError> { Ok(()) } fn handle_query_channel_range(&self, _their_node_id: &PublicKey, _msg: msgs::QueryChannelRange) -> Result<(), msgs::LightningError> { Ok(()) } fn handle_query_short_channel_ids(&self, _their_node_id: &PublicKey, _msg: msgs::QueryShortChannelIds) -> Result<(), msgs::LightningError> { Ok(()) } } impl events::MessageSendEventsProvider for TestRoutingMessageHandler { fn get_and_clear_pending_msg_events(&self) -> Vec<events::MessageSendEvent> { vec![] } } pub struct TestLogger { level: Level, id: String, pub lines: Mutex<HashMap<(String, String), usize>>, } impl TestLogger { pub fn new() -> TestLogger { Self::with_id("".to_owned()) } pub fn with_id(id: String) -> TestLogger { TestLogger { level: Level::Trace, id, lines: Mutex::new(HashMap::new()) } } pub fn enable(&mut self, level: Level) { self.level = level; } pub fn assert_log(&self, module: String, line: String, count: usize) { let log_entries = self.lines.lock().unwrap(); assert_eq!(log_entries.get(&(module, line)), Some(&count)); } /// Search for the number of occurrence of the logged lines which /// 1. belongs to the specified module and /// 2. contains `line` in it. /// And asserts if the number of occurrences is the same with the given `count` pub fn assert_log_contains(&self, module: String, line: String, count: usize) { let log_entries = self.lines.lock().unwrap(); let l: usize = log_entries.iter().filter(|&(&(ref m, ref l), _c)| { m == &module && l.contains(line.as_str()) }).map(|(_, c) | { c }).sum(); assert_eq!(l, count) } /// Search for the number of occurrences of logged lines which /// 1. belong to the specified module and /// 2. match the given regex pattern. /// Assert that the number of occurrences equals the given `count` pub fn assert_log_regex(&self, module: String, pattern: regex::Regex, count: usize) { let log_entries = self.lines.lock().unwrap(); let l: usize = log_entries.iter().filter(|&(&(ref m, ref l), _c)| { m == &module && pattern.is_match(&l) }).map(|(_, c) | { c }).sum(); assert_eq!(l, count) } } impl Logger for TestLogger { fn log(&self, record: &Record) { *self.lines.lock().unwrap().entry((record.module_path.to_string(), format!("{}", record.args))).or_insert(0) += 1; if record.level >= self.level { #[cfg(feature = "std")] println!("{:<5} {} [{} : {}, {}] {}", record.level.to_string(), self.id, record.module_path, record.file, record.line, record.args); } } } pub struct TestKeysInterface { pub backing: keysinterface::KeysManager, pub override_session_priv: Mutex<Option<[u8; 32]>>, pub override_channel_id_priv: Mutex<Option<[u8; 32]>>, pub disable_revocation_policy_check: bool, enforcement_states: Mutex<HashMap<[u8;32], Arc<Mutex<EnforcementState>>>>, expectations: Mutex<Option<VecDeque<OnGetShutdownScriptpubkey>>>, } impl keysinterface::KeysInterface for TestKeysInterface { type Signer = EnforcingSigner; fn get_node_secret(&self) -> SecretKey { self.backing.get_node_secret() } fn get_inbound_payment_key_material(&self) -> keysinterface::KeyMaterial { self.backing.get_inbound_payment_key_material() } fn get_destination_script(&self) -> Script { self.backing.get_destination_script() } fn get_shutdown_scriptpubkey(&self) -> ShutdownScript { match &mut *self.expectations.lock().unwrap() { None => self.backing.get_shutdown_scriptpubkey(), Some(expectations) => match expectations.pop_front() { None => panic!("Unexpected get_shutdown_scriptpubkey"), Some(expectation) => expectation.returns, }, } } fn get_channel_signer(&self, inbound: bool, channel_value_satoshis: u64) -> EnforcingSigner { let keys = self.backing.get_channel_signer(inbound, channel_value_satoshis); let state = self.make_enforcement_state_cell(keys.commitment_seed); EnforcingSigner::new_with_revoked(keys, state, self.disable_revocation_policy_check) } fn get_secure_random_bytes(&self) -> [u8; 32] { let override_channel_id = self.override_channel_id_priv.lock().unwrap(); let override_session_key = self.override_session_priv.lock().unwrap(); if override_channel_id.is_some() && override_session_key.is_some() { panic!("We don't know which override key to use!"); } if let Some(key) = &*override_channel_id { return *key; } if let Some(key) = &*override_session_key { return *key; } self.backing.get_secure_random_bytes() } fn read_chan_signer(&self, buffer: &[u8]) -> Result<Self::Signer, msgs::DecodeError> { let mut reader = io::Cursor::new(buffer); let inner: InMemorySigner = ReadableArgs::read(&mut reader, self.get_node_secret())?; let state = self.make_enforcement_state_cell(inner.commitment_seed); Ok(EnforcingSigner::new_with_revoked( inner, state, self.disable_revocation_policy_check )) } fn sign_invoice(&self, hrp_bytes: &[u8], invoice_data: &[u5]) -> Result<RecoverableSignature, ()> { self.backing.sign_invoice(hrp_bytes, invoice_data) } } impl TestKeysInterface { pub fn new(seed: &[u8; 32], network: Network) -> Self { let now = Duration::from_secs(genesis_block(network).header.time as u64); Self { backing: keysinterface::KeysManager::new(seed, now.as_secs(), now.subsec_nanos()), override_session_priv: Mutex::new(None), override_channel_id_priv: Mutex::new(None), disable_revocation_policy_check: false, enforcement_states: Mutex::new(HashMap::new()), expectations: Mutex::new(None), } } /// Sets an expectation that [`keysinterface::KeysInterface::get_shutdown_scriptpubkey`] is /// called. pub fn expect(&self, expectation: OnGetShutdownScriptpubkey) -> &Self { self.expectations.lock().unwrap() .get_or_insert_with(|| VecDeque::new()) .push_back(expectation); self } pub fn derive_channel_keys(&self, channel_value_satoshis: u64, id: &[u8; 32]) -> EnforcingSigner { let keys = self.backing.derive_channel_keys(channel_value_satoshis, id); let state = self.make_enforcement_state_cell(keys.commitment_seed); EnforcingSigner::new_with_revoked(keys, state, self.disable_revocation_policy_check) } fn make_enforcement_state_cell(&self, commitment_seed: [u8; 32]) -> Arc<Mutex<EnforcementState>> { let mut states = self.enforcement_states.lock().unwrap(); if !states.contains_key(&commitment_seed) { let state = EnforcementState::new(); states.insert(commitment_seed, Arc::new(Mutex::new(state))); } let cell = states.get(&commitment_seed).unwrap(); Arc::clone(cell) } } pub(crate) fn panicking() -> bool { #[cfg(feature = "std")] let panicking = ::std::thread::panicking(); #[cfg(not(feature = "std"))] let panicking = false; return panicking; } impl Drop for TestKeysInterface { fn drop(&mut self) { if panicking() { return; } if let Some(expectations) = &*self.expectations.lock().unwrap() { if !expectations.is_empty() { panic!("Unsatisfied expectations: {:?}", expectations); } } } } /// An expectation that [`keysinterface::KeysInterface::get_shutdown_scriptpubkey`] was called and /// returns a [`ShutdownScript`]. pub struct OnGetShutdownScriptpubkey { /// A shutdown script used to close a channel. pub returns: ShutdownScript, } impl core::fmt::Debug for OnGetShutdownScriptpubkey { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { f.debug_struct("OnGetShutdownScriptpubkey").finish() } } pub struct TestChainSource { pub genesis_hash: BlockHash, pub utxo_ret: Mutex<Result<TxOut, chain::AccessError>>, pub watched_txn: Mutex<HashSet<(Txid, Script)>>, pub watched_outputs: Mutex<HashSet<(OutPoint, Script)>>, expectations: Mutex<Option<VecDeque<OnRegisterOutput>>>, } impl TestChainSource { pub fn new(network: Network) -> Self { let script_pubkey = Builder::new().push_opcode(opcodes::OP_TRUE).into_script(); Self { genesis_hash: genesis_block(network).block_hash(), utxo_ret: Mutex::new(Ok(TxOut { value: u64::max_value(), script_pubkey })), watched_txn: Mutex::new(HashSet::new()), watched_outputs: Mutex::new(HashSet::new()), expectations: Mutex::new(None), } } /// Sets an expectation that [`chain::Filter::register_output`] is called. pub fn expect(&self, expectation: OnRegisterOutput) -> &Self { self.expectations.lock().unwrap() .get_or_insert_with(|| VecDeque::new()) .push_back(expectation); self } } impl chain::Access for TestChainSource { fn get_utxo(&self, genesis_hash: &BlockHash, _short_channel_id: u64) -> Result<TxOut, chain::AccessError> { if self.genesis_hash != *genesis_hash { return Err(chain::AccessError::UnknownChain); } self.utxo_ret.lock().unwrap().clone() } } impl chain::Filter for TestChainSource { fn register_tx(&self, txid: &Txid, script_pubkey: &Script) { self.watched_txn.lock().unwrap().insert((*txid, script_pubkey.clone())); } fn register_output(&self, output: WatchedOutput) -> Option<(usize, Transaction)> { let dependent_tx = match &mut *self.expectations.lock().unwrap() { None => None, Some(expectations) => match expectations.pop_front() { None => { panic!("Unexpected register_output: {:?}", (output.outpoint, output.script_pubkey)); }, Some(expectation) => { assert_eq!(output.outpoint, expectation.outpoint()); assert_eq!(&output.script_pubkey, expectation.script_pubkey()); expectation.returns }, }, }; self.watched_outputs.lock().unwrap().insert((output.outpoint, output.script_pubkey)); dependent_tx } } impl Drop for TestChainSource { fn drop(&mut self) { if panicking() { return; } if let Some(expectations) = &*self.expectations.lock().unwrap() { if !expectations.is_empty() { panic!("Unsatisfied expectations: {:?}", expectations); } } } } /// An expectation that [`chain::Filter::register_output`] was called with a transaction output and /// returns an optional dependent transaction that spends the output in the same block. pub struct OnRegisterOutput { /// The transaction output to register. pub with: TxOutReference, /// A dependent transaction spending the output along with its position in the block. pub returns: Option<(usize, Transaction)>, } /// A transaction output as identified by an index into a transaction's output list. pub struct TxOutReference(pub Transaction, pub usize); impl OnRegisterOutput { fn outpoint(&self) -> OutPoint { let txid = self.with.0.txid(); let index = self.with.1 as u16; OutPoint { txid, index } } fn script_pubkey(&self) -> &Script { let index = self.with.1; &self.with.0.output[index].script_pubkey } } impl core::fmt::Debug for OnRegisterOutput { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { f.debug_struct("OnRegisterOutput") .field("outpoint", &self.outpoint()) .field("script_pubkey", self.script_pubkey()) .finish() } } /// A scorer useful in testing, when the passage of time isn't a concern. pub type TestScorer = ScorerUsingTime<Eternity>;
38.921703
272
0.729875
e29ecbb51c11ca01bc2d768cfc257402ec695ab8
6,075
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::DCOC_OFFSET_5 { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = r" Value of the field"] pub struct DCOC_BBA_OFFSET_IR { bits: u8, } impl DCOC_BBA_OFFSET_IR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct DCOC_BBA_OFFSET_QR { bits: u8, } impl DCOC_BBA_OFFSET_QR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct DCOC_TZA_OFFSET_IR { bits: u8, } impl DCOC_TZA_OFFSET_IR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct DCOC_TZA_OFFSET_QR { bits: u8, } impl DCOC_TZA_OFFSET_QR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Proxy"] pub struct _DCOC_BBA_OFFSET_IW<'a> { w: &'a mut W, } impl<'a> _DCOC_BBA_OFFSET_IW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 63; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _DCOC_BBA_OFFSET_QW<'a> { w: &'a mut W, } impl<'a> _DCOC_BBA_OFFSET_QW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 63; const OFFSET: u8 = 8; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _DCOC_TZA_OFFSET_IW<'a> { w: &'a mut W, } impl<'a> _DCOC_TZA_OFFSET_IW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 255; const OFFSET: u8 = 16; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _DCOC_TZA_OFFSET_QW<'a> { w: &'a mut W, } impl<'a> _DCOC_TZA_OFFSET_QW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 255; const OFFSET: u8 = 24; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bits 0:5 - DCOC BBA I-channel offset"] #[inline] pub fn dcoc_bba_offset_i(&self) -> DCOC_BBA_OFFSET_IR { let bits = { const MASK: u8 = 63; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u8 }; DCOC_BBA_OFFSET_IR { bits } } #[doc = "Bits 8:13 - DCOC BBA Q-channel offset"] #[inline] pub fn dcoc_bba_offset_q(&self) -> DCOC_BBA_OFFSET_QR { let bits = { const MASK: u8 = 63; const OFFSET: u8 = 8; ((self.bits >> OFFSET) & MASK as u32) as u8 }; DCOC_BBA_OFFSET_QR { bits } } #[doc = "Bits 16:23 - DCOC TZA I-channel offset"] #[inline] pub fn dcoc_tza_offset_i(&self) -> DCOC_TZA_OFFSET_IR { let bits = { const MASK: u8 = 255; const OFFSET: u8 = 16; ((self.bits >> OFFSET) & MASK as u32) as u8 }; DCOC_TZA_OFFSET_IR { bits } } #[doc = "Bits 24:31 - DCOC TZA Q-channel offset"] #[inline] pub fn dcoc_tza_offset_q(&self) -> DCOC_TZA_OFFSET_QR { let bits = { const MASK: u8 = 255; const OFFSET: u8 = 24; ((self.bits >> OFFSET) & MASK as u32) as u8 }; DCOC_TZA_OFFSET_QR { bits } } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bits 0:5 - DCOC BBA I-channel offset"] #[inline] pub fn dcoc_bba_offset_i(&mut self) -> _DCOC_BBA_OFFSET_IW { _DCOC_BBA_OFFSET_IW { w: self } } #[doc = "Bits 8:13 - DCOC BBA Q-channel offset"] #[inline] pub fn dcoc_bba_offset_q(&mut self) -> _DCOC_BBA_OFFSET_QW { _DCOC_BBA_OFFSET_QW { w: self } } #[doc = "Bits 16:23 - DCOC TZA I-channel offset"] #[inline] pub fn dcoc_tza_offset_i(&mut self) -> _DCOC_TZA_OFFSET_IW { _DCOC_TZA_OFFSET_IW { w: self } } #[doc = "Bits 24:31 - DCOC TZA Q-channel offset"] #[inline] pub fn dcoc_tza_offset_q(&mut self) -> _DCOC_TZA_OFFSET_QW { _DCOC_TZA_OFFSET_QW { w: self } } }
26.528384
64
0.533333
1dc4520344b7d085662630d8ecb724b18dd8af51
3,243
#![allow(clippy::module_inception)] #![allow(clippy::upper_case_acronyms)] #![allow(clippy::large_enum_variant)] #![allow(clippy::wrong_self_convention)] #![allow(clippy::should_implement_trait)] #![allow(clippy::blacklisted_name)] #![allow(clippy::vec_init_then_push)] #![allow(clippy::type_complexity)] #![allow(rustdoc::bare_urls)] #![warn(missing_docs)] //! <p>The Amazon Web Services Snow Family provides a petabyte-scale data transport solution that uses //! secure devices to transfer large amounts of data between your on-premises data centers and //! Amazon Simple Storage Service (Amazon S3). The Snow Family commands described here provide access to the same //! functionality that is available in the Amazon Web Services Snow Family Management Console, which enables you to create //! and manage jobs for a Snow Family device. To transfer data locally with a Snow Family device, //! you'll need to use the Snowball Edge client or the Amazon S3 API Interface for Snowball or OpsHub for Snow Family. For more information, see the <a href="https://docs.aws.amazon.com/AWSImportExport/latest/ug/api-reference.html">User Guide</a>.</p> //! //! # Crate Organization //! //! The entry point for most customers will be [`Client`]. [`Client`] exposes one method for each API offered //! by the service. //! //! Some APIs require complex or nested arguments. These exist in [`model`](crate::model). //! //! Lastly, errors that can be returned by the service are contained within [`error`]. [`Error`] defines a meta //! error encompassing all possible errors that can be returned by the service. //! //! The other modules within this crate are not required for normal usage. //! //! # Examples //! Examples can be found [here](https://github.com/awslabs/aws-sdk-rust/tree/main/examples/snowball). // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. pub use error_meta::Error; #[doc(inline)] pub use config::Config; mod aws_endpoint; /// Client and fluent builders for calling the service. pub mod client; /// Configuration for the service. pub mod config; /// Errors that can occur when calling the service. pub mod error; mod error_meta; /// Input structures for operations. pub mod input; mod json_deser; mod json_errors; mod json_ser; /// Generated accessors for nested fields pub mod lens; pub mod middleware; /// Data structures used by operation inputs/outputs. pub mod model; mod no_credentials; /// All operations that this crate can perform. pub mod operation; mod operation_deser; mod operation_ser; /// Output structures for operations. pub mod output; /// Paginators for the service pub mod paginator; /// Crate version number. pub static PKG_VERSION: &str = env!("CARGO_PKG_VERSION"); /// Re-exported types from supporting crates. pub mod types { pub use aws_smithy_http::result::SdkError; pub use aws_smithy_types::DateTime; } static API_METADATA: aws_http::user_agent::ApiMetadata = aws_http::user_agent::ApiMetadata::new("snowball", PKG_VERSION); pub use aws_smithy_http::endpoint::Endpoint; pub use aws_smithy_types::retry::RetryConfig; pub use aws_types::app_name::AppName; pub use aws_types::region::Region; pub use aws_types::Credentials; #[doc(inline)] pub use client::Client;
39.54878
251
0.756707
62e46b364a9f29a15ac2321580112f3228a20338
2,252
#[derive(Clone, Copy, PartialEq, Eq)] pub enum ScrollBehavior { Auto, Smooth, } impl Default for ScrollBehavior { fn default() -> Self { ScrollBehavior::Auto } } impl From<ScrollBehavior> for web_sys::ScrollBehavior { fn from(scroll_behaviour: ScrollBehavior) -> web_sys::ScrollBehavior { match scroll_behaviour { ScrollBehavior::Auto => web_sys::ScrollBehavior::Auto, ScrollBehavior::Smooth => web_sys::ScrollBehavior::Smooth, } } } macro_rules! impl_scrollable_for_element { ($element:ident) => { impl $crate::scroll::scrollable_seal::Seal for $element {} impl $crate::scroll::Scrollable for $element { // Note: web_sys currently declares scroll_left and scroll_top as integers, but they // are specced as doubles. Convert them for now for BC; resolve this in web_sys at some // point. fn scroll_left(&self) -> f64 { use crate::dom::element_seal::Seal; self.as_web_sys_element().scroll_left() as f64 } fn scroll_top(&self) -> f64 { use crate::dom::element_seal::Seal; self.as_web_sys_element().scroll_top() as f64 } fn scroll_to(&self, options: $crate::scroll::ScrollToOptions) { use crate::dom::element_seal::Seal; let mut opts = web_sys::ScrollToOptions::new(); opts.left(options.left.into()); opts.top(options.top.into()); opts.behavior(options.behavior.into()); self.as_web_sys_element() .scroll_to_with_scroll_to_options(&opts); } fn scroll_by(&self, options: $crate::scroll::ScrollByOptions) { use crate::dom::element_seal::Seal; let mut opts = web_sys::ScrollToOptions::new(); opts.left(options.x.into()); opts.top(options.y.into()); opts.behavior(options.behavior.into()); self.as_web_sys_element() .scroll_by_with_scroll_to_options(&opts); } } }; } pub(crate) use impl_scrollable_for_element;
31.277778
99
0.571492
effd4598b43a251c4e5670c83ed257d5062f2fab
1,025
#![crate_name = "uu_base64"] // This file is part of the uutils coreutils package. // // (c) Jordy Dickinson <jordy.dickinson@gmail.com> // (c) Jian Zeng <anonymousknight96@gmail.com> // // For the full copyright and license information, please view the LICENSE file // that was distributed with this source code. // #[macro_use] extern crate uucore; use uucore::encoding::Format; mod base_common; static SYNTAX: &str = "[OPTION]... [FILE]"; static SUMMARY: &str = "Base64 encode or decode FILE, or standard input, to standard output."; static LONG_HELP: &str = " With no FILE, or when FILE is -, read standard input. The data are encoded as described for the base64 alphabet in RFC 3548. When decoding, the input may contain newlines in addition to the bytes of the formal base64 alphabet. Use --ignore-garbage to attempt to recover from any other non-alphabet bytes in the encoded stream. "; pub fn uumain(args: Vec<String>) -> i32 { base_common::execute(args, SYNTAX, SUMMARY, LONG_HELP, Format::Base64) }
31.060606
94
0.731707
899c6304f8955b8f4a5de034c2a20c23c6bee4b4
3,862
use crate::*; use core::convert::TryFrom; #[derive(Debug)] pub struct EmbeddedPdv<'a> { pub identification: PdvIdentification<'a>, pub data_value_descriptor: Option<ObjectDescriptor<'a>>, pub data_value: &'a [u8], } #[derive(Debug, PartialEq)] pub enum PdvIdentification<'a> { Syntaxes { s_abstract: Oid<'a>, s_transfer: Oid<'a>, }, Syntax(Oid<'a>), PresentationContextId(Integer<'a>), ContextNegotiation { presentation_context_id: Integer<'a>, presentation_syntax: Oid<'a>, }, TransferSyntax(Oid<'a>), Fixed, } impl<'a> TryFrom<Any<'a>> for EmbeddedPdv<'a> { type Error = Error; fn try_from(any: Any<'a>) -> Result<Self> { let data = any.data; // AUTOMATIC TAGS means all values will be tagged (IMPLICIT) // [0] -> identification let (rem, seq0) = TaggedParser::<Explicit, Any>::parse_ber(Class::ContextSpecific, Tag(0), data)?; let inner = seq0.inner; let identification = match inner.tag() { Tag(0) => { // syntaxes SEQUENCE { // abstract OBJECT IDENTIFIER, // transfer OBJECT IDENTIFIER // }, let (rem, s_abstract) = Oid::from_ber(inner.data)?; let (_, s_transfer) = Oid::from_ber(rem)?; PdvIdentification::Syntaxes { s_abstract, s_transfer, } } Tag(1) => { // syntax OBJECT IDENTIFIER let oid = Oid::new(inner.data.into()); PdvIdentification::Syntax(oid) } Tag(2) => { // presentation-context-id INTEGER let i = Integer::new(inner.data); PdvIdentification::PresentationContextId(i) } Tag(3) => { // context-negotiation SEQUENCE { // presentation-context-id INTEGER, // transfer-syntax OBJECT IDENTIFIER // }, let (rem, presentation_context_id) = Integer::from_ber(inner.data)?; let (_, presentation_syntax) = Oid::from_ber(rem)?; PdvIdentification::ContextNegotiation { presentation_context_id, presentation_syntax, } } Tag(4) => { // transfer-syntax OBJECT IDENTIFIER let oid = Oid::new(inner.data.into()); PdvIdentification::TransferSyntax(oid) } Tag(5) => { // fixed NULL PdvIdentification::Fixed } _ => { return Err(inner .tag() .invalid_value("Invalid identification tag in EMBEDDED PDV")) } }; // [1] -> data-value-descriptor ObjectDescriptor OPTIONAL // *BUT* WITH COMPONENTS data-value-descriptor ABSENT // XXX this should be parse_ber? // let (rem, data_value_descriptor) = // TaggedOptional::from(1).parse_der(rem, |_, inner| ObjectDescriptor::from_ber(inner))?; let (rem, data_value_descriptor) = (rem, None); // [2] -> data-value OCTET STRING let (_, data_value) = TaggedParser::<Implicit, &[u8]>::parse_ber(Class::ContextSpecific, Tag(2), rem)?; let data_value = data_value.inner; let obj = EmbeddedPdv { identification, data_value_descriptor, data_value, }; Ok(obj) } } impl CheckDerConstraints for EmbeddedPdv<'_> { fn check_constraints(any: &Any) -> Result<()> { any.header.length().assert_definite()?; any.header.assert_constructed()?; Ok(()) } }
34.176991
101
0.515536
0357c0d8eca6c510ed712409acf38958137fe3a3
6,893
#[doc = "Reader of register IF"] pub type R = crate::R<u32, super::IF>; #[doc = "Reader of field `VMONAVDDFALL`"] pub type VMONAVDDFALL_R = crate::R<bool, bool>; #[doc = "Reader of field `VMONAVDDRISE`"] pub type VMONAVDDRISE_R = crate::R<bool, bool>; #[doc = "Reader of field `VMONALTAVDDFALL`"] pub type VMONALTAVDDFALL_R = crate::R<bool, bool>; #[doc = "Reader of field `VMONALTAVDDRISE`"] pub type VMONALTAVDDRISE_R = crate::R<bool, bool>; #[doc = "Reader of field `VMONDVDDFALL`"] pub type VMONDVDDFALL_R = crate::R<bool, bool>; #[doc = "Reader of field `VMONDVDDRISE`"] pub type VMONDVDDRISE_R = crate::R<bool, bool>; #[doc = "Reader of field `VMONIO0FALL`"] pub type VMONIO0FALL_R = crate::R<bool, bool>; #[doc = "Reader of field `VMONIO0RISE`"] pub type VMONIO0RISE_R = crate::R<bool, bool>; #[doc = "Reader of field `VMONIO1FALL`"] pub type VMONIO1FALL_R = crate::R<bool, bool>; #[doc = "Reader of field `VMONIO1RISE`"] pub type VMONIO1RISE_R = crate::R<bool, bool>; #[doc = "Reader of field `R5VREADY`"] pub type R5VREADY_R = crate::R<bool, bool>; #[doc = "Reader of field `VMONBUVDDFALL`"] pub type VMONBUVDDFALL_R = crate::R<bool, bool>; #[doc = "Reader of field `VMONBUVDDRISE`"] pub type VMONBUVDDRISE_R = crate::R<bool, bool>; #[doc = "Reader of field `PFETOVERCURRENTLIMIT`"] pub type PFETOVERCURRENTLIMIT_R = crate::R<bool, bool>; #[doc = "Reader of field `NFETOVERCURRENTLIMIT`"] pub type NFETOVERCURRENTLIMIT_R = crate::R<bool, bool>; #[doc = "Reader of field `DCDCLPRUNNING`"] pub type DCDCLPRUNNING_R = crate::R<bool, bool>; #[doc = "Reader of field `DCDCLNRUNNING`"] pub type DCDCLNRUNNING_R = crate::R<bool, bool>; #[doc = "Reader of field `DCDCINBYPASS`"] pub type DCDCINBYPASS_R = crate::R<bool, bool>; #[doc = "Reader of field `BURDY`"] pub type BURDY_R = crate::R<bool, bool>; #[doc = "Reader of field `R5VVSINT`"] pub type R5VVSINT_R = crate::R<bool, bool>; #[doc = "Reader of field `EM23WAKEUP`"] pub type EM23WAKEUP_R = crate::R<bool, bool>; #[doc = "Reader of field `VSCALEDONE`"] pub type VSCALEDONE_R = crate::R<bool, bool>; #[doc = "Reader of field `TEMP`"] pub type TEMP_R = crate::R<bool, bool>; #[doc = "Reader of field `TEMPLOW`"] pub type TEMPLOW_R = crate::R<bool, bool>; #[doc = "Reader of field `TEMPHIGH`"] pub type TEMPHIGH_R = crate::R<bool, bool>; impl R { #[doc = "Bit 0 - VMON AVDD Channel Fall"] #[inline(always)] pub fn vmonavddfall(&self) -> VMONAVDDFALL_R { VMONAVDDFALL_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - VMON AVDD Channel Rise"] #[inline(always)] pub fn vmonavddrise(&self) -> VMONAVDDRISE_R { VMONAVDDRISE_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - Alternate VMON AVDD Channel Fall"] #[inline(always)] pub fn vmonaltavddfall(&self) -> VMONALTAVDDFALL_R { VMONALTAVDDFALL_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - Alternate VMON AVDD Channel Rise"] #[inline(always)] pub fn vmonaltavddrise(&self) -> VMONALTAVDDRISE_R { VMONALTAVDDRISE_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - VMON DVDD Channel Fall"] #[inline(always)] pub fn vmondvddfall(&self) -> VMONDVDDFALL_R { VMONDVDDFALL_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 5 - VMON DVDD Channel Rise"] #[inline(always)] pub fn vmondvddrise(&self) -> VMONDVDDRISE_R { VMONDVDDRISE_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 6 - VMON IOVDD0 Channel Fall"] #[inline(always)] pub fn vmonio0fall(&self) -> VMONIO0FALL_R { VMONIO0FALL_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 7 - VMON IOVDD0 Channel Rise"] #[inline(always)] pub fn vmonio0rise(&self) -> VMONIO0RISE_R { VMONIO0RISE_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bit 8 - VMON IOVDD1 Channel Fall"] #[inline(always)] pub fn vmonio1fall(&self) -> VMONIO1FALL_R { VMONIO1FALL_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 9 - VMON IOVDD1 Channel Rise"] #[inline(always)] pub fn vmonio1rise(&self) -> VMONIO1RISE_R { VMONIO1RISE_R::new(((self.bits >> 9) & 0x01) != 0) } #[doc = "Bit 10 - 5V Regulator is Ready to Use"] #[inline(always)] pub fn r5vready(&self) -> R5VREADY_R { R5VREADY_R::new(((self.bits >> 10) & 0x01) != 0) } #[doc = "Bit 12 - VMON BACKUP Channel Fall"] #[inline(always)] pub fn vmonbuvddfall(&self) -> VMONBUVDDFALL_R { VMONBUVDDFALL_R::new(((self.bits >> 12) & 0x01) != 0) } #[doc = "Bit 13 - VMON BUVDD Channel Rise"] #[inline(always)] pub fn vmonbuvddrise(&self) -> VMONBUVDDRISE_R { VMONBUVDDRISE_R::new(((self.bits >> 13) & 0x01) != 0) } #[doc = "Bit 16 - PFET Current Limit Hit"] #[inline(always)] pub fn pfetovercurrentlimit(&self) -> PFETOVERCURRENTLIMIT_R { PFETOVERCURRENTLIMIT_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 17 - NFET Current Limit Hit"] #[inline(always)] pub fn nfetovercurrentlimit(&self) -> NFETOVERCURRENTLIMIT_R { NFETOVERCURRENTLIMIT_R::new(((self.bits >> 17) & 0x01) != 0) } #[doc = "Bit 18 - LP Mode is Running"] #[inline(always)] pub fn dcdclprunning(&self) -> DCDCLPRUNNING_R { DCDCLPRUNNING_R::new(((self.bits >> 18) & 0x01) != 0) } #[doc = "Bit 19 - LN Mode is Running"] #[inline(always)] pub fn dcdclnrunning(&self) -> DCDCLNRUNNING_R { DCDCLNRUNNING_R::new(((self.bits >> 19) & 0x01) != 0) } #[doc = "Bit 20 - DCDC is in Bypass"] #[inline(always)] pub fn dcdcinbypass(&self) -> DCDCINBYPASS_R { DCDCINBYPASS_R::new(((self.bits >> 20) & 0x01) != 0) } #[doc = "Bit 22 - Backup Functionality Ready Interrupt Flag"] #[inline(always)] pub fn burdy(&self) -> BURDY_R { BURDY_R::new(((self.bits >> 22) & 0x01) != 0) } #[doc = "Bit 23 - 5V Regulator Voltage Update Done"] #[inline(always)] pub fn r5vvsint(&self) -> R5VVSINT_R { R5VVSINT_R::new(((self.bits >> 23) & 0x01) != 0) } #[doc = "Bit 24 - Wakeup IRQ From EM2 and EM3"] #[inline(always)] pub fn em23wakeup(&self) -> EM23WAKEUP_R { EM23WAKEUP_R::new(((self.bits >> 24) & 0x01) != 0) } #[doc = "Bit 25 - Voltage Scale Steps Done IRQ"] #[inline(always)] pub fn vscaledone(&self) -> VSCALEDONE_R { VSCALEDONE_R::new(((self.bits >> 25) & 0x01) != 0) } #[doc = "Bit 29 - New Temperature Measurement Valid"] #[inline(always)] pub fn temp(&self) -> TEMP_R { TEMP_R::new(((self.bits >> 29) & 0x01) != 0) } #[doc = "Bit 30 - Temperature Low Limit Reached"] #[inline(always)] pub fn templow(&self) -> TEMPLOW_R { TEMPLOW_R::new(((self.bits >> 30) & 0x01) != 0) } #[doc = "Bit 31 - Temperature High Limit Reached"] #[inline(always)] pub fn temphigh(&self) -> TEMPHIGH_R { TEMPHIGH_R::new(((self.bits >> 31) & 0x01) != 0) } }
42.549383
99
0.618744
8a796dc8c9e9f4959d1e5ae5ee54d058957ec088
10,698
use crate::image::Image; pub use crate::prelude::*; use fltk_sys::menu::*; use std::{ ffi::{CStr, CString}, mem, os::raw, }; /// Creates a menu bar #[derive(WidgetExt, MenuExt, Debug)] pub struct MenuBar { _inner: *mut Fl_Menu_Bar, _tracker: *mut fltk_sys::fl::Fl_Widget_Tracker, } /// Creates a menu button #[derive(WidgetExt, MenuExt, Debug)] pub struct MenuButton { _inner: *mut Fl_Menu_Button, _tracker: *mut fltk_sys::fl::Fl_Widget_Tracker, } /// Creates a menu choice #[derive(WidgetExt, MenuExt, Debug)] pub struct Choice { _inner: *mut Fl_Choice, _tracker: *mut fltk_sys::fl::Fl_Widget_Tracker, } /// Creates a menu item #[derive(Debug, Clone)] pub struct MenuItem { _inner: *mut Fl_Menu_Item, _parent: *const MenuBar, _alloc: bool, } /// Defines the menu flag for any added menu items using the add() method #[repr(i32)] #[derive(Debug, Copy, Clone, PartialEq)] pub enum MenuFlag { Normal = 0, Inactive = 1, Toggle = 2, Value = 4, Radio = 8, Invisible = 0x10, SubmenuPointer = 0x20, Submenu = 0x40, MenuDivider = 0x80, MenuHorizontal = 0x100, } impl MenuItem { /// Initializes a new window, useful for popup menus pub fn new(choices: Vec<&str>) -> MenuItem { unsafe { let sz = choices.len(); let mut temp: Vec<*mut raw::c_char> = vec![]; for choice in choices { temp.push(CString::new(choice).unwrap().into_raw()); } let item_ptr = Fl_Menu_Item_new(temp.as_ptr() as *mut *mut raw::c_char, sz as i32); assert!(!item_ptr.is_null()); MenuItem { _inner: item_ptr, _parent: 0 as *const MenuBar, _alloc: true, } } } /// Creates a popup menu at the specified coordinates and returns its choice pub fn popup(&mut self, x: i32, y: i32) -> Option<MenuItem> { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { let item = Fl_Menu_Item_popup(self._inner, x, y); if item.is_null() { None } else { let item = MenuItem { _inner: item as *mut Fl_Menu_Item, _parent: 0 as *const MenuBar, _alloc: false, }; Some(item) } } } /// Returns the label of the menu item pub fn label(&self) -> Option<String> { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { let label_ptr = Fl_Menu_Item_label(self._inner); if label_ptr.is_null() { return None; } Some( CStr::from_ptr(label_ptr as *mut raw::c_char) .to_string_lossy() .to_string(), ) } } /// Sets the label of the menu item pub fn set_label(&mut self, txt: &str) { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { let txt = CString::new(txt).unwrap(); Fl_Menu_Item_set_label(self._inner, txt.into_raw()); } } /// Returns the label type of the menu item pub fn label_type<T: WidgetType>(&self) -> T { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { T::from_i32(Fl_Menu_Item_label_type(self._inner)) } } /// Sets the label type of the menu item pub fn set_label_type<T: WidgetType>(&mut self, typ: T) { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { Fl_Menu_Item_set_label_type(self._inner, typ.to_int()); } } /// Returns the label color of the menu item pub fn label_color(&self) -> Color { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { mem::transmute(Fl_Menu_Item_label_color(self._inner)) } } /// Sets the label color of the menu item pub fn set_label_color(&mut self, color: Color) { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { Fl_Menu_Item_set_label_color(self._inner, color as u32) } } /// Returns the label font of the menu item pub fn label_font(&self) -> Font { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { mem::transmute(Fl_Menu_Item_label_font(self._inner)) } } /// Sets the label font of the menu item pub fn set_label_font(&mut self, font: Font) { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { Fl_Menu_Item_set_label_font(self._inner, font as i32) } } /// Returns the label size of the menu item pub fn label_size(&self) -> u32 { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { Fl_Menu_Item_label_size(self._inner) as u32 } } /// Sets the label size of the menu item pub fn set_label_size(&mut self, sz: u32) { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { Fl_Menu_Item_set_label_size(self._inner, sz as i32) } } /// Returns the value of the menu item pub fn value(&self) -> bool { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { match Fl_Menu_Item_value(self._inner) { 0 => false, _ => true, } } } /// Sets the menu item pub fn set(&mut self) { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { Fl_Menu_Item_set(self._inner) } } /// Clears the menu item pub fn clear(&mut self) { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { Fl_Menu_Item_clear(self._inner) } } /// Returns whether the menu item is visible or not pub fn visible(&self) -> bool { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { match Fl_Menu_Item_visible(self._inner) { 0 => false, _ => true, } } } /// Returns whether the menu item is active pub fn active(&mut self) -> bool { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { match Fl_Menu_Item_active(self._inner) { 0 => false, _ => true, } } } /// Activates the menu item pub fn activate(&mut self) { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { Fl_Menu_Item_activate(self._inner) } } /// Deactivates the menu item pub fn deactivate(&mut self) { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { Fl_Menu_Item_deactivate(self._inner) } } /// Returns whether a menu item is a submenu pub fn is_submenu(&self) -> bool { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { Fl_Menu_Item_submenu(self._inner) != 0 } } /// Shows the menu item pub fn show(&mut self) { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { Fl_Menu_Item_show(self._inner) } } /// Hides the menu item pub fn hide(&mut self) { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { Fl_Menu_Item_hide(self._inner) } } /// Get the next menu item pub fn next(&mut self, idx: u32) -> Option<MenuItem> { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { let ptr = Fl_Menu_Item_next(self._inner, idx as i32); if ptr.is_null() { None } else { Some(MenuItem { _inner: ptr, _parent: self._parent, _alloc: self._alloc, }) } } } /// Get the user data pub unsafe fn user_data(&self) -> Option<Box<dyn FnMut()>> { let ptr = Fl_Menu_Item_user_data(self._inner); if ptr.is_null() { None } else { let x = ptr as *mut Box<dyn FnMut()>; let x = Box::from_raw(x); Some(*x) } } /// Manually set the user data pub unsafe fn set_user_data(&mut self, data: *mut raw::c_void) { Fl_Menu_Item_set_user_data(self._inner, data) } /// Set a callback for the menu item pub fn set_callback(&mut self, cb: Box<dyn FnMut()>) { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { unsafe extern "C" fn shim( _wid: *mut fltk_sys::menu::Fl_Widget, data: *mut raw::c_void, ) { let a: *mut Box<dyn FnMut()> = mem::transmute(data); let f: &mut (dyn FnMut()) = &mut **a; f(); } let a: *mut Box<dyn FnMut()> = Box::into_raw(Box::new(cb)); let data: *mut raw::c_void = mem::transmute(a); let callback: fltk_sys::menu::Fl_Callback = Some(shim); Fl_Menu_Item_callback(self._inner, callback, data); } } /// Manually unset a callback pub unsafe fn unset_callback(&mut self) { let old_data = self.user_data(); if old_data.is_some() { let _ = old_data.unwrap(); self.set_user_data(0 as *mut raw::c_void); } } /// Delete the old callback and replace it with an empty one pub fn safe_unset_callback(&mut self) { assert!(!self.was_deleted() && !self._inner.is_null()); unsafe { self.unset_callback(); } self.set_callback(Box::new(move || { /* do nothing */ })); } /// Check if a menu item was deleted pub fn was_deleted(&self) -> bool { if !self._parent.is_null() { let parent = unsafe { Fl_Menu_Bar_menu((*self._parent)._inner) }; if parent.is_null() { true } else { false } } else { false } } } unsafe impl Send for MenuItem {} unsafe impl Sync for MenuItem {} impl Drop for MenuItem { fn drop(&mut self) { if self._alloc { unsafe { Fl_Menu_Item_delete(self._inner) } self._inner = 0 as *mut Fl_Menu_Item; } } } #[cfg(test)] mod menu { use super::*; #[test] fn label() { let mut menu = MenuBar::new(0, 0, 0, 0, "hello"); menu.set_label("cloned"); } #[test] fn tooltip() { let mut menu = MenuBar::new(0, 0, 0, 0, "hello"); menu.set_tooltip("tooltip"); assert!(menu.tooltip().unwrap() == "tooltip"); } }
29.966387
95
0.543933
758250df1a4801e1aba90d730af531828137a356
486
#[macro_use] pub extern crate integra8; // regardless of the order the mod are defined in file // integra8 will run these components defined in other files // in lexicographical order mod b_test_mod; mod a_test_mod; // # Test main // Test main is required to setup the application entrypoint and bootstrap the test framework main_test! { console_output: integra8_serde_formatter::SerdeFormatter, } #[integration_test] fn test_c() { println!("Test C was called first"); }
22.090909
93
0.753086
7a1e85bfd89797d7dfc48f0041317d226ebb86cf
2,219
use std::fmt::{self, Debug, Display, Formatter}; // https://dev.mysql.com/doc/dev/mysql-server/8.0.12/binary__log__types_8h.html // https://mariadb.com/kb/en/library/resultset/#field-types #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub struct TypeId(pub u8); macro_rules! type_id_consts { ($( pub const $name:ident: TypeId = TypeId($id:literal); )*) => ( impl TypeId { $(pub const $name: TypeId = TypeId($id);)* #[doc(hidden)] pub fn type_name(&self) -> &'static str { match self.0 { $($id => stringify!($name),)* _ => "<unknown>" } } } ) } impl Display for TypeId { fn fmt(&self, f: &mut Formatter) -> fmt::Result { write!(f, "{} ({:#x})", self.type_name(), self.0) } } // https://github.com/google/mysql/blob/c01fc2134d439282a21a2ddf687566e198ddee28/include/mysql_com.h#L429 type_id_consts! { pub const NULL: TypeId = TypeId(6); // String: CHAR, VARCHAR, TEXT // Bytes: BINARY, VARBINARY, BLOB pub const CHAR: TypeId = TypeId(254); // or BINARY pub const VAR_CHAR: TypeId = TypeId(253); // or VAR_BINARY pub const TEXT: TypeId = TypeId(252); // or BLOB // Enum pub const ENUM: TypeId = TypeId(247); // More Bytes pub const TINY_BLOB: TypeId = TypeId(249); pub const MEDIUM_BLOB: TypeId = TypeId(250); pub const LONG_BLOB: TypeId = TypeId(251); // Numeric: TINYINT, SMALLINT, INT, BIGINT pub const TINY_INT: TypeId = TypeId(1); pub const SMALL_INT: TypeId = TypeId(2); pub const INT: TypeId = TypeId(3); pub const BIG_INT: TypeId = TypeId(8); // pub const MEDIUM_INT: TypeId = TypeId(9); // Numeric: FLOAT, DOUBLE pub const FLOAT: TypeId = TypeId(4); pub const DOUBLE: TypeId = TypeId(5); // pub const NEWDECIMAL: TypeId = TypeId(246); // Date/Time: DATE, TIME, DATETIME, TIMESTAMP pub const DATE: TypeId = TypeId(10); pub const TIME: TypeId = TypeId(11); pub const DATETIME: TypeId = TypeId(12); pub const TIMESTAMP: TypeId = TypeId(7); } impl Default for TypeId { fn default() -> TypeId { TypeId::NULL } }
29.986486
105
0.597566
2f6b2bb7abb1417885f548779f5f48b316958b6e
12,328
// Note these imports are only for testing, not required/linked for the library extern crate std; use std::collections::HashMap; use std::fs::File; use std::io::{BufRead, BufReader}; use std::vec::Vec; use std::{env, format, println}; use crate::*; pub const TV_LEN: usize = 1024; pub const ROUND_START: i64 = -97; pub const ROUND_END: i64 = 1023; // TEST VECTOR FILE LOAD/PARSE ######################################################################################### // (state, buffer) tuple #[derive(Debug, PartialEq, Eq, Hash)] struct RoundData { state: [u8; E128_STATE_LEN], buf: [u8; E128_BUF_LEN], } // (state, buffer) tuples by round, all same key and IV struct Test { key: [u8; E128_KEY_LEN], iv: [u8; E128_IV_LEN], tv: [u8; TV_LEN], rounds: HashMap<i64, RoundData>, } impl Test { pub fn new() -> Test { Test { key: [0; E128_KEY_LEN], iv: [0; E128_IV_LEN], tv: [0; TV_LEN], rounds: HashMap::new(), } } } // Possible states for file parsing state machine #[derive(Debug, PartialEq, Eq, Hash)] enum TestCollectorState { Idle, InputRound, InputState, InputBuffer, InputKey, InputIV, InputTV, } // State machine parses file line-by-line to collect Tests struct TestCollector { collect_state: TestCollectorState, tests: Vec<Test>, curr_round: i64, curr_state: Vec<u8>, curr_buf: Vec<u8>, curr_key: Vec<u8>, curr_iv: Vec<u8>, curr_tv: Vec<u8>, round_set: bool, } impl TestCollector { const DELIM_ROUND: &'static str = "round = "; const DELIM_STATE: &'static str = "state : "; const DELIM_BUFFER: &'static str = "buffer : "; const DELIM_KEY: &'static str = "Key :"; const DELIM_IV: &'static str = "IV :"; const DELIM_TV: &'static str = "Test Vector:"; // Public APIs ----------------------------------------------------------------------------------------------------- // Construct state machine pub fn new() -> TestCollector { let mut tc = TestCollector { collect_state: TestCollectorState::Idle, tests: Vec::new(), curr_round: 0, curr_state: Vec::new(), curr_buf: Vec::new(), curr_key: Vec::new(), curr_iv: Vec::new(), curr_tv: Vec::new(), round_set: false, }; tc.tests.push(Test::new()); tc } // Update state machine pub fn process_line(&mut self, line: &str) { if line.contains(TestCollector::DELIM_ROUND) { self.collect_state = TestCollectorState::InputRound; } else if line.contains(TestCollector::DELIM_STATE) { self.collect_state = TestCollectorState::InputState; } else if line.contains(TestCollector::DELIM_BUFFER) { self.collect_state = TestCollectorState::InputBuffer; } else if line.contains(TestCollector::DELIM_KEY) { self.collect_state = TestCollectorState::InputKey; } else if line.contains(TestCollector::DELIM_IV) { self.collect_state = TestCollectorState::InputIV; } else if line.contains(TestCollector::DELIM_TV) { self.collect_state = TestCollectorState::InputTV; } self.collect_data(line); self.commit_data(); } // Private APIs ---------------------------------------------------------------------------------------------------- // State-based data collection dispatch fn collect_data(&mut self, line: &str) { match self.collect_state { TestCollectorState::InputRound => { self.curr_round = line .trim() .trim_start_matches(TestCollector::DELIM_ROUND) .parse() .unwrap(); self.round_set = true; } TestCollectorState::InputState => { self.curr_state.extend( TestCollector::parse_str_hex_bytes(line, TestCollector::DELIM_STATE) .iter() .cloned(), ); } TestCollectorState::InputBuffer => { self.curr_buf.extend( TestCollector::parse_str_hex_bytes(line, TestCollector::DELIM_BUFFER) .iter() .cloned(), ); } TestCollectorState::InputKey => { self.curr_key.extend( TestCollector::parse_str_hex_bytes(line, TestCollector::DELIM_KEY) .iter() .cloned(), ); } TestCollectorState::InputIV => { self.curr_iv.extend( TestCollector::parse_str_hex_bytes(line, TestCollector::DELIM_IV) .iter() .cloned(), ); } TestCollectorState::InputTV => { self.curr_tv.extend( TestCollector::parse_str_hex_bytes(line, TestCollector::DELIM_TV) .iter() .cloned(), ); } _ => { return; } } } // Optional commit of either test or round data fn commit_data(&mut self) { if self.round_set && (self.curr_state.len() >= E128_STATE_LEN) && (self.curr_buf.len() >= E128_BUF_LEN) { self.commit_round(); self.reset_state_partial(); } if (self.curr_key.len() >= E128_KEY_LEN) && (self.curr_iv.len() >= E128_IV_LEN) && (self.curr_tv.len() >= TV_LEN) { self.commit_test(); self.reset_state_full(); } } // Convert string of space-delimited hex bytes to vector of u8 fn parse_str_hex_bytes(line: &str, prefix: &str) -> Vec<u8> { let data = line.trim().trim_start_matches(prefix); if data.is_empty() { return Vec::new(); } data.split(" ") .map(|x| u8::from_str_radix(x.trim_start_matches("0x"), 16).unwrap()) .collect() } // Reset after round triplet (round number, state, buffer) commit fn reset_state_partial(&mut self) { self.collect_state = TestCollectorState::Idle; self.round_set = false; self.curr_round = 0; self.curr_state.clear(); self.curr_buf.clear(); } // Reset after test (rounds, key, IV, TV) commit fn reset_state_full(&mut self) { self.reset_state_partial(); self.curr_key.clear(); self.curr_iv.clear(); self.curr_tv.clear(); } // Commit RoundData to Test's hashmap fn commit_round(&mut self) { let mut rd = RoundData { state: [0; E128_STATE_LEN], buf: [0; E128_BUF_LEN], }; assert_eq!(rd.state.len(), self.curr_state.len()); assert_eq!(rd.buf.len(), self.curr_buf.len()); rd.state[..].copy_from_slice(&self.curr_state[..]); rd.buf[..].copy_from_slice(&self.curr_buf[..]); self.tests .last_mut() .unwrap() .rounds .insert(self.curr_round, rd); println!( "[Test: {}][Round: {}] -> {:?}", self.tests.len(), self.curr_round, self.tests .last() .unwrap() .rounds .get(&self.curr_round) .unwrap(), ); } // Commit Test to TestCollector's Vec fn commit_test(&mut self) { let curr_test_num = self.tests.len(); let curr_test = self.tests.last_mut().unwrap(); assert_eq!(curr_test.key.len(), self.curr_key.len()); assert_eq!(curr_test.iv.len(), self.curr_iv.len()); assert_eq!(curr_test.tv.len(), self.curr_tv.len()); curr_test.key[..].copy_from_slice(&self.curr_key); curr_test.iv[..].copy_from_slice(&self.curr_iv); curr_test.tv[..].copy_from_slice(&self.curr_tv); println!( "\n[Test: {}]: Key: {:?}, IV: {:?}, Test Vector: {:?}\n", curr_test_num, curr_test.key, curr_test.iv, &curr_test.tv[..] ); self.tests.push(Test::new()); } // Check that all round numbers in an inclusive range are present for all collected Tests fn verify_finalize(&mut self, start_round_num: i64, end_round_num: i64) -> bool { // Strip test allocated after final commit self.tests.retain(|t| !t.rounds.is_empty()); // Wipe temporaries from any potential uncompleted test commit self.reset_state_full(); // Verify all collected tests contain complete round range for t in &self.tests { for k in start_round_num..=end_round_num { if !t.rounds.contains_key(&k) { return false; } } } true } } // Parse unstructured reference test vector text file using a state machine fn parse_test_vector_file() -> TestCollector { let base = env::current_dir().unwrap(); let target = base.join("tests").join("official_test_vectors.txt"); let file = File::open(target).unwrap(); let reader = BufReader::new(file); let mut tc = TestCollector::new(); for (idx, line) in reader.lines().enumerate() { let err_msg = format!("Unable to read line {}", idx); let line = line.expect(&err_msg); tc.process_line(&line); } assert!(tc.verify_finalize(ROUND_START, ROUND_END)); tc } // TEST HELPERS ######################################################################################################## // Validate that internal state matches expected #[inline(always)] fn internals_in_lockstep(e128: &Enocoro128, rd: &RoundData) -> bool { (e128.state == rd.state) && (e128.buf == rd.buf) } // For inspecting state during initialization rounds (occur within Enocoro128 constructor) #[inline(always)] fn bypass_constructor(key: &[u8], iv: &[u8]) -> Enocoro128 { let mut e128 = Enocoro128 { key: [0; E128_KEY_LEN], iv: [0; E128_IV_LEN], state: [0; E128_STATE_LEN], buf: [0; E128_BUF_LEN], top: 0, }; e128.key[..].copy_from_slice(&key); e128.iv[..].copy_from_slice(&iv); e128.buf[0..E128_KEY_LEN].copy_from_slice(&e128.key); e128.buf[E128_KEY_LEN..(E128_KEY_LEN + E128_IV_LEN)].copy_from_slice(&e128.iv); e128.buf[(E128_KEY_LEN + E128_IV_LEN)..].copy_from_slice(&E128_BUF_TAIL_INIT); e128.state[..].copy_from_slice(&E128_STATE_INIT); e128 } // TEST VECTOR VALIDATION ############################################################################################## // Verify internal states #[test] fn test_internal_states() { let test_collector = parse_test_vector_file(); for test in &test_collector.tests { let mut ctr = 0x1; let mut e128 = bypass_constructor(&test.key, &test.iv); // Starting state let round_data = test.rounds.get(&ROUND_START).unwrap(); assert!(internals_in_lockstep(&e128, round_data)); // Initialization states for round_num in (ROUND_START + 1)..0 { let round_data = test.rounds.get(&round_num).unwrap(); e128.buf[(e128.top.wrapping_add(K128_SHIFT) & 0x1f) as usize] ^= ctr; ctr = XTIME[ctr as usize]; e128.next128(); assert!(internals_in_lockstep(&e128, round_data)); } // Post-initialization states for round_num in 0..=ROUND_END { let round_data = test.rounds.get(&round_num).unwrap(); e128.next128(); assert!(internals_in_lockstep(&e128, round_data)); } } } // Verify en/decryption result #[test] fn test_output() { let test_collector = parse_test_vector_file(); for test in &test_collector.tests { let mut test_vector = [0; TV_LEN]; Enocoro128::apply_keystream_static(&test.key, &test.iv, &mut test_vector); assert!(test.tv.iter().zip(test_vector.iter()).all(|(a, b)| a == b)); } }
31.773196
120
0.537638
e97db880120f39c5187f963b542de99645f83eb0
9,384
use crate::cli; use crate::command_prelude::*; use anyhow::{bail, format_err}; use cargo::core::dependency::DepKind; use cargo::ops::tree::{self, EdgeKind}; use cargo::ops::Packages; use cargo::util::CargoResult; use std::collections::HashSet; use std::str::FromStr; pub fn cli() -> App { subcommand("tree") .about("Display a tree visualization of a dependency graph") .arg(opt("quiet", "Suppress status messages").short("q")) .arg_manifest_path() .arg_package_spec_no_all( "Package to be used as the root of the tree", "Display the tree for all packages in the workspace", "Exclude specific workspace members", ) .arg(Arg::with_name("all").long("all").short("a").hidden(true)) .arg( Arg::with_name("all-targets") .long("all-targets") .hidden(true), ) .arg_features() .arg_target_triple( "Filter dependencies matching the given target-triple (default host platform)", ) .arg( Arg::with_name("no-dev-dependencies") .long("no-dev-dependencies") .hidden(true), ) .arg( multi_opt( "edges", "KINDS", "The kinds of dependencies to display \ (features, normal, build, dev, all, no-dev, no-build, no-normal)", ) .short("e"), ) .arg( optional_multi_opt( "invert", "SPEC", "Invert the tree direction and focus on the given package", ) .short("i"), ) .arg(Arg::with_name("no-indent").long("no-indent").hidden(true)) .arg( Arg::with_name("prefix-depth") .long("prefix-depth") .hidden(true), ) .arg( opt( "prefix", "Change the prefix (indentation) of how each entry is displayed", ) .value_name("PREFIX") .possible_values(&["depth", "indent", "none"]) .default_value("indent"), ) .arg(opt( "no-dedupe", "Do not de-duplicate (repeats all shared dependencies)", )) .arg( opt( "duplicates", "Show only dependencies which come in multiple versions (implies -i)", ) .short("d") .alias("duplicate"), ) .arg( opt("charset", "Character set to use in output: utf8, ascii") .value_name("CHARSET") .possible_values(&["utf8", "ascii"]) .default_value("utf8"), ) .arg( opt("format", "Format string used for printing dependencies") .value_name("FORMAT") .short("f") .default_value("{p}"), ) .arg( // Backwards compatibility with old cargo-tree. Arg::with_name("version") .long("version") .short("V") .hidden(true), ) .after_help("Run `cargo help tree` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { if args.is_present("version") { let verbose = args.occurrences_of("verbose") > 0; let version = cli::get_version_string(verbose); cargo::drop_print!(config, "{}", version); return Ok(()); } let prefix = if args.is_present("no-indent") { config .shell() .warn("the --no-indent flag has been changed to --prefix=none")?; "none" } else if args.is_present("prefix-depth") { config .shell() .warn("the --prefix-depth flag has been changed to --prefix=depth")?; "depth" } else { args.value_of("prefix").unwrap() }; let prefix = tree::Prefix::from_str(prefix).map_err(|e| anyhow::anyhow!("{}", e))?; let no_dedupe = args.is_present("no-dedupe") || args.is_present("all"); if args.is_present("all") { config.shell().warn( "The `cargo tree` --all flag has been changed to --no-dedupe, \ and may be removed in a future version.\n\ If you are looking to display all workspace members, use the --workspace flag.", )?; } let targets = if args.is_present("all-targets") { config .shell() .warn("the --all-targets flag has been changed to --target=all")?; vec!["all".to_string()] } else { args._values_of("target") }; let target = tree::Target::from_cli(targets); let edge_kinds = parse_edge_kinds(config, args)?; let graph_features = edge_kinds.contains(&EdgeKind::Feature); let packages = args.packages_from_flags()?; let mut invert = args .values_of("invert") .map_or_else(|| Vec::new(), |is| is.map(|s| s.to_string()).collect()); if args.is_present_with_zero_values("invert") { match &packages { Packages::Packages(ps) => { // Backwards compatibility with old syntax of `cargo tree -i -p foo`. invert.extend(ps.clone()); } _ => { return Err(format_err!( "The `-i` flag requires a package name.\n\ \n\ The `-i` flag is used to inspect the reverse dependencies of a specific\n\ package. It will invert the tree and display the packages that depend on the\n\ given package.\n\ \n\ Note that in a workspace, by default it will only display the package's\n\ reverse dependencies inside the tree of the workspace member in the current\n\ directory. The --workspace flag can be used to extend it so that it will show\n\ the package's reverse dependencies across the entire workspace. The -p flag\n\ can be used to display the package's reverse dependencies only with the\n\ subtree of the package given to -p.\n\ " ) .into()); } } } let ws = args.workspace(config)?; let charset = tree::Charset::from_str(args.value_of("charset").unwrap()) .map_err(|e| anyhow::anyhow!("{}", e))?; let opts = tree::TreeOptions { features: values(args, "features"), all_features: args.is_present("all-features"), no_default_features: args.is_present("no-default-features"), packages, target, edge_kinds, invert, prefix, no_dedupe, duplicates: args.is_present("duplicates"), charset, format: args.value_of("format").unwrap().to_string(), graph_features, }; tree::build_and_print(&ws, &opts)?; Ok(()) } fn parse_edge_kinds(config: &Config, args: &ArgMatches<'_>) -> CargoResult<HashSet<EdgeKind>> { let mut kinds: Vec<&str> = args .values_of("edges") .map_or_else(|| Vec::new(), |es| es.flat_map(|e| e.split(',')).collect()); if args.is_present("no-dev-dependencies") { config .shell() .warn("the --no-dev-dependencies flag has changed to -e=no-dev")?; kinds.push("no-dev"); } if kinds.is_empty() { kinds.extend(&["normal", "build", "dev"]); } let mut result = HashSet::new(); let insert_defaults = |result: &mut HashSet<EdgeKind>| { result.insert(EdgeKind::Dep(DepKind::Normal)); result.insert(EdgeKind::Dep(DepKind::Build)); result.insert(EdgeKind::Dep(DepKind::Development)); }; let unknown = |k| { bail!( "unknown edge kind `{}`, valid values are \ \"normal\", \"build\", \"dev\", \ \"no-normal\", \"no-build\", \"no-dev\", \ \"features\", or \"all\"", k ) }; if kinds.iter().any(|k| k.starts_with("no-")) { insert_defaults(&mut result); for kind in &kinds { match *kind { "no-normal" => result.remove(&EdgeKind::Dep(DepKind::Normal)), "no-build" => result.remove(&EdgeKind::Dep(DepKind::Build)), "no-dev" => result.remove(&EdgeKind::Dep(DepKind::Development)), "features" => result.insert(EdgeKind::Feature), "normal" | "build" | "dev" | "all" => { bail!("`no-` dependency kinds cannot be mixed with other dependency kinds") } k => return unknown(k), }; } return Ok(result); } for kind in &kinds { match *kind { "all" => { insert_defaults(&mut result); result.insert(EdgeKind::Feature); } "features" => { result.insert(EdgeKind::Feature); } "normal" => { result.insert(EdgeKind::Dep(DepKind::Normal)); } "build" => { result.insert(EdgeKind::Dep(DepKind::Build)); } "dev" => { result.insert(EdgeKind::Dep(DepKind::Development)); } k => return unknown(k), } } if kinds.len() == 1 && kinds[0] == "features" { insert_defaults(&mut result); } Ok(result) }
34.627306
95
0.523231
031ffb82c0d82c3b503c5f4bdb2f161daed91749
3,816
use farmanager_codegen::Langpack; use farmanager::basic; #[derive(Langpack, Copy, Clone)] #[langpack(name = "showcase")] #[language(code = "en", value = "English,English")] #[language(code = "ru", value = "Russian,Russian (Русский)")] pub enum Lng { #[msg(language = "en", value = "HelloRust: API Showcase")] #[msg(language = "ru", value = "HelloRust: API Showcase")] MenuItemTitle, #[msg(language = "en", value = "Hello, Rust!")] #[msg(language = "ru", value = "Здравствуй, Rust!")] MessageTitle, #[msg(language = "en", value = "Hello, Rust! (with FMSG_ALLINONE flag)")] #[msg(language = "ru", value = "Здравствуй, Rust! (с флагом FMSG_ALLINONE)")] MessageTitleAllInOne, #[msg(language = "en", value = "Hello, Rust! (from commandline)")] #[msg(language = "ru", value = "Здравствуй, Rust! (из командной строки)")] MessageTitleCommandline, #[msg(language = "en", value = "")] #[msg(language = "ru", value = "")] MessageLine0, #[msg(language = "en", value = "HelloRust.rs: compiling...")] #[msg(language = "ru", value = "HelloRust.rs: компиляция...")] MessageLine1, #[msg(language = "en", value = " 13 error(s), 8 warning(s) :-)")] #[msg(language = "ru", value = " 13 ошибок, 8 предупреждений :-)")] MessageLine2, #[msg(language = "en", value = "")] #[msg(language = "ru", value = "")] MessageLine3, #[msg(language = "en", value = "&Ok")] #[msg(language = "ru", value = "Угу")] MessageButton, #[msg(language = "en", value = "Opened from left disk menu")] #[msg(language = "ru", value = "Запущен из левого меню дисков")] MessageFromLeftDiskMenu, #[msg(language = "en", value = "Opened from right disk menu")] #[msg(language = "ru", value = "Запущен из правого меню дисков")] MessageFromRightDiskMenu, #[msg(language = "en", value = "Opened from analyse")] #[msg(language = "ru", value = "Запущен после анализа файла")] MessageFromAnalyse, #[msg(language = "en", value = "Line 1")] #[msg(language = "ru", value = "Строка 1")] PanelMessageLine1, #[msg(language = "en", value = "Data 1")] #[msg(language = "ru", value = "Данные 1")] PanelMessageData1, #[msg(language = "en", value = "Line 2")] #[msg(language = "ru", value = "Строка 2")] PanelMessageLine2, #[msg(language = "en", value = "Data 2")] #[msg(language = "ru", value = "Данные 2")] PanelMessageData2, #[msg(language = "en", value = "Separator 2")] #[msg(language = "ru", value = "Разделитель 2")] PanelMessageSeparator1, #[msg(language = "en", value = "Create directory")] #[msg(language = "ru", value = "Создание папки")] MessageTitleCreateDirectory, #[msg(language = "en", value = "Directory name")] #[msg(language = "ru", value = "Имя папки")] MessageCreateDirectoryName, #[msg(language = "en", value = "Plugin configuration")] #[msg(language = "ru", value = "Параметры плагина")] MessageTitleConfiguration, #[msg(language = "en", value = "<once they'll be here>")] #[msg(language = "ru", value = "<когда-нибудь они тут будут>")] MessageConfiguration, #[msg(language = "en", value = "This API is not yet implemented!")] #[msg(language = "ru", value = "Данное API еще не поддерживается!")] MessageApiIsNotImplemented, #[msg(language = "en", value = "Error")] #[msg(language = "ru", value = "Ошибка")] ErrorTitle, #[msg(language = "en", value = "Cause")] #[msg(language = "ru", value = "Причина")] ErrorCause, #[msg(language = "en", value = "Backtrace")] #[msg(language = "ru", value = "Бэктрейс")] ErrorBacktrace, }
34.071429
82
0.578092
1ac0a48713a2966257f10e90a98be68c4e844edb
48,543
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // The Rust HIR. pub use self::BindingMode::*; pub use self::BinOp_::*; pub use self::BlockCheckMode::*; pub use self::CaptureClause::*; pub use self::Decl_::*; pub use self::Expr_::*; pub use self::FunctionRetTy::*; pub use self::ForeignItem_::*; pub use self::Item_::*; pub use self::Mutability::*; pub use self::PrimTy::*; pub use self::Stmt_::*; pub use self::TraitItem_::*; pub use self::Ty_::*; pub use self::TyParamBound::*; pub use self::UnOp::*; pub use self::UnsafeSource::*; pub use self::ViewPath_::*; pub use self::Visibility::{Public, Inherited}; pub use self::PathParameters::*; use hir::def::Def; use hir::def_id::DefId; use util::nodemap::{NodeMap, FnvHashSet}; use syntax_pos::{mk_sp, Span, ExpnId, DUMMY_SP}; use syntax::codemap::{self, respan, Spanned}; use syntax::abi::Abi; use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, AsmDialect}; use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem}; use syntax::parse::token::{keywords, InternedString}; use syntax::ptr::P; use syntax::tokenstream::TokenTree; use syntax::util::ThinVec; use std::collections::BTreeMap; use std::fmt; /// HIR doesn't commit to a concrete storage type and have its own alias for a vector. /// It can be `Vec`, `P<[T]>` or potentially `Box<[T]>`, or some other container with similar /// behavior. Unlike AST, HIR is mostly a static structure, so we can use an owned slice instead /// of `Vec` to avoid keeping extra capacity. pub type HirVec<T> = P<[T]>; macro_rules! hir_vec { ($elem:expr; $n:expr) => ( $crate::hir::HirVec::from(vec![$elem; $n]) ); ($($x:expr),*) => ( $crate::hir::HirVec::from(vec![$($x),*]) ); ($($x:expr,)*) => (hir_vec![$($x),*]) } pub mod check_attr; pub mod def; pub mod def_id; pub mod intravisit; pub mod lowering; pub mod map; pub mod pat_util; pub mod print; pub mod svh; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub struct Lifetime { pub id: NodeId, pub span: Span, pub name: Name, } impl fmt::Debug for Lifetime { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "lifetime({}: {})", self.id, print::lifetime_to_string(self)) } } /// A lifetime definition, eg `'a: 'b+'c+'d` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct LifetimeDef { pub lifetime: Lifetime, pub bounds: HirVec<Lifetime>, pub pure_wrt_drop: bool, } /// A "Path" is essentially Rust's notion of a name; for instance: /// std::cmp::PartialEq . It's represented as a sequence of identifiers, /// along with a bunch of supporting information. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub struct Path { pub span: Span, /// A `::foo` path, is relative to the crate root rather than current /// module (like paths in an import). pub global: bool, /// The segments in the path: the things separated by `::`. pub segments: HirVec<PathSegment>, } impl fmt::Debug for Path { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "path({})", print::path_to_string(self)) } } impl fmt::Display for Path { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", print::path_to_string(self)) } } impl Path { /// Convert a span and an identifier to the corresponding /// 1-segment path. pub fn from_name(s: Span, name: Name) -> Path { Path { span: s, global: false, segments: hir_vec![PathSegment { name: name, parameters: PathParameters::none() }], } } } /// A segment of a path: an identifier, an optional lifetime, and a set of /// types. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct PathSegment { /// The identifier portion of this path segment. pub name: Name, /// Type/lifetime parameters attached to this path. They come in /// two flavors: `Path<A,B,C>` and `Path(A,B) -> C`. Note that /// this is more than just simple syntactic sugar; the use of /// parens affects the region binding rules, so we preserve the /// distinction. pub parameters: PathParameters, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum PathParameters { /// The `<'a, A,B,C>` in `foo::bar::baz::<'a, A,B,C>` AngleBracketedParameters(AngleBracketedParameterData), /// The `(A,B)` and `C` in `Foo(A,B) -> C` ParenthesizedParameters(ParenthesizedParameterData), } impl PathParameters { pub fn none() -> PathParameters { AngleBracketedParameters(AngleBracketedParameterData { lifetimes: HirVec::new(), types: HirVec::new(), bindings: HirVec::new(), }) } pub fn is_empty(&self) -> bool { match *self { AngleBracketedParameters(ref data) => data.is_empty(), // Even if the user supplied no types, something like // `X()` is equivalent to `X<(),()>`. ParenthesizedParameters(..) => false, } } pub fn has_lifetimes(&self) -> bool { match *self { AngleBracketedParameters(ref data) => !data.lifetimes.is_empty(), ParenthesizedParameters(_) => false, } } pub fn has_types(&self) -> bool { match *self { AngleBracketedParameters(ref data) => !data.types.is_empty(), ParenthesizedParameters(..) => true, } } /// Returns the types that the user wrote. Note that these do not necessarily map to the type /// parameters in the parenthesized case. pub fn types(&self) -> HirVec<&P<Ty>> { match *self { AngleBracketedParameters(ref data) => { data.types.iter().collect() } ParenthesizedParameters(ref data) => { data.inputs .iter() .chain(data.output.iter()) .collect() } } } pub fn lifetimes(&self) -> HirVec<&Lifetime> { match *self { AngleBracketedParameters(ref data) => { data.lifetimes.iter().collect() } ParenthesizedParameters(_) => { HirVec::new() } } } pub fn bindings(&self) -> HirVec<&TypeBinding> { match *self { AngleBracketedParameters(ref data) => { data.bindings.iter().collect() } ParenthesizedParameters(_) => { HirVec::new() } } } } /// A path like `Foo<'a, T>` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct AngleBracketedParameterData { /// The lifetime parameters for this path segment. pub lifetimes: HirVec<Lifetime>, /// The type parameters for this path segment, if present. pub types: HirVec<P<Ty>>, /// Bindings (equality constraints) on associated types, if present. /// E.g., `Foo<A=Bar>`. pub bindings: HirVec<TypeBinding>, } impl AngleBracketedParameterData { fn is_empty(&self) -> bool { self.lifetimes.is_empty() && self.types.is_empty() && self.bindings.is_empty() } } /// A path like `Foo(A,B) -> C` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct ParenthesizedParameterData { /// Overall span pub span: Span, /// `(A,B)` pub inputs: HirVec<P<Ty>>, /// `C` pub output: Option<P<Ty>>, } /// The AST represents all type param bounds as types. /// typeck::collect::compute_bounds matches these against /// the "special" built-in traits (see middle::lang_items) and /// detects Copy, Send and Sync. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum TyParamBound { TraitTyParamBound(PolyTraitRef, TraitBoundModifier), RegionTyParamBound(Lifetime), } /// A modifier on a bound, currently this is only used for `?Sized`, where the /// modifier is `Maybe`. Negative bounds should also be handled here. #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum TraitBoundModifier { None, Maybe, } pub type TyParamBounds = HirVec<TyParamBound>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct TyParam { pub name: Name, pub id: NodeId, pub bounds: TyParamBounds, pub default: Option<P<Ty>>, pub span: Span, pub pure_wrt_drop: bool, } /// Represents lifetimes and type parameters attached to a declaration /// of a function, enum, trait, etc. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Generics { pub lifetimes: HirVec<LifetimeDef>, pub ty_params: HirVec<TyParam>, pub where_clause: WhereClause, pub span: Span, } impl Generics { pub fn empty() -> Generics { Generics { lifetimes: HirVec::new(), ty_params: HirVec::new(), where_clause: WhereClause { id: DUMMY_NODE_ID, predicates: HirVec::new(), }, span: DUMMY_SP, } } pub fn is_lt_parameterized(&self) -> bool { !self.lifetimes.is_empty() } pub fn is_type_parameterized(&self) -> bool { !self.ty_params.is_empty() } pub fn is_parameterized(&self) -> bool { self.is_lt_parameterized() || self.is_type_parameterized() } } pub enum UnsafeGeneric { Region(LifetimeDef, &'static str), Type(TyParam, &'static str), } impl UnsafeGeneric { pub fn attr_name(&self) -> &'static str { match *self { UnsafeGeneric::Region(_, s) => s, UnsafeGeneric::Type(_, s) => s, } } } impl Generics { pub fn carries_unsafe_attr(&self) -> Option<UnsafeGeneric> { for r in &self.lifetimes { if r.pure_wrt_drop { return Some(UnsafeGeneric::Region(r.clone(), "may_dangle")); } } for t in &self.ty_params { if t.pure_wrt_drop { return Some(UnsafeGeneric::Type(t.clone(), "may_dangle")); } } return None; } } /// A `where` clause in a definition #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct WhereClause { pub id: NodeId, pub predicates: HirVec<WherePredicate>, } /// A single predicate in a `where` clause #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum WherePredicate { /// A type binding, eg `for<'c> Foo: Send+Clone+'c` BoundPredicate(WhereBoundPredicate), /// A lifetime predicate, e.g. `'a: 'b+'c` RegionPredicate(WhereRegionPredicate), /// An equality predicate (unsupported) EqPredicate(WhereEqPredicate), } /// A type bound, eg `for<'c> Foo: Send+Clone+'c` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct WhereBoundPredicate { pub span: Span, /// Any lifetimes from a `for` binding pub bound_lifetimes: HirVec<LifetimeDef>, /// The type being bounded pub bounded_ty: P<Ty>, /// Trait and lifetime bounds (`Clone+Send+'static`) pub bounds: TyParamBounds, } /// A lifetime predicate, e.g. `'a: 'b+'c` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct WhereRegionPredicate { pub span: Span, pub lifetime: Lifetime, pub bounds: HirVec<Lifetime>, } /// An equality predicate (unsupported), e.g. `T=int` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct WhereEqPredicate { pub id: NodeId, pub span: Span, pub path: Path, pub ty: P<Ty>, } pub type CrateConfig = HirVec<P<MetaItem>>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)] pub struct Crate { pub module: Mod, pub attrs: HirVec<Attribute>, pub config: CrateConfig, pub span: Span, pub exported_macros: HirVec<MacroDef>, // NB: We use a BTreeMap here so that `visit_all_items` iterates // over the ids in increasing order. In principle it should not // matter what order we visit things in, but in *practice* it // does, because it can affect the order in which errors are // detected, which in turn can make compile-fail tests yield // slightly different results. pub items: BTreeMap<NodeId, Item>, } impl Crate { pub fn item(&self, id: NodeId) -> &Item { &self.items[&id] } /// Visits all items in the crate in some determinstic (but /// unspecified) order. If you just need to process every item, /// but don't care about nesting, this method is the best choice. /// /// If you do care about nesting -- usually because your algorithm /// follows lexical scoping rules -- then you want a different /// approach. You should override `visit_nested_item` in your /// visitor and then call `intravisit::walk_crate` instead. pub fn visit_all_items<'hir, V>(&'hir self, visitor: &mut V) where V: intravisit::Visitor<'hir> { for (_, item) in &self.items { visitor.visit_item(item); } } } /// A macro definition, in this crate or imported from another. /// /// Not parsed directly, but created on macro import or `macro_rules!` expansion. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct MacroDef { pub name: Name, pub attrs: HirVec<Attribute>, pub id: NodeId, pub span: Span, pub imported_from: Option<Name>, pub allow_internal_unstable: bool, pub body: HirVec<TokenTree>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Block { /// Statements in a block pub stmts: HirVec<Stmt>, /// An expression at the end of the block /// without a semicolon, if any pub expr: Option<P<Expr>>, pub id: NodeId, /// Distinguishes between `unsafe { ... }` and `{ ... }` pub rules: BlockCheckMode, pub span: Span, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub struct Pat { pub id: NodeId, pub node: PatKind, pub span: Span, } impl fmt::Debug for Pat { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "pat({}: {})", self.id, print::pat_to_string(self)) } } impl Pat { // FIXME(#19596) this is a workaround, but there should be a better way fn walk_<G>(&self, it: &mut G) -> bool where G: FnMut(&Pat) -> bool { if !it(self) { return false; } match self.node { PatKind::Binding(.., Some(ref p)) => p.walk_(it), PatKind::Struct(_, ref fields, _) => { fields.iter().all(|field| field.node.pat.walk_(it)) } PatKind::TupleStruct(_, ref s, _) | PatKind::Tuple(ref s, _) => { s.iter().all(|p| p.walk_(it)) } PatKind::Box(ref s) | PatKind::Ref(ref s, _) => { s.walk_(it) } PatKind::Slice(ref before, ref slice, ref after) => { before.iter().all(|p| p.walk_(it)) && slice.iter().all(|p| p.walk_(it)) && after.iter().all(|p| p.walk_(it)) } PatKind::Wild | PatKind::Lit(_) | PatKind::Range(..) | PatKind::Binding(..) | PatKind::Path(..) => { true } } } pub fn walk<F>(&self, mut it: F) -> bool where F: FnMut(&Pat) -> bool { self.walk_(&mut it) } } /// A single field in a struct pattern /// /// Patterns like the fields of Foo `{ x, ref y, ref mut z }` /// are treated the same as` x: x, y: ref y, z: ref mut z`, /// except is_shorthand is true #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct FieldPat { /// The identifier for the field pub name: Name, /// The pattern the field is destructured to pub pat: P<Pat>, pub is_shorthand: bool, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum BindingMode { BindByRef(Mutability), BindByValue(Mutability), } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum PatKind { /// Represents a wildcard pattern (`_`) Wild, /// A fresh binding `ref mut binding @ OPT_SUBPATTERN`. Binding(BindingMode, Spanned<Name>, Option<P<Pat>>), /// A struct or struct variant pattern, e.g. `Variant {x, y, ..}`. /// The `bool` is `true` in the presence of a `..`. Struct(Path, HirVec<Spanned<FieldPat>>, bool), /// A tuple struct/variant pattern `Variant(x, y, .., z)`. /// If the `..` pattern fragment is present, then `Option<usize>` denotes its position. /// 0 <= position <= subpats.len() TupleStruct(Path, HirVec<P<Pat>>, Option<usize>), /// A possibly qualified path pattern. /// Such pattern can be resolved to a unit struct/variant or a constant. Path(Option<QSelf>, Path), /// A tuple pattern `(a, b)`. /// If the `..` pattern fragment is present, then `Option<usize>` denotes its position. /// 0 <= position <= subpats.len() Tuple(HirVec<P<Pat>>, Option<usize>), /// A `box` pattern Box(P<Pat>), /// A reference pattern, e.g. `&mut (a, b)` Ref(P<Pat>, Mutability), /// A literal Lit(P<Expr>), /// A range pattern, e.g. `1...2` Range(P<Expr>, P<Expr>), /// `[a, b, ..i, y, z]` is represented as: /// `PatKind::Slice(box [a, b], Some(i), box [y, z])` Slice(HirVec<P<Pat>>, Option<P<Pat>>, HirVec<P<Pat>>), } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum Mutability { MutMutable, MutImmutable, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum BinOp_ { /// The `+` operator (addition) BiAdd, /// The `-` operator (subtraction) BiSub, /// The `*` operator (multiplication) BiMul, /// The `/` operator (division) BiDiv, /// The `%` operator (modulus) BiRem, /// The `&&` operator (logical and) BiAnd, /// The `||` operator (logical or) BiOr, /// The `^` operator (bitwise xor) BiBitXor, /// The `&` operator (bitwise and) BiBitAnd, /// The `|` operator (bitwise or) BiBitOr, /// The `<<` operator (shift left) BiShl, /// The `>>` operator (shift right) BiShr, /// The `==` operator (equality) BiEq, /// The `<` operator (less than) BiLt, /// The `<=` operator (less than or equal to) BiLe, /// The `!=` operator (not equal to) BiNe, /// The `>=` operator (greater than or equal to) BiGe, /// The `>` operator (greater than) BiGt, } impl BinOp_ { pub fn as_str(self) -> &'static str { match self { BiAdd => "+", BiSub => "-", BiMul => "*", BiDiv => "/", BiRem => "%", BiAnd => "&&", BiOr => "||", BiBitXor => "^", BiBitAnd => "&", BiBitOr => "|", BiShl => "<<", BiShr => ">>", BiEq => "==", BiLt => "<", BiLe => "<=", BiNe => "!=", BiGe => ">=", BiGt => ">", } } pub fn is_lazy(self) -> bool { match self { BiAnd | BiOr => true, _ => false, } } pub fn is_shift(self) -> bool { match self { BiShl | BiShr => true, _ => false, } } pub fn is_comparison(self) -> bool { match self { BiEq | BiLt | BiLe | BiNe | BiGt | BiGe => true, BiAnd | BiOr | BiAdd | BiSub | BiMul | BiDiv | BiRem | BiBitXor | BiBitAnd | BiBitOr | BiShl | BiShr => false, } } /// Returns `true` if the binary operator takes its arguments by value pub fn is_by_value(self) -> bool { !self.is_comparison() } } pub type BinOp = Spanned<BinOp_>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum UnOp { /// The `*` operator for dereferencing UnDeref, /// The `!` operator for logical inversion UnNot, /// The `-` operator for negation UnNeg, } impl UnOp { pub fn as_str(self) -> &'static str { match self { UnDeref => "*", UnNot => "!", UnNeg => "-", } } /// Returns `true` if the unary operator takes its argument by value pub fn is_by_value(self) -> bool { match self { UnNeg | UnNot => true, _ => false, } } } /// A statement pub type Stmt = Spanned<Stmt_>; impl fmt::Debug for Stmt_ { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // Sadness. let spanned = codemap::dummy_spanned(self.clone()); write!(f, "stmt({}: {})", spanned.node.id(), print::stmt_to_string(&spanned)) } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub enum Stmt_ { /// Could be an item or a local (let) binding: StmtDecl(P<Decl>, NodeId), /// Expr without trailing semi-colon (must have unit type): StmtExpr(P<Expr>, NodeId), /// Expr with trailing semi-colon (may have any type): StmtSemi(P<Expr>, NodeId), } impl Stmt_ { pub fn attrs(&self) -> &[Attribute] { match *self { StmtDecl(ref d, _) => d.node.attrs(), StmtExpr(ref e, _) | StmtSemi(ref e, _) => &e.attrs, } } pub fn id(&self) -> NodeId { match *self { StmtDecl(_, id) => id, StmtExpr(_, id) => id, StmtSemi(_, id) => id, } } } // FIXME (pending discussion of #1697, #2178...): local should really be // a refinement on pat. /// Local represents a `let` statement, e.g., `let <pat>:<ty> = <expr>;` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Local { pub pat: P<Pat>, pub ty: Option<P<Ty>>, /// Initializer expression to set the value, if any pub init: Option<P<Expr>>, pub id: NodeId, pub span: Span, pub attrs: ThinVec<Attribute>, } pub type Decl = Spanned<Decl_>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Decl_ { /// A local (let) binding: DeclLocal(P<Local>), /// An item binding: DeclItem(ItemId), } impl Decl_ { pub fn attrs(&self) -> &[Attribute] { match *self { DeclLocal(ref l) => &l.attrs, DeclItem(_) => &[] } } } /// represents one arm of a 'match' #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Arm { pub attrs: HirVec<Attribute>, pub pats: HirVec<P<Pat>>, pub guard: Option<P<Expr>>, pub body: P<Expr>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Field { pub name: Spanned<Name>, pub expr: P<Expr>, pub span: Span, pub is_shorthand: bool, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum BlockCheckMode { DefaultBlock, UnsafeBlock(UnsafeSource), PushUnsafeBlock(UnsafeSource), PopUnsafeBlock(UnsafeSource), // Within this block (but outside a PopUnstableBlock), we suspend checking of stability. PushUnstableBlock, PopUnstableBlock, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum UnsafeSource { CompilerGenerated, UserProvided, } /// An expression #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub struct Expr { pub id: NodeId, pub node: Expr_, pub span: Span, pub attrs: ThinVec<Attribute>, } impl fmt::Debug for Expr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "expr({}: {})", self.id, print::expr_to_string(self)) } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Expr_ { /// A `box x` expression. ExprBox(P<Expr>), /// An array (`[a, b, c, d]`) ExprArray(HirVec<P<Expr>>), /// A function call /// /// The first field resolves to the function itself (usually an `ExprPath`), /// and the second field is the list of arguments ExprCall(P<Expr>, HirVec<P<Expr>>), /// A method call (`x.foo::<Bar, Baz>(a, b, c, d)`) /// /// The `Spanned<Name>` is the identifier for the method name. /// The vector of `Ty`s are the ascripted type parameters for the method /// (within the angle brackets). /// /// The first element of the vector of `Expr`s is the expression that /// evaluates to the object on which the method is being called on (the /// receiver), and the remaining elements are the rest of the arguments. /// /// Thus, `x.foo::<Bar, Baz>(a, b, c, d)` is represented as /// `ExprMethodCall(foo, [Bar, Baz], [x, a, b, c, d])`. ExprMethodCall(Spanned<Name>, HirVec<P<Ty>>, HirVec<P<Expr>>), /// A tuple (`(a, b, c ,d)`) ExprTup(HirVec<P<Expr>>), /// A binary operation (For example: `a + b`, `a * b`) ExprBinary(BinOp, P<Expr>, P<Expr>), /// A unary operation (For example: `!x`, `*x`) ExprUnary(UnOp, P<Expr>), /// A literal (For example: `1`, `"foo"`) ExprLit(P<Lit>), /// A cast (`foo as f64`) ExprCast(P<Expr>, P<Ty>), ExprType(P<Expr>, P<Ty>), /// An `if` block, with an optional else block /// /// `if expr { block } else { expr }` ExprIf(P<Expr>, P<Block>, Option<P<Expr>>), /// A while loop, with an optional label /// /// `'label: while expr { block }` ExprWhile(P<Expr>, P<Block>, Option<Spanned<Name>>), /// Conditionless loop (can be exited with break, continue, or return) /// /// `'label: loop { block }` ExprLoop(P<Block>, Option<Spanned<Name>>), /// A `match` block, with a source that indicates whether or not it is /// the result of a desugaring, and if so, which kind. ExprMatch(P<Expr>, HirVec<Arm>, MatchSource), /// A closure (for example, `move |a, b, c| {a + b + c}`). /// /// The final span is the span of the argument block `|...|` ExprClosure(CaptureClause, P<FnDecl>, P<Block>, Span), /// A block (`{ ... }`) ExprBlock(P<Block>), /// An assignment (`a = foo()`) ExprAssign(P<Expr>, P<Expr>), /// An assignment with an operator /// /// For example, `a += 1`. ExprAssignOp(BinOp, P<Expr>, P<Expr>), /// Access of a named struct field (`obj.foo`) ExprField(P<Expr>, Spanned<Name>), /// Access of an unnamed field of a struct or tuple-struct /// /// For example, `foo.0`. ExprTupField(P<Expr>, Spanned<usize>), /// An indexing operation (`foo[2]`) ExprIndex(P<Expr>, P<Expr>), /// Variable reference, possibly containing `::` and/or type /// parameters, e.g. foo::bar::<baz>. /// /// Optionally "qualified", /// e.g. `<HirVec<T> as SomeTrait>::SomeType`. ExprPath(Option<QSelf>, Path), /// A referencing operation (`&a` or `&mut a`) ExprAddrOf(Mutability, P<Expr>), /// A `break`, with an optional label to break ExprBreak(Option<Spanned<Name>>), /// A `continue`, with an optional label ExprAgain(Option<Spanned<Name>>), /// A `return`, with an optional value to be returned ExprRet(Option<P<Expr>>), /// Inline assembly (from `asm!`), with its outputs and inputs. ExprInlineAsm(InlineAsm, Vec<P<Expr>>, Vec<P<Expr>>), /// A struct or struct-like variant literal expression. /// /// For example, `Foo {x: 1, y: 2}`, or /// `Foo {x: 1, .. base}`, where `base` is the `Option<Expr>`. ExprStruct(Path, HirVec<Field>, Option<P<Expr>>), /// An array literal constructed from one repeated element. /// /// For example, `[1; 5]`. The first expression is the element /// to be repeated; the second is the number of times to repeat it. ExprRepeat(P<Expr>, P<Expr>), } /// The explicit Self type in a "qualified path". The actual /// path, including the trait and the associated item, is stored /// separately. `position` represents the index of the associated /// item qualified with this Self type. /// /// <HirVec<T> as a::b::Trait>::AssociatedItem /// ^~~~~ ~~~~~~~~~~~~~~^ /// ty position = 3 /// /// <HirVec<T>>::AssociatedItem /// ^~~~~ ^ /// ty position = 0 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct QSelf { pub ty: P<Ty>, pub position: usize, } /// Hints at the original code for a `match _ { .. }` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum MatchSource { /// A `match _ { .. }` Normal, /// An `if let _ = _ { .. }` (optionally with `else { .. }`) IfLetDesugar { contains_else_clause: bool, }, /// A `while let _ = _ { .. }` (which was desugared to a /// `loop { match _ { .. } }`) WhileLetDesugar, /// A desugared `for _ in _ { .. }` loop ForLoopDesugar, /// A desugared `?` operator TryDesugar, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum CaptureClause { CaptureByValue, CaptureByRef, } // NB: If you change this, you'll probably want to change the corresponding // type structure in middle/ty.rs as well. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct MutTy { pub ty: P<Ty>, pub mutbl: Mutability, } /// Represents a method's signature in a trait declaration or implementation. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct MethodSig { pub unsafety: Unsafety, pub constness: Constness, pub abi: Abi, pub decl: P<FnDecl>, pub generics: Generics, } /// Represents an item declaration within a trait declaration, /// possibly including a default implementation. A trait item is /// either required (meaning it doesn't have an implementation, just a /// signature) or provided (meaning it has a default implementation). #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct TraitItem { pub id: NodeId, pub name: Name, pub attrs: HirVec<Attribute>, pub node: TraitItem_, pub span: Span, } /// Represents a trait method or associated constant or type #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum TraitItem_ { /// An associated constant with an optional value (otherwise `impl`s /// must contain a value) ConstTraitItem(P<Ty>, Option<P<Expr>>), /// A method with an optional body MethodTraitItem(MethodSig, Option<P<Block>>), /// An associated type with (possibly empty) bounds and optional concrete /// type TypeTraitItem(TyParamBounds, Option<P<Ty>>), } /// Represents anything within an `impl` block #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct ImplItem { pub id: NodeId, pub name: Name, pub vis: Visibility, pub defaultness: Defaultness, pub attrs: HirVec<Attribute>, pub node: ImplItemKind, pub span: Span, } /// Represents different contents within `impl`s #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum ImplItemKind { /// An associated constant of the given type, set to the constant result /// of the expression Const(P<Ty>, P<Expr>), /// A method implementation with the given signature and body Method(MethodSig, P<Block>), /// An associated type Type(P<Ty>), } // Bind a type to an associated type: `A=Foo`. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct TypeBinding { pub id: NodeId, pub name: Name, pub ty: P<Ty>, pub span: Span, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub struct Ty { pub id: NodeId, pub node: Ty_, pub span: Span, } impl fmt::Debug for Ty { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "type({})", print::ty_to_string(self)) } } /// Not represented directly in the AST, referred to by name through a ty_path. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum PrimTy { TyInt(IntTy), TyUint(UintTy), TyFloat(FloatTy), TyStr, TyBool, TyChar, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct BareFnTy { pub unsafety: Unsafety, pub abi: Abi, pub lifetimes: HirVec<LifetimeDef>, pub decl: P<FnDecl>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] /// The different kinds of types recognized by the compiler pub enum Ty_ { /// A variable length slice (`[T]`) TySlice(P<Ty>), /// A fixed length array (`[T; n]`) TyArray(P<Ty>, P<Expr>), /// A raw pointer (`*const T` or `*mut T`) TyPtr(MutTy), /// A reference (`&'a T` or `&'a mut T`) TyRptr(Option<Lifetime>, MutTy), /// A bare function (e.g. `fn(usize) -> bool`) TyBareFn(P<BareFnTy>), /// The never type (`!`) TyNever, /// A tuple (`(A, B, C, D,...)`) TyTup(HirVec<P<Ty>>), /// A path (`module::module::...::Type`), optionally /// "qualified", e.g. `<HirVec<T> as SomeTrait>::SomeType`. /// /// Type parameters are stored in the Path itself TyPath(Option<QSelf>, Path), /// Something like `A+B`. Note that `B` must always be a path. TyObjectSum(P<Ty>, TyParamBounds), /// A type like `for<'a> Foo<&'a Bar>` TyPolyTraitRef(TyParamBounds), /// An `impl TraitA+TraitB` type. TyImplTrait(TyParamBounds), /// Unused for now TyTypeof(P<Expr>), /// TyInfer means the type should be inferred instead of it having been /// specified. This can appear anywhere in a type. TyInfer, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct InlineAsmOutput { pub constraint: InternedString, pub is_rw: bool, pub is_indirect: bool, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct InlineAsm { pub asm: InternedString, pub asm_str_style: StrStyle, pub outputs: HirVec<InlineAsmOutput>, pub inputs: HirVec<InternedString>, pub clobbers: HirVec<InternedString>, pub volatile: bool, pub alignstack: bool, pub dialect: AsmDialect, pub expn_id: ExpnId, } /// represents an argument in a function header #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Arg { pub ty: P<Ty>, pub pat: P<Pat>, pub id: NodeId, } /// Alternative representation for `Arg`s describing `self` parameter of methods. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum SelfKind { /// `self`, `mut self` Value(Mutability), /// `&'lt self`, `&'lt mut self` Region(Option<Lifetime>, Mutability), /// `self: TYPE`, `mut self: TYPE` Explicit(P<Ty>, Mutability), } pub type ExplicitSelf = Spanned<SelfKind>; impl Arg { pub fn to_self(&self) -> Option<ExplicitSelf> { if let PatKind::Binding(BindByValue(mutbl), name, _) = self.pat.node { if name.node == keywords::SelfValue.name() { return match self.ty.node { TyInfer => Some(respan(self.pat.span, SelfKind::Value(mutbl))), TyRptr(lt, MutTy{ref ty, mutbl}) if ty.node == TyInfer => { Some(respan(self.pat.span, SelfKind::Region(lt, mutbl))) } _ => Some(respan(mk_sp(self.pat.span.lo, self.ty.span.hi), SelfKind::Explicit(self.ty.clone(), mutbl))) } } } None } pub fn is_self(&self) -> bool { if let PatKind::Binding(_, name, _) = self.pat.node { name.node == keywords::SelfValue.name() } else { false } } } /// Represents the header (not the body) of a function declaration #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct FnDecl { pub inputs: HirVec<Arg>, pub output: FunctionRetTy, pub variadic: bool, } impl FnDecl { pub fn get_self(&self) -> Option<ExplicitSelf> { self.inputs.get(0).and_then(Arg::to_self) } pub fn has_self(&self) -> bool { self.inputs.get(0).map(Arg::is_self).unwrap_or(false) } } #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Unsafety { Unsafe, Normal, } #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Constness { Const, NotConst, } #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Defaultness { Default, Final, } impl Defaultness { pub fn is_final(&self) -> bool { *self == Defaultness::Final } pub fn is_default(&self) -> bool { *self == Defaultness::Default } } impl fmt::Display for Unsafety { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(match *self { Unsafety::Normal => "normal", Unsafety::Unsafe => "unsafe", }, f) } } #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub enum ImplPolarity { /// `impl Trait for Type` Positive, /// `impl !Trait for Type` Negative, } impl fmt::Debug for ImplPolarity { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { ImplPolarity::Positive => "positive".fmt(f), ImplPolarity::Negative => "negative".fmt(f), } } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum FunctionRetTy { /// Return type is not specified. /// /// Functions default to `()` and /// closures default to inference. Span points to where return /// type would be inserted. DefaultReturn(Span), /// Everything else Return(P<Ty>), } impl FunctionRetTy { pub fn span(&self) -> Span { match *self { DefaultReturn(span) => span, Return(ref ty) => ty.span, } } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Mod { /// A span from the first token past `{` to the last token until `}`. /// For `mod foo;`, the inner span ranges from the first token /// to the last token in the external file. pub inner: Span, pub item_ids: HirVec<ItemId>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct ForeignMod { pub abi: Abi, pub items: HirVec<ForeignItem>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct EnumDef { pub variants: HirVec<Variant>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Variant_ { pub name: Name, pub attrs: HirVec<Attribute>, pub data: VariantData, /// Explicit discriminant, eg `Foo = 1` pub disr_expr: Option<P<Expr>>, } pub type Variant = Spanned<Variant_>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub struct PathListItem_ { pub name: Name, /// renamed in list, eg `use foo::{bar as baz};` pub rename: Option<Name>, pub id: NodeId, } pub type PathListItem = Spanned<PathListItem_>; pub type ViewPath = Spanned<ViewPath_>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum ViewPath_ { /// `foo::bar::baz as quux` /// /// or just /// /// `foo::bar::baz` (with `as baz` implicitly on the right) ViewPathSimple(Name, Path), /// `foo::bar::*` ViewPathGlob(Path), /// `foo::bar::{a,b,c}` ViewPathList(Path, HirVec<PathListItem>), } /// TraitRef's appear in impls. /// /// resolve maps each TraitRef's ref_id to its defining trait; that's all /// that the ref_id is for. Note that ref_id's value is not the NodeId of the /// trait being referred to but just a unique NodeId that serves as a key /// within the DefMap. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct TraitRef { pub path: Path, pub ref_id: NodeId, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct PolyTraitRef { /// The `'a` in `<'a> Foo<&'a T>` pub bound_lifetimes: HirVec<LifetimeDef>, /// The `Foo<&'a T>` in `<'a> Foo<&'a T>` pub trait_ref: TraitRef, pub span: Span, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Visibility { Public, Crate, Restricted { path: P<Path>, id: NodeId }, Inherited, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct StructField { pub span: Span, pub name: Name, pub vis: Visibility, pub id: NodeId, pub ty: P<Ty>, pub attrs: HirVec<Attribute>, } impl StructField { // Still necessary in couple of places pub fn is_positional(&self) -> bool { let first = self.name.as_str().as_bytes()[0]; first >= b'0' && first <= b'9' } } /// Fields and Ids of enum variants and structs /// /// For enum variants: `NodeId` represents both an Id of the variant itself (relevant for all /// variant kinds) and an Id of the variant's constructor (not relevant for `Struct`-variants). /// One shared Id can be successfully used for these two purposes. /// Id of the whole enum lives in `Item`. /// /// For structs: `NodeId` represents an Id of the structure's constructor, so it is not actually /// used for `Struct`-structs (but still presents). Structures don't have an analogue of "Id of /// the variant itself" from enum variants. /// Id of the whole struct lives in `Item`. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum VariantData { Struct(HirVec<StructField>, NodeId), Tuple(HirVec<StructField>, NodeId), Unit(NodeId), } impl VariantData { pub fn fields(&self) -> &[StructField] { match *self { VariantData::Struct(ref fields, _) | VariantData::Tuple(ref fields, _) => fields, _ => &[], } } pub fn id(&self) -> NodeId { match *self { VariantData::Struct(_, id) | VariantData::Tuple(_, id) | VariantData::Unit(id) => id, } } pub fn is_struct(&self) -> bool { if let VariantData::Struct(..) = *self { true } else { false } } pub fn is_tuple(&self) -> bool { if let VariantData::Tuple(..) = *self { true } else { false } } pub fn is_unit(&self) -> bool { if let VariantData::Unit(..) = *self { true } else { false } } } // The bodies for items are stored "out of line", in a separate // hashmap in the `Crate`. Here we just record the node-id of the item // so it can fetched later. #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct ItemId { pub id: NodeId, } // FIXME (#3300): Should allow items to be anonymous. Right now // we just use dummy names for anon items. /// An item /// /// The name might be a dummy name in case of anonymous items #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Item { pub name: Name, pub attrs: HirVec<Attribute>, pub id: NodeId, pub node: Item_, pub vis: Visibility, pub span: Span, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Item_ { /// An`extern crate` item, with optional original crate name, /// /// e.g. `extern crate foo` or `extern crate foo_bar as foo` ItemExternCrate(Option<Name>), /// A `use` or `pub use` item ItemUse(P<ViewPath>), /// A `static` item ItemStatic(P<Ty>, Mutability, P<Expr>), /// A `const` item ItemConst(P<Ty>, P<Expr>), /// A function declaration ItemFn(P<FnDecl>, Unsafety, Constness, Abi, Generics, P<Block>), /// A module ItemMod(Mod), /// An external module ItemForeignMod(ForeignMod), /// A type alias, e.g. `type Foo = Bar<u8>` ItemTy(P<Ty>, Generics), /// An enum definition, e.g. `enum Foo<A, B> {C<A>, D<B>}` ItemEnum(EnumDef, Generics), /// A struct definition, e.g. `struct Foo<A> {x: A}` ItemStruct(VariantData, Generics), /// A union definition, e.g. `union Foo<A, B> {x: A, y: B}` ItemUnion(VariantData, Generics), /// Represents a Trait Declaration ItemTrait(Unsafety, Generics, TyParamBounds, HirVec<TraitItem>), // Default trait implementations /// /// `impl Trait for .. {}` ItemDefaultImpl(Unsafety, TraitRef), /// An implementation, eg `impl<A> Trait for Foo { .. }` ItemImpl(Unsafety, ImplPolarity, Generics, Option<TraitRef>, // (optional) trait this impl implements P<Ty>, // self HirVec<ImplItem>), } impl Item_ { pub fn descriptive_variant(&self) -> &str { match *self { ItemExternCrate(..) => "extern crate", ItemUse(..) => "use", ItemStatic(..) => "static item", ItemConst(..) => "constant item", ItemFn(..) => "function", ItemMod(..) => "module", ItemForeignMod(..) => "foreign module", ItemTy(..) => "type alias", ItemEnum(..) => "enum", ItemStruct(..) => "struct", ItemUnion(..) => "union", ItemTrait(..) => "trait", ItemImpl(..) | ItemDefaultImpl(..) => "item", } } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct ForeignItem { pub name: Name, pub attrs: HirVec<Attribute>, pub node: ForeignItem_, pub id: NodeId, pub span: Span, pub vis: Visibility, } /// An item within an `extern` block #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum ForeignItem_ { /// A foreign function ForeignItemFn(P<FnDecl>, Generics), /// A foreign static item (`static ext: u8`), with optional mutability /// (the boolean is true when mutable) ForeignItemStatic(P<Ty>, bool), } impl ForeignItem_ { pub fn descriptive_variant(&self) -> &str { match *self { ForeignItemFn(..) => "foreign function", ForeignItemStatic(..) => "foreign static item", } } } /// A free variable referred to in a function. #[derive(Copy, Clone, RustcEncodable, RustcDecodable)] pub struct Freevar { /// The variable being accessed free. pub def: Def, // First span where it is accessed (there can be multiple). pub span: Span } pub type FreevarMap = NodeMap<Vec<Freevar>>; pub type CaptureModeMap = NodeMap<CaptureClause>; #[derive(Clone, Debug)] pub struct TraitCandidate { pub def_id: DefId, pub import_id: Option<NodeId>, } // Trait method resolution pub type TraitMap = NodeMap<Vec<TraitCandidate>>; // Map from the NodeId of a glob import to a list of items which are actually // imported. pub type GlobMap = NodeMap<FnvHashSet<Name>>;
30.150932
97
0.603568
ace748d107538b28e14639779cf08443807842ee
2,819
use super::Sharder; use data_types::{DatabaseName, DeletePredicate}; use mutable_batch::MutableBatch; use parking_lot::Mutex; use std::{collections::VecDeque, fmt::Debug, sync::Arc}; #[derive(Debug, Clone)] pub enum MockSharderPayload { MutableBatch(MutableBatch), DeletePredicate(DeletePredicate), } impl MockSharderPayload { pub fn mutable_batch(&self) -> &MutableBatch { match self { Self::MutableBatch(v) => v, _ => panic!("payload is not a mutable batch"), } } } #[derive(Debug, Clone)] pub struct MockSharderCall { pub table_name: String, pub namespace: String, pub payload: MockSharderPayload, } #[derive(Debug, Default)] struct Inner<T> { calls: Vec<MockSharderCall>, shard_return: VecDeque<T>, } impl<T> Inner<T> { fn record_call(&mut self, call: MockSharderCall) { self.calls.push(call); } } #[derive(Debug)] pub struct MockSharder<T>(Mutex<Inner<T>>); impl<T> Default for MockSharder<T> { fn default() -> Self { Self(Mutex::new(Inner { calls: Default::default(), shard_return: VecDeque::new(), })) } } impl<T> MockSharder<T> { /// Return the values specified in `ret` in sequence for calls to `shard`, /// starting from the front. pub fn with_return(self, ret: impl Into<VecDeque<T>>) -> Self { self.0.lock().shard_return = ret.into(); self } pub fn calls(&self) -> Vec<MockSharderCall> { self.0.lock().calls.clone() } } impl<T> Sharder<MutableBatch> for Arc<MockSharder<T>> where T: Debug + Send + Sync, { type Item = T; fn shard( &self, table: &str, namespace: &DatabaseName<'_>, payload: &MutableBatch, ) -> Self::Item { let mut guard = self.0.lock(); guard.record_call(MockSharderCall { table_name: table.to_string(), namespace: namespace.to_string(), payload: MockSharderPayload::MutableBatch(payload.clone()), }); guard .shard_return .pop_front() .expect("no shard mock value to return") } } impl<T> Sharder<DeletePredicate> for Arc<MockSharder<T>> where T: Debug + Send + Sync, { type Item = Vec<T>; fn shard( &self, table: &str, namespace: &DatabaseName<'_>, payload: &DeletePredicate, ) -> Self::Item { let mut guard = self.0.lock(); guard.record_call(MockSharderCall { table_name: table.to_string(), namespace: namespace.to_string(), payload: MockSharderPayload::DeletePredicate(payload.clone()), }); vec![guard .shard_return .pop_front() .expect("no shard mock value to return")] } }
24.513043
78
0.589571
79a94527a2f54de4a34ba43f0dd73c9c07e99c3c
3,157
mod view; mod token_receiver; mod internal; mod utils; mod owner; use near_sdk::borsh::{self, BorshDeserialize, BorshSerialize}; use near_sdk::json_types::{U128, U64, WrappedTimestamp}; use near_sdk::{env, near_bindgen, AccountId, Balance, EpochHeight}; use std::collections::HashMap; #[global_allocator] static ALLOC: near_sdk::wee_alloc::WeeAlloc = near_sdk::wee_alloc::WeeAlloc::INIT; #[derive(BorshDeserialize, BorshSerialize, Debug, PartialEq)] pub struct AccountInfo { id: AccountId, vote: bool, amount: Balance, } #[near_bindgen] #[derive(BorshDeserialize, BorshSerialize)] pub struct VotingContract { owner: AccountId, polls: Vec<Poll>, poll_count: u32, pause: bool, market_id: AccountId, token_id: AccountId, min_create_poll_amount: Balance, } #[near_bindgen] #[derive(BorshDeserialize, BorshSerialize, Serialize, Deserialize, PartialEq)] #[serde(crate = "near_sdk::serde")] pub struct Poll { creator_id: AccountId, status: PollStatus, create_date: Option<WrappedTimestamp>, title: String, description: String, deposit_amount: Balance, votes: HashMap<AccountId, AccountInfo>, yes_amount: Balance, no_amount: Balance, stake_amount: Balance, total_balance_at_end_poll: Balance, last_epoch_height: EpochHeight, } impl Default for VotingContract { fn default() -> Self { env::panic(b"Voting contract should be initialized before usage") } } #[near_bindgen] impl VotingContract { #[init] pub fn new(token_id:AccountId, market_id: AccountId, min_create_poll_amount: Balance) -> Self { assert!(!env::state_exists(), "The contract is already initialized"); VotingContract { owner: env::predecessor_account_id(), polls: Vec::new(), poll_count: 0, pause: false, market_id, token_id, min_create_poll_amount, } } // claim user pub fn claim(&self, index: u32) { self.assert_contract_running(); self.assert_index(index); self.check_status(index); let account_id = env::predecessor_account_id(); let mut cur_poll: Poll = self.polls[index]; let votes = std::mem::take(&mut cur_poll.votes); if votes.contains_key(&env::predecessor_account_id()) { let account = votes.get(&account_id).unwrap(); let amount = (U256::from(cur_poll.stake_amount) * U256::from(account.amount) / U256::from(cur_poll.deposit_amount)).as_u128(); self.internal_send_tokens(&self.owner, &self.token_id, amount); } else { env::panic(b"ERR: non-whitelisted token can NOT deposit into lost-found."); }; } pub fn stop_vote(&mut self, index: u32) { self.assert_owner(); let mut cur_poll: Poll = self.polls[index]; cur_poll.status = PollStatus::Expired; self.polls.insert(index.into(), cur_poll); } } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)] #[serde(crate = "near_sdk::serde")] pub enum PollStatus { InProgress, Passed, Rejected, Expired, // Depricated }
28.7
138
0.661387
febd770f8e0024ed0ad2a974825fbde55b7ff823
2,387
use anyhow::Result; use iota_crypto_preview::Kerl; use iota_signing_preview::IotaSeed; use crate::response::Input; use crate::Client; /// Builder to construct GetInputs API //#[derive(Debug)] pub struct GetInputsBuilder<'a> { seed: &'a IotaSeed<Kerl>, index: u64, security: u8, threshold: u64, } impl<'a> GetInputsBuilder<'a> { pub(crate) fn new(seed: &'a IotaSeed<Kerl>) -> Self { Self { seed: seed, index: 0, security: 2, threshold: 0, } } /// Set key index to start search at pub fn index(mut self, index: u64) -> Self { self.index = index; self } /// Set security level pub fn security(mut self, security: u8) -> Self { self.security = security; self } /// Set minimum amount of balance required pub fn threshold(mut self, threshold: u64) -> Self { self.threshold = threshold; self } /// Send GetInputs request pub async fn generate(self) -> Result<(u64, Vec<Input>)> { if self.threshold == 0 { return Ok((0, Vec::default())); } let mut index = self.index; let mut total = 0; let mut inputs = Vec::new(); let mut zero_balance_warning = 5; while zero_balance_warning != 0 { let (next_index, address) = Client::get_new_address(self.seed) .index(index) .security(self.security) .generate() .await?; let balance = Client::get_balances() .addresses(&[address.clone()]) .send() .await? .balances[0]; // If the next couple of addresses don't have any balance, we determine it fails to prevent from infinite searching. if balance == 0 { zero_balance_warning -= 1; } else { zero_balance_warning = 5; } total += balance; index = next_index; inputs.push(Input { address, balance, index, }); index += 1; if total >= self.threshold { return Ok((total, inputs)); } } Err(anyhow!("Cannot find enough inputs to satisify threshold")) } }
25.666667
128
0.509845
3ad274167bcb6ba475d775b9a5956e4a7367a4c8
22,041
//! Pretty-printer based rewriting strategy. //! //! The basic idea here is to pretty-print the new AST, which gives text that is guaranteed to //! parse to the new AST (assuming the printer is correct), and then splice that text into the //! source in place of the old AST's text. //! //! It turns out that in many cases, some subtrees of the new AST actually came from the old AST, //! and thus have source available. We'd prefer to use that old source text instead of the //! pretty-printer output, since it likely has nicer formatting, comments, etc. So there is some //! logic in this module for "recovering" from needing to use this strategy by splicing old AST //! text back into the new AST's pretty printer output. use std::rc::Rc; use rustc::session::Session; use rustc_target::spec::abi::Abi; use syntax::ThinVec; use syntax::ast::*; use syntax::attr; use syntax::source_map::{Span, Spanned, BytePos, FileName}; use syntax::ext::hygiene::SyntaxContext; use syntax::parse::token::{Token, DelimToken, Nonterminal}; use syntax::print::pprust; use syntax::ptr::P; use syntax::tokenstream::{TokenTree, Delimited, DelimSpan, TokenStream, ThinTokenStream}; use syntax::util::parser; use ast_manip::{GetNodeId, GetSpan, AstDeref}; use ast_manip::ast_map::NodeTable; use ast_manip::util::extended_span; use driver; use rewrite::{Rewrite, TextRewrite, RewriteCtxt, RewriteCtxtRef, TextAdjust, ExprPrec}; use rewrite::base::{is_rewritable, describe}; use rewrite::base::{binop_left_prec, binop_right_prec}; use util::Lone; // PrintParse /// Trait for nodes that can be printed and reparsed. /// /// Someday it may be useful to separate this into `Print` and `Parse` traits (and move them out of /// this module to a more general location), but right everything we care to print is also pretty /// easy to parse. pub trait PrintParse { /// Pretty print this node. fn to_string(&self) -> String; /// The result type of `Self::parse`. type Parsed: AstDeref<Target=Self>; /// Parse a string to a node of this type. Panics if parsing fails. fn parse(sess: &Session, src: &str) -> Self::Parsed; } impl PrintParse for Expr { fn to_string(&self) -> String { pprust::expr_to_string(self) } type Parsed = P<Expr>; fn parse(sess: &Session, src: &str) -> Self::Parsed { driver::parse_expr(sess, src) } } impl PrintParse for Pat { fn to_string(&self) -> String { pprust::pat_to_string(self) } type Parsed = P<Pat>; fn parse(sess: &Session, src: &str) -> Self::Parsed { driver::parse_pat(sess, src) } } impl PrintParse for Ty { fn to_string(&self) -> String { pprust::ty_to_string(self) } type Parsed = P<Ty>; fn parse(sess: &Session, src: &str) -> Self::Parsed { driver::parse_ty(sess, src) } } impl PrintParse for Stmt { fn to_string(&self) -> String { // pprust::stmt_to_string appends a semicolon to Expr kind statements, // not just to Semi kind statements. We want to differentiate these // nodes. match self.node { StmtKind::Expr(ref expr) => pprust::expr_to_string(expr), _ => pprust::stmt_to_string(self), } } type Parsed = Stmt; fn parse(sess: &Session, src: &str) -> Self::Parsed { driver::parse_stmts(sess, src).lone() } } impl PrintParse for Item { fn to_string(&self) -> String { match self.node { ItemKind::Mod(ref m) if !m.inline => { // Special case: non-inline `Mod` items print as `mod foo;`, which parses back as a // module with no children. We force all mods to be inline for printing. let mut tmp = self.clone(); expect!([tmp.node] ItemKind::Mod(ref mut m) => m.inline = true); warn!("printing non-inline module {:?} as inline for rewriting purposes", self.ident); pprust::item_to_string(&tmp) }, _ => pprust::item_to_string(self), } } type Parsed = P<Item>; fn parse(sess: &Session, src: &str) -> Self::Parsed { driver::parse_items(sess, src).lone() } } // TODO: ImplItem impl PrintParse for ForeignItem { fn to_string(&self) -> String { pprust::to_string(|s| s.print_foreign_item(self)) } type Parsed = ForeignItem; fn parse(sess: &Session, src: &str) -> Self::Parsed { driver::parse_foreign_items(sess, src).lone() } } impl PrintParse for Block { fn to_string(&self) -> String { pprust::block_to_string(self) } type Parsed = P<Block>; fn parse(sess: &Session, src: &str) -> Self::Parsed { driver::parse_block(sess, src) } } impl PrintParse for Arg { fn to_string(&self) -> String { pprust::arg_to_string(self) } type Parsed = Arg; fn parse(sess: &Session, src: &str) -> Self::Parsed { driver::parse_arg(sess, src) } } impl PrintParse for Attribute { fn to_string(&self) -> String { pprust::attr_to_string(self) } type Parsed = Attribute; fn parse(sess: &Session, src: &str) -> Self::Parsed { driver::run_parser(sess, src, |p| { match p.token { // `parse_attribute` doesn't handle inner or outer doc comments. Token::DocComment(s) => { assert!(src.ends_with('\n')); // Expand the `span` to include the trailing \n. Otherwise multiple spliced // doc comments will run together into a single line. let span = p.span.with_hi(p.span.hi() + BytePos(1)); let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, span); p.bump(); return Ok(attr); }, _ => p.parse_attribute(true), } }) } } // Splice /// Node types for which we can splice the node text into/out of the source. pub trait Splice { /// Get a span that covers the entire text of the node. This is used as the source or /// destination span when splicing text. fn splice_span(&self) -> Span; /// Get the text adjustment (such as parenthesization) to apply to the printed text before /// splicing it in. This relies on the `RewriteCtxt` accurately tracking the `ExprPrec`s of /// the parent nodes of the destination location. fn get_adjustment(&self, _rcx: &RewriteCtxt) -> TextAdjust { TextAdjust::None } } impl Splice for Expr { fn splice_span(&self) -> Span { extended_span(self.span, &self.attrs) } fn get_adjustment(&self, rcx: &RewriteCtxt) -> TextAdjust { // Check for cases where we can safely omit parentheses. let prec = self.precedence(); let need_parens = match rcx.expr_prec() { ExprPrec::Normal(min_prec) => prec.order() < min_prec, ExprPrec::Cond(min_prec) => prec.order() < min_prec || parser::contains_exterior_struct_lit(self), ExprPrec::Callee(min_prec) => match self.node { ExprKind::Field(..) => true, _ => prec.order() < min_prec, }, ExprPrec::LeftLess(min_prec) => match self.node { ExprKind::Cast(..) | ExprKind::Type(..) => true, _ => prec.order() < min_prec, } }; if need_parens { TextAdjust::Parenthesize } else { TextAdjust::None } } } impl Splice for Pat { fn splice_span(&self) -> Span { self.span } } impl Splice for Ty { fn splice_span(&self) -> Span { self.span } } impl Splice for Stmt { fn splice_span(&self) -> Span { self.span } } impl Splice for Item { fn splice_span(&self) -> Span { extended_span(self.span, &self.attrs) } } impl Splice for ForeignItem { fn splice_span(&self) -> Span { extended_span(self.span, &self.attrs) } } impl Splice for Block { fn splice_span(&self) -> Span { self.span } } impl Splice for Arg { fn splice_span(&self) -> Span { self.pat.span.to(self.ty.span) } } impl Splice for Attribute { fn splice_span(&self) -> Span { self.span } } // Recover /// Node types for which we can recover an old AST that has associated text. pub trait Recover { /// Obtain from the `RewriteCtxt` the table of old nodes of this type. fn node_table<'a, 's>(rcx: &'a RewriteCtxt<'s>) -> &'a NodeTable<'s, Self>; } impl Recover for Expr { fn node_table<'a, 's>(rcx: &'a RewriteCtxt<'s>) -> &'a NodeTable<'s, Self> { &rcx.old_nodes().exprs } } impl Recover for Pat { fn node_table<'a, 's>(rcx: &'a RewriteCtxt<'s>) -> &'a NodeTable<'s, Self> { &rcx.old_nodes().pats } } impl Recover for Ty { fn node_table<'a, 's>(rcx: &'a RewriteCtxt<'s>) -> &'a NodeTable<'s, Self> { &rcx.old_nodes().tys } } impl Recover for Stmt { fn node_table<'a, 's>(rcx: &'a RewriteCtxt<'s>) -> &'a NodeTable<'s, Self> { &rcx.old_nodes().stmts } } impl Recover for Item { fn node_table<'a, 's>(rcx: &'a RewriteCtxt<'s>) -> &'a NodeTable<'s, Self> { &rcx.old_nodes().items } } impl Recover for ForeignItem { fn node_table<'a, 's>(rcx: &'a RewriteCtxt<'s>) -> &'a NodeTable<'s, Self> { &rcx.old_nodes().foreign_items } } impl Recover for Block { fn node_table<'a, 's>(rcx: &'a RewriteCtxt<'s>) -> &'a NodeTable<'s, Self> { &rcx.old_nodes().blocks } } // RecoverChildren /// Codegenned trait for recursively traversing new and reparsed ASTs, looking for places we can /// invoke `recover`. pub trait RecoverChildren { /// Recursively attempt to `recover()` descendants of `reparsed`/`new`, not including /// `reparsed`/`new` itself. fn recover_children(reparsed: &Self, new: &Self, rcx: RewriteCtxtRef); /// Try to `recover` the node itself (if this node type implements `Recover`), then try to /// `recover_children`. fn recover_node_and_children(reparsed: &Self, new: &Self, rcx: RewriteCtxtRef); /// Attempt "restricted recovery" of the node itself, then try to `recover_children`. /// Restricted recovery succeeds only if the recovered AST has a different span than the old /// AST (otherwise we would get stuck in an infinite loop, replacing the old AST and old text /// with identical copies of themselves). fn recover_node_restricted(old_span: Span, reparsed: &Self, new: &Self, rcx: RewriteCtxtRef); } impl<T: RecoverChildren> RecoverChildren for P<T> { fn recover_children(reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { <T as RecoverChildren>::recover_children(reparsed, new, rcx) } fn recover_node_and_children(reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { <T as RecoverChildren>::recover_node_and_children(reparsed, new, rcx) } fn recover_node_restricted(old_span: Span, reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { <T as RecoverChildren>::recover_node_restricted(old_span, reparsed, new, rcx) } } impl<T: RecoverChildren> RecoverChildren for Rc<T> { fn recover_children(reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { <T as RecoverChildren>::recover_children(reparsed, new, rcx) } fn recover_node_and_children(reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { <T as RecoverChildren>::recover_node_and_children(reparsed, new, rcx) } fn recover_node_restricted(old_span: Span, reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { <T as RecoverChildren>::recover_node_restricted(old_span, reparsed, new, rcx) } } impl<T: RecoverChildren> RecoverChildren for Spanned<T> { fn recover_children(reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { <T as RecoverChildren>::recover_children(&reparsed.node, &new.node, rcx) } fn recover_node_and_children(reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { <T as RecoverChildren>::recover_node_and_children(&reparsed.node, &new.node, rcx) } fn recover_node_restricted(old_span: Span, reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { <T as RecoverChildren>::recover_node_restricted(old_span, &reparsed.node, &new.node, rcx) } } impl<T: RecoverChildren> RecoverChildren for Option<T> { fn recover_children(reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { match (reparsed, new) { (&Some(ref x1), &Some(ref x2)) => { RecoverChildren::recover_children(x1, x2, rcx); } (_, _) => {}, } } fn recover_node_and_children(reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { match (reparsed, new) { (&Some(ref x1), &Some(ref x2)) => { RecoverChildren::recover_node_and_children(x1, x2, rcx); } (_, _) => {}, } } fn recover_node_restricted(_old_span: Span, reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { // This type never implements `Recover`, so just call `recover_children`. RecoverChildren::recover_children(reparsed, new, rcx); } } impl<A: RecoverChildren, B: RecoverChildren> RecoverChildren for (A, B) { fn recover_children(reparsed: &Self, new: &Self, mut rcx: RewriteCtxtRef) { <A as RecoverChildren>::recover_children(&reparsed.0, &new.0, rcx.borrow()); <B as RecoverChildren>::recover_children(&reparsed.1, &new.1, rcx.borrow()); } fn recover_node_and_children(reparsed: &Self, new: &Self, mut rcx: RewriteCtxtRef) { <A as RecoverChildren>::recover_node_and_children(&reparsed.0, &new.0, rcx.borrow()); <B as RecoverChildren>::recover_node_and_children(&reparsed.1, &new.1, rcx.borrow()); } fn recover_node_restricted(_old_span: Span, reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { // This type never implements `Recover`, so just call `recover_children`. RecoverChildren::recover_children(reparsed, new, rcx); } } impl<A: RecoverChildren, B: RecoverChildren, C: RecoverChildren> RecoverChildren for (A, B, C) { fn recover_children(reparsed: &Self, new: &Self, mut rcx: RewriteCtxtRef) { <A as RecoverChildren>::recover_children(&reparsed.0, &new.0, rcx.borrow()); <B as RecoverChildren>::recover_children(&reparsed.1, &new.1, rcx.borrow()); <C as RecoverChildren>::recover_children(&reparsed.2, &new.2, rcx.borrow()); } fn recover_node_and_children(reparsed: &Self, new: &Self, mut rcx: RewriteCtxtRef) { <A as RecoverChildren>::recover_node_and_children(&reparsed.0, &new.0, rcx.borrow()); <B as RecoverChildren>::recover_node_and_children(&reparsed.1, &new.1, rcx.borrow()); <C as RecoverChildren>::recover_node_and_children(&reparsed.2, &new.2, rcx.borrow()); } fn recover_node_restricted(_old_span: Span, reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { // This type never implements `Recover`, so just call `recover_children`. RecoverChildren::recover_children(reparsed, new, rcx); } } impl<T: RecoverChildren> RecoverChildren for [T] { fn recover_children(reparsed: &Self, new: &Self, mut rcx: RewriteCtxtRef) { assert!(reparsed.len() == new.len(), "new and reprinted ASTs don't match"); for i in 0 .. reparsed.len() { RecoverChildren::recover_children(&reparsed[i], &new[i], rcx.borrow()); } } fn recover_node_and_children(reparsed: &Self, new: &Self, mut rcx: RewriteCtxtRef) { assert!(reparsed.len() == new.len(), "new and reprinted ASTs don't match"); for i in 0 .. reparsed.len() { RecoverChildren::recover_node_and_children(&reparsed[i], &new[i], rcx.borrow()); } } fn recover_node_restricted(_old_span: Span, reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { // This type never implements `Recover`, so just call `recover_children`. RecoverChildren::recover_children(reparsed, new, rcx); } } impl<T: RecoverChildren> RecoverChildren for Vec<T> { fn recover_children(reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { <[T] as RecoverChildren>::recover_children(&reparsed, &new, rcx) } fn recover_node_and_children(reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { <[T] as RecoverChildren>::recover_node_and_children(&reparsed, &new, rcx) } fn recover_node_restricted(old_span: Span, reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { <[T] as RecoverChildren>::recover_node_restricted(old_span, &reparsed, &new, rcx) } } impl<T: RecoverChildren> RecoverChildren for ThinVec<T> { fn recover_children(reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { <[T] as RecoverChildren>::recover_children(&reparsed, &new, rcx) } fn recover_node_and_children(reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { <[T] as RecoverChildren>::recover_node_and_children(&reparsed, &new, rcx) } fn recover_node_restricted(old_span: Span, reparsed: &Self, new: &Self, rcx: RewriteCtxtRef) { <[T] as RecoverChildren>::recover_node_restricted(old_span, &reparsed, &new, rcx) } } include!(concat!(env!("OUT_DIR"), "/rewrite_recover_children_gen.inc.rs")); /// Try to replace the text for `reparsed` with recovered text for `new`. This works as /// follows: /// /// 1. Find a node that has text available and whose `NodeId` matches `new.id`. This is the /// "old" node. Get the old node's text and AST. /// 2. Rewrite the old text to match the new AST. This is necessary because things other than /// the `NodeId` may differ between the old and new ASTs. /// 3. Splice the rewritten text in place of the text for `reparsed`. /// /// Returns `true` if all steps succeed. Returns `false` if it fails to find an old node or if /// it fails to rewrite the old node to match `new`. fn recover<'s, T>(maybe_restricted_span: Option<Span>, reparsed: &T, new: &T, mut rcx: RewriteCtxtRef<'s, '_>) -> bool where T: GetNodeId + Recover + Rewrite + Splice + 's { // Find a node with ID matching `new.id`, after accounting for renumbering of NodeIds. let old_id = rcx.new_to_old_id(new.get_node_id()); let old = match <T as Recover>::node_table(&mut rcx).get(old_id) { Some(x) => x, None => { return false; }, }; if !is_rewritable(old.splice_span()) { return false; } let sf = rcx.session().source_map().lookup_byte_offset(old.splice_span().lo()).sf; if let FileName::Macros(..) = sf.name { return false; } // If `maybe_restricted_span` is set, then we can only proceed if `old.splice_span() != // restricted_span`. What's really going on here is that `restricted_span` is the `old_span` // of the enclosing `rewrite_at`, and we need to avoid infinitely recursing through // `rewrite_at` and `recover` on the same node. if let Some(restricted_span) = maybe_restricted_span { if old.splice_span() == restricted_span { return false; } } info!("REVERT {}", describe(rcx.session(), reparsed.splice_span())); info!(" TO {}", describe(rcx.session(), old.splice_span())); let mut rw = TextRewrite::adjusted(reparsed.splice_span(), old.splice_span(), new.get_adjustment(&rcx)); let mark = rcx.mark(); let ok = Rewrite::rewrite(old, new, rcx.enter(&mut rw)); if !ok { rcx.rewind(mark); return false; } rcx.record(rw); true } pub fn rewrite<T>(old: &T, new: &T, rcx: RewriteCtxtRef) -> bool where T: PrintParse + RecoverChildren + Splice { if !is_rewritable(old.splice_span()) { // If we got here, it means rewriting failed somewhere inside macro-generated code, and // outside any chunks of AST that the macro copied out of its arguments (those chunks // would have non-dummy spans, and would be spliced in already). We give up on this // part of the rewrite when this happens, because rewriting inside the RHS of a // macro_rules! macro would be very difficult, and for procedural macros it's just // impossible. But we still report success (`return true`) because we don't want to force // replacement of the macro with its expansion. warn!("can't splice in fresh text for a non-rewritable node"); return true; } rewrite_at(old.splice_span(), new, rcx) } pub fn rewrite_at<T>(old_span: Span, new: &T, mut rcx: RewriteCtxtRef) -> bool where T: PrintParse + RecoverChildren + Splice { let printed = new.to_string(); let reparsed = T::parse(rcx.session(), &printed); let reparsed = reparsed.ast_deref(); if old_span.lo() != old_span.hi() { info!("REWRITE {}", describe(rcx.session(), old_span)); info!(" INTO {}", describe(rcx.session(), reparsed.splice_span())); } else { info!("INSERT AT {}", describe(rcx.session(), old_span)); info!(" TEXT {}", describe(rcx.session(), reparsed.splice_span())); } let mut rw = TextRewrite::adjusted(old_span, reparsed.splice_span(), new.get_adjustment(&rcx)); // Try recovery, starting in "restricted mode" to avoid infinite recursion. The guarantee of // `recover_node_restricted` is that if it calls into `Rewrite::rewrite(old2, new2, ...)`, then // `old2.splice_span() != old_span`, so we won't end up back here in `rewrite_at` with // identical arguments. RecoverChildren::recover_node_restricted( old_span, reparsed, new, rcx.enter(&mut rw)); rcx.record(rw); true }
35.607431
99
0.626469
e512badcad87de91a9ca7bdbdac5af0c486163de
12,368
use std::collections::{ BTreeMap, BTreeSet, HashMap, HashSet, btree_map, btree_set, hash_map, hash_set, }; use std::hash::Hash; use std::vec; use de; use bytes; /////////////////////////////////////////////////////////////////////////////// pub enum Error { SyntaxError, EndOfStreamError, UnknownFieldError(String), MissingFieldError(&'static str), } impl de::Error for Error { fn syntax_error() -> Self { Error::SyntaxError } fn end_of_stream_error() -> Self { Error::EndOfStreamError } fn unknown_field_error(field: &str) -> Self { Error::UnknownFieldError(field.to_string()) } fn missing_field_error(field: &'static str) -> Self { Error::MissingFieldError(field) } } /////////////////////////////////////////////////////////////////////////////// pub trait ValueDeserializer { type Deserializer: de::Deserializer<Error=Error>; fn into_deserializer(self) -> Self::Deserializer; } /////////////////////////////////////////////////////////////////////////////// impl ValueDeserializer for () { type Deserializer = UnitDeserializer; fn into_deserializer(self) -> UnitDeserializer { UnitDeserializer } } /// A helper deserializer that deserializes a `()`. pub struct UnitDeserializer; impl de::Deserializer for UnitDeserializer { type Error = Error; fn visit<V>(&mut self, mut visitor: V) -> Result<V::Value, Error> where V: de::Visitor, { visitor.visit_unit() } fn visit_option<V>(&mut self, mut visitor: V) -> Result<V::Value, Error> where V: de::Visitor, { visitor.visit_none() } } /////////////////////////////////////////////////////////////////////////////// macro_rules! primitive_deserializer { ($ty:ty, $name:ident, $method:ident) => { pub struct $name(Option<$ty>); impl ValueDeserializer for $ty { type Deserializer = $name; fn into_deserializer(self) -> $name { $name(Some(self)) } } impl de::Deserializer for $name { type Error = Error; fn visit<V>(&mut self, mut visitor: V) -> Result<V::Value, Error> where V: de::Visitor, { match self.0.take() { Some(v) => visitor.$method(v), None => Err(de::Error::end_of_stream_error()), } } } } } primitive_deserializer!(bool, BoolDeserializer, visit_bool); primitive_deserializer!(i8, I8Deserializer, visit_i8); primitive_deserializer!(i16, I16Deserializer, visit_i16); primitive_deserializer!(i32, I32Deserializer, visit_i32); primitive_deserializer!(i64, I64Deserializer, visit_i64); primitive_deserializer!(isize, IsizeDeserializer, visit_isize); primitive_deserializer!(u8, U8Deserializer, visit_u8); primitive_deserializer!(u16, U16Deserializer, visit_u16); primitive_deserializer!(u32, U32Deserializer, visit_u32); primitive_deserializer!(u64, U64Deserializer, visit_u64); primitive_deserializer!(usize, UsizeDeserializer, visit_usize); primitive_deserializer!(f32, F32Deserializer, visit_f32); primitive_deserializer!(f64, F64Deserializer, visit_f64); primitive_deserializer!(char, CharDeserializer, visit_char); /////////////////////////////////////////////////////////////////////////////// /// A helper deserializer that deserializes a `&str`. pub struct StrDeserializer<'a>(Option<&'a str>); impl<'a> ValueDeserializer for &'a str { type Deserializer = StrDeserializer<'a>; fn into_deserializer(self) -> StrDeserializer<'a> { StrDeserializer(Some(self)) } } impl<'a> de::Deserializer for StrDeserializer<'a> { type Error = Error; fn visit<V>(&mut self, mut visitor: V) -> Result<V::Value, Error> where V: de::Visitor, { match self.0.take() { Some(v) => visitor.visit_str(v), None => Err(de::Error::end_of_stream_error()), } } fn visit_enum<V>(&mut self, _name: &str, mut visitor: V) -> Result<V::Value, Error> where V: de::EnumVisitor, { visitor.visit(self) } } impl<'a> de::VariantVisitor for StrDeserializer<'a> { type Error = Error; fn visit_variant<T>(&mut self) -> Result<T, Error> where T: de::Deserialize, { de::Deserialize::deserialize(self) } fn visit_unit(&mut self) -> Result<(), Error> { Ok(()) } } /////////////////////////////////////////////////////////////////////////////// /// A helper deserializer that deserializes a `String`. pub struct StringDeserializer(Option<String>); impl ValueDeserializer for String { type Deserializer = StringDeserializer; fn into_deserializer(self) -> StringDeserializer { StringDeserializer(Some(self)) } } impl de::Deserializer for StringDeserializer { type Error = Error; fn visit<V>(&mut self, mut visitor: V) -> Result<V::Value, Error> where V: de::Visitor, { match self.0.take() { Some(string) => visitor.visit_string(string), None => Err(de::Error::end_of_stream_error()), } } fn visit_enum<V>(&mut self, _name: &str, mut visitor: V) -> Result<V::Value, Error> where V: de::EnumVisitor, { visitor.visit(self) } } impl<'a> de::VariantVisitor for StringDeserializer { type Error = Error; fn visit_variant<T>(&mut self) -> Result<T, Error> where T: de::Deserialize, { de::Deserialize::deserialize(self) } fn visit_unit(&mut self) -> Result<(), Error> { Ok(()) } } /////////////////////////////////////////////////////////////////////////////// pub struct SeqDeserializer<I> { iter: I, len: usize, } impl<I> SeqDeserializer<I> { pub fn new(iter: I, len: usize) -> Self { SeqDeserializer { iter: iter, len: len, } } } impl<I, T> de::Deserializer for SeqDeserializer<I> where I: Iterator<Item=T>, T: ValueDeserializer, { type Error = Error; fn visit<V>(&mut self, mut visitor: V) -> Result<V::Value, Error> where V: de::Visitor, { visitor.visit_seq(self) } } impl<I, T> de::SeqVisitor for SeqDeserializer<I> where I: Iterator<Item=T>, T: ValueDeserializer, { type Error = Error; fn visit<V>(&mut self) -> Result<Option<V>, Error> where V: de::Deserialize { match self.iter.next() { Some(value) => { self.len -= 1; let mut de = value.into_deserializer(); Ok(Some(try!(de::Deserialize::deserialize(&mut de)))) } None => Ok(None), } } fn end(&mut self) -> Result<(), Error> { if self.len == 0 { Ok(()) } else { Err(de::Error::end_of_stream_error()) } } fn size_hint(&self) -> (usize, Option<usize>) { (self.len, Some(self.len)) } } /////////////////////////////////////////////////////////////////////////////// impl<T> ValueDeserializer for Vec<T> where T: ValueDeserializer, { type Deserializer = SeqDeserializer<vec::IntoIter<T>>; fn into_deserializer(self) -> SeqDeserializer<vec::IntoIter<T>> { let len = self.len(); SeqDeserializer::new(self.into_iter(), len) } } impl<T> ValueDeserializer for BTreeSet<T> where T: ValueDeserializer + Eq + Ord, { type Deserializer = SeqDeserializer<btree_set::IntoIter<T>>; fn into_deserializer(self) -> SeqDeserializer<btree_set::IntoIter<T>> { let len = self.len(); SeqDeserializer::new(self.into_iter(), len) } } impl<T> ValueDeserializer for HashSet<T> where T: ValueDeserializer + Eq + Hash, { type Deserializer = SeqDeserializer<hash_set::IntoIter<T>>; fn into_deserializer(self) -> SeqDeserializer<hash_set::IntoIter<T>> { let len = self.len(); SeqDeserializer::new(self.into_iter(), len) } } /////////////////////////////////////////////////////////////////////////////// pub struct MapDeserializer<I, K, V> where I: Iterator<Item=(K, V)>, K: ValueDeserializer, V: ValueDeserializer, { iter: I, value: Option<V>, len: usize, } impl<I, K, V> MapDeserializer<I, K, V> where I: Iterator<Item=(K, V)>, K: ValueDeserializer, V: ValueDeserializer, { pub fn new(iter: I, len: usize) -> Self { MapDeserializer { iter: iter, value: None, len: len, } } } impl<I, K, V> de::Deserializer for MapDeserializer<I, K, V> where I: Iterator<Item=(K, V)>, K: ValueDeserializer, V: ValueDeserializer, { type Error = Error; fn visit<V_>(&mut self, mut visitor: V_) -> Result<V_::Value, Error> where V_: de::Visitor, { visitor.visit_map(self) } } impl<I, K, V> de::MapVisitor for MapDeserializer<I, K, V> where I: Iterator<Item=(K, V)>, K: ValueDeserializer, V: ValueDeserializer, { type Error = Error; fn visit_key<T>(&mut self) -> Result<Option<T>, Error> where T: de::Deserialize, { match self.iter.next() { Some((key, value)) => { self.len -= 1; self.value = Some(value); let mut de = key.into_deserializer(); Ok(Some(try!(de::Deserialize::deserialize(&mut de)))) } None => Ok(None), } } fn visit_value<T>(&mut self) -> Result<T, Error> where T: de::Deserialize, { match self.value.take() { Some(value) => { let mut de = value.into_deserializer(); de::Deserialize::deserialize(&mut de) } None => Err(de::Error::syntax_error()) } } fn end(&mut self) -> Result<(), Error> { if self.len == 0 { Ok(()) } else { Err(de::Error::end_of_stream_error()) } } fn size_hint(&self) -> (usize, Option<usize>) { (self.len, Some(self.len)) } } /////////////////////////////////////////////////////////////////////////////// impl<K, V> ValueDeserializer for BTreeMap<K, V> where K: ValueDeserializer + Eq + Ord, V: ValueDeserializer, { type Deserializer = MapDeserializer<btree_map::IntoIter<K, V>, K, V>; fn into_deserializer(self) -> MapDeserializer<btree_map::IntoIter<K, V>, K, V> { let len = self.len(); MapDeserializer::new(self.into_iter(), len) } } impl<K, V> ValueDeserializer for HashMap<K, V> where K: ValueDeserializer + Eq + Hash, V: ValueDeserializer, { type Deserializer = MapDeserializer<hash_map::IntoIter<K, V>, K, V>; fn into_deserializer(self) -> MapDeserializer<hash_map::IntoIter<K, V>, K, V> { let len = self.len(); MapDeserializer::new(self.into_iter(), len) } } /////////////////////////////////////////////////////////////////////////////// impl<'a> ValueDeserializer for bytes::Bytes<'a> { type Deserializer = BytesDeserializer<'a>; fn into_deserializer(self) -> BytesDeserializer<'a> { BytesDeserializer(Some(self.into())) } } pub struct BytesDeserializer<'a> (Option<&'a [u8]>); impl<'a> de::Deserializer for BytesDeserializer<'a> { type Error = Error; fn visit<V>(&mut self, mut visitor: V) -> Result<V::Value, Error> where V: de::Visitor, { match self.0.take() { Some(bytes) => visitor.visit_bytes(bytes), None => Err(de::Error::end_of_stream_error()), } } } /////////////////////////////////////////////////////////////////////////////// impl ValueDeserializer for bytes::ByteBuf { type Deserializer = ByteBufDeserializer; fn into_deserializer(self) -> Self::Deserializer { ByteBufDeserializer(Some(self.into())) } } pub struct ByteBufDeserializer(Option<Vec<u8>>); impl de::Deserializer for ByteBufDeserializer { type Error = Error; fn visit<V>(&mut self, mut visitor: V) -> Result<V::Value, Error> where V: de::Visitor, { match self.0.take() { Some(bytes) => visitor.visit_byte_buf(bytes), None => Err(de::Error::end_of_stream_error()), } } }
26.540773
95
0.546491
9bc32014b58c0c3d096e9730896d708cf1249ba0
3,271
// Copyright (c) The Libra Core Contributors // SPDX-License-Identifier: Apache-2.0 #![forbid(unsafe_code)] use anyhow::{ensure, format_err, Result}; use serde_json::Value; use std::collections::HashSet; use std::{ffi::OsStr, fmt, process::Stdio}; use tokio; #[derive(Clone)] pub struct Instance { peer_name: String, ip: String, ac_port: u32, } impl Instance { pub fn new(peer_name: String, ip: String, ac_port: u32) -> Instance { Instance { peer_name, ip, ac_port, } } pub async fn run_cmd_tee_err<I, S>(&self, args: I) -> Result<()> where I: IntoIterator<Item = S>, S: AsRef<OsStr>, { self.run_cmd_inner(false, args).await } pub async fn run_cmd<I, S>(&self, args: I) -> Result<()> where I: IntoIterator<Item = S>, S: AsRef<OsStr>, { self.run_cmd_inner(true, args).await } pub async fn run_cmd_inner<I, S>(&self, no_std_err: bool, args: I) -> Result<()> where I: IntoIterator<Item = S>, S: AsRef<OsStr>, { let ssh_dest = format!("ec2-user@{}", self.ip); let ssh_args = vec![ "ssh", "-i", "/libra_rsa", "-oStrictHostKeyChecking=no", "-oConnectTimeout=3", "-oConnectionAttempts=10", ssh_dest.as_str(), ]; let mut ssh_cmd = tokio::process::Command::new("timeout"); ssh_cmd.arg("60").args(ssh_args).args(args); if no_std_err { ssh_cmd.stderr(Stdio::null()); } let status = ssh_cmd.status().await?; ensure!( status.success(), "Failed with code {}", status.code().unwrap_or(-1) ); Ok(()) } pub fn counter(&self, counter: &str) -> Result<f64> { let response: Value = reqwest::get(format!("http://{}:9101/counters", self.ip).as_str())?.json()?; if let Value::Number(ref response) = response[counter] { if let Some(response) = response.as_f64() { Ok(response) } else { Err(format_err!( "Failed to parse counter({}) as f64: {:?}", counter, response )) } } else { Err(format_err!( "Counter({}) was not a Value::Number: {:?}", counter, response[counter] )) } } pub fn is_up(&self) -> bool { reqwest::get(format!("http://{}:9101/counters", self.ip).as_str()) .map(|x| x.status().is_success()) .unwrap_or(false) } pub fn peer_name(&self) -> &String { &self.peer_name } pub fn ip(&self) -> &String { &self.ip } pub fn ac_port(&self) -> u32 { self.ac_port } } impl fmt::Display for Instance { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}({})", self.peer_name, self.ip) } } pub fn instancelist_to_set(instances: &[Instance]) -> HashSet<String> { let mut r = HashSet::new(); for instance in instances { r.insert(instance.peer_name().clone()); } r }
25.755906
88
0.504433
18b9467cb1cacd37bf40ed3c54e82cb3cc2759af
15,978
use crate::entry::{Descriptor, Entry}; use crate::fdpool::FdPool; use crate::sys::entry_impl::OsHandle; use crate::virtfs::{VirtualDir, VirtualDirEntry}; use crate::wasi::{self, WasiError, WasiResult}; use std::borrow::Borrow; use std::collections::HashMap; use std::ffi::{self, CString, OsString}; use std::fs::File; use std::path::{Path, PathBuf}; use std::{env, io, string}; /// Possible errors when `WasiCtxBuilder` fails building /// `WasiCtx`. #[derive(Debug, thiserror::Error)] pub enum WasiCtxBuilderError { /// General I/O error was encountered. #[error("general I/O error encountered: {0}")] Io(#[from] io::Error), /// Provided sequence of bytes was not a valid UTF-8. #[error("provided sequence is not valid UTF-8: {0}")] InvalidUtf8(#[from] string::FromUtf8Error), /// Provided sequence of bytes was not a valid UTF-16. /// /// This error is expected to only occur on Windows hosts. #[error("provided sequence is not valid UTF-16: {0}")] InvalidUtf16(#[from] string::FromUtf16Error), /// Provided sequence of bytes contained an unexpected NUL byte. #[error("provided sequence contained an unexpected NUL byte")] UnexpectedNul(#[from] ffi::NulError), /// Provided `File` is not a directory. #[error("preopened directory path {} is not a directory", .0.display())] NotADirectory(PathBuf), /// `WasiCtx` has too many opened files. #[error("context object has too many opened files")] TooManyFilesOpen, } type WasiCtxBuilderResult<T> = std::result::Result<T, WasiCtxBuilderError>; enum PendingEntry { Thunk(fn() -> io::Result<Entry>), File(File), } impl std::fmt::Debug for PendingEntry { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Thunk(f) => write!( fmt, "PendingFdEntry::Thunk({:p})", f as *const fn() -> io::Result<Entry> ), Self::File(f) => write!(fmt, "PendingFdEntry::File({:?})", f), } } } #[derive(Debug, Eq, Hash, PartialEq)] enum PendingCString { Bytes(Vec<u8>), OsString(OsString), } impl From<Vec<u8>> for PendingCString { fn from(bytes: Vec<u8>) -> Self { Self::Bytes(bytes) } } impl From<OsString> for PendingCString { fn from(s: OsString) -> Self { Self::OsString(s) } } impl PendingCString { fn into_string(self) -> WasiCtxBuilderResult<String> { let res = match self { Self::Bytes(v) => String::from_utf8(v)?, #[cfg(unix)] Self::OsString(s) => { use std::os::unix::ffi::OsStringExt; String::from_utf8(s.into_vec())? } #[cfg(windows)] Self::OsString(s) => { use std::os::windows::ffi::OsStrExt; let bytes: Vec<u16> = s.encode_wide().collect(); String::from_utf16(&bytes)? } }; Ok(res) } /// Create a `CString` containing valid UTF-8. fn into_utf8_cstring(self) -> WasiCtxBuilderResult<CString> { let s = self.into_string()?; let s = CString::new(s)?; Ok(s) } } /// A builder allowing customizable construction of `WasiCtx` instances. pub struct WasiCtxBuilder { stdin: Option<PendingEntry>, stdout: Option<PendingEntry>, stderr: Option<PendingEntry>, preopens: Option<Vec<(PathBuf, Descriptor)>>, args: Option<Vec<PendingCString>>, env: Option<HashMap<PendingCString, PendingCString>>, } impl WasiCtxBuilder { /// Builder for a new `WasiCtx`. pub fn new() -> Self { let stdin = Some(PendingEntry::Thunk(Entry::null)); let stdout = Some(PendingEntry::Thunk(Entry::null)); let stderr = Some(PendingEntry::Thunk(Entry::null)); Self { stdin, stdout, stderr, preopens: Some(Vec::new()), args: Some(Vec::new()), env: Some(HashMap::new()), } } /// Add arguments to the command-line arguments list. /// /// Arguments must be valid UTF-8 with no NUL bytes, or else `WasiCtxBuilder::build()` will fail. pub fn args<S: AsRef<[u8]>>(&mut self, args: impl IntoIterator<Item = S>) -> &mut Self { self.args .as_mut() .unwrap() .extend(args.into_iter().map(|a| a.as_ref().to_vec().into())); self } /// Add an argument to the command-line arguments list. /// /// Arguments must be valid UTF-8 with no NUL bytes, or else `WasiCtxBuilder::build()` will fail. pub fn arg<S: AsRef<[u8]>>(&mut self, arg: S) -> &mut Self { self.args .as_mut() .unwrap() .push(arg.as_ref().to_vec().into()); self } /// Inherit the command-line arguments from the host process. /// /// If any arguments from the host process contain invalid UTF-8, `WasiCtxBuilder::build()` will /// fail. pub fn inherit_args(&mut self) -> &mut Self { let args = self.args.as_mut().unwrap(); args.clear(); args.extend(env::args_os().map(PendingCString::OsString)); self } /// Inherit stdin from the host process. pub fn inherit_stdin(&mut self) -> &mut Self { self.stdin = Some(PendingEntry::Thunk(Entry::duplicate_stdin)); self } /// Inherit stdout from the host process. pub fn inherit_stdout(&mut self) -> &mut Self { self.stdout = Some(PendingEntry::Thunk(Entry::duplicate_stdout)); self } /// Inherit stdout from the host process. pub fn inherit_stderr(&mut self) -> &mut Self { self.stderr = Some(PendingEntry::Thunk(Entry::duplicate_stderr)); self } /// Inherit the stdin, stdout, and stderr streams from the host process. pub fn inherit_stdio(&mut self) -> &mut Self { self.stdin = Some(PendingEntry::Thunk(Entry::duplicate_stdin)); self.stdout = Some(PendingEntry::Thunk(Entry::duplicate_stdout)); self.stderr = Some(PendingEntry::Thunk(Entry::duplicate_stderr)); self } /// Inherit the environment variables from the host process. /// /// If any environment variables from the host process contain invalid Unicode (UTF-16 for /// Windows, UTF-8 for other platforms), `WasiCtxBuilder::build()` will fail. pub fn inherit_env(&mut self) -> &mut Self { let env = self.env.as_mut().unwrap(); env.clear(); env.extend(std::env::vars_os().map(|(k, v)| (k.into(), v.into()))); self } /// Add an entry to the environment. /// /// Environment variable keys and values must be valid UTF-8 with no NUL bytes, or else /// `WasiCtxBuilder::build()` will fail. pub fn env<S: AsRef<[u8]>>(&mut self, k: S, v: S) -> &mut Self { self.env .as_mut() .unwrap() .insert(k.as_ref().to_vec().into(), v.as_ref().to_vec().into()); self } /// Add entries to the environment. /// /// Environment variable keys and values must be valid UTF-8 with no NUL bytes, or else /// `WasiCtxBuilder::build()` will fail. pub fn envs<S: AsRef<[u8]>, T: Borrow<(S, S)>>( &mut self, envs: impl IntoIterator<Item = T>, ) -> &mut Self { self.env.as_mut().unwrap().extend(envs.into_iter().map(|t| { let (k, v) = t.borrow(); (k.as_ref().to_vec().into(), v.as_ref().to_vec().into()) })); self } /// Provide a File to use as stdin pub fn stdin(&mut self, file: File) -> &mut Self { self.stdin = Some(PendingEntry::File(file)); self } /// Provide a File to use as stdout pub fn stdout(&mut self, file: File) -> &mut Self { self.stdout = Some(PendingEntry::File(file)); self } /// Provide a File to use as stderr pub fn stderr(&mut self, file: File) -> &mut Self { self.stderr = Some(PendingEntry::File(file)); self } /// Add a preopened directory. pub fn preopened_dir<P: AsRef<Path>>(&mut self, dir: File, guest_path: P) -> &mut Self { self.preopens.as_mut().unwrap().push(( guest_path.as_ref().to_owned(), Descriptor::OsHandle(OsHandle::from(dir)), )); self } /// Add a preopened virtual directory. pub fn preopened_virt<P: AsRef<Path>>( &mut self, dir: VirtualDirEntry, guest_path: P, ) -> &mut Self { fn populate_directory(virtentry: HashMap<String, VirtualDirEntry>, dir: &mut VirtualDir) { for (path, entry) in virtentry.into_iter() { match entry { VirtualDirEntry::Directory(dir_entries) => { let mut subdir = VirtualDir::new(true); populate_directory(dir_entries, &mut subdir); dir.add_dir(subdir, path); } VirtualDirEntry::File(content) => { dir.add_file(content, path); } } } } let dir = if let VirtualDirEntry::Directory(entries) = dir { let mut dir = VirtualDir::new(true); populate_directory(entries, &mut dir); Box::new(dir) } else { panic!("the root of a VirtualDirEntry tree must be a VirtualDirEntry::Directory"); }; self.preopens .as_mut() .unwrap() .push((guest_path.as_ref().to_owned(), Descriptor::VirtualFile(dir))); self } /// Build a `WasiCtx`, consuming this `WasiCtxBuilder`. /// /// If any of the arguments or environment variables in this builder cannot be converted into /// `CString`s, either due to NUL bytes or Unicode conversions, this will fail. pub fn build(&mut self) -> WasiCtxBuilderResult<WasiCtx> { // Process arguments and environment variables into `CString`s, failing quickly if they // contain any NUL bytes, or if conversion from `OsString` fails. let args = self .args .take() .unwrap() .into_iter() .map(|arg| arg.into_utf8_cstring()) .collect::<WasiCtxBuilderResult<Vec<CString>>>()?; let env = self .env .take() .unwrap() .into_iter() .map(|(k, v)| { k.into_string().and_then(|mut pair| { v.into_string().and_then(|v| { pair.push('='); pair.push_str(v.as_str()); // We have valid UTF-8, but the keys and values have not yet been checked // for NULs, so we do a final check here. let s = CString::new(pair)?; Ok(s) }) }) }) .collect::<WasiCtxBuilderResult<Vec<CString>>>()?; let mut fd_pool = FdPool::new(); let mut entries: HashMap<wasi::__wasi_fd_t, Entry> = HashMap::new(); // Populate the non-preopen entries. for pending in vec![ self.stdin.take().unwrap(), self.stdout.take().unwrap(), self.stderr.take().unwrap(), ] { let fd = fd_pool .allocate() .ok_or(WasiCtxBuilderError::TooManyFilesOpen)?; log::debug!("WasiCtx inserting ({:?}, {:?})", fd, pending); match pending { PendingEntry::Thunk(f) => { entries.insert(fd, f()?); } PendingEntry::File(f) => { entries.insert(fd, Entry::from(Descriptor::OsHandle(OsHandle::from(f)))?); } } } // Then add the preopen entries. for (guest_path, dir) in self.preopens.take().unwrap() { // We do the increment at the beginning of the loop body, so that we don't overflow // unnecessarily if we have exactly the maximum number of file descriptors. let preopen_fd = fd_pool .allocate() .ok_or(WasiCtxBuilderError::TooManyFilesOpen)?; match &dir { Descriptor::OsHandle(handle) => { if !handle.metadata()?.is_dir() { return Err(WasiCtxBuilderError::NotADirectory(guest_path)); } } Descriptor::VirtualFile(virt) => { if virt.get_file_type() != wasi::__WASI_FILETYPE_DIRECTORY { return Err(WasiCtxBuilderError::NotADirectory(guest_path)); } } Descriptor::Stdin | Descriptor::Stdout | Descriptor::Stderr => { panic!("implementation error, stdin/stdout/stderr shouldn't be in the list of preopens"); } } let mut fe = Entry::from(dir)?; fe.preopen_path = Some(guest_path); log::debug!("WasiCtx inserting ({:?}, {:?})", preopen_fd, fe); entries.insert(preopen_fd, fe); log::debug!("WasiCtx entries = {:?}", entries); } Ok(WasiCtx { args, env, entries, fd_pool, }) } } #[derive(Debug)] pub struct WasiCtx { fd_pool: FdPool, entries: HashMap<wasi::__wasi_fd_t, Entry>, pub(crate) args: Vec<CString>, pub(crate) env: Vec<CString>, } impl WasiCtx { /// Make a new `WasiCtx` with some default settings. /// /// - File descriptors 0, 1, and 2 inherit stdin, stdout, and stderr from the host process. /// /// - Environment variables are inherited from the host process. /// /// To override these behaviors, use `WasiCtxBuilder`. pub fn new<S: AsRef<[u8]>>(args: impl IntoIterator<Item = S>) -> WasiCtxBuilderResult<Self> { WasiCtxBuilder::new() .args(args) .inherit_stdio() .inherit_env() .build() } /// Check if `WasiCtx` contains the specified raw WASI `fd`. pub(crate) unsafe fn contains_entry(&self, fd: wasi::__wasi_fd_t) -> bool { self.entries.contains_key(&fd) } /// Get an immutable `Entry` corresponding to the specified raw WASI `fd`. pub(crate) unsafe fn get_entry(&self, fd: wasi::__wasi_fd_t) -> WasiResult<&Entry> { self.entries.get(&fd).ok_or(WasiError::EBADF) } /// Get a mutable `Entry` corresponding to the specified raw WASI `fd`. pub(crate) unsafe fn get_entry_mut(&mut self, fd: wasi::__wasi_fd_t) -> WasiResult<&mut Entry> { self.entries.get_mut(&fd).ok_or(WasiError::EBADF) } /// Insert the specified `Entry` into the `WasiCtx` object. /// /// The `FdEntry` will automatically get another free raw WASI `fd` assigned. Note that /// the two subsequent free raw WASI `fd`s do not have to be stored contiguously. pub(crate) fn insert_entry(&mut self, fe: Entry) -> WasiResult<wasi::__wasi_fd_t> { let fd = self.fd_pool.allocate().ok_or(WasiError::EMFILE)?; self.entries.insert(fd, fe); Ok(fd) } /// Insert the specified `Entry` with the specified raw WASI `fd` key into the `WasiCtx` /// object. pub(crate) fn insert_entry_at(&mut self, fd: wasi::__wasi_fd_t, fe: Entry) -> Option<Entry> { self.entries.insert(fd, fe) } /// Remove `Entry` corresponding to the specified raw WASI `fd` from the `WasiCtx` object. pub(crate) fn remove_entry(&mut self, fd: wasi::__wasi_fd_t) -> WasiResult<Entry> { // Remove the `fd` from valid entries. let entry = self.entries.remove(&fd).ok_or(WasiError::EBADF)?; // Next, deallocate the `fd`. self.fd_pool.deallocate(fd); Ok(entry) } }
35.506667
109
0.561334
4aa738f961def082ae3ee3555fa889cc03174ae7
16,128
// Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0. use std::io::{Error, ErrorKind, Read}; use std::path::Path; use std::{fs, io, process::Command}; use derive_new::new; use prost::Message; use prost_build::{protoc, protoc_include, Config, Method, Service, ServiceGenerator}; use prost_types::FileDescriptorSet; use crate::util::{fq_grpc, to_snake_case, MethodType}; /// Returns the names of all packages compiled. pub fn compile_protos<P>(protos: &[P], includes: &[P], out_dir: &str) -> io::Result<Vec<String>> where P: AsRef<Path>, { let mut prost_config = Config::new(); prost_config.service_generator(Box::new(Generator)); prost_config.out_dir(out_dir); // Create a file descriptor set for the protocol files. let tmp = tempfile::Builder::new().prefix("prost-build").tempdir()?; let descriptor_set = tmp.path().join("prost-descriptor-set"); let mut cmd = Command::new(protoc()); cmd.arg("--include_imports") .arg("--include_source_info") .arg("-o") .arg(&descriptor_set); for include in includes { cmd.arg("-I").arg(include.as_ref()); } // Set the protoc include after the user includes in case the user wants to // override one of the built-in .protos. cmd.arg("-I").arg(protoc_include()); for proto in protos { cmd.arg(proto.as_ref()); } let output = cmd.output()?; if !output.status.success() { return Err(Error::new( ErrorKind::Other, format!("protoc failed: {}", String::from_utf8_lossy(&output.stderr)), )); } let mut buf = Vec::new(); fs::File::open(descriptor_set)?.read_to_end(&mut buf)?; let descriptor_set = FileDescriptorSet::decode(buf.as_slice())?; // Get the package names from the descriptor set. let mut packages: Vec<_> = descriptor_set .file .iter() .filter_map(|f| f.package.clone()) .collect(); packages.sort(); packages.dedup(); // FIXME(https://github.com/danburkert/prost/pull/155) // Unfortunately we have to forget the above work and use `compile_protos` to // actually generate the Rust code. prost_config.compile_protos(protos, includes)?; Ok(packages) } struct Generator; impl ServiceGenerator for Generator { fn generate(&mut self, service: Service, buf: &mut String) { generate_methods(&service, buf); generate_client(&service, buf); generate_server(&service, buf); } } fn generate_methods(service: &Service, buf: &mut String) { let service_path = if service.package.is_empty() { format!("/{}", service.proto_name) } else { format!("/{}.{}", service.package, service.proto_name) }; for method in &service.methods { generate_method(&service.name, &service_path, method, buf); } } fn const_method_name(service_name: &str, method: &Method) -> String { format!( "METHOD_{}_{}", to_snake_case(service_name).to_uppercase(), method.name.to_uppercase() ) } fn generate_method(service_name: &str, service_path: &str, method: &Method, buf: &mut String) { let name = const_method_name(service_name, method); let ty = format!( "{}<{}, {}>", fq_grpc("Method"), method.input_type, method.output_type ); buf.push_str("const "); buf.push_str(&name); buf.push_str(": "); buf.push_str(&ty); buf.push_str(" = "); generate_method_body(service_path, method, buf); } fn generate_method_body(service_path: &str, method: &Method, buf: &mut String) { let ty = fq_grpc(&MethodType::from_method(method).to_string()); let pr_mar = format!( "{} {{ ser: {}, de: {} }}", fq_grpc("Marshaller"), fq_grpc("pr_ser"), fq_grpc("pr_de") ); buf.push_str(&fq_grpc("Method")); buf.push('{'); generate_field_init("ty", &ty, buf); generate_field_init( "name", &format!("\"{}/{}\"", service_path, method.proto_name), buf, ); generate_field_init("req_mar", &pr_mar, buf); generate_field_init("resp_mar", &pr_mar, buf); buf.push_str("};\n"); } // TODO share this code with protobuf codegen impl MethodType { fn from_method(method: &Method) -> MethodType { match (method.client_streaming, method.server_streaming) { (false, false) => MethodType::Unary, (true, false) => MethodType::ClientStreaming, (false, true) => MethodType::ServerStreaming, (true, true) => MethodType::Duplex, } } } fn generate_field_init(name: &str, value: &str, buf: &mut String) { buf.push_str(name); buf.push_str(": "); buf.push_str(value); buf.push_str(", "); } fn generate_client(service: &Service, buf: &mut String) { let client_name = format!("{}Client", service.name); buf.push_str("#[derive(Clone)]\n"); buf.push_str("pub struct "); buf.push_str(&client_name); buf.push_str(" { client: ::grpcio::Client }\n"); buf.push_str("impl "); buf.push_str(&client_name); buf.push_str(" {\n"); generate_ctor(&client_name, buf); generate_client_methods(service, buf); generate_spawn(buf); buf.push_str("}\n") } fn generate_ctor(client_name: &str, buf: &mut String) { buf.push_str("pub fn new(channel: ::grpcio::Channel) -> Self { "); buf.push_str(client_name); buf.push_str(" { client: ::grpcio::Client::new(channel) }"); buf.push_str("}\n"); } fn generate_client_methods(service: &Service, buf: &mut String) { for method in &service.methods { generate_client_method(&service.name, method, buf); } } fn generate_client_method(service_name: &str, method: &Method, buf: &mut String) { let name = &format!( "METHOD_{}_{}", to_snake_case(service_name).to_uppercase(), method.name.to_uppercase() ); match MethodType::from_method(method) { MethodType::Unary => { ClientMethod::new( &method.name, true, Some(&method.input_type), false, vec![&method.output_type], "unary_call", name, ) .generate(buf); ClientMethod::new( &method.name, false, Some(&method.input_type), false, vec![&method.output_type], "unary_call", name, ) .generate(buf); ClientMethod::new( &method.name, true, Some(&method.input_type), true, vec![&format!( "{}<{}>", fq_grpc("ClientUnaryReceiver"), method.output_type )], "unary_call", name, ) .generate(buf); ClientMethod::new( &method.name, false, Some(&method.input_type), true, vec![&format!( "{}<{}>", fq_grpc("ClientUnaryReceiver"), method.output_type )], "unary_call", name, ) .generate(buf); } MethodType::ClientStreaming => { ClientMethod::new( &method.name, true, None, false, vec![ &format!("{}<{}>", fq_grpc("ClientCStreamSender"), method.input_type), &format!( "{}<{}>", fq_grpc("ClientCStreamReceiver"), method.output_type ), ], "client_streaming", name, ) .generate(buf); ClientMethod::new( &method.name, false, None, false, vec![ &format!("{}<{}>", fq_grpc("ClientCStreamSender"), method.input_type), &format!( "{}<{}>", fq_grpc("ClientCStreamReceiver"), method.output_type ), ], "client_streaming", name, ) .generate(buf); } MethodType::ServerStreaming => { ClientMethod::new( &method.name, true, Some(&method.input_type), false, vec![&format!( "{}<{}>", fq_grpc("ClientSStreamReceiver"), method.output_type )], "server_streaming", name, ) .generate(buf); ClientMethod::new( &method.name, false, Some(&method.input_type), false, vec![&format!( "{}<{}>", fq_grpc("ClientSStreamReceiver"), method.output_type )], "server_streaming", name, ) .generate(buf); } MethodType::Duplex => { ClientMethod::new( &method.name, true, None, false, vec![ &format!("{}<{}>", fq_grpc("ClientDuplexSender"), method.input_type), &format!( "{}<{}>", fq_grpc("ClientDuplexReceiver"), method.output_type ), ], "duplex_streaming", name, ) .generate(buf); ClientMethod::new( &method.name, false, None, false, vec![ &format!("{}<{}>", fq_grpc("ClientDuplexSender"), method.input_type), &format!( "{}<{}>", fq_grpc("ClientDuplexReceiver"), method.output_type ), ], "duplex_streaming", name, ) .generate(buf); } } } #[derive(new)] struct ClientMethod<'a> { method_name: &'a str, opt: bool, request: Option<&'a str>, r#async: bool, result_types: Vec<&'a str>, inner_method_name: &'a str, data_name: &'a str, } impl<'a> ClientMethod<'a> { fn generate(&self, buf: &mut String) { buf.push_str("pub fn "); buf.push_str(self.method_name); if self.r#async { buf.push_str("_async"); } if self.opt { buf.push_str("_opt"); } buf.push_str("(&self"); if let Some(req) = self.request { buf.push_str(", req: &"); buf.push_str(req); } if self.opt { buf.push_str(", opt: "); buf.push_str(&fq_grpc("CallOption")); } buf.push_str(") -> "); buf.push_str(&fq_grpc("Result")); buf.push('<'); if self.result_types.len() != 1 { buf.push('('); } for rt in &self.result_types { buf.push_str(rt); buf.push(','); } if self.result_types.len() != 1 { buf.push(')'); } buf.push_str("> { "); if self.opt { self.generate_inner_body(buf); } else { self.generate_opt_body(buf); } buf.push_str(" }\n"); } // Method delegates to the `_opt` version of the method. fn generate_opt_body(&self, buf: &mut String) { buf.push_str("self."); buf.push_str(self.method_name); if self.r#async { buf.push_str("_async"); } buf.push_str("_opt("); if self.request.is_some() { buf.push_str("req, "); } buf.push_str(&fq_grpc("CallOption::default()")); buf.push(')'); } // Method delegates to the inner client. fn generate_inner_body(&self, buf: &mut String) { buf.push_str("self.client."); buf.push_str(self.inner_method_name); if self.r#async { buf.push_str("_async"); } buf.push_str("(&"); buf.push_str(self.data_name); if self.request.is_some() { buf.push_str(", req"); } buf.push_str(", opt)"); } } fn generate_spawn(buf: &mut String) { buf.push_str( "pub fn spawn<F>(&self, f: F) \ where F: ::futures::Future<Item = (), Error = ()> + Send + 'static {\ self.client.spawn(f)\ }\n", ); } fn generate_server(service: &Service, buf: &mut String) { buf.push_str("pub trait "); buf.push_str(&service.name); buf.push_str(" {\n"); generate_server_methods(service, buf); buf.push_str("}\n"); buf.push_str("pub fn create_"); buf.push_str(&to_snake_case(&service.name)); buf.push_str("<S: "); buf.push_str(&service.name); buf.push_str(" + Send + Clone + 'static>(s: S) -> "); buf.push_str(&fq_grpc("Service")); buf.push_str(" {\n"); buf.push_str("let mut builder = ::grpcio::ServiceBuilder::new();\n"); for method in &service.methods[0..service.methods.len() - 1] { buf.push_str("let mut instance = s.clone();\n"); generate_method_bind(&service.name, method, buf); } buf.push_str("let mut instance = s;\n"); generate_method_bind( &service.name, &service.methods[service.methods.len() - 1], buf, ); buf.push_str("builder.build()\n"); buf.push_str("}\n"); } fn generate_server_methods(service: &Service, buf: &mut String) { for method in &service.methods { let method_type = MethodType::from_method(method); let request_arg = match method_type { MethodType::Unary | MethodType::ServerStreaming => { format!("req: {}", method.input_type) } MethodType::ClientStreaming | MethodType::Duplex => format!( "stream: {}<{}>", fq_grpc("RequestStream"), method.input_type ), }; let response_type = match method_type { MethodType::Unary => "UnarySink", MethodType::ClientStreaming => "ClientStreamingSink", MethodType::ServerStreaming => "ServerStreamingSink", MethodType::Duplex => "DuplexSink", }; generate_server_method(method, &request_arg, response_type, buf); } } fn generate_server_method( method: &Method, request_arg: &str, response_type: &str, buf: &mut String, ) { buf.push_str("fn "); buf.push_str(&method.name); buf.push_str("(&mut self, ctx: "); buf.push_str(&fq_grpc("RpcContext")); buf.push_str(", "); buf.push_str(request_arg); buf.push_str(", sink: "); buf.push_str(&fq_grpc(response_type)); buf.push('<'); buf.push_str(&method.output_type); buf.push('>'); buf.push_str(");\n"); } fn generate_method_bind(service_name: &str, method: &Method, buf: &mut String) { let add_name = match MethodType::from_method(method) { MethodType::Unary => "add_unary_handler", MethodType::ClientStreaming => "add_client_streaming_handler", MethodType::ServerStreaming => "add_server_streaming_handler", MethodType::Duplex => "add_duplex_streaming_handler", }; buf.push_str("builder = builder."); buf.push_str(add_name); buf.push_str("(&"); buf.push_str(&const_method_name(service_name, method)); buf.push_str(", move |ctx, req, resp| instance."); buf.push_str(&method.name); buf.push_str("(ctx, req, resp));\n"); }
29.922078
96
0.514633