hexsha
stringlengths 40
40
| size
int64 2
1.05M
| content
stringlengths 2
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e9ddcb9be0c6d816371719f10bfb77f23f8924ff | 14,921 | use itertools::Itertools;
use std::collections::HashSet;
use tempdir::TempDir;
use tokio::runtime::Handle;
use collection::{
operations::{
payload_ops::{PayloadOps, SetPayload},
point_ops::{Batch, PointOperations, PointStruct},
types::{RecommendRequest, ScrollRequest, SearchRequest, UpdateStatus},
CollectionUpdateOperations,
},
Collection,
};
use segment::types::{Condition, HasIdCondition, Payload, PointIdType, WithPayloadInterface};
use crate::common::{simple_collection_fixture, N_SHARDS};
use collection::collection_manager::simple_collection_searcher::SimpleCollectionSearcher;
use collection::operations::types::PointRequest;
mod common;
#[tokio::test]
async fn test_collection_updater() {
test_collection_updater_with_shards(1).await;
test_collection_updater_with_shards(N_SHARDS).await;
}
async fn test_collection_updater_with_shards(shard_number: u32) {
let collection_dir = TempDir::new("collection").unwrap();
let mut collection = simple_collection_fixture(collection_dir.path(), shard_number).await;
let insert_points = CollectionUpdateOperations::PointOperation(
Batch {
ids: vec![0, 1, 2, 3, 4]
.into_iter()
.map(|x| x.into())
.collect_vec(),
vectors: vec![
vec![1.0, 0.0, 1.0, 1.0],
vec![1.0, 0.0, 1.0, 0.0],
vec![1.0, 1.0, 1.0, 1.0],
vec![1.0, 1.0, 0.0, 1.0],
vec![1.0, 0.0, 0.0, 0.0],
],
payloads: None,
}
.into(),
);
let insert_result = collection.update_from_client(insert_points, true).await;
match insert_result {
Ok(res) => {
assert_eq!(res.status, UpdateStatus::Completed)
}
Err(err) => panic!("operation failed: {:?}", err),
}
let search_request = SearchRequest {
vector: vec![1.0, 1.0, 1.0, 1.0],
with_payload: None,
with_vector: false,
filter: None,
params: None,
top: 3,
score_threshold: None,
};
let segment_searcher = SimpleCollectionSearcher::new();
let search_res = collection
.search(search_request, &segment_searcher, &Handle::current(), None)
.await;
match search_res {
Ok(res) => {
assert_eq!(res.len(), 3);
assert_eq!(res[0].id, 2.into());
assert!(res[0].payload.is_none());
}
Err(err) => panic!("search failed: {:?}", err),
}
collection.before_drop().await;
}
#[tokio::test]
async fn test_collection_search_with_payload_and_vector() {
test_collection_search_with_payload_and_vector_with_shards(1).await;
test_collection_search_with_payload_and_vector_with_shards(N_SHARDS).await;
}
async fn test_collection_search_with_payload_and_vector_with_shards(shard_number: u32) {
let collection_dir = TempDir::new("collection").unwrap();
let mut collection = simple_collection_fixture(collection_dir.path(), shard_number).await;
let insert_points = CollectionUpdateOperations::PointOperation(
Batch {
ids: vec![0.into(), 1.into()],
vectors: vec![vec![1.0, 0.0, 1.0, 1.0], vec![1.0, 0.0, 1.0, 0.0]],
payloads: serde_json::from_str(
r#"[{ "k": { "type": "keyword", "value": "v1" } }, { "k": "v2" , "v": "v3"}]"#,
)
.unwrap(),
}
.into(),
);
let insert_result = collection.update_from_client(insert_points, true).await;
match insert_result {
Ok(res) => {
assert_eq!(res.status, UpdateStatus::Completed)
}
Err(err) => panic!("operation failed: {:?}", err),
}
let search_request = SearchRequest {
vector: vec![1.0, 0.0, 1.0, 1.0],
with_payload: Some(WithPayloadInterface::Bool(true)),
with_vector: true,
filter: None,
params: None,
top: 3,
score_threshold: None,
};
let segment_searcher = SimpleCollectionSearcher::new();
let search_res = collection
.search(search_request, &segment_searcher, &Handle::current(), None)
.await;
match search_res {
Ok(res) => {
assert_eq!(res.len(), 2);
assert_eq!(res[0].id, 0.into());
assert_eq!(res[0].payload.as_ref().unwrap().len(), 1);
assert_eq!(&res[0].vector, &Some(vec![1.0, 0.0, 1.0, 1.0]));
}
Err(err) => panic!("search failed: {:?}", err),
}
collection.before_drop().await;
}
// FIXME: dos not work
#[tokio::test]
async fn test_collection_loading() {
test_collection_loading_with_shards(1).await;
test_collection_loading_with_shards(N_SHARDS).await;
}
async fn test_collection_loading_with_shards(shard_number: u32) {
let collection_dir = TempDir::new("collection").unwrap();
{
let mut collection = simple_collection_fixture(collection_dir.path(), shard_number).await;
let insert_points = CollectionUpdateOperations::PointOperation(
Batch {
ids: vec![0, 1, 2, 3, 4]
.into_iter()
.map(|x| x.into())
.collect_vec(),
vectors: vec![
vec![1.0, 0.0, 1.0, 1.0],
vec![1.0, 0.0, 1.0, 0.0],
vec![1.0, 1.0, 1.0, 1.0],
vec![1.0, 1.0, 0.0, 1.0],
vec![1.0, 0.0, 0.0, 0.0],
],
payloads: None,
}
.into(),
);
collection
.update_from_client(insert_points, true)
.await
.unwrap();
let payload: Payload = serde_json::from_str(r#"{"color":"red"}"#).unwrap();
let assign_payload =
CollectionUpdateOperations::PayloadOperation(PayloadOps::SetPayload(SetPayload {
payload,
points: vec![2.into(), 3.into()],
}));
collection
.update_from_client(assign_payload, true)
.await
.unwrap();
collection.before_drop().await;
}
let mut loaded_collection = Collection::load("test".to_string(), collection_dir.path()).await;
let segment_searcher = SimpleCollectionSearcher::new();
let request = PointRequest {
ids: vec![1.into(), 2.into()],
with_payload: Some(WithPayloadInterface::Bool(true)),
with_vector: true,
};
let retrieved = loaded_collection
.retrieve(request, &segment_searcher, None)
.await
.unwrap();
assert_eq!(retrieved.len(), 2);
for record in retrieved {
if record.id == 2.into() {
let non_empty_payload = record.payload.unwrap();
assert_eq!(non_empty_payload.len(), 1)
}
}
println!("Function end");
loaded_collection.before_drop().await;
}
#[test]
fn test_deserialization() {
let insert_points = CollectionUpdateOperations::PointOperation(
Batch {
ids: vec![0.into(), 1.into()],
vectors: vec![vec![1.0, 0.0, 1.0, 1.0], vec![1.0, 0.0, 1.0, 0.0]],
payloads: None,
}
.into(),
);
let json_str = serde_json::to_string_pretty(&insert_points).unwrap();
let _read_obj: CollectionUpdateOperations = serde_json::from_str(&json_str).unwrap();
let crob_bytes = rmp_serde::to_vec(&insert_points).unwrap();
let _read_obj2: CollectionUpdateOperations = rmp_serde::from_slice(&crob_bytes).unwrap();
}
#[test]
fn test_deserialization2() {
let insert_points = CollectionUpdateOperations::PointOperation(
vec![
PointStruct {
id: 0.into(),
vector: vec![1.0, 0.0, 1.0, 1.0],
payload: None,
},
PointStruct {
id: 1.into(),
vector: vec![1.0, 0.0, 1.0, 0.0],
payload: None,
},
]
.into(),
);
let json_str = serde_json::to_string_pretty(&insert_points).unwrap();
let _read_obj: CollectionUpdateOperations = serde_json::from_str(&json_str).unwrap();
let raw_bytes = rmp_serde::to_vec(&insert_points).unwrap();
let _read_obj2: CollectionUpdateOperations = rmp_serde::from_slice(&raw_bytes).unwrap();
}
// Request to find points sent to all shards but they might not have a particular id, so they will return an error
#[tokio::test]
async fn test_recommendation_api() {
test_recommendation_api_with_shards(1).await;
test_recommendation_api_with_shards(N_SHARDS).await;
}
async fn test_recommendation_api_with_shards(shard_number: u32) {
let collection_dir = TempDir::new("collection").unwrap();
let mut collection = simple_collection_fixture(collection_dir.path(), shard_number).await;
let insert_points = CollectionUpdateOperations::PointOperation(
Batch {
ids: vec![0, 1, 2, 3, 4, 5, 6, 7, 8]
.into_iter()
.map(|x| x.into())
.collect_vec(),
vectors: vec![
vec![0.0, 0.0, 1.0, 1.0],
vec![1.0, 0.0, 0.0, 0.0],
vec![1.0, 0.0, 0.0, 0.0],
vec![0.0, 1.0, 0.0, 0.0],
vec![0.0, 1.0, 0.0, 0.0],
vec![0.0, 0.0, 1.0, 0.0],
vec![0.0, 0.0, 1.0, 0.0],
vec![0.0, 0.0, 0.0, 1.0],
vec![0.0, 0.0, 0.0, 1.0],
],
payloads: None,
}
.into(),
);
collection
.update_from_client(insert_points, true)
.await
.unwrap();
let segment_searcher = SimpleCollectionSearcher::new();
let result = collection
.recommend_by(
RecommendRequest {
positive: vec![0.into()],
negative: vec![8.into()],
filter: None,
params: None,
top: 5,
with_payload: None,
with_vector: false,
score_threshold: None,
},
&segment_searcher,
&Handle::current(),
None,
)
.await
.unwrap();
assert!(!result.is_empty());
let top1 = &result[0];
assert!(top1.id == 5.into() || top1.id == 6.into());
collection.before_drop().await;
}
#[tokio::test]
async fn test_read_api() {
test_read_api_with_shards(1).await;
test_read_api_with_shards(N_SHARDS).await;
}
async fn test_read_api_with_shards(shard_number: u32) {
let collection_dir = TempDir::new("collection").unwrap();
let mut collection = simple_collection_fixture(collection_dir.path(), shard_number).await;
let insert_points = CollectionUpdateOperations::PointOperation(PointOperations::UpsertPoints(
Batch {
ids: vec![0, 1, 2, 3, 4, 5, 6, 7, 8]
.into_iter()
.map(|x| x.into())
.collect_vec(),
vectors: vec![
vec![0.0, 0.0, 1.0, 1.0],
vec![1.0, 0.0, 0.0, 0.0],
vec![1.0, 0.0, 0.0, 0.0],
vec![0.0, 1.0, 0.0, 0.0],
vec![0.0, 1.0, 0.0, 0.0],
vec![0.0, 0.0, 1.0, 0.0],
vec![0.0, 0.0, 1.0, 0.0],
vec![0.0, 0.0, 0.0, 1.0],
vec![0.0, 0.0, 0.0, 1.0],
],
payloads: None,
}
.into(),
));
collection
.update_from_client(insert_points, true)
.await
.unwrap();
let segment_searcher = SimpleCollectionSearcher::new();
let result = collection
.scroll_by(
ScrollRequest {
offset: None,
limit: Some(2),
filter: None,
with_payload: Some(WithPayloadInterface::Bool(true)),
with_vector: false,
},
&segment_searcher,
None,
)
.await
.unwrap();
assert_eq!(result.next_page_offset, Some(2.into()));
assert_eq!(result.points.len(), 2);
collection.before_drop().await;
}
#[tokio::test]
async fn test_collection_delete_points_by_filter() {
test_collection_delete_points_by_filter_with_shards(1).await;
test_collection_delete_points_by_filter_with_shards(N_SHARDS).await;
}
async fn test_collection_delete_points_by_filter_with_shards(shard_number: u32) {
let collection_dir = TempDir::new("collection").unwrap();
let mut collection = simple_collection_fixture(collection_dir.path(), shard_number).await;
let insert_points = CollectionUpdateOperations::PointOperation(
Batch {
ids: vec![0, 1, 2, 3, 4]
.into_iter()
.map(|x| x.into())
.collect_vec(),
vectors: vec![
vec![1.0, 0.0, 1.0, 1.0],
vec![1.0, 0.0, 1.0, 0.0],
vec![1.0, 1.0, 1.0, 1.0],
vec![1.0, 1.0, 0.0, 1.0],
vec![1.0, 0.0, 0.0, 0.0],
],
payloads: None,
}
.into(),
);
let insert_result = collection.update_from_client(insert_points, true).await;
match insert_result {
Ok(res) => {
assert_eq!(res.status, UpdateStatus::Completed)
}
Err(err) => panic!("operation failed: {:?}", err),
}
// delete points with id (0, 3)
let to_be_deleted: HashSet<PointIdType> = vec![0.into(), 3.into()].into_iter().collect();
let delete_filter = segment::types::Filter {
should: None,
must: Some(vec![Condition::HasId(HasIdCondition::from(to_be_deleted))]),
must_not: None,
};
let delete_points = CollectionUpdateOperations::PointOperation(
PointOperations::DeletePointsByFilter(delete_filter),
);
let delete_result = collection.update_from_client(delete_points, true).await;
match delete_result {
Ok(res) => {
assert_eq!(res.status, UpdateStatus::Completed)
}
Err(err) => panic!("operation failed: {:?}", err),
}
let segment_searcher = SimpleCollectionSearcher::new();
let result = collection
.scroll_by(
ScrollRequest {
offset: None,
limit: Some(10),
filter: None,
with_payload: Some(WithPayloadInterface::Bool(false)),
with_vector: false,
},
&segment_searcher,
None,
)
.await
.unwrap();
// check if we only have 3 out of 5 points left and that the point id were really deleted
assert_eq!(result.points.len(), 3);
assert_eq!(result.points.get(0).unwrap().id, 1.into());
assert_eq!(result.points.get(1).unwrap().id, 2.into());
assert_eq!(result.points.get(2).unwrap().id, 4.into());
collection.before_drop().await;
}
| 31.679406 | 114 | 0.557469 |
62563934eb80074c5924501e59da24e4beafa62c | 1,727 | /*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use std::io;
use snap::read::FrameDecoder;
use snap::write::FrameEncoder;
/// A trait that provides a compression and decompression strategy for this filter.
/// Conversion takes place on a mutable Vec, to ensure the most performant compression or
/// decompression operation can occur.
pub(crate) trait Compressor {
/// Compress the contents of the Vec - overwriting the original content.
fn encode(&self, contents: &mut Vec<u8>) -> io::Result<()>;
/// Decompress the contents of the Vec - overwriting the original content.
fn decode(&self, contents: &mut Vec<u8>) -> io::Result<()>;
}
pub(crate) struct Snappy {}
impl Compressor for Snappy {
fn encode(&self, contents: &mut Vec<u8>) -> io::Result<()> {
let input = std::mem::take(contents);
let mut wtr = FrameEncoder::new(contents);
io::copy(&mut input.as_slice(), &mut wtr)?;
Ok(())
}
fn decode(&self, contents: &mut Vec<u8>) -> io::Result<()> {
let input = std::mem::take(contents);
let mut rdr = FrameDecoder::new(input.as_slice());
io::copy(&mut rdr, contents)?;
Ok(())
}
}
| 35.244898 | 89 | 0.670527 |
1af72804f80eeb2a74bcfa68b70e1118efc89054 | 241 | //! Module holding the 'project' function
use crate::env::FileFrame;
use crate::internal::values::{I32Wrap, Value};
pub fn project<'file_frame>(file_frame: &'file_frame mut FileFrame) -> Box<dyn Value<'static>> {
Box::new(I32Wrap(0))
}
| 30.125 | 96 | 0.705394 |
2f514f3689891a90cb2d48c5bb2b12448fbbaf38 | 20,237 | // @generated SignedSource<<7477e4591b12a0bc4aebae640f66e375>>
mod parse;
use parse::transform_fixture;
use fixture_tests::test_fixture;
#[test]
fn argument_complex_object_invalid() {
let input = include_str!("parse/fixtures/argument-complex-object.invalid.graphql");
let expected = include_str!("parse/fixtures/argument-complex-object.invalid.expected");
test_fixture(transform_fixture, "argument-complex-object.invalid.graphql", "parse/fixtures/argument-complex-object.invalid.expected", input, expected);
}
#[test]
fn argument_definitions_with_typo_invalid() {
let input = include_str!("parse/fixtures/argument-definitions-with-typo.invalid.graphql");
let expected = include_str!("parse/fixtures/argument-definitions-with-typo.invalid.expected");
test_fixture(transform_fixture, "argument-definitions-with-typo.invalid.graphql", "parse/fixtures/argument-definitions-with-typo.invalid.expected", input, expected);
}
#[test]
fn complex_object_with_missing_fields_invalid() {
let input = include_str!("parse/fixtures/complex-object-with-missing-fields.invalid.graphql");
let expected = include_str!("parse/fixtures/complex-object-with-missing-fields.invalid.expected");
test_fixture(transform_fixture, "complex-object-with-missing-fields.invalid.graphql", "parse/fixtures/complex-object-with-missing-fields.invalid.expected", input, expected);
}
#[test]
fn directive_generic() {
let input = include_str!("parse/fixtures/directive-generic.graphql");
let expected = include_str!("parse/fixtures/directive-generic.expected");
test_fixture(transform_fixture, "directive-generic.graphql", "parse/fixtures/directive-generic.expected", input, expected);
}
#[test]
fn directive_include() {
let input = include_str!("parse/fixtures/directive-include.graphql");
let expected = include_str!("parse/fixtures/directive-include.expected");
test_fixture(transform_fixture, "directive-include.graphql", "parse/fixtures/directive-include.expected", input, expected);
}
#[test]
fn directive_match_on_fragment_invalid() {
let input = include_str!("parse/fixtures/directive-match-on-fragment.invalid.graphql");
let expected = include_str!("parse/fixtures/directive-match-on-fragment.invalid.expected");
test_fixture(transform_fixture, "directive-match-on-fragment.invalid.graphql", "parse/fixtures/directive-match-on-fragment.invalid.expected", input, expected);
}
#[test]
fn directive_module_match_on_query_invalid() {
let input = include_str!("parse/fixtures/directive-module-match-on-query.invalid.graphql");
let expected = include_str!("parse/fixtures/directive-module-match-on-query.invalid.expected");
test_fixture(transform_fixture, "directive-module-match-on-query.invalid.graphql", "parse/fixtures/directive-module-match-on-query.invalid.expected", input, expected);
}
#[test]
fn directive_module_on_field_invalid() {
let input = include_str!("parse/fixtures/directive-module-on-field.invalid.graphql");
let expected = include_str!("parse/fixtures/directive-module-on-field.invalid.expected");
test_fixture(transform_fixture, "directive-module-on-field.invalid.graphql", "parse/fixtures/directive-module-on-field.invalid.expected", input, expected);
}
#[test]
fn directive_unknown_argument_invalid() {
let input = include_str!("parse/fixtures/directive-unknown-argument.invalid.graphql");
let expected = include_str!("parse/fixtures/directive-unknown-argument.invalid.expected");
test_fixture(transform_fixture, "directive-unknown-argument.invalid.graphql", "parse/fixtures/directive-unknown-argument.invalid.expected", input, expected);
}
#[test]
fn enum_values() {
let input = include_str!("parse/fixtures/enum-values.graphql");
let expected = include_str!("parse/fixtures/enum-values.expected");
test_fixture(transform_fixture, "enum-values.graphql", "parse/fixtures/enum-values.expected", input, expected);
}
#[test]
fn enum_values_invalid() {
let input = include_str!("parse/fixtures/enum-values.invalid.graphql");
let expected = include_str!("parse/fixtures/enum-values.invalid.expected");
test_fixture(transform_fixture, "enum-values.invalid.graphql", "parse/fixtures/enum-values.invalid.expected", input, expected);
}
#[test]
fn field_arguments() {
let input = include_str!("parse/fixtures/field-arguments.graphql");
let expected = include_str!("parse/fixtures/field-arguments.expected");
test_fixture(transform_fixture, "field-arguments.graphql", "parse/fixtures/field-arguments.expected", input, expected);
}
#[test]
fn field_unknown_argument_invalid() {
let input = include_str!("parse/fixtures/field-unknown-argument.invalid.graphql");
let expected = include_str!("parse/fixtures/field-unknown-argument.invalid.expected");
test_fixture(transform_fixture, "field-unknown-argument.invalid.graphql", "parse/fixtures/field-unknown-argument.invalid.expected", input, expected);
}
#[test]
fn fixme_fat_interface_on_union() {
let input = include_str!("parse/fixtures/fixme_fat_interface_on_union.graphql");
let expected = include_str!("parse/fixtures/fixme_fat_interface_on_union.expected");
test_fixture(transform_fixture, "fixme_fat_interface_on_union.graphql", "parse/fixtures/fixme_fat_interface_on_union.expected", input, expected);
}
#[test]
fn fixme_fat_interface_on_union_invalid() {
let input = include_str!("parse/fixtures/fixme_fat_interface_on_union.invalid.graphql");
let expected = include_str!("parse/fixtures/fixme_fat_interface_on_union.invalid.expected");
test_fixture(transform_fixture, "fixme_fat_interface_on_union.invalid.graphql", "parse/fixtures/fixme_fat_interface_on_union.invalid.expected", input, expected);
}
#[test]
fn fragment_spread_on_wrong_type_invalid() {
let input = include_str!("parse/fixtures/fragment-spread-on-wrong-type.invalid.graphql");
let expected = include_str!("parse/fixtures/fragment-spread-on-wrong-type.invalid.expected");
test_fixture(transform_fixture, "fragment-spread-on-wrong-type.invalid.graphql", "parse/fixtures/fragment-spread-on-wrong-type.invalid.expected", input, expected);
}
#[test]
fn fragment_with_argument_type_invalid() {
let input = include_str!("parse/fixtures/fragment-with-argument-type.invalid.graphql");
let expected = include_str!("parse/fixtures/fragment-with-argument-type.invalid.expected");
test_fixture(transform_fixture, "fragment-with-argument-type.invalid.graphql", "parse/fixtures/fragment-with-argument-type.invalid.expected", input, expected);
}
#[test]
fn fragment_with_arguments() {
let input = include_str!("parse/fixtures/fragment-with-arguments.graphql");
let expected = include_str!("parse/fixtures/fragment-with-arguments.expected");
test_fixture(transform_fixture, "fragment-with-arguments.graphql", "parse/fixtures/fragment-with-arguments.expected", input, expected);
}
#[test]
fn fragment_with_arguments_invalid_type_invalid() {
let input = include_str!("parse/fixtures/fragment-with-arguments-invalid-type.invalid.graphql");
let expected = include_str!("parse/fixtures/fragment-with-arguments-invalid-type.invalid.expected");
test_fixture(transform_fixture, "fragment-with-arguments-invalid-type.invalid.graphql", "parse/fixtures/fragment-with-arguments-invalid-type.invalid.expected", input, expected);
}
#[test]
fn fragment_with_literal_arguments() {
let input = include_str!("parse/fixtures/fragment-with-literal-arguments.graphql");
let expected = include_str!("parse/fixtures/fragment-with-literal-arguments.expected");
test_fixture(transform_fixture, "fragment-with-literal-arguments.graphql", "parse/fixtures/fragment-with-literal-arguments.expected", input, expected);
}
#[test]
fn fragment_with_literal_enum_arguments() {
let input = include_str!("parse/fixtures/fragment-with-literal-enum-arguments.graphql");
let expected = include_str!("parse/fixtures/fragment-with-literal-enum-arguments.expected");
test_fixture(transform_fixture, "fragment-with-literal-enum-arguments.graphql", "parse/fixtures/fragment-with-literal-enum-arguments.expected", input, expected);
}
#[test]
fn fragment_with_literal_enum_arguments_into_enum_list() {
let input = include_str!("parse/fixtures/fragment-with-literal-enum-arguments-into-enum-list.graphql");
let expected = include_str!("parse/fixtures/fragment-with-literal-enum-arguments-into-enum-list.expected");
test_fixture(transform_fixture, "fragment-with-literal-enum-arguments-into-enum-list.graphql", "parse/fixtures/fragment-with-literal-enum-arguments-into-enum-list.expected", input, expected);
}
#[test]
fn fragment_with_literal_enum_arguments_into_enum_list_indirect_invalid() {
let input = include_str!("parse/fixtures/fragment-with-literal-enum-arguments-into-enum-list-indirect.invalid.graphql");
let expected = include_str!("parse/fixtures/fragment-with-literal-enum-arguments-into-enum-list-indirect.invalid.expected");
test_fixture(transform_fixture, "fragment-with-literal-enum-arguments-into-enum-list-indirect.invalid.graphql", "parse/fixtures/fragment-with-literal-enum-arguments-into-enum-list-indirect.invalid.expected", input, expected);
}
#[test]
fn fragment_with_literal_enum_list_arguments() {
let input = include_str!("parse/fixtures/fragment-with-literal-enum-list-arguments.graphql");
let expected = include_str!("parse/fixtures/fragment-with-literal-enum-list-arguments.expected");
test_fixture(transform_fixture, "fragment-with-literal-enum-list-arguments.graphql", "parse/fixtures/fragment-with-literal-enum-list-arguments.expected", input, expected);
}
#[test]
fn fragment_with_literal_object_arguments() {
let input = include_str!("parse/fixtures/fragment-with-literal-object-arguments.graphql");
let expected = include_str!("parse/fixtures/fragment-with-literal-object-arguments.expected");
test_fixture(transform_fixture, "fragment-with-literal-object-arguments.graphql", "parse/fixtures/fragment-with-literal-object-arguments.expected", input, expected);
}
#[test]
fn fragment_with_literal_object_list_arguments() {
let input = include_str!("parse/fixtures/fragment-with-literal-object-list-arguments.graphql");
let expected = include_str!("parse/fixtures/fragment-with-literal-object-list-arguments.expected");
test_fixture(transform_fixture, "fragment-with-literal-object-list-arguments.graphql", "parse/fixtures/fragment-with-literal-object-list-arguments.expected", input, expected);
}
#[test]
fn fragment_with_undefined_literal_arguments_invalid() {
let input = include_str!("parse/fixtures/fragment-with-undefined-literal-arguments.invalid.graphql");
let expected = include_str!("parse/fixtures/fragment-with-undefined-literal-arguments.invalid.expected");
test_fixture(transform_fixture, "fragment-with-undefined-literal-arguments.invalid.graphql", "parse/fixtures/fragment-with-undefined-literal-arguments.invalid.expected", input, expected);
}
#[test]
fn fragment_with_undefined_variable_arguments_invalid() {
let input = include_str!("parse/fixtures/fragment-with-undefined-variable-arguments.invalid.graphql");
let expected = include_str!("parse/fixtures/fragment-with-undefined-variable-arguments.invalid.expected");
test_fixture(transform_fixture, "fragment-with-undefined-variable-arguments.invalid.graphql", "parse/fixtures/fragment-with-undefined-variable-arguments.invalid.expected", input, expected);
}
#[test]
fn fragment_with_unnecessary_unchecked_arguments_invalid() {
let input = include_str!("parse/fixtures/fragment-with-unnecessary-unchecked-arguments.invalid.graphql");
let expected = include_str!("parse/fixtures/fragment-with-unnecessary-unchecked-arguments.invalid.expected");
test_fixture(transform_fixture, "fragment-with-unnecessary-unchecked-arguments.invalid.graphql", "parse/fixtures/fragment-with-unnecessary-unchecked-arguments.invalid.expected", input, expected);
}
#[test]
fn inline_fragment_on_wrong_type_invalid() {
let input = include_str!("parse/fixtures/inline-fragment-on-wrong-type.invalid.graphql");
let expected = include_str!("parse/fixtures/inline-fragment-on-wrong-type.invalid.expected");
test_fixture(transform_fixture, "inline-fragment-on-wrong-type.invalid.graphql", "parse/fixtures/inline-fragment-on-wrong-type.invalid.expected", input, expected);
}
#[test]
fn inline_fragment_with_invalid_type() {
let input = include_str!("parse/fixtures/inline-fragment-with-invalid-type.graphql");
let expected = include_str!("parse/fixtures/inline-fragment-with-invalid-type.expected");
test_fixture(transform_fixture, "inline-fragment-with-invalid-type.graphql", "parse/fixtures/inline-fragment-with-invalid-type.expected", input, expected);
}
#[test]
fn inline_untyped_fragment() {
let input = include_str!("parse/fixtures/inline-untyped-fragment.graphql");
let expected = include_str!("parse/fixtures/inline-untyped-fragment.expected");
test_fixture(transform_fixture, "inline-untyped-fragment.graphql", "parse/fixtures/inline-untyped-fragment.expected", input, expected);
}
#[test]
fn invalid_argument_type_invalid() {
let input = include_str!("parse/fixtures/invalid-argument-type.invalid.graphql");
let expected = include_str!("parse/fixtures/invalid-argument-type.invalid.expected");
test_fixture(transform_fixture, "invalid-argument-type.invalid.graphql", "parse/fixtures/invalid-argument-type.invalid.expected", input, expected);
}
#[test]
fn linked_handle_field() {
let input = include_str!("parse/fixtures/linked-handle-field.graphql");
let expected = include_str!("parse/fixtures/linked-handle-field.expected");
test_fixture(transform_fixture, "linked-handle-field.graphql", "parse/fixtures/linked-handle-field.expected", input, expected);
}
#[test]
fn linked_handle_field_with_filters() {
let input = include_str!("parse/fixtures/linked-handle-field-with-filters.graphql");
let expected = include_str!("parse/fixtures/linked-handle-field-with-filters.expected");
test_fixture(transform_fixture, "linked-handle-field-with-filters.graphql", "parse/fixtures/linked-handle-field-with-filters.expected", input, expected);
}
#[test]
fn linked_handle_field_with_key() {
let input = include_str!("parse/fixtures/linked-handle-field-with-key.graphql");
let expected = include_str!("parse/fixtures/linked-handle-field-with-key.expected");
test_fixture(transform_fixture, "linked-handle-field-with-key.graphql", "parse/fixtures/linked-handle-field-with-key.expected", input, expected);
}
#[test]
fn linked_handle_filter() {
let input = include_str!("parse/fixtures/linked-handle-filter.graphql");
let expected = include_str!("parse/fixtures/linked-handle-filter.expected");
test_fixture(transform_fixture, "linked-handle-filter.graphql", "parse/fixtures/linked-handle-filter.expected", input, expected);
}
#[test]
fn list_argument() {
let input = include_str!("parse/fixtures/list-argument.graphql");
let expected = include_str!("parse/fixtures/list-argument.expected");
test_fixture(transform_fixture, "list-argument.graphql", "parse/fixtures/list-argument.expected", input, expected);
}
#[test]
fn list_argument_complex_object() {
let input = include_str!("parse/fixtures/list-argument-complex-object.graphql");
let expected = include_str!("parse/fixtures/list-argument-complex-object.expected");
test_fixture(transform_fixture, "list-argument-complex-object.graphql", "parse/fixtures/list-argument-complex-object.expected", input, expected);
}
#[test]
fn list_of_enums() {
let input = include_str!("parse/fixtures/list-of-enums.graphql");
let expected = include_str!("parse/fixtures/list-of-enums.expected");
test_fixture(transform_fixture, "list-of-enums.graphql", "parse/fixtures/list-of-enums.expected", input, expected);
}
#[test]
fn literal_list_argument() {
let input = include_str!("parse/fixtures/literal-list-argument.graphql");
let expected = include_str!("parse/fixtures/literal-list-argument.expected");
test_fixture(transform_fixture, "literal-list-argument.graphql", "parse/fixtures/literal-list-argument.expected", input, expected);
}
#[test]
fn literal_list_argument_invalid() {
let input = include_str!("parse/fixtures/literal-list-argument.invalid.graphql");
let expected = include_str!("parse/fixtures/literal-list-argument.invalid.expected");
test_fixture(transform_fixture, "literal-list-argument.invalid.graphql", "parse/fixtures/literal-list-argument.invalid.expected", input, expected);
}
#[test]
fn literal_object_argument() {
let input = include_str!("parse/fixtures/literal-object-argument.graphql");
let expected = include_str!("parse/fixtures/literal-object-argument.expected");
test_fixture(transform_fixture, "literal-object-argument.graphql", "parse/fixtures/literal-object-argument.expected", input, expected);
}
#[test]
fn literal_object_argument_invalid() {
let input = include_str!("parse/fixtures/literal-object-argument.invalid.graphql");
let expected = include_str!("parse/fixtures/literal-object-argument.invalid.expected");
test_fixture(transform_fixture, "literal-object-argument.invalid.graphql", "parse/fixtures/literal-object-argument.invalid.expected", input, expected);
}
#[test]
fn null_values() {
let input = include_str!("parse/fixtures/null-values.graphql");
let expected = include_str!("parse/fixtures/null-values.expected");
test_fixture(transform_fixture, "null-values.graphql", "parse/fixtures/null-values.expected", input, expected);
}
#[test]
fn null_values_invalid() {
let input = include_str!("parse/fixtures/null-values.invalid.graphql");
let expected = include_str!("parse/fixtures/null-values.invalid.expected");
test_fixture(transform_fixture, "null-values.invalid.graphql", "parse/fixtures/null-values.invalid.expected", input, expected);
}
#[test]
fn object_argument() {
let input = include_str!("parse/fixtures/object-argument.graphql");
let expected = include_str!("parse/fixtures/object-argument.expected");
test_fixture(transform_fixture, "object-argument.graphql", "parse/fixtures/object-argument.expected", input, expected);
}
#[test]
fn scalar_handle_field() {
let input = include_str!("parse/fixtures/scalar-handle-field.graphql");
let expected = include_str!("parse/fixtures/scalar-handle-field.expected");
test_fixture(transform_fixture, "scalar-handle-field.graphql", "parse/fixtures/scalar-handle-field.expected", input, expected);
}
#[test]
fn simple_fragment() {
let input = include_str!("parse/fixtures/simple-fragment.graphql");
let expected = include_str!("parse/fixtures/simple-fragment.expected");
test_fixture(transform_fixture, "simple-fragment.graphql", "parse/fixtures/simple-fragment.expected", input, expected);
}
#[test]
fn simple_query() {
let input = include_str!("parse/fixtures/simple-query.graphql");
let expected = include_str!("parse/fixtures/simple-query.expected");
test_fixture(transform_fixture, "simple-query.graphql", "parse/fixtures/simple-query.expected", input, expected);
}
#[test]
fn undefined_fragment_invalid() {
let input = include_str!("parse/fixtures/undefined-fragment.invalid.graphql");
let expected = include_str!("parse/fixtures/undefined-fragment.invalid.expected");
test_fixture(transform_fixture, "undefined-fragment.invalid.graphql", "parse/fixtures/undefined-fragment.invalid.expected", input, expected);
}
#[test]
fn undefined_type_invalid() {
let input = include_str!("parse/fixtures/undefined-type.invalid.graphql");
let expected = include_str!("parse/fixtures/undefined-type.invalid.expected");
test_fixture(transform_fixture, "undefined-type.invalid.graphql", "parse/fixtures/undefined-type.invalid.expected", input, expected);
}
#[test]
fn unknown_fragment_type_invalid() {
let input = include_str!("parse/fixtures/unknown-fragment-type.invalid.graphql");
let expected = include_str!("parse/fixtures/unknown-fragment-type.invalid.expected");
test_fixture(transform_fixture, "unknown-fragment-type.invalid.graphql", "parse/fixtures/unknown-fragment-type.invalid.expected", input, expected);
}
| 53.537037 | 229 | 0.775362 |
184a816b6c71213a345990c1cb8891247575241f | 13,538 | #![recursion_limit = "2048"]
#![allow(non_upper_case_globals)]
#![allow(non_snake_case)]
#[macro_use]
extern crate bitflags;
#[macro_use]
extern crate stdweb;
use std::cell::RefCell;
use std::error::Error;
use std::rc::Rc;
use stdweb::web::event::{IEvent, IKeyboardEvent, KeyDownEvent, KeyUpEvent, KeyboardLocation};
use stdweb::web::{self, Element, IElement, IEventTarget, INode, INonElementParentNode};
use stdweb::{UnsafeTypedArray, Value};
#[macro_use]
mod common;
use common::*;
mod game;
use game::update_and_render;
mod bytecode;
macro_rules! enclose {
( [$( $x:ident ),*] $y:expr ) => {
{
$(let $x = $x.clone();)*
$y
}
};
}
// This creates a really basic WebGL context for blitting a single texture.
// On some web browsers this is faster than using a 2d canvas.
fn setup_webgl(canvas: &Element) -> Value {
const FRAGMENT_SHADER: &'static str = r#"
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D u_sampler;
void main() {
gl_FragColor = vec4( texture2D(
u_sampler,
vec2( v_texcoord.s, v_texcoord.t ) ).rgb,
1.0
);
}
"#;
const VERTEX_SHADER: &'static str = r#"
attribute vec2 a_position;
attribute vec2 a_texcoord;
uniform mat4 u_matrix;
varying vec2 v_texcoord;
void main() {
gl_Position = u_matrix * vec4( a_position, 0.0, 1.0 );
v_texcoord = a_texcoord;
}
"#;
fn ortho(left: f64, right: f64, bottom: f64, top: f64) -> Vec<f64> {
let mut m = vec![
1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0,
];
m[0 * 4 + 0] = 2.0 / (right - left);
m[1 * 4 + 1] = 2.0 / (top - bottom);
m[3 * 4 + 0] = (right + left) / (right - left) * -1.0;
m[3 * 4 + 1] = (top + bottom) / (top - bottom) * -1.0;
return m;
}
js!(
var gl;
var webgl_names = ["webgl", "experimental-webgl", "webkit-3d", "moz-webgl"];
for( var i = 0; i < webgl_names.length; ++i ) {
var name = webgl_names[ i ];
try {
gl = @{canvas}.getContext( name );
} catch( err ) {}
if( gl ) {
console.log( "WebGL support using context:", name );
break;
}
}
if( gl === null ) {
console.error( "WebGL rendering context not found." );
return null;
}
var vertex_shader = gl.createShader( gl.VERTEX_SHADER );
var fragment_shader = gl.createShader( gl.FRAGMENT_SHADER );
gl.shaderSource( vertex_shader, @{VERTEX_SHADER} );
gl.shaderSource( fragment_shader, @{FRAGMENT_SHADER} );
gl.compileShader( vertex_shader );
gl.compileShader( fragment_shader );
if( !gl.getShaderParameter( vertex_shader, gl.COMPILE_STATUS ) ) {
console.error( "WebGL vertex shader compilation failed:", gl.getShaderInfoLog( vertex_shader ) );
return null;
}
if( !gl.getShaderParameter( fragment_shader, gl.COMPILE_STATUS ) ) {
console.error( "WebGL fragment shader compilation failed:", gl.getShaderInfoLog( fragment_shader ) );
return null;
}
var program = gl.createProgram();
gl.attachShader( program, vertex_shader );
gl.attachShader( program, fragment_shader );
gl.linkProgram( program );
if( !gl.getProgramParameter( program, gl.LINK_STATUS ) ) {
console.error( "WebGL program linking failed!" );
return null;
}
gl.useProgram( program );
var vertex_attr = gl.getAttribLocation( program, "a_position" );
var texcoord_attr = gl.getAttribLocation( program, "a_texcoord" );
gl.enableVertexAttribArray( vertex_attr );
gl.enableVertexAttribArray( texcoord_attr );
var sampler_uniform = gl.getUniformLocation( program, "u_sampler" );
gl.uniform1i( sampler_uniform, 0 );
var matrix = @{ortho( 0.0, 256.0, 256.0, 0.0 )};
var matrix_uniform = gl.getUniformLocation( program, "u_matrix" );
gl.uniformMatrix4fv( matrix_uniform, false, matrix );
var texture = gl.createTexture();
gl.bindTexture( gl.TEXTURE_2D, texture );
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl.RGBA,
256,
256,
0,
gl.RGBA,
gl.UNSIGNED_BYTE,
new Uint8Array( 256 * 256 * 4 )
);
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST );
var vertex_buffer = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, vertex_buffer );
var vertices = [
0.0, 0.0,
0.0, 256.0,
256.0, 0.0,
256.0, 256.0
];
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( vertices ), gl.STATIC_DRAW );
gl.vertexAttribPointer( vertex_attr, 2, gl.FLOAT, false, 0, 0 );
var texcoord_buffer = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, texcoord_buffer );
var texcoords = [
0.0, 0.0,
0.0, 256.0 / 256.0,
1.0, 0.0,
1.0, 256.0 / 256.0
];
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( texcoords ), gl.STATIC_DRAW );
gl.vertexAttribPointer( texcoord_attr, 2, gl.FLOAT, false, 0, 0 );
var index_buffer = gl.createBuffer();
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, index_buffer );
var indices = [
0, 1, 2,
2, 3, 1
];
gl.bufferData( gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( indices ), gl.STATIC_DRAW );
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
gl.enable( gl.DEPTH_TEST );
gl.viewport( 0, 0, 256, 256 );
return gl;
)
}
struct PinkyWeb {
state: State,
paused: bool,
busy: bool,
js_ctx: Value,
}
impl PinkyWeb {
fn new(canvas: &Element) -> Self {
let gl = setup_webgl(&canvas);
let js_ctx = js!(
var h = {};
var canvas = @{canvas};
h.gl = @{gl};
if( !h.gl ) {
console.log( "No WebGL; using Canvas API" );
// If the WebGL **is** supported but something else
// went wrong the web browser won't let us create
// a normal canvas context on a WebGL-ified canvas,
// so we recreate a new canvas here to work around that.
var new_canvas = canvas.cloneNode( true );
canvas.parentNode.replaceChild( new_canvas, canvas );
canvas = new_canvas;
h.ctx = canvas.getContext( "2d" );
h.img = h.ctx.createImageData( 256, 256 );
h.buffer = new Uint32Array( h.img.data.buffer );
}
return h;
);
PinkyWeb {
state: State::new(),
paused: true,
busy: false,
js_ctx,
}
}
fn pause(&mut self) {
self.paused = true;
}
fn unpause(&mut self) {
self.paused = false;
self.busy = false;
}
fn execute_cycle(&mut self) -> Result<bool, Box<Error>> {
self.state.frame();
Ok(true)
}
fn run_a_bit(&mut self) -> Result<bool, Box<Error>> {
if self.paused {
return Ok(true);
}
loop {
let result = self.execute_cycle();
match result {
Ok(processed_whole_frame) => {
if processed_whole_frame {
return Ok(true);
}
}
Err(error) => {
js!( console.error( "Execution error:", @{format!( "{}", error )} ); );
self.pause();
return Err(error);
}
}
}
}
fn draw(&mut self) {
if !self.paused {
js! {
var h = @{&self.js_ctx};
var framebuffer = @{unsafe {
UnsafeTypedArray::new( &self.state.framebuffer.buffer )
}};
if( h.gl ) {
var data = new Uint8Array(
framebuffer.buffer,
framebuffer.byteOffset,
framebuffer.byteLength
);
h.gl.texSubImage2D( h.gl.TEXTURE_2D,
0, 0, 0, 256, 256, h.gl.RGBA, h.gl.UNSIGNED_BYTE, data );
h.gl.drawElements( h.gl.TRIANGLES, 6, h.gl.UNSIGNED_SHORT, 0 );
} else {
h.buffer.set( framebuffer );
h.ctx.putImageData( h.img, 0, 0 );
}
}
}
}
fn on_key(&mut self, key: &str, location: KeyboardLocation, is_pressed: bool) -> bool {
let button = match (key, location) {
("Enter", _) => Button::Start,
("Shift", KeyboardLocation::Right) => Button::Select,
("ArrowUp", _) => Button::Up,
("ArrowLeft", _) => Button::Left,
("ArrowRight", _) => Button::Right,
("ArrowDown", _) => Button::Down,
// On Edge the arrows have different names
// for some reason.
("Up", _) => Button::Up,
("Left", _) => Button::Left,
("Right", _) => Button::Right,
("Down", _) => Button::Down,
("z", _) => Button::A,
("x", _) => Button::B,
// For those using the Dvorak layout.
(";", _) => Button::A,
("q", _) => Button::B,
// For those using the Dvorak layout **and** Microsoft Edge.
//
// On `keydown` we get ";" as we should, but on `keyup`
// we get "Unidentified". Seriously Microsoft, how buggy can
// your browser be?
("Unidentified", _) if is_pressed == false => Button::A,
_ => return false,
};
PinkyWeb::set_button_state(self, button, is_pressed);
return true;
}
fn set_button_state(&mut self, button: Button::Ty, is_pressed: bool) {
if is_pressed {
self.state.press(button);
} else {
self.state.release(button);
}
}
}
impl State {
pub fn frame(&mut self) {
update_and_render(&mut self.framebuffer, &mut self.game_state, self.input);
self.input.previous_gamepad = self.input.gamepad;
}
pub fn press(&mut self, button: Button::Ty) {
self.input.gamepad.insert(button);
}
pub fn release(&mut self, button: Button::Ty) {
self.input.gamepad.remove(button);
}
}
fn emulate_for_a_single_frame(pinky: Rc<RefCell<PinkyWeb>>) {
pinky.borrow_mut().busy = true;
web::set_timeout(
enclose!( [pinky] move || {
let finished_frame = match pinky.borrow_mut().run_a_bit() {
Ok( result ) => result,
Err( error ) => {
handle_error( error );
return;
}
};
if !finished_frame {
web::set_timeout( move || { emulate_for_a_single_frame( pinky ); }, 0 );
} else {
let mut pinky = pinky.borrow_mut();
pinky.busy = false;
}
}),
0,
);
}
fn main_loop(pinky: Rc<RefCell<PinkyWeb>>) {
// If we're running too slowly there is no point
// in queueing up even more work.
if !pinky.borrow_mut().busy {
emulate_for_a_single_frame(pinky.clone());
}
pinky.borrow_mut().draw();
web::window().request_animation_frame(move |_| {
main_loop(pinky);
});
}
fn show(id: &str) {
web::document()
.get_element_by_id(id)
.unwrap()
.class_list()
.remove("hidden")
.unwrap();
}
fn hide(id: &str) {
web::document()
.get_element_by_id(id)
.unwrap()
.class_list()
.add("hidden")
.unwrap();
}
fn support_input(pinky: Rc<RefCell<PinkyWeb>>) {
web::window().add_event_listener(enclose!( [pinky] move |event: KeyDownEvent| {
let handled = pinky.borrow_mut().on_key( &event.key(), event.location(), true );
if handled {
event.prevent_default();
}
}));
web::window().add_event_listener(enclose!( [pinky] move |event: KeyUpEvent| {
let handled = pinky.borrow_mut().on_key( &event.key(), event.location(), false );
if handled {
event.prevent_default();
}
}));
}
fn handle_error<E: Into<Box<Error>>>(error: E) {
let error_message = format!("{}", error.into());
web::document()
.get_element_by_id("error-description")
.unwrap()
.set_text_content(&error_message);
hide("viewport");
show("error");
}
fn main() {
stdweb::initialize();
let canvas = web::document().get_element_by_id("viewport").unwrap();
let pinky = Rc::new(RefCell::new(PinkyWeb::new(&canvas)));
support_input(pinky.clone());
hide("loading");
hide("error");
pinky.borrow_mut().unpause();
show("viewport");
web::window().request_animation_frame(move |_| {
main_loop(pinky);
});
stdweb::event_loop();
}
| 29.176724 | 113 | 0.521347 |
f576e53016f093f8e0a81cafc301f200c19d0bc4 | 2,335 | use crate::json_to_vec;
use crate::request::prelude::*;
use std::{
error::Error,
fmt::{Display, Formatter, Result as FmtResult},
};
use twilight_model::user::User;
#[derive(Clone, Debug)]
pub enum UpdateCurrentUserError {
/// The length of the username is either fewer than 2 UTF-16 characters or
/// more than 32 UTF-16 characters.
UsernameInvalid,
}
impl Display for UpdateCurrentUserError {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
match self {
Self::UsernameInvalid => f.write_str("the username length is invalid"),
}
}
}
impl Error for UpdateCurrentUserError {}
#[derive(Default, Serialize)]
struct UpdateCurrentUserFields {
avatar: Option<String>,
username: Option<String>,
}
pub struct UpdateCurrentUser<'a> {
fields: UpdateCurrentUserFields,
fut: Option<Pending<'a, User>>,
http: &'a Client,
}
impl<'a> UpdateCurrentUser<'a> {
pub(crate) fn new(http: &'a Client) -> Self {
Self {
fields: UpdateCurrentUserFields::default(),
fut: None,
http,
}
}
pub fn avatar(mut self, avatar: impl Into<String>) -> Self {
self.fields.avatar.replace(avatar.into());
self
}
/// Set the username.
///
/// The minimum length is 2 UTF-16 characters and the maximum is 32 UTF-16
/// characters.
///
/// # Errors
///
/// Returns [`UpdateCurrentUserError::UsernameInvalid`] if the username
/// length is too short or too long.
///
/// [`UpdateCurrentUserError::UsernameInvalid`]: enum.UpdateCurrentUserError.html#variant.UsernameInvalid
pub fn username(self, username: impl Into<String>) -> Result<Self, UpdateCurrentUserError> {
self._username(username.into())
}
fn _username(mut self, username: String) -> Result<Self, UpdateCurrentUserError> {
if !validate::username(&username) {
return Err(UpdateCurrentUserError::UsernameInvalid);
}
self.fields.username.replace(username);
Ok(self)
}
fn start(&mut self) -> Result<()> {
self.fut.replace(Box::pin(self.http.request(Request::from((
json_to_vec(&self.fields)?,
Route::UpdateCurrentUser,
)))));
Ok(())
}
}
poll_req!(UpdateCurrentUser<'_>, User);
| 26.235955 | 109 | 0.62227 |
22e3d9baa6f47831c6aec7aa36c6948e6b159f56 | 2,773 | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
use crate::channel::ProverChannel;
use common::{
errors::ProverError, proof::StarkProof, Air, FieldExtension, HashFunction, ProofOptions,
TraceInfo,
};
use crypto::hash::{Blake3_256, Sha3_256};
use math::field::QuadExtension;
mod domain;
use domain::StarkDomain;
mod constraints;
mod deep_fri;
mod trace;
pub use trace::{ExecutionTrace, ExecutionTraceFragment, TracePolyTable};
mod generation;
use generation::generate_proof;
// PROVER
// ================================================================================================
/// Generates a STARK proof attesting that the specified `trace` is a valid execution trace of the
/// computation described by AIR generated using the specified public inputs.
#[rustfmt::skip]
pub fn prove<AIR: Air>(
trace: ExecutionTrace<AIR::BaseElement>,
pub_inputs: AIR::PublicInputs,
options: ProofOptions,
) -> Result<StarkProof, ProverError> {
// create an instance of AIR for the provided parameters. this takes a generic description of
// the computation (provided via AIR type), and creates a description of a specific execution
// of the computation for the provided public inputs.
let trace_info = TraceInfo {
length: trace.len(),
meta: Vec::new(),
};
let air = AIR::new(trace_info, pub_inputs, options);
// make sure the specified trace is valid against the AIR. This checks validity of both,
// assertions and state transitions. we do this in debug mode only because this is a very
// expensive operation.
#[cfg(debug_assertions)]
trace.validate(&air);
// figure out which version of the generic proof generation procedure to run. this is a sort
// of static dispatch for selecting two generic parameter: extension field and hash function.
match air.context().options().field_extension() {
FieldExtension::None => match air.context().options().hash_fn() {
HashFunction::Blake3_256 => {
generate_proof::<AIR, AIR::BaseElement, Blake3_256>(air, trace)
}
HashFunction::Sha3_256 => {
generate_proof::<AIR, AIR::BaseElement, Sha3_256>(air, trace)
},
},
FieldExtension::Quadratic => match air.context().options().hash_fn() {
HashFunction::Blake3_256 => {
generate_proof::<AIR, QuadExtension<AIR::BaseElement>, Blake3_256>(air, trace)
}
HashFunction::Sha3_256 => {
generate_proof::<AIR, QuadExtension<AIR::BaseElement>, Sha3_256>(air, trace)
}
},
}
}
| 37.986301 | 99 | 0.650559 |
0e2c0046bb71989b5f57b4c417bbacc0b5e57ebc | 5,145 | //! Global version api.
use crate::ffi::version::Version;
use crate::global::get_interface;
use crate::ownership::Owned;
use crate::version::{ReleaseType, VersionAPI};
use crate::Error;
use std::cmp::Ordering;
/// Constructs a new version.
///
/// Constructs a new version with `major`, `minor` and `patch` and sets the rest to `0`.
///
/// # Return
///
/// Constructed version.
#[inline]
pub fn new_short(major: i32, minor: i32, patch: i32) -> Version {
VersionAPI::new_short(get_interface(), major, minor, patch)
}
/// Constructs a new version.
///
/// Constructs a new version with `major`, `minor`, `patch`, `release_type` and
/// `release_number` and sets the rest to `0`.
///
/// # Return
///
/// Constructed version.
#[inline]
pub fn new_long(
major: i32,
minor: i32,
patch: i32,
release_type: ReleaseType,
release_number: i8,
) -> Version {
VersionAPI::new_long(
get_interface(),
major,
minor,
patch,
release_type,
release_number,
)
}
/// Constructs a new version.
///
/// Constructs a new version with `major`, `minor`, `patch`, `release_type`,
/// `release_number` and `build`.
///
/// # Return
///
/// Constructed version.
#[inline]
pub fn new_full(
major: i32,
minor: i32,
patch: i32,
release_type: ReleaseType,
release_number: i8,
build: i64,
) -> Version {
VersionAPI::new_full(
get_interface(),
major,
minor,
patch,
release_type,
release_number,
build,
)
}
/// Constructs a version from a string.
///
/// # Failure
///
/// Fails if `string_is_valid(buffer) == false`.
///
/// # Return
///
/// Constructed version.
#[inline]
pub fn from_string(buffer: impl AsRef<str>) -> Result<Version, Error<Owned>> {
VersionAPI::from_string(get_interface(), buffer)
}
/// Computes the length of the short version string.
///
/// # Return
///
/// Length of the string.
#[inline]
pub fn string_length_short(version: &Version) -> usize {
VersionAPI::string_length_short(get_interface(), version)
}
/// Computes the length of the long version string.
///
/// # Return
///
/// Length of the string.
#[inline]
pub fn string_length_long(version: &Version) -> usize {
VersionAPI::string_length_long(get_interface(), version)
}
/// Computes the length of the full version string.
///
/// # Return
///
/// Length of the string.
#[inline]
pub fn string_length_full(version: &Version) -> usize {
VersionAPI::string_length_full(get_interface(), version)
}
/// Represents the version as a short string.
///
/// # Failure
///
/// This function fails if `buffer.len() < string_length_short(version)`.
///
/// # Return
///
/// Number of written characters on success, error otherwise.
#[inline]
pub fn as_string_short(version: &Version, buffer: impl AsMut<str>) -> Result<usize, Error<Owned>> {
VersionAPI::as_string_short(get_interface(), version, buffer)
}
/// Represents the version as a long string.
///
/// # Failure
///
/// This function fails if `buffer.len() < string_length_long(version)`.
///
/// # Return
///
/// Number of written characters on success, error otherwise.
#[inline]
pub fn as_string_long(version: &Version, buffer: impl AsMut<str>) -> Result<usize, Error<Owned>> {
VersionAPI::as_string_long(get_interface(), version, buffer)
}
/// Represents the version as a full string.
///
/// # Failure
///
/// This function fails if `buffer.len() < string_length_full(version)`.
///
/// # Return
///
/// Number of written characters on success, error otherwise.
#[inline]
pub fn as_string_full(version: &Version, buffer: impl AsMut<str>) -> Result<usize, Error<Owned>> {
VersionAPI::as_string_full(get_interface(), version, buffer)
}
/// Checks whether the version string is valid.
///
/// # Return
///
/// [true] if the string is valid, [false] otherwise.
#[inline]
pub fn string_is_valid(version_string: impl AsRef<str>) -> bool {
VersionAPI::string_is_valid(get_interface(), version_string)
}
/// Compares two versions.
///
/// Compares two version, disregarding their build number.
///
/// # Return
///
/// Order of the versions.
#[inline]
pub fn compare(lhs: &Version, rhs: &Version) -> Ordering {
VersionAPI::compare(get_interface(), lhs, rhs)
}
/// Compares two versions.
///
/// Compares two version, disregarding their build number and release type.
///
/// # Return
///
/// Order of the versions.
#[inline]
pub fn compare_weak(lhs: &Version, rhs: &Version) -> Ordering {
VersionAPI::compare_weak(get_interface(), lhs, rhs)
}
/// Compares two versions.
///
/// # Return
///
/// Order of the versions.
#[inline]
pub fn compare_strong(lhs: &Version, rhs: &Version) -> Ordering {
VersionAPI::compare_strong(get_interface(), lhs, rhs)
}
/// Checks for compatibility of two versions.
///
/// Two compatible versions can be used interchangeably.
///
/// # Note
///
/// This function is not commutative.
///
/// # Return
///
/// [true] if the versions are compatible, [false] otherwise.
#[inline]
pub fn is_compatible(lhs: &Version, rhs: &Version) -> bool {
VersionAPI::is_compatible(get_interface(), lhs, rhs)
}
| 23.386364 | 99 | 0.658698 |
08bdb29dfeeb9ac6a6d3d1572ce46b3db6a7bdbe | 40,806 | // Copyright (c) Microsoft. All rights reserved.
pub struct Session {
pub(crate) context: std::sync::Arc<crate::Context>,
pub(crate) handle: pkcs11_sys::CK_SESSION_HANDLE,
pin: Option<String>,
}
impl Session {
pub(crate) fn new(
context: std::sync::Arc<crate::Context>,
handle: pkcs11_sys::CK_SESSION_HANDLE,
pin: Option<String>,
) -> Self {
Session {
context,
handle,
pin,
}
}
}
pub type Key = crate::Object<()>;
pub enum KeyPair {
Ec(
crate::Object<openssl::ec::EcKey<openssl::pkey::Public>>,
crate::Object<openssl::ec::EcKey<openssl::pkey::Private>>,
),
Rsa(
crate::Object<openssl::rsa::Rsa<openssl::pkey::Public>>,
crate::Object<openssl::rsa::Rsa<openssl::pkey::Private>>,
),
}
pub enum PublicKey {
Ec(crate::Object<openssl::ec::EcKey<openssl::pkey::Public>>),
Rsa(crate::Object<openssl::rsa::Rsa<openssl::pkey::Public>>),
}
impl Session {
/// Get a public key in the current session with the given label.
pub fn get_public_key(
self: std::sync::Arc<Self>,
label: Option<&str>,
) -> Result<PublicKey, GetKeyError> {
unsafe {
let public_key_handle = self.get_key_inner(pkcs11_sys::CKO_PUBLIC_KEY, label)?;
let public_key_mechanism_type = self.get_key_mechanism_type(public_key_handle)?;
match public_key_mechanism_type {
pkcs11_sys::CKK_EC => {
Ok(PublicKey::Ec(crate::Object::new(self, public_key_handle)))
}
pkcs11_sys::CKK_RSA => {
Ok(PublicKey::Rsa(crate::Object::new(self, public_key_handle)))
}
_ => Err(GetKeyError::MismatchedMechanismType),
}
}
}
/// Get a key pair in the current session with the given label.
pub fn get_key_pair(
self: std::sync::Arc<Self>,
label: Option<&str>,
) -> Result<KeyPair, GetKeyError> {
unsafe {
// Private key access needs login
self.login().map_err(GetKeyError::LoginFailed)?;
let public_key_handle = self.get_key_inner(pkcs11_sys::CKO_PUBLIC_KEY, label)?;
let public_key_mechanism_type = self.get_key_mechanism_type(public_key_handle)?;
let private_key_handle = self.get_key_inner(pkcs11_sys::CKO_PRIVATE_KEY, label)?;
let private_key_mechanism_type = self.get_key_mechanism_type(private_key_handle)?;
match (public_key_mechanism_type, private_key_mechanism_type) {
(pkcs11_sys::CKK_EC, pkcs11_sys::CKK_EC) => Ok(KeyPair::Ec(
crate::Object::new(self.clone(), public_key_handle),
crate::Object::new(self, private_key_handle),
)),
(pkcs11_sys::CKK_RSA, pkcs11_sys::CKK_RSA) => Ok(KeyPair::Rsa(
crate::Object::new(self.clone(), public_key_handle),
crate::Object::new(self, private_key_handle),
)),
_ => Err(GetKeyError::MismatchedMechanismType),
}
}
}
/// Get a key in the current session with the given label.
pub fn get_key(self: std::sync::Arc<Self>, label: Option<&str>) -> Result<Key, GetKeyError> {
unsafe {
// Private key access needs login
self.login().map_err(GetKeyError::LoginFailed)?;
let key_handle = self.get_key_inner(pkcs11_sys::CKO_SECRET_KEY, label)?;
Ok(crate::Object::new(self, key_handle))
}
}
unsafe fn get_key_inner(
&self,
class: pkcs11_sys::CK_OBJECT_CLASS,
label: Option<&str>,
) -> Result<pkcs11_sys::CK_OBJECT_HANDLE, GetKeyError> {
let mut templates = vec![pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_CLASS,
pValue: &class as *const _ as _,
ulValueLen: std::convert::TryInto::try_into(std::mem::size_of_val(&class))
.expect("usize -> CK_ULONG"),
}];
if let Some(label) = label {
templates.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_LABEL,
pValue: label.as_ptr() as *const _ as _,
ulValueLen: std::convert::TryInto::try_into(label.len())
.expect("usize -> CK_ULONG"),
});
}
let key_handle = {
let mut find_objects =
FindObjects::new(self, &templates).map_err(GetKeyError::FindObjectsFailed)?;
match find_objects.next() {
Some(key_handle) => key_handle.map_err(GetKeyError::FindObjectsFailed)?,
None => return Err(GetKeyError::KeyDoesNotExist),
}
};
Ok(key_handle)
}
unsafe fn get_key_mechanism_type(
&self,
key_handle: pkcs11_sys::CK_OBJECT_HANDLE,
) -> Result<pkcs11_sys::CK_KEY_TYPE, GetKeyError> {
let mut key_type = pkcs11_sys::CKK_EC;
let key_type_size = std::convert::TryInto::try_into(std::mem::size_of_val(&key_type))
.expect("usize -> CK_ULONG");
let mut attribute = pkcs11_sys::CK_ATTRIBUTE {
r#type: pkcs11_sys::CKA_KEY_TYPE,
pValue: &mut key_type as *mut _ as _,
ulValueLen: key_type_size,
};
let result = (self.context.C_GetAttributeValue)(self.handle, key_handle, &mut attribute, 1);
if result != pkcs11_sys::CKR_OK {
return Err(GetKeyError::GetKeyTypeFailed(result));
}
Ok(key_type)
}
}
/// An error from getting a key.
#[derive(Debug)]
pub enum GetKeyError {
FindObjectsFailed(FindObjectsError),
GetKeyTypeFailed(pkcs11_sys::CK_RV),
KeyDoesNotExist,
LoginFailed(LoginError),
MismatchedMechanismType,
}
impl std::fmt::Display for GetKeyError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
GetKeyError::FindObjectsFailed(_) => f.write_str("could not find objects"),
GetKeyError::GetKeyTypeFailed(result) => write!(
f,
"C_GetAttributeValue(CKA_KEY_TYPE) failed with {}",
result
),
GetKeyError::KeyDoesNotExist => f.write_str("did not find any keys in the slot"),
GetKeyError::LoginFailed(_) => f.write_str("could not log in to the token"),
GetKeyError::MismatchedMechanismType => {
f.write_str("public and private keys have different mechanisms")
}
}
}
}
impl std::error::Error for GetKeyError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
#[allow(clippy::match_same_arms)]
match self {
GetKeyError::FindObjectsFailed(inner) => Some(inner),
GetKeyError::GetKeyTypeFailed(_) => None,
GetKeyError::KeyDoesNotExist => None,
GetKeyError::LoginFailed(inner) => Some(inner),
GetKeyError::MismatchedMechanismType => None,
}
}
}
struct FindObjects<'session> {
session: &'session Session,
}
impl<'session> FindObjects<'session> {
unsafe fn new(
session: &'session Session,
templates: &'session [pkcs11_sys::CK_ATTRIBUTE_IN],
) -> Result<Self, FindObjectsError> {
let result = (session.context.C_FindObjectsInit)(
session.handle,
templates.as_ptr(),
std::convert::TryInto::try_into(templates.len()).expect("usize -> CK_ULONG"),
);
if result != pkcs11_sys::CKR_OK {
return Err(FindObjectsError::FindObjectsInitFailed(result));
}
Ok(FindObjects { session })
}
}
impl<'session> Iterator for FindObjects<'session> {
type Item = Result<pkcs11_sys::CK_OBJECT_HANDLE, FindObjectsError>;
fn next(&mut self) -> Option<Self::Item> {
unsafe {
let mut object_handle = pkcs11_sys::CK_INVALID_OBJECT_HANDLE;
let mut num_objects = 0;
let result = (self.session.context.C_FindObjects)(
self.session.handle,
&mut object_handle,
1,
&mut num_objects,
);
if result != pkcs11_sys::CKR_OK {
return Some(Err(FindObjectsError::FindObjectsFailed(
format!("C_FindObjects failed with {}", result).into(),
)));
}
match num_objects {
0 => None,
1 if object_handle != pkcs11_sys::CK_INVALID_OBJECT_HANDLE => {
Some(Ok(object_handle))
}
1 => Some(Err(FindObjectsError::FindObjectsFailed(
"C_FindObjects found 1 object but object handle is still CK_INVALID_HANDLE"
.into(),
))),
num_objects => Some(Err(FindObjectsError::FindObjectsFailed(
format!("C_FindObjects found {} objects", num_objects).into(),
))),
}
}
}
}
impl<'session> Drop for FindObjects<'session> {
fn drop(&mut self) {
unsafe {
let _ = (self.session.context.C_FindObjectsFinal)(self.session.handle);
}
}
}
/// An error from finding an object.
#[derive(Debug)]
pub enum FindObjectsError {
FindObjectsFailed(std::borrow::Cow<'static, str>),
FindObjectsInitFailed(pkcs11_sys::CK_RV),
}
impl std::fmt::Display for FindObjectsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
FindObjectsError::FindObjectsFailed(message) => f.write_str(message),
FindObjectsError::FindObjectsInitFailed(result) => {
write!(f, "C_FindObjectsInit failed with {}", result)
}
}
}
}
impl std::error::Error for FindObjectsError {}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum KeyUsage {
Aes,
Hmac,
}
impl Session {
/// Generate a symmetric key in the current session with the given length and label.
pub fn generate_key(
self: std::sync::Arc<Self>,
label: Option<&str>,
usage: KeyUsage,
) -> Result<Key, GenerateKeyError> {
unsafe {
// Deleting existing keys and generating new ones needs login
self.login().map_err(GenerateKeyError::LoginFailed)?;
// If label is set, delete any existing objects with that label first
if let Some(label) = label {
match self.get_key_inner(pkcs11_sys::CKO_SECRET_KEY, Some(label)) {
Ok(key_handle) => {
let result = (self.context.C_DestroyObject)(self.handle, key_handle);
if result != pkcs11_sys::CKR_OK {
return Err(GenerateKeyError::DeleteExistingKeyFailed(result));
}
}
Err(GetKeyError::KeyDoesNotExist) => (),
Err(err) => return Err(GenerateKeyError::GetExistingKeyFailed(err)),
}
}
let r#true = pkcs11_sys::CK_TRUE;
let true_size = std::convert::TryInto::try_into(std::mem::size_of_val(&r#true))
.expect("usize -> CK_ULONG");
let r#true = &r#true as *const _ as _;
// Common to all keys
let mut key_template = vec![
pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_PRIVATE,
pValue: r#true,
ulValueLen: true_size,
},
pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_SENSITIVE,
pValue: r#true,
ulValueLen: true_size,
},
pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_TOKEN,
pValue: r#true,
ulValueLen: true_size,
},
pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_VERIFY,
pValue: r#true,
ulValueLen: true_size,
},
];
if let Some(label) = label {
key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_LABEL,
pValue: label.as_ptr() as _,
ulValueLen: std::convert::TryInto::try_into(label.len())
.expect("usize -> CK_ULONG"),
});
}
match usage {
KeyUsage::Aes => {
// We want to use AES-256-GCM, but fall back to AES-128-GCM if the PKCS#11 implementation
// doesn't support AES-256-GCM (eg Cryptoauthlib on ATECC608A).
//
// Unfortunately PKCS#11 doesn't give us a way to know up-front if the token supports AES-256-GCM or not.
// So first try creating a 256-bit key. If that fails, try again with a 128-bit key.
// If that also fails, return an error.
let mechanism = pkcs11_sys::CK_MECHANISM_IN {
mechanism: pkcs11_sys::CKM_AES_KEY_GEN,
pParameter: std::ptr::null(),
ulParameterLen: 0,
};
key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_DECRYPT,
pValue: r#true,
ulValueLen: true_size,
});
key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_ENCRYPT,
pValue: r#true,
ulValueLen: true_size,
});
let key_type = pkcs11_sys::CKK_AES;
key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_KEY_TYPE,
pValue: &key_type as *const _ as _,
ulValueLen: std::convert::TryInto::try_into(std::mem::size_of_val(
&key_type,
))
.expect("usize -> CK_ULONG"),
});
let key_template_except_value_len = key_template.clone();
let mut len: pkcs11_sys::CK_ULONG =
std::convert::TryInto::try_into(32).expect("usize -> CK_ULONG");
let len_size: pkcs11_sys::CK_ULONG =
std::convert::TryInto::try_into(std::mem::size_of_val(&len))
.expect("usize -> CK_ULONG");
key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_VALUE_LEN,
pValue: &len as *const _ as _,
ulValueLen: len_size,
});
let mut key_handle = pkcs11_sys::CK_INVALID_OBJECT_HANDLE;
let result = (self.context.C_GenerateKey)(
self.handle,
&mechanism,
key_template.as_ptr() as _,
std::convert::TryInto::try_into(key_template.len())
.expect("usize -> CK_ULONG"),
&mut key_handle,
);
if result == pkcs11_sys::CKR_OK
&& key_handle != pkcs11_sys::CK_INVALID_OBJECT_HANDLE
{
return Ok(crate::Object::new(self, key_handle));
}
// C_GenerateKey failed. Try with a 128-bit key.
let mut key_template = key_template_except_value_len;
len = 16;
key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_VALUE_LEN,
pValue: &len as *const _ as _,
ulValueLen: len_size,
});
let mut key_handle = pkcs11_sys::CK_INVALID_OBJECT_HANDLE;
let result = (self.context.C_GenerateKey)(
self.handle,
&mechanism,
key_template.as_ptr() as _,
std::convert::TryInto::try_into(key_template.len())
.expect("usize -> CK_ULONG"),
&mut key_handle,
);
if result != pkcs11_sys::CKR_OK {
return Err(GenerateKeyError::GenerateKeyFailed(result));
}
if key_handle == pkcs11_sys::CK_INVALID_OBJECT_HANDLE {
return Err(GenerateKeyError::GenerateKeyDidNotReturnHandle);
}
Ok(crate::Object::new(self, key_handle))
}
KeyUsage::Hmac => {
// HMAC-SHA256 uses 256-bit keys
let mechanism = pkcs11_sys::CK_MECHANISM_IN {
mechanism: pkcs11_sys::CKM_GENERIC_SECRET_KEY_GEN,
pParameter: std::ptr::null(),
ulParameterLen: 0,
};
let len: pkcs11_sys::CK_ULONG =
std::convert::TryInto::try_into(32).expect("usize -> CK_ULONG");
let len_size: pkcs11_sys::CK_ULONG =
std::convert::TryInto::try_into(std::mem::size_of_val(&len))
.expect("usize -> CK_ULONG");
key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_SIGN,
pValue: r#true,
ulValueLen: true_size,
});
key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_VALUE_LEN,
pValue: &len as *const _ as _,
ulValueLen: len_size,
});
let key_type = pkcs11_sys::CKK_GENERIC_SECRET;
key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_KEY_TYPE,
pValue: &key_type as *const _ as _,
ulValueLen: std::convert::TryInto::try_into(std::mem::size_of_val(
&key_type,
))
.expect("usize -> CK_ULONG"),
});
let mut key_handle = pkcs11_sys::CK_INVALID_OBJECT_HANDLE;
let result = (self.context.C_GenerateKey)(
self.handle,
&mechanism,
key_template.as_ptr() as _,
std::convert::TryInto::try_into(key_template.len())
.expect("usize -> CK_ULONG"),
&mut key_handle,
);
if result != pkcs11_sys::CKR_OK {
return Err(GenerateKeyError::GenerateKeyFailed(result));
}
if key_handle == pkcs11_sys::CK_INVALID_OBJECT_HANDLE {
return Err(GenerateKeyError::GenerateKeyDidNotReturnHandle);
}
Ok(crate::Object::new(self, key_handle))
}
}
}
}
}
/// An error from generating a key pair.
#[derive(Debug)]
pub enum GenerateKeyError {
DeleteExistingKeyFailed(pkcs11_sys::CK_RV),
GenerateKeyDidNotReturnHandle,
GenerateKeyFailed(pkcs11_sys::CK_RV),
GetExistingKeyFailed(GetKeyError),
LoginFailed(crate::LoginError),
}
impl std::fmt::Display for GenerateKeyError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
GenerateKeyError::DeleteExistingKeyFailed(result) => write!(f, "C_DestroyObject failed with {}", result),
GenerateKeyError::GenerateKeyDidNotReturnHandle =>
f.write_str("could not generate key pair: C_GenerateKey succeeded but key handle is still CK_INVALID_HANDLE"),
GenerateKeyError::GenerateKeyFailed(result) => write!(f, "could not generate key: C_GenerateKey failed with {}", result),
GenerateKeyError::GetExistingKeyFailed(_) => write!(f, "could not get existing key object"),
GenerateKeyError::LoginFailed(_) => f.write_str("could not log in to the token"),
}
}
}
impl std::error::Error for GenerateKeyError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
#[allow(clippy::match_same_arms)]
match self {
GenerateKeyError::DeleteExistingKeyFailed(_) => None,
GenerateKeyError::GenerateKeyDidNotReturnHandle => None,
GenerateKeyError::GenerateKeyFailed(_) => None,
GenerateKeyError::GetExistingKeyFailed(inner) => Some(inner),
GenerateKeyError::LoginFailed(inner) => Some(inner),
}
}
}
impl Session {
/// Import a symmetric key in the current session with the given bytes and label.
pub fn import_key(
self: std::sync::Arc<Self>,
bytes: &[u8],
label: Option<&str>,
usage: KeyUsage,
) -> Result<Key, ImportKeyError> {
unsafe {
// Deleting existing keys and importing new ones needs login
self.login().map_err(ImportKeyError::LoginFailed)?;
// If label is set, delete any existing objects with that label first
if let Some(label) = label {
match self.get_key_inner(pkcs11_sys::CKO_SECRET_KEY, Some(label)) {
Ok(key_handle) => {
let result = (self.context.C_DestroyObject)(self.handle, key_handle);
if result != pkcs11_sys::CKR_OK {
return Err(ImportKeyError::DeleteExistingKeyFailed(result));
}
}
Err(GetKeyError::KeyDoesNotExist) => (),
Err(err) => return Err(ImportKeyError::GetExistingKeyFailed(err)),
}
}
let class = pkcs11_sys::CKO_SECRET_KEY;
let r#true = pkcs11_sys::CK_TRUE;
let true_size = std::convert::TryInto::try_into(std::mem::size_of_val(&r#true))
.expect("usize -> CK_ULONG");
let r#true = &r#true as *const _ as _;
// Common to all keys
let mut key_template = vec![
pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_CLASS,
pValue: &class as *const _ as _,
ulValueLen: std::convert::TryInto::try_into(std::mem::size_of_val(&class))
.expect("usize -> CK_ULONG"),
},
pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_PRIVATE,
pValue: r#true,
ulValueLen: true_size,
},
pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_SENSITIVE,
pValue: r#true,
ulValueLen: true_size,
},
pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_TOKEN,
pValue: r#true,
ulValueLen: true_size,
},
pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_VALUE,
pValue: bytes.as_ptr() as _,
ulValueLen: std::convert::TryInto::try_into(bytes.len())
.expect("usize -> CK_ULONG"),
},
];
if let Some(label) = label {
key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_LABEL,
pValue: label.as_ptr() as _,
ulValueLen: std::convert::TryInto::try_into(label.len())
.expect("usize -> CK_ULONG"),
});
}
let key_type = match usage {
KeyUsage::Aes => {
key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_DECRYPT,
pValue: r#true,
ulValueLen: true_size,
});
key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_ENCRYPT,
pValue: r#true,
ulValueLen: true_size,
});
pkcs11_sys::CKK_AES
}
KeyUsage::Hmac => {
key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_SIGN,
pValue: r#true,
ulValueLen: true_size,
});
key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_VERIFY,
pValue: r#true,
ulValueLen: true_size,
});
pkcs11_sys::CKK_GENERIC_SECRET
}
};
key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_KEY_TYPE,
pValue: &key_type as *const _ as _,
ulValueLen: std::convert::TryInto::try_into(std::mem::size_of_val(&key_type))
.expect("usize -> CK_ULONG"),
});
let mut key_handle = pkcs11_sys::CK_INVALID_OBJECT_HANDLE;
let result = (self.context.C_CreateObject)(
self.handle,
key_template.as_ptr() as _,
std::convert::TryInto::try_into(key_template.len()).expect("usize -> CK_ULONG"),
&mut key_handle,
);
if result != pkcs11_sys::CKR_OK {
return Err(ImportKeyError::CreateObjectFailed(result));
}
if key_handle == pkcs11_sys::CK_INVALID_OBJECT_HANDLE {
return Err(ImportKeyError::CreateObjectDidNotReturnHandle);
}
Ok(crate::Object::new(self, key_handle))
}
}
}
/// An error from generating a key pair.
#[derive(Debug)]
pub enum ImportKeyError {
CreateObjectDidNotReturnHandle,
CreateObjectFailed(pkcs11_sys::CK_RV),
DeleteExistingKeyFailed(pkcs11_sys::CK_RV),
GetExistingKeyFailed(GetKeyError),
LoginFailed(crate::LoginError),
}
impl std::fmt::Display for ImportKeyError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ImportKeyError::CreateObjectDidNotReturnHandle =>
f.write_str("could not generate key pair: C_CreateObject succeeded but key handle is still CK_INVALID_HANDLE"),
ImportKeyError::CreateObjectFailed(result) => write!(f, "could not generate key pair: C_CreateObject failed with {}", result),
ImportKeyError::DeleteExistingKeyFailed(result) => write!(f, "C_DestroyObject failed with {}", result),
ImportKeyError::GetExistingKeyFailed(_) => write!(f, "could not get existing key object"),
ImportKeyError::LoginFailed(_) => f.write_str("could not log in to the token"),
}
}
}
impl std::error::Error for ImportKeyError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
#[allow(clippy::match_same_arms)]
match self {
ImportKeyError::CreateObjectDidNotReturnHandle => None,
ImportKeyError::CreateObjectFailed(_) => None,
ImportKeyError::DeleteExistingKeyFailed(_) => None,
ImportKeyError::GetExistingKeyFailed(inner) => Some(inner),
ImportKeyError::LoginFailed(inner) => Some(inner),
}
}
}
impl Session {
/// Generate an EC key pair in the current session with the given curve and label.
pub fn generate_ec_key_pair(
self: std::sync::Arc<Self>,
curve: openssl2::EcCurve,
label: Option<&str>,
) -> Result<
(
crate::Object<openssl::ec::EcKey<openssl::pkey::Public>>,
crate::Object<openssl::ec::EcKey<openssl::pkey::Private>>,
),
GenerateKeyPairError,
> {
unsafe {
let oid = curve.as_oid_der();
let public_key_template = vec![pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_EC_PARAMS,
pValue: oid.as_ptr() as _,
ulValueLen: std::convert::TryInto::try_into(oid.len()).expect("usize -> CK_ULONG"),
}];
let private_key_template = vec![];
self.generate_key_pair_inner(
pkcs11_sys::CKM_EC_KEY_PAIR_GEN,
public_key_template,
private_key_template,
label,
)
}
}
/// Generate an RSA key pair in the current session with the given modulus size, exponent and label.
pub fn generate_rsa_key_pair(
self: std::sync::Arc<Self>,
modulus_bits: pkcs11_sys::CK_ULONG,
exponent: &openssl::bn::BigNumRef,
label: Option<&str>,
) -> Result<
(
crate::Object<openssl::rsa::Rsa<openssl::pkey::Public>>,
crate::Object<openssl::rsa::Rsa<openssl::pkey::Private>>,
),
GenerateKeyPairError,
> {
unsafe {
let exponent = exponent.to_vec();
let public_key_template = vec![
pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_MODULUS_BITS,
pValue: &modulus_bits as *const _ as _,
ulValueLen: std::convert::TryInto::try_into(std::mem::size_of_val(
&modulus_bits,
))
.expect("usize -> CK_ULONG"),
},
pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_PUBLIC_EXPONENT,
pValue: exponent.as_ptr() as _,
ulValueLen: std::convert::TryInto::try_into(exponent.len())
.expect("usize -> CK_ULONG"),
},
];
let private_key_template = vec![];
self.generate_key_pair_inner(
pkcs11_sys::CKM_RSA_PKCS_KEY_PAIR_GEN,
public_key_template,
private_key_template,
label,
)
}
}
unsafe fn generate_key_pair_inner<TPublic, TPrivate>(
self: std::sync::Arc<Self>,
mechanism: pkcs11_sys::CK_MECHANISM_TYPE,
mut public_key_template: Vec<pkcs11_sys::CK_ATTRIBUTE_IN>,
mut private_key_template: Vec<pkcs11_sys::CK_ATTRIBUTE_IN>,
label: Option<&str>,
) -> Result<(crate::Object<TPublic>, crate::Object<TPrivate>), GenerateKeyPairError> {
// Deleting existing keys and generating new ones needs login
self.login().map_err(GenerateKeyPairError::LoginFailed)?;
// If label is set, delete any existing objects with that label first
if let Some(label) = label {
for &class in &[pkcs11_sys::CKO_PUBLIC_KEY, pkcs11_sys::CKO_PRIVATE_KEY] {
match self.get_key_inner(class, Some(label)) {
Ok(key_handle) => {
let result = (self.context.C_DestroyObject)(self.handle, key_handle);
if result != pkcs11_sys::CKR_OK {
return Err(GenerateKeyPairError::DeleteExistingKeyFailed(result));
}
}
Err(GetKeyError::KeyDoesNotExist) => (),
Err(err) => return Err(GenerateKeyPairError::GetExistingKeyFailed(err)),
}
}
}
let mechanism = pkcs11_sys::CK_MECHANISM_IN {
mechanism,
pParameter: std::ptr::null(),
ulParameterLen: 0,
};
let r#true = pkcs11_sys::CK_TRUE;
let true_size = std::convert::TryInto::try_into(std::mem::size_of_val(&r#true))
.expect("usize -> CK_ULONG");
let r#true = &r#true as *const _ as _;
let r#false = pkcs11_sys::CK_FALSE;
let false_size = std::convert::TryInto::try_into(std::mem::size_of_val(&r#false))
.expect("usize -> CK_ULONG");
let r#false = &r#false as *const _ as _;
// The spec's example also passes in CKA_WRAP for the public key and CKA_UNWRAP for the private key,
// but tpm2-pkcs11's impl of `C_GenerateKeyPair` does not recognize those and fails.
//
// We don't need them anyway, so we don't pass them.
public_key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_ENCRYPT,
pValue: r#true,
ulValueLen: true_size,
});
public_key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_PRIVATE,
pValue: r#false,
ulValueLen: false_size,
});
public_key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_TOKEN,
pValue: r#true,
ulValueLen: true_size,
});
public_key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_VERIFY,
pValue: r#true,
ulValueLen: true_size,
});
if let Some(label) = label {
public_key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_LABEL,
pValue: label.as_ptr() as _,
ulValueLen: std::convert::TryInto::try_into(label.len())
.expect("usize -> CK_ULONG"),
});
}
private_key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_DECRYPT,
pValue: r#true,
ulValueLen: true_size,
});
private_key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_PRIVATE,
pValue: r#true,
ulValueLen: true_size,
});
private_key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_SENSITIVE,
pValue: r#true,
ulValueLen: true_size,
});
private_key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_SIGN,
pValue: r#true,
ulValueLen: true_size,
});
private_key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_TOKEN,
pValue: r#true,
ulValueLen: true_size,
});
if let Some(label) = label {
private_key_template.push(pkcs11_sys::CK_ATTRIBUTE_IN {
r#type: pkcs11_sys::CKA_LABEL,
pValue: label.as_ptr() as _,
ulValueLen: std::convert::TryInto::try_into(label.len())
.expect("usize -> CK_ULONG"),
});
}
let mut public_key_handle = pkcs11_sys::CK_INVALID_OBJECT_HANDLE;
let mut private_key_handle = pkcs11_sys::CK_INVALID_OBJECT_HANDLE;
let result = (self.context.C_GenerateKeyPair)(
self.handle,
&mechanism,
public_key_template.as_ptr() as _,
std::convert::TryInto::try_into(public_key_template.len()).expect("usize -> CK_ULONG"),
private_key_template.as_ptr() as _,
std::convert::TryInto::try_into(private_key_template.len()).expect("usize -> CK_ULONG"),
&mut public_key_handle,
&mut private_key_handle,
);
if result != pkcs11_sys::CKR_OK {
return Err(GenerateKeyPairError::GenerateKeyPairFailed(result));
}
if public_key_handle == pkcs11_sys::CK_INVALID_OBJECT_HANDLE {
return Err(GenerateKeyPairError::GenerateKeyPairDidNotReturnHandle(
"public",
));
}
if private_key_handle == pkcs11_sys::CK_INVALID_OBJECT_HANDLE {
return Err(GenerateKeyPairError::GenerateKeyPairDidNotReturnHandle(
"private",
));
}
Ok((
crate::Object::new(self.clone(), public_key_handle),
crate::Object::new(self, private_key_handle),
))
}
}
/// An error from generating a key pair.
#[derive(Debug)]
pub enum GenerateKeyPairError {
DeleteExistingKeyFailed(pkcs11_sys::CK_RV),
GenerateKeyPairDidNotReturnHandle(&'static str),
GenerateKeyPairFailed(pkcs11_sys::CK_RV),
GetExistingKeyFailed(GetKeyError),
LoginFailed(crate::LoginError),
}
impl std::fmt::Display for GenerateKeyPairError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
GenerateKeyPairError::DeleteExistingKeyFailed(result) => write!(f, "C_DestroyObject failed with {}", result),
GenerateKeyPairError::GenerateKeyPairDidNotReturnHandle(kind) =>
write!(f, "could not generate key pair: C_GenerateKeyPair succeeded but {} key handle is still CK_INVALID_HANDLE", kind),
GenerateKeyPairError::GenerateKeyPairFailed(result) => write!(f, "could not generate key pair: C_GenerateKeyPair failed with {}", result),
GenerateKeyPairError::GetExistingKeyFailed(_) => write!(f, "could not get existing key object"),
GenerateKeyPairError::LoginFailed(_) => f.write_str("could not log in to the token"),
}
}
}
impl std::error::Error for GenerateKeyPairError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
#[allow(clippy::match_same_arms)]
match self {
GenerateKeyPairError::DeleteExistingKeyFailed(_) => None,
GenerateKeyPairError::GenerateKeyPairDidNotReturnHandle(_) => None,
GenerateKeyPairError::GenerateKeyPairFailed(_) => None,
GenerateKeyPairError::GetExistingKeyFailed(inner) => Some(inner),
GenerateKeyPairError::LoginFailed(inner) => Some(inner),
}
}
}
impl Session {
pub(crate) unsafe fn login(&self) -> Result<(), LoginError> {
let mut session_info = std::mem::MaybeUninit::uninit();
let result = (self.context.C_GetSessionInfo)(self.handle, session_info.as_mut_ptr());
if result != pkcs11_sys::CKR_OK {
return Err(LoginError::GetSessionInfoFailed(result));
}
let session_info = session_info.assume_init();
match session_info.state {
pkcs11_sys::CKS_RO_USER_FUNCTIONS
| pkcs11_sys::CKS_RW_USER_FUNCTIONS
| pkcs11_sys::CKS_RW_SO_FUNCTIONS => return Ok(()),
_ => (),
}
if let Some(pin) = &self.pin {
let result = (self.context.C_Login)(
self.handle,
pkcs11_sys::CKU_USER,
pin.as_ptr() as _,
std::convert::TryInto::try_into(pin.len()).expect("usize -> CK_ULONG"),
);
if result != pkcs11_sys::CKR_OK && result != pkcs11_sys::CKR_USER_ALREADY_LOGGED_IN {
return Err(LoginError::LoginFailed(result));
}
} else {
// Don't fail if PIN was never provided to us. We decide to log in proactively, so it's *possible* the operation we're trying to log in for
// doesn't actually need a login.
//
// So we pretend to succeed. If the operation did require a login after all, it'll fail with the approprate error.
}
Ok(())
}
}
/// An error from logging in to the token.
#[derive(Debug)]
pub enum LoginError {
GetSessionInfoFailed(pkcs11_sys::CK_RV),
LoginFailed(pkcs11_sys::CK_RV),
}
impl std::fmt::Display for LoginError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
LoginError::GetSessionInfoFailed(result) => {
write!(f, "C_GetSessionInfo failed with {}", result)
}
LoginError::LoginFailed(result) => write!(f, "C_Login failed with {}", result),
}
}
}
impl std::error::Error for LoginError {}
impl Drop for Session {
fn drop(&mut self) {
unsafe {
let _ = (self.context.C_CloseSession)(self.handle);
}
}
}
| 39.617476 | 151 | 0.542616 |
dd30876a600276a0b1ccc91fd75c93d245a18d2c | 1,928 | // Copyright 2020 The Druid Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! X11 menus implementation.
use crate::hotkey::HotKey;
pub struct Menu;
impl Menu {
pub fn new() -> Menu {
// TODO(x11/menus): implement Menu::new (currently a no-op)
tracing::warn!("Menu::new is currently unimplemented for X11 platforms.");
Menu {}
}
pub fn new_for_popup() -> Menu {
// TODO(x11/menus): implement Menu::new_for_popup (currently a no-op)
tracing::warn!("Menu::new_for_popup is currently unimplemented for X11 platforms.");
Menu {}
}
pub fn add_dropdown(&mut self, mut _menu: Menu, _text: &str, _enabled: bool) {
// TODO(x11/menus): implement Menu::add_dropdown (currently a no-op)
tracing::warn!("Menu::add_dropdown is currently unimplemented for X11 platforms.");
}
pub fn add_item(
&mut self,
_id: u32,
_text: &str,
_key: Option<&HotKey>,
_enabled: bool,
_selected: bool,
) {
// TODO(x11/menus): implement Menu::add_item (currently a no-op)
tracing::warn!("Menu::add_item is currently unimplemented for X11 platforms.");
}
pub fn add_separator(&mut self) {
// TODO(x11/menus): implement Menu::add_separator (currently a no-op)
tracing::warn!("Menu::add_separator is currently unimplemented for X11 platforms.");
}
}
| 34.428571 | 92 | 0.657676 |
db0a966ecc1639b7edf4613b5d721b96dff97583 | 4,390 | // Copyright 2021 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use anyhow::{format_err, Result};
use regex::Regex;
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::path::PathBuf;
/// Parses VDL Output to get Emulator PID
pub fn get_emu_pid(vdl_output: &PathBuf) -> Result<u32> {
let reader = BufReader::new(File::open(vdl_output)?);
let emu_process = Regex::new(r#"\s+name:\s+"Emulator"$"#).unwrap();
let pid = Regex::new(r"\s+pid:\s+(?P<id>\d+)$").unwrap();
let mut found_emu = false;
let mut emu_pid = 0;
for line in reader.lines() {
if let Ok(l) = line {
if !found_emu && emu_process.is_match(&l) {
found_emu = true;
continue;
}
if found_emu {
pid.captures(&l).and_then(|cap| {
cap.name("id").map(|id| emu_pid = id.as_str().parse::<u32>().unwrap())
});
if emu_pid == 0 {
break;
}
return Ok(emu_pid);
}
}
}
return Err(format_err!(
"Cannot parse --vdl-output {} to obtain emulator PID",
vdl_output.display()
));
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::Write;
use tempfile::Builder;
#[test]
fn test_parse_valid() -> Result<()> {
let data = format!(
r#"device_info: {{
base_dir: "/tmp/launcher274142475"
ports: {{
name: "ssh"
value: 57306
}}
ports: {{
name: "emulatorController"
value: 43143
}}
processes: {{
name: "Emulator"
pid: 1454638
}}
processes: {{
name: "PackageServer"
pid: 1454949
}}
device_type: "workstation_qemu-x64"
}}
network_address: "localhost"
"#,
);
let tmp_dir = Builder::new().prefix("vdl_proto_test_").tempdir()?;
let vdl_out = tmp_dir.path().join("vdl_output");
File::create(&vdl_out)?.write_all(data.as_bytes())?;
let pid = get_emu_pid(&vdl_out)?;
assert_eq!(pid, 1454638);
Ok(())
}
#[test]
fn test_parse_error() -> Result<()> {
let data = format!(
r#"device_info: {{
base_dir: "/tmp/launcher274142475"
ports: {{
name: "ssh"
value:
}}
ports: {{
name: "emulatorController"
value: 43143
}}
processes: {{
name: "Emulator"
pid:
}}
processes: {{
name: "PackageServer"
pid: 1454949
}}
device_type: "workstation_qemu-x64"
}}
network_address: "localhost"
"#,
);
let tmp_dir = Builder::new().prefix("vdl_proto_test_").tempdir()?;
let vdl_out = tmp_dir.path().join("vdl_output");
File::create(&vdl_out)?.write_all(data.as_bytes())?;
let pid = get_emu_pid(&vdl_out);
assert_eq!(pid.is_err(), true);
Ok(())
}
#[test]
fn test_parse_no_emu() -> Result<()> {
let data = format!(
r#"device_info: {{
base_dir: "/tmp/launcher274142475"
ports: {{
name: "emulatorController"
value: 43143
}}
processes: {{
name: "PackageServer"
pid: 1454949
}}
device_type: "workstation_qemu-x64"
}}
network_address: "localhost"
"#,
);
let tmp_dir = Builder::new().prefix("vdl_proto_test_").tempdir()?;
let vdl_out = tmp_dir.path().join("vdl_output");
File::create(&vdl_out)?.write_all(data.as_bytes())?;
let pid = get_emu_pid(&vdl_out);
assert_eq!(pid.is_err(), true);
Ok(())
}
}
| 30.486111 | 90 | 0.450342 |
c15e799d2f4149ef866dfde653f24a72332807d0 | 358 | use bevy::{
prelude::{
Commands,
DespawnRecursiveExt,
Entity,
Query,
Without,
},
render::camera::Camera,
};
// remove all entities that are not a camera
pub fn clean_up_scene(mut commands: Commands, entities: Query<Entity, Without<Camera>>) {
entities.for_each(|entity| {
commands.entity(entity).despawn_recursive();
});
}
| 19.888889 | 89 | 0.670391 |
e6c414d3fe88ac6435d537fe5b0843cb7f7a6ab1 | 103,533 | // DO NOT EDIT !
// This file was generated automatically from 'src/mako/cli/main.rs.mako'
// DO NOT EDIT !
#![allow(unused_variables, unused_imports, dead_code, unused_mut)]
extern crate tokio;
#[macro_use]
extern crate clap;
extern crate yup_oauth2 as oauth2;
use std::env;
use std::io::{self, Write};
use clap::{App, SubCommand, Arg};
use google_datastore1::{api, Error};
mod client;
use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg,
input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol,
calltype_from_str, remove_json_null_values, ComplexType, JsonType, JsonTypeInfo};
use std::default::Default;
use std::str::FromStr;
use serde_json as json;
use clap::ArgMatches;
enum DoitError {
IoError(String, io::Error),
ApiError(Error),
}
struct Engine<'n> {
opt: ArgMatches<'n>,
hub: api::Datastore,
gp: Vec<&'static str>,
gpm: Vec<(&'static str, &'static str)>,
}
impl<'n> Engine<'n> {
async fn _projects_allocate_ids(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec![]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::AllocateIdsRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().allocate_ids(request, opt.value_of("project-id").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_begin_transaction(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"transaction-options.read-write.previous-transaction" => Some(("transactionOptions.readWrite.previousTransaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["previous-transaction", "read-write", "transaction-options"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::BeginTransactionRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().begin_transaction(request, opt.value_of("project-id").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_commit(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"mode" => Some(("mode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"transaction" => Some(("transaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["mode", "transaction"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::CommitRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().commit(request, opt.value_of("project-id").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_export(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"entity-filter.kinds" => Some(("entityFilter.kinds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"entity-filter.namespace-ids" => Some(("entityFilter.namespaceIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })),
"output-url-prefix" => Some(("outputUrlPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["entity-filter", "kinds", "labels", "namespace-ids", "output-url-prefix"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::GoogleDatastoreAdminV1ExportEntitiesRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().export(request, opt.value_of("project-id").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_import(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"entity-filter.kinds" => Some(("entityFilter.kinds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"entity-filter.namespace-ids" => Some(("entityFilter.namespaceIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"input-url" => Some(("inputUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["entity-filter", "input-url", "kinds", "labels", "namespace-ids"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::GoogleDatastoreAdminV1ImportEntitiesRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().import(request, opt.value_of("project-id").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_indexes_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"ancestor" => Some(("ancestor", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"index-id" => Some(("indexId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["ancestor", "index-id", "kind", "project-id", "state"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::GoogleDatastoreAdminV1Index = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().indexes_create(request, opt.value_of("project-id").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_indexes_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().indexes_delete(opt.value_of("project-id").unwrap_or(""), opt.value_of("index-id").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_indexes_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().indexes_get(opt.value_of("project-id").unwrap_or(""), opt.value_of("index-id").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_indexes_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().indexes_list(opt.value_of("project-id").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"page-token" => {
call = call.page_token(value.unwrap_or(""));
},
"page-size" => {
call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer"));
},
"filter" => {
call = call.filter(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_lookup(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"read-options.read-consistency" => Some(("readOptions.readConsistency", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"read-options.transaction" => Some(("readOptions.transaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["read-consistency", "read-options", "transaction"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::LookupRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().lookup(request, opt.value_of("project-id").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_operations_cancel(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().operations_cancel(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_operations_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().operations_delete(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_operations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().operations_get(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_operations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().operations_list(opt.value_of("name").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"page-token" => {
call = call.page_token(value.unwrap_or(""));
},
"page-size" => {
call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer"));
},
"filter" => {
call = call.filter(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_reserve_ids(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"database-id" => Some(("databaseId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["database-id"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::ReserveIdsRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().reserve_ids(request, opt.value_of("project-id").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_rollback(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"transaction" => Some(("transaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["transaction"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::RollbackRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().rollback(request, opt.value_of("project-id").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _projects_run_query(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"gql-query.allow-literals" => Some(("gqlQuery.allowLiterals", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"gql-query.query-string" => Some(("gqlQuery.queryString", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"partition-id.namespace-id" => Some(("partitionId.namespaceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"partition-id.project-id" => Some(("partitionId.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"query.end-cursor" => Some(("query.endCursor", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"query.filter.composite-filter.op" => Some(("query.filter.compositeFilter.op", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"query.filter.property-filter.op" => Some(("query.filter.propertyFilter.op", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"query.filter.property-filter.property.name" => Some(("query.filter.propertyFilter.property.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"query.filter.property-filter.value.blob-value" => Some(("query.filter.propertyFilter.value.blobValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"query.filter.property-filter.value.boolean-value" => Some(("query.filter.propertyFilter.value.booleanValue", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"query.filter.property-filter.value.double-value" => Some(("query.filter.propertyFilter.value.doubleValue", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"query.filter.property-filter.value.entity-value.key.partition-id.namespace-id" => Some(("query.filter.propertyFilter.value.entityValue.key.partitionId.namespaceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"query.filter.property-filter.value.entity-value.key.partition-id.project-id" => Some(("query.filter.propertyFilter.value.entityValue.key.partitionId.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"query.filter.property-filter.value.exclude-from-indexes" => Some(("query.filter.propertyFilter.value.excludeFromIndexes", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"query.filter.property-filter.value.geo-point-value.latitude" => Some(("query.filter.propertyFilter.value.geoPointValue.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"query.filter.property-filter.value.geo-point-value.longitude" => Some(("query.filter.propertyFilter.value.geoPointValue.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })),
"query.filter.property-filter.value.integer-value" => Some(("query.filter.propertyFilter.value.integerValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"query.filter.property-filter.value.key-value.partition-id.namespace-id" => Some(("query.filter.propertyFilter.value.keyValue.partitionId.namespaceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"query.filter.property-filter.value.key-value.partition-id.project-id" => Some(("query.filter.propertyFilter.value.keyValue.partitionId.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"query.filter.property-filter.value.meaning" => Some(("query.filter.propertyFilter.value.meaning", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"query.filter.property-filter.value.null-value" => Some(("query.filter.propertyFilter.value.nullValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"query.filter.property-filter.value.string-value" => Some(("query.filter.propertyFilter.value.stringValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"query.filter.property-filter.value.timestamp-value" => Some(("query.filter.propertyFilter.value.timestampValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"query.limit" => Some(("query.limit", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"query.offset" => Some(("query.offset", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"query.start-cursor" => Some(("query.startCursor", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"read-options.read-consistency" => Some(("readOptions.readConsistency", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"read-options.transaction" => Some(("readOptions.transaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-literals", "blob-value", "boolean-value", "composite-filter", "double-value", "end-cursor", "entity-value", "exclude-from-indexes", "filter", "geo-point-value", "gql-query", "integer-value", "key", "key-value", "latitude", "limit", "longitude", "meaning", "name", "namespace-id", "null-value", "offset", "op", "partition-id", "project-id", "property", "property-filter", "query", "query-string", "read-consistency", "read-options", "start-cursor", "string-value", "timestamp-value", "transaction", "value"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::RunQueryRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().run_query(request, opt.value_of("project-id").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit().await,
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
async fn _doit(&self, dry_run: bool) -> Result<Result<(), DoitError>, Option<InvalidOptionsError>> {
let mut err = InvalidOptionsError::new();
let mut call_result: Result<(), DoitError> = Ok(());
let mut err_opt: Option<InvalidOptionsError> = None;
match self.opt.subcommand() {
("projects", Some(opt)) => {
match opt.subcommand() {
("allocate-ids", Some(opt)) => {
call_result = self._projects_allocate_ids(opt, dry_run, &mut err).await;
},
("begin-transaction", Some(opt)) => {
call_result = self._projects_begin_transaction(opt, dry_run, &mut err).await;
},
("commit", Some(opt)) => {
call_result = self._projects_commit(opt, dry_run, &mut err).await;
},
("export", Some(opt)) => {
call_result = self._projects_export(opt, dry_run, &mut err).await;
},
("import", Some(opt)) => {
call_result = self._projects_import(opt, dry_run, &mut err).await;
},
("indexes-create", Some(opt)) => {
call_result = self._projects_indexes_create(opt, dry_run, &mut err).await;
},
("indexes-delete", Some(opt)) => {
call_result = self._projects_indexes_delete(opt, dry_run, &mut err).await;
},
("indexes-get", Some(opt)) => {
call_result = self._projects_indexes_get(opt, dry_run, &mut err).await;
},
("indexes-list", Some(opt)) => {
call_result = self._projects_indexes_list(opt, dry_run, &mut err).await;
},
("lookup", Some(opt)) => {
call_result = self._projects_lookup(opt, dry_run, &mut err).await;
},
("operations-cancel", Some(opt)) => {
call_result = self._projects_operations_cancel(opt, dry_run, &mut err).await;
},
("operations-delete", Some(opt)) => {
call_result = self._projects_operations_delete(opt, dry_run, &mut err).await;
},
("operations-get", Some(opt)) => {
call_result = self._projects_operations_get(opt, dry_run, &mut err).await;
},
("operations-list", Some(opt)) => {
call_result = self._projects_operations_list(opt, dry_run, &mut err).await;
},
("reserve-ids", Some(opt)) => {
call_result = self._projects_reserve_ids(opt, dry_run, &mut err).await;
},
("rollback", Some(opt)) => {
call_result = self._projects_rollback(opt, dry_run, &mut err).await;
},
("run-query", Some(opt)) => {
call_result = self._projects_run_query(opt, dry_run, &mut err).await;
},
_ => {
err.issues.push(CLIError::MissingMethodError("projects".to_string()));
writeln!(io::stderr(), "{}\n", opt.usage()).ok();
}
}
},
_ => {
err.issues.push(CLIError::MissingCommandError);
writeln!(io::stderr(), "{}\n", self.opt.usage()).ok();
}
}
if dry_run {
if err.issues.len() > 0 {
err_opt = Some(err);
}
Err(err_opt)
} else {
Ok(call_result)
}
}
// Please note that this call will fail if any part of the opt can't be handled
async fn new(opt: ArgMatches<'n>) -> Result<Engine<'n>, InvalidOptionsError> {
let (config_dir, secret) = {
let config_dir = match client::assure_config_dir_exists(opt.value_of("folder").unwrap_or("~/.google-service-cli")) {
Err(e) => return Err(InvalidOptionsError::single(e, 3)),
Ok(p) => p,
};
match client::application_secret_from_directory(&config_dir, "datastore1-secret.json",
"{\"installed\":{\"auth_uri\":\"https://accounts.google.com/o/oauth2/auth\",\"client_secret\":\"hCsslbCUyfehWMmbkG8vTYxG\",\"token_uri\":\"https://accounts.google.com/o/oauth2/token\",\"client_email\":\"\",\"redirect_uris\":[\"urn:ietf:wg:oauth:2.0:oob\",\"oob\"],\"client_x509_cert_url\":\"\",\"client_id\":\"620010449518-9ngf7o4dhs0dka470npqvor6dc5lqb9b.apps.googleusercontent.com\",\"auth_provider_x509_cert_url\":\"https://www.googleapis.com/oauth2/v1/certs\"}}") {
Ok(secret) => (config_dir, secret),
Err(e) => return Err(InvalidOptionsError::single(e, 4))
}
};
let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
secret,
yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
).persist_tokens_to_disk(format!("{}/datastore1", config_dir)).build().await.unwrap();
let client = hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots());
let engine = Engine {
opt: opt,
hub: api::Datastore::new(client, auth),
gp: vec!["$-xgafv", "access-token", "alt", "callback", "fields", "key", "oauth-token", "pretty-print", "quota-user", "upload-type", "upload-protocol"],
gpm: vec![
("$-xgafv", "$.xgafv"),
("access-token", "access_token"),
("oauth-token", "oauth_token"),
("pretty-print", "prettyPrint"),
("quota-user", "quotaUser"),
("upload-type", "uploadType"),
("upload-protocol", "upload_protocol"),
]
};
match engine._doit(true).await {
Err(Some(err)) => Err(err),
Err(None) => Ok(engine),
Ok(_) => unreachable!(),
}
}
async fn doit(&self) -> Result<(), DoitError> {
match self._doit(false).await {
Ok(res) => res,
Err(_) => unreachable!(),
}
}
}
#[tokio::main]
async fn main() {
let mut exit_status = 0i32;
let arg_data = [
("projects", "methods: 'allocate-ids', 'begin-transaction', 'commit', 'export', 'import', 'indexes-create', 'indexes-delete', 'indexes-get', 'indexes-list', 'lookup', 'operations-cancel', 'operations-delete', 'operations-get', 'operations-list', 'reserve-ids', 'rollback' and 'run-query'", vec![
("allocate-ids",
Some(r##"Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted."##),
"Details at http://byron.github.io/google-apis-rs/google_datastore1_cli/projects_allocate-ids",
vec![
(Some(r##"project-id"##),
None,
Some(r##"Required. The ID of the project against which to make the request."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("begin-transaction",
Some(r##"Begins a new transaction."##),
"Details at http://byron.github.io/google-apis-rs/google_datastore1_cli/projects_begin-transaction",
vec![
(Some(r##"project-id"##),
None,
Some(r##"Required. The ID of the project against which to make the request."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("commit",
Some(r##"Commits a transaction, optionally creating, deleting or modifying some entities."##),
"Details at http://byron.github.io/google-apis-rs/google_datastore1_cli/projects_commit",
vec![
(Some(r##"project-id"##),
None,
Some(r##"Required. The ID of the project against which to make the request."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("export",
Some(r##"Exports a copy of all or a subset of entities from Google Cloud Datastore to another storage system, such as Google Cloud Storage. Recent updates to entities may not be reflected in the export. The export occurs in the background and its progress can be monitored and managed via the Operation resource that is created. The output of an export may only be used once the associated operation is done. If an export operation is cancelled before completion it may leave partial data behind in Google Cloud Storage."##),
"Details at http://byron.github.io/google-apis-rs/google_datastore1_cli/projects_export",
vec![
(Some(r##"project-id"##),
None,
Some(r##"Required. Project ID against which to make the request."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("import",
Some(r##"Imports entities into Google Cloud Datastore. Existing entities with the same key are overwritten. The import occurs in the background and its progress can be monitored and managed via the Operation resource that is created. If an ImportEntities operation is cancelled, it is possible that a subset of the data has already been imported to Cloud Datastore."##),
"Details at http://byron.github.io/google-apis-rs/google_datastore1_cli/projects_import",
vec![
(Some(r##"project-id"##),
None,
Some(r##"Required. Project ID against which to make the request."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("indexes-create",
Some(r##"Creates the specified index. A newly created index's initial state is `CREATING`. On completion of the returned google.longrunning.Operation, the state will be `READY`. If the index already exists, the call will return an `ALREADY_EXISTS` status. During index creation, the process could result in an error, in which case the index will move to the `ERROR` state. The process can be recovered by fixing the data that caused the error, removing the index with delete, then re-creating the index with create. Indexes with a single property cannot be created."##),
"Details at http://byron.github.io/google-apis-rs/google_datastore1_cli/projects_indexes-create",
vec![
(Some(r##"project-id"##),
None,
Some(r##"Project ID against which to make the request."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("indexes-delete",
Some(r##"Deletes an existing index. An index can only be deleted if it is in a `READY` or `ERROR` state. On successful execution of the request, the index will be in a `DELETING` state. And on completion of the returned google.longrunning.Operation, the index will be removed. During index deletion, the process could result in an error, in which case the index will move to the `ERROR` state. The process can be recovered by fixing the data that caused the error, followed by calling delete again."##),
"Details at http://byron.github.io/google-apis-rs/google_datastore1_cli/projects_indexes-delete",
vec![
(Some(r##"project-id"##),
None,
Some(r##"Project ID against which to make the request."##),
Some(true),
Some(false)),
(Some(r##"index-id"##),
None,
Some(r##"The resource ID of the index to delete."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("indexes-get",
Some(r##"Gets an index."##),
"Details at http://byron.github.io/google-apis-rs/google_datastore1_cli/projects_indexes-get",
vec![
(Some(r##"project-id"##),
None,
Some(r##"Project ID against which to make the request."##),
Some(true),
Some(false)),
(Some(r##"index-id"##),
None,
Some(r##"The resource ID of the index to get."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("indexes-list",
Some(r##"Lists the indexes that match the specified filters. Datastore uses an eventually consistent query to fetch the list of indexes and may occasionally return stale results."##),
"Details at http://byron.github.io/google-apis-rs/google_datastore1_cli/projects_indexes-list",
vec![
(Some(r##"project-id"##),
None,
Some(r##"Project ID against which to make the request."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("lookup",
Some(r##"Looks up entities by key."##),
"Details at http://byron.github.io/google-apis-rs/google_datastore1_cli/projects_lookup",
vec![
(Some(r##"project-id"##),
None,
Some(r##"Required. The ID of the project against which to make the request."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("operations-cancel",
Some(r##"Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`."##),
"Details at http://byron.github.io/google-apis-rs/google_datastore1_cli/projects_operations-cancel",
vec![
(Some(r##"name"##),
None,
Some(r##"The name of the operation resource to be cancelled."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("operations-delete",
Some(r##"Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`."##),
"Details at http://byron.github.io/google-apis-rs/google_datastore1_cli/projects_operations-delete",
vec![
(Some(r##"name"##),
None,
Some(r##"The name of the operation resource to be deleted."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("operations-get",
Some(r##"Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service."##),
"Details at http://byron.github.io/google-apis-rs/google_datastore1_cli/projects_operations-get",
vec![
(Some(r##"name"##),
None,
Some(r##"The name of the operation resource."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("operations-list",
Some(r##"Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id."##),
"Details at http://byron.github.io/google-apis-rs/google_datastore1_cli/projects_operations-list",
vec![
(Some(r##"name"##),
None,
Some(r##"The name of the operation's parent resource."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("reserve-ids",
Some(r##"Prevents the supplied keys' IDs from being auto-allocated by Cloud Datastore."##),
"Details at http://byron.github.io/google-apis-rs/google_datastore1_cli/projects_reserve-ids",
vec![
(Some(r##"project-id"##),
None,
Some(r##"Required. The ID of the project against which to make the request."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("rollback",
Some(r##"Rolls back a transaction."##),
"Details at http://byron.github.io/google-apis-rs/google_datastore1_cli/projects_rollback",
vec![
(Some(r##"project-id"##),
None,
Some(r##"Required. The ID of the project against which to make the request."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("run-query",
Some(r##"Queries for entities."##),
"Details at http://byron.github.io/google-apis-rs/google_datastore1_cli/projects_run-query",
vec![
(Some(r##"project-id"##),
None,
Some(r##"Required. The ID of the project against which to make the request."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
]),
];
let mut app = App::new("datastore1")
.author("Sebastian Thiel <byronimo@gmail.com>")
.version("2.0.5+20210317")
.about("Accesses the schemaless NoSQL database to provide fully managed, robust, scalable storage for your application. ")
.after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_datastore1_cli")
.arg(Arg::with_name("url")
.long("scope")
.help("Specify the authentication a method should be executed in. Each scope requires the user to grant this application permission to use it.If unset, it defaults to the shortest scope url for a particular method.")
.multiple(true)
.takes_value(true))
.arg(Arg::with_name("folder")
.long("config-dir")
.help("A directory into which we will store our persistent data. Defaults to a user-writable directory that we will create during the first invocation.[default: ~/.google-service-cli")
.multiple(false)
.takes_value(true))
.arg(Arg::with_name("debug")
.long("debug")
.help("Debug print all errors")
.multiple(false)
.takes_value(false));
for &(main_command_name, about, ref subcommands) in arg_data.iter() {
let mut mcmd = SubCommand::with_name(main_command_name).about(about);
for &(sub_command_name, ref desc, url_info, ref args) in subcommands {
let mut scmd = SubCommand::with_name(sub_command_name);
if let &Some(desc) = desc {
scmd = scmd.about(desc);
}
scmd = scmd.after_help(url_info);
for &(ref arg_name, ref flag, ref desc, ref required, ref multi) in args {
let arg_name_str =
match (arg_name, flag) {
(&Some(an), _ ) => an,
(_ , &Some(f)) => f,
_ => unreachable!(),
};
let mut arg = Arg::with_name(arg_name_str)
.empty_values(false);
if let &Some(short_flag) = flag {
arg = arg.short(short_flag);
}
if let &Some(desc) = desc {
arg = arg.help(desc);
}
if arg_name.is_some() && flag.is_some() {
arg = arg.takes_value(true);
}
if let &Some(required) = required {
arg = arg.required(required);
}
if let &Some(multi) = multi {
arg = arg.multiple(multi);
}
scmd = scmd.arg(arg);
}
mcmd = mcmd.subcommand(scmd);
}
app = app.subcommand(mcmd);
}
let matches = app.get_matches();
let debug = matches.is_present("debug");
match Engine::new(matches).await {
Err(err) => {
exit_status = err.exit_code;
writeln!(io::stderr(), "{}", err).ok();
},
Ok(engine) => {
if let Err(doit_err) = engine.doit().await {
exit_status = 1;
match doit_err {
DoitError::IoError(path, err) => {
writeln!(io::stderr(), "Failed to open output file '{}': {}", path, err).ok();
},
DoitError::ApiError(err) => {
if debug {
writeln!(io::stderr(), "{:#?}", err).ok();
} else {
writeln!(io::stderr(), "{}", err).ok();
}
}
}
}
}
}
std::process::exit(exit_status);
}
| 51.948319 | 639 | 0.460047 |
cceb4c11ef7e565d2a302a86be9a2b005e27f5d5 | 2,926 | // Copyright 2020 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use grin_core as core;
use grin_store as store;
use grin_util as util;
use crate::core::global;
use crate::core::ser::{self, Readable, Reader, Writeable, Writer};
use std::fs;
const WRITE_CHUNK_SIZE: usize = 20;
const TEST_ALLOC_SIZE: usize = store::lmdb::ALLOC_CHUNK_SIZE_DEFAULT / 8 / WRITE_CHUNK_SIZE;
#[derive(Clone)]
struct PhatChunkStruct {
phatness: u64,
}
impl PhatChunkStruct {
/// create
pub fn new() -> PhatChunkStruct {
PhatChunkStruct { phatness: 0 }
}
}
impl Readable for PhatChunkStruct {
fn read<R: Reader>(reader: &mut R) -> Result<PhatChunkStruct, ser::Error> {
let mut retval = PhatChunkStruct::new();
for _ in 0..TEST_ALLOC_SIZE {
retval.phatness = reader.read_u64()?;
}
Ok(retval)
}
}
impl Writeable for PhatChunkStruct {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), ser::Error> {
// write many times
for _ in 0..TEST_ALLOC_SIZE {
writer.write_u64(self.phatness)?;
}
Ok(())
}
}
fn clean_output_dir(test_dir: &str) {
let _ = fs::remove_dir_all(test_dir);
}
fn setup(test_dir: &str) {
global::set_local_chain_type(global::ChainTypes::Mainnet);
util::init_test_logger();
clean_output_dir(test_dir);
}
#[test]
fn lmdb_allocate() -> Result<(), store::Error> {
let test_dir = "test_output/lmdb_allocate";
setup(test_dir);
// Allocate more than the initial chunk, ensuring
// the DB resizes underneath
{
let store = store::Store::new(test_dir, Some("test1"), None, None)?;
for i in 0..WRITE_CHUNK_SIZE * 2 {
println!("Allocating chunk: {}", i);
let chunk = PhatChunkStruct::new();
let key_val = format!("phat_chunk_set_1_{}", i);
let batch = store.batch()?;
let key = store::to_key(b'P', &key_val);
batch.put_ser(&key, &chunk)?;
batch.commit()?;
}
}
println!("***********************************");
println!("***************NEXT*****************");
println!("***********************************");
// Open env again and keep adding
{
let store = store::Store::new(test_dir, Some("test1"), None, None)?;
for i in 0..WRITE_CHUNK_SIZE * 2 {
println!("Allocating chunk: {}", i);
let chunk = PhatChunkStruct::new();
let key_val = format!("phat_chunk_set_2_{}", i);
let batch = store.batch()?;
let key = store::to_key(b'P', &key_val);
batch.put_ser(&key, &chunk)?;
batch.commit()?;
}
}
Ok(())
}
| 27.603774 | 92 | 0.656869 |
870fbbf53717ad81dd8c191cdd0a6c5e23c5ecf6 | 844 | mod list;
mod display;
pub use display::*;
pub use cli::*;
mod cli {
use super::*;
use list::ListSpusOpt;
use list::process_list_spus;
use structopt::StructOpt;
use crate::COMMAND_TEMPLATE;
use crate::error::CliError;
use crate::Terminal;
#[derive(Debug, StructOpt)]
pub enum SpuOpt {
/// List all SPUs known by this cluster (managed AND custom)
#[structopt(
name = "list",
template = COMMAND_TEMPLATE,
)]
List(ListSpusOpt),
}
pub(crate) async fn process_spu<O>(
out: std::sync::Arc<O>,
spu_opt: SpuOpt,
) -> Result<String, CliError>
where
O: Terminal,
{
match spu_opt {
SpuOpt::List(spu_opt) => process_list_spus(out, spu_opt).await?,
}
Ok("".to_string())
}
}
| 20.095238 | 76 | 0.558057 |
efbd22bc7d5a0de1cc64d22aba1fe38905f83053 | 1,145 | use serde::Deserialize;
use wechaty_puppet::error::PuppetError;
#[derive(Debug, Deserialize)]
struct Endpoint {
ip: String,
port: usize,
}
const WECHATY_ENDPOINT_RESOLUTION_SERVICE_URI: &'static str = "https://api.chatie.io/v0/hosties/";
const ENDPOINT_SERVICE_ERROR: &'static str = "Endpoint service error";
pub async fn discover(token: String) -> Result<String, PuppetError> {
match reqwest::get(&format!("{}{}", WECHATY_ENDPOINT_RESOLUTION_SERVICE_URI, token)).await {
Ok(res) => match res.json::<Endpoint>().await {
Ok(endpoint) => {
if endpoint.port == 0 {
Err(PuppetError::InvalidToken)
} else {
Ok(format!("grpc://{}:{}", endpoint.ip, endpoint.port))
}
}
Err(_) => Err(PuppetError::Network(ENDPOINT_SERVICE_ERROR.to_owned())),
},
Err(_) => Err(PuppetError::Network(ENDPOINT_SERVICE_ERROR.to_owned())),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[actix_rt::test]
async fn can_discover() {
println!("{:?}", discover("123".to_owned()).await);
}
}
| 30.131579 | 98 | 0.58952 |
d57fcdc75b5874d1fe82e96a6027339dcd5ed975 | 4,389 | // Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use crate::fidl_hanging_get_responder;
use crate::fidl_process;
use crate::base::{SettingInfo, SettingType};
use crate::fidl_processor::settings::RequestContext;
use crate::handler::base::Request;
use crate::setup::types::{
ConfigurationInterfaceFlags, SetConfigurationInterfacesParams, SetupInfo,
};
use fidl_fuchsia_settings::{Error, SetupMarker, SetupRequest, SetupSettings, SetupWatchResponder};
fidl_hanging_get_responder!(SetupMarker, SetupSettings, SetupWatchResponder);
impl From<SettingInfo> for SetupSettings {
fn from(response: SettingInfo) -> Self {
if let SettingInfo::Setup(info) = response {
return SetupSettings::from(info);
}
panic!("incorrect value sent");
}
}
impl From<fidl_fuchsia_settings::ConfigurationInterfaces> for ConfigurationInterfaceFlags {
fn from(interfaces: fidl_fuchsia_settings::ConfigurationInterfaces) -> Self {
let mut flags = ConfigurationInterfaceFlags::empty();
if interfaces.intersects(fidl_fuchsia_settings::ConfigurationInterfaces::Ethernet) {
flags |= ConfigurationInterfaceFlags::ETHERNET;
}
if interfaces.intersects(fidl_fuchsia_settings::ConfigurationInterfaces::Wifi) {
flags |= ConfigurationInterfaceFlags::WIFI;
}
flags
}
}
impl From<ConfigurationInterfaceFlags> for fidl_fuchsia_settings::ConfigurationInterfaces {
fn from(flags: ConfigurationInterfaceFlags) -> Self {
let mut interfaces = fidl_fuchsia_settings::ConfigurationInterfaces::empty();
if flags.intersects(ConfigurationInterfaceFlags::ETHERNET) {
interfaces |= fidl_fuchsia_settings::ConfigurationInterfaces::Ethernet;
}
if flags.intersects(ConfigurationInterfaceFlags::WIFI) {
interfaces |= fidl_fuchsia_settings::ConfigurationInterfaces::Wifi;
}
interfaces
}
}
impl From<SetupInfo> for SetupSettings {
fn from(info: SetupInfo) -> Self {
let mut settings = SetupSettings::EMPTY;
let interfaces =
fidl_fuchsia_settings::ConfigurationInterfaces::from(info.configuration_interfaces);
if !interfaces.is_empty() {
settings.enabled_configuration_interfaces = Some(interfaces);
}
settings
}
}
fn to_request(value: SetupSettings, should_reboot: bool) -> Result<Request, &'static str> {
if let Some(configuration_interfaces) = value.enabled_configuration_interfaces {
return Ok(Request::SetConfigurationInterfaces(SetConfigurationInterfacesParams {
config_interfaces_flags: ConfigurationInterfaceFlags::from(configuration_interfaces),
should_reboot,
}));
}
Err("Ineligible change")
}
async fn set(
context: RequestContext<SetupSettings, SetupWatchResponder>,
settings: SetupSettings,
should_reboot: bool,
) -> Result<(), Error> {
let request =
to_request(settings, should_reboot).map_err(|_| fidl_fuchsia_settings::Error::Failed)?;
context
.request(SettingType::Setup, request)
.await
.map_err(|_| fidl_fuchsia_settings::Error::Failed)?;
Ok(())
}
async fn process_request(
context: RequestContext<SetupSettings, SetupWatchResponder>,
req: SetupRequest,
) -> Result<Option<SetupRequest>, anyhow::Error> {
// Support future expansion of FIDL
#[allow(unreachable_patterns)]
match req {
// TODO(fxb/79644): Clean up Set interface.
SetupRequest::Set { settings, responder } => {
match set(context, settings, true).await {
Ok(_) => responder.send(&mut Ok(())).ok(),
Err(e) => responder.send(&mut Err(e)).ok(),
};
}
SetupRequest::Set2 { settings, reboot_device, responder } => {
match set(context, settings, reboot_device).await {
Ok(_) => responder.send(&mut Ok(())).ok(),
Err(e) => responder.send(&mut Err(e)).ok(),
};
}
SetupRequest::Watch { responder } => {
context.watch(responder, true).await;
}
_ => return Ok(Some(req)),
}
Ok(None)
}
fidl_process!(Setup, SettingType::Setup, process_request);
| 34.023256 | 98 | 0.672363 |
8ad712030b16fe4d26c7fd1fb23a8243eefae749 | 29,358 | //! Fully serializable (ACID) multi-`Tree` transactions
//!
//! # Examples
//! ```
//! # use sled::{transaction::TransactionResult, Config};
//! # fn main() -> TransactionResult<()> {
//!
//! let config = Config::new().temporary(true);
//! let db1 = config.open().unwrap();
//! let db = db1.open_tree(b"a").unwrap();
//!
//! // Use write-only transactions as a writebatch:
//! db.transaction(|db| {
//! db.insert(b"k1", b"cats")?;
//! db.insert(b"k2", b"dogs")?;
//! Ok(())
//! })?;
//!
//! // Atomically swap two items:
//! db.transaction(|db| {
//! let v1_option = db.remove(b"k1")?;
//! let v1 = v1_option.unwrap();
//! let v2_option = db.remove(b"k2")?;
//! let v2 = v2_option.unwrap();
//!
//! db.insert(b"k1", v2)?;
//! db.insert(b"k2", v1)?;
//!
//! Ok(())
//! })?;
//!
//! assert_eq!(&db.get(b"k1")?.unwrap(), b"dogs");
//! assert_eq!(&db.get(b"k2")?.unwrap(), b"cats");
//! # Ok(())
//! # }
//! ```
//!
//! Transactions also work on tuples of `Tree`s,
//! preserving serializable ACID semantics!
//! In this example, we treat two trees like a
//! work queue, atomically apply updates to
//! data and move them from the unprocessed `Tree`
//! to the processed `Tree`.
//!
//! ```
//! # use sled::{transaction::{TransactionResult, Transactional}, Config};
//! # fn main() -> TransactionResult<()> {
//!
//! let config = Config::new().temporary(true);
//! let db = config.open().unwrap();
//!
//! let unprocessed = db.open_tree(b"unprocessed items").unwrap();
//! let processed = db.open_tree(b"processed items").unwrap();
//!
//! // An update somehow gets into the tree, which we
//! // later trigger the atomic processing of.
//! unprocessed.insert(b"k3", b"ligers").unwrap();
//!
//! // Atomically process the new item and move it
//! // between `Tree`s.
//! (&unprocessed, &processed)
//! .transaction(|(unprocessed, processed)| {
//! let unprocessed_item = unprocessed.remove(b"k3")?.unwrap();
//! let mut processed_item = b"yappin' ".to_vec();
//! processed_item.extend_from_slice(&unprocessed_item);
//! processed.insert(b"k3", processed_item)?;
//! Ok(())
//! })?;
//!
//! assert_eq!(unprocessed.get(b"k3").unwrap(), None);
//! assert_eq!(&processed.get(b"k3").unwrap().unwrap(), b"yappin' ligers");
//! # Ok(())
//! # }
//! ```
#![allow(clippy::module_name_repetitions)]
use std::{cell::RefCell, fmt, rc::Rc};
#[cfg(not(feature = "testing"))]
use std::collections::HashMap as Map;
// we avoid HashMap while testing because
// it makes tests non-deterministic
#[cfg(feature = "testing")]
use std::collections::BTreeMap as Map;
use crate::{
concurrency_control, pin, Batch, Error, Guard, IVec, Protector, Result,
Tree,
};
/// A transaction that will
/// be applied atomically to the
/// Tree.
#[derive(Clone)]
pub struct TransactionalTree {
pub(super) tree: Tree,
pub(super) writes: Rc<RefCell<Map<IVec, Option<IVec>>>>,
pub(super) read_cache: Rc<RefCell<Map<IVec, Option<IVec>>>>,
}
/// An error type that is returned from the closure
/// passed to the `transaction` method.
#[derive(Debug, Clone, PartialEq)]
pub enum UnabortableTransactionError {
/// An internal conflict has occurred and the `transaction` method will
/// retry the passed-in closure until it succeeds. This should never be
/// returned directly from the user's closure, as it will create an
/// infinite loop that never returns. This is why it is hidden.
Conflict,
/// A serious underlying storage issue has occurred that requires
/// attention from an operator or a remediating system, such as
/// corruption.
Storage(Error),
}
impl fmt::Display for UnabortableTransactionError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use UnabortableTransactionError::*;
match self {
Conflict => write!(f, "Conflict during transaction"),
Storage(e) => e.fmt(f),
}
}
}
impl std::error::Error for UnabortableTransactionError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
UnabortableTransactionError::Storage(ref e) => Some(e),
_ => None,
}
}
}
pub(crate) type UnabortableTransactionResult<T> =
std::result::Result<T, UnabortableTransactionError>;
impl From<Error> for UnabortableTransactionError {
fn from(error: Error) -> Self {
UnabortableTransactionError::Storage(error)
}
}
impl<E> From<UnabortableTransactionError> for ConflictableTransactionError<E> {
fn from(error: UnabortableTransactionError) -> Self {
match error {
UnabortableTransactionError::Conflict => {
ConflictableTransactionError::Conflict
}
UnabortableTransactionError::Storage(error) => {
ConflictableTransactionError::Storage(error)
}
}
}
}
/// An error type that is returned from the closure
/// passed to the `transaction` method.
#[derive(Debug, Clone, PartialEq)]
pub enum ConflictableTransactionError<T = Error> {
/// A user-provided error type that indicates the transaction should abort.
/// This is passed into the return value of `transaction` as a direct Err
/// instance, rather than forcing users to interact with this enum
/// directly.
Abort(T),
#[doc(hidden)]
/// An internal conflict has occurred and the `transaction` method will
/// retry the passed-in closure until it succeeds. This should never be
/// returned directly from the user's closure, as it will create an
/// infinite loop that never returns. This is why it is hidden.
Conflict,
/// A serious underlying storage issue has occurred that requires
/// attention from an operator or a remediating system, such as
/// corruption.
Storage(Error),
}
impl<E: fmt::Display> fmt::Display for ConflictableTransactionError<E> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use ConflictableTransactionError::*;
match self {
Abort(e) => e.fmt(f),
Conflict => write!(f, "Conflict during transaction"),
Storage(e) => e.fmt(f),
}
}
}
impl<E: std::error::Error> std::error::Error
for ConflictableTransactionError<E>
{
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
ConflictableTransactionError::Storage(ref e) => Some(e),
_ => None,
}
}
}
/// An error type that is returned from the closure
/// passed to the `transaction` method.
#[derive(Debug, Clone, PartialEq)]
pub enum TransactionError<T = Error> {
/// A user-provided error type that indicates the transaction should abort.
/// This is passed into the return value of `transaction` as a direct Err
/// instance, rather than forcing users to interact with this enum
/// directly.
Abort(T),
/// A serious underlying storage issue has occurred that requires
/// attention from an operator or a remediating system, such as
/// corruption.
Storage(Error),
}
impl<E: fmt::Display> fmt::Display for TransactionError<E> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use TransactionError::*;
match self {
Abort(e) => e.fmt(f),
Storage(e) => e.fmt(f),
}
}
}
impl<E: std::error::Error> std::error::Error for TransactionError<E> {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
TransactionError::Storage(ref e) => Some(e),
_ => None,
}
}
}
/// A transaction-related `Result` which is used for transparently handling
/// concurrency-related conflicts when running transaction closures.
pub type ConflictableTransactionResult<T, E = ()> =
std::result::Result<T, ConflictableTransactionError<E>>;
impl<T> From<Error> for ConflictableTransactionError<T> {
fn from(error: Error) -> Self {
ConflictableTransactionError::Storage(error)
}
}
/// A transaction-related `Result` which is used for returning the
/// final result of a transaction after potentially running the provided
/// closure several times due to underlying conflicts.
pub type TransactionResult<T, E = ()> =
std::result::Result<T, TransactionError<E>>;
impl<T> From<Error> for TransactionError<T> {
fn from(error: Error) -> Self {
TransactionError::Storage(error)
}
}
impl TransactionalTree {
/// Set a key to a new value
pub fn insert<K, V>(
&self,
key: K,
value: V,
) -> UnabortableTransactionResult<Option<IVec>>
where
IVec: From<K> + From<V>,
K: AsRef<[u8]>,
{
let old = self.get(key.as_ref())?;
let mut writes = self.writes.borrow_mut();
let _last_write =
writes.insert(IVec::from(key), Some(IVec::from(value)));
Ok(old)
}
/// Remove a key
pub fn remove<K>(
&self,
key: K,
) -> UnabortableTransactionResult<Option<IVec>>
where
IVec: From<K>,
K: AsRef<[u8]>,
{
let old = self.get(key.as_ref());
let mut writes = self.writes.borrow_mut();
let _last_write = writes.insert(IVec::from(key), None);
old
}
/// Get the value associated with a key
pub fn get<K: AsRef<[u8]>>(
&self,
key: K,
) -> UnabortableTransactionResult<Option<IVec>> {
let writes = self.writes.borrow();
if let Some(first_try) = writes.get(key.as_ref()) {
return Ok(first_try.clone());
}
let mut reads = self.read_cache.borrow_mut();
if let Some(second_try) = reads.get(key.as_ref()) {
return Ok(second_try.clone());
}
// not found in a cache, need to hit the backing db
let mut guard = pin();
let get = loop {
if let Ok(get) = self.tree.get_inner(key.as_ref(), &mut guard)? {
break get;
}
};
let last = reads.insert(key.as_ref().into(), get.clone());
assert!(last.is_none());
Ok(get)
}
/// Atomically apply multiple inserts and removals.
pub fn apply_batch(
&self,
batch: &Batch,
) -> UnabortableTransactionResult<()> {
for (k, v_opt) in &batch.writes {
if let Some(v) = v_opt {
let _old = self.insert(k, v)?;
} else {
let _old = self.remove(k)?;
}
}
Ok(())
}
fn unstage(&self) {
unimplemented!()
}
const fn validate(&self) -> bool {
true
}
fn commit(&self) -> Result<()> {
let writes = self.writes.borrow();
let mut guard = pin();
for (k, v_opt) in &*writes {
while self.tree.insert_inner(k, v_opt.clone(), &mut guard)?.is_err()
{
}
}
Ok(())
}
fn from_tree(tree: &Tree) -> Self {
Self {
tree: tree.clone(),
writes: Default::default(),
read_cache: Default::default(),
}
}
}
/// A type which allows for pluggable transactional capabilities
pub struct TransactionalTrees {
inner: Vec<TransactionalTree>,
}
impl TransactionalTrees {
fn stage(&self) -> UnabortableTransactionResult<Protector<'_>> {
Ok(concurrency_control::write())
}
fn unstage(&self) {
for tree in &self.inner {
tree.unstage();
}
}
fn validate(&self) -> bool {
for tree in &self.inner {
if !tree.validate() {
return false;
}
}
true
}
fn commit(&self, guard: &Guard) -> Result<()> {
let peg = self.inner[0].tree.context.pin_log(guard)?;
for tree in &self.inner {
tree.commit()?;
}
// when the peg drops, it ensures all updates
// written to the log since its creation are
// recovered atomically
peg.seal_batch()
}
}
/// A simple constructor for `Err(TransactionError::Abort(_))`
pub fn abort<A, T>(t: T) -> ConflictableTransactionResult<A, T> {
Err(ConflictableTransactionError::Abort(t))
}
/// A type that may be transacted on in sled transactions.
pub trait Transactional<E = ()> {
/// An internal reference to an internal proxy type that
/// mediates transactional reads and writes.
type View;
/// An internal function for creating a top-level
/// transactional structure.
fn make_overlay(&self) -> TransactionalTrees;
/// An internal function for viewing the transactional
/// subcomponents based on the top-level transactional
/// structure.
fn view_overlay(overlay: &TransactionalTrees) -> Self::View;
/// Runs a transaction, possibly retrying the passed-in closure if
/// a concurrent conflict is detected that would cause a violation
/// of serializability. This is the only trait method that
/// you're most likely to use directly.
fn transaction<F, A>(&self, f: F) -> TransactionResult<A, E>
where
F: Fn(&Self::View) -> ConflictableTransactionResult<A, E>,
{
loop {
let tt = self.make_overlay();
let view = Self::view_overlay(&tt);
// NB locks must exist until this function returns.
let _locks = if let Ok(l) = tt.stage() {
l
} else {
tt.unstage();
continue;
};
let ret = f(&view);
if !tt.validate() {
tt.unstage();
continue;
}
match ret {
Ok(r) => {
let guard = pin();
tt.commit(&guard)?;
return Ok(r);
}
Err(ConflictableTransactionError::Abort(e)) => {
return Err(TransactionError::Abort(e));
}
Err(ConflictableTransactionError::Conflict) => continue,
Err(ConflictableTransactionError::Storage(other)) => {
return Err(TransactionError::Storage(other));
}
}
}
}
}
impl<E> Transactional<E> for &Tree {
type View = TransactionalTree;
fn make_overlay(&self) -> TransactionalTrees {
TransactionalTrees { inner: vec![TransactionalTree::from_tree(self)] }
}
fn view_overlay(overlay: &TransactionalTrees) -> Self::View {
overlay.inner[0].clone()
}
}
impl<E> Transactional<E> for &&Tree {
type View = TransactionalTree;
fn make_overlay(&self) -> TransactionalTrees {
TransactionalTrees { inner: vec![TransactionalTree::from_tree(*self)] }
}
fn view_overlay(overlay: &TransactionalTrees) -> Self::View {
overlay.inner[0].clone()
}
}
impl<E> Transactional<E> for Tree {
type View = TransactionalTree;
fn make_overlay(&self) -> TransactionalTrees {
TransactionalTrees { inner: vec![TransactionalTree::from_tree(self)] }
}
fn view_overlay(overlay: &TransactionalTrees) -> Self::View {
overlay.inner[0].clone()
}
}
impl<E> Transactional<E> for [Tree] {
type View = Vec<TransactionalTree>;
fn make_overlay(&self) -> TransactionalTrees {
TransactionalTrees {
inner: self
.iter()
.map(|t| TransactionalTree::from_tree(t))
.collect(),
}
}
fn view_overlay(overlay: &TransactionalTrees) -> Self::View {
overlay.inner.clone()
}
}
impl<E> Transactional<E> for [&Tree] {
type View = Vec<TransactionalTree>;
fn make_overlay(&self) -> TransactionalTrees {
TransactionalTrees {
inner: self
.iter()
.map(|&t| TransactionalTree::from_tree(t))
.collect(),
}
}
fn view_overlay(overlay: &TransactionalTrees) -> Self::View {
overlay.inner.clone()
}
}
macro_rules! repeat_type {
($t:ty, ($literal:literal)) => {
($t,)
};
($t:ty, ($($literals:literal),+)) => {
repeat_type!(IMPL $t, (), ($($literals),*))
};
(IMPL $t:ty, (), ($first:literal, $($rest:literal),*)) => {
repeat_type!(IMPL $t, ($t), ($($rest),*))
};
(IMPL $t:ty, ($($partial:tt),*), ($first:literal, $($rest:literal),*)) => {
repeat_type!(IMPL $t, ($t, $($partial),*), ($($rest),*))
};
(IMPL $t:ty, ($($partial:tt),*), ($last:literal)) => {
($($partial),*, $t)
};
}
macro_rules! impl_transactional_tuple_trees {
($($indices:tt),+) => {
impl<E> Transactional<E> for repeat_type!(&Tree, ($($indices),+)) {
type View = repeat_type!(TransactionalTree, ($($indices),+));
fn make_overlay(&self) -> TransactionalTrees {
TransactionalTrees {
inner: vec![
$(
TransactionalTree::from_tree(self.$indices)
),+
],
}
}
fn view_overlay(overlay: &TransactionalTrees) -> Self::View {
(
$(
overlay.inner[$indices].clone()
),+,
)
}
}
};
}
impl_transactional_tuple_trees!(0);
impl_transactional_tuple_trees!(0, 1);
impl_transactional_tuple_trees!(0, 1, 2);
impl_transactional_tuple_trees!(0, 1, 2, 3);
impl_transactional_tuple_trees!(0, 1, 2, 3, 4);
impl_transactional_tuple_trees!(0, 1, 2, 3, 4, 5);
impl_transactional_tuple_trees!(0, 1, 2, 3, 4, 5, 6);
impl_transactional_tuple_trees!(0, 1, 2, 3, 4, 5, 6, 7);
impl_transactional_tuple_trees!(0, 1, 2, 3, 4, 5, 6, 7, 8);
impl_transactional_tuple_trees!(0, 1, 2, 3, 4, 5, 6, 7, 8, 9);
impl_transactional_tuple_trees!(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
impl_transactional_tuple_trees!(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11);
impl_transactional_tuple_trees!(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12);
impl_transactional_tuple_trees!(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
59
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
59, 60
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
59, 60, 61
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
59, 60, 61, 62
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
59, 60, 61, 62, 63
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
59, 60, 61, 62, 63, 64
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
59, 60, 61, 62, 63, 64, 65
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
59, 60, 61, 62, 63, 64, 65, 66
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
59, 60, 61, 62, 63, 64, 65, 66, 67
);
impl_transactional_tuple_trees!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
59, 60, 61, 62, 63, 64, 65, 66, 67, 68
);
| 34.991657 | 80 | 0.553682 |
1e590b6e89760c6067b1d8e76ac778904ecaf73a | 30,849 | // Std
#[cfg(all(feature = "debug", any(target_os = "windows", target_arch = "wasm32")))]
use osstringext::OsStrExt3;
use std::cell::Cell;
use std::ffi::{OsStr, OsString};
use std::io::{self, BufWriter, Write};
use std::iter::Peekable;
use std::mem;
#[cfg(all(
feature = "debug",
not(any(target_os = "windows", target_arch = "wasm32"))
))]
use std::os::unix::ffi::OsStrExt;
// Internal
use crate::build::app::Propagation;
use crate::build::AppSettings as AS;
use crate::build::{App, Arg, ArgSettings};
use crate::output::{usage, Help};
use crate::parse::errors::Error as ClapError;
use crate::parse::errors::ErrorKind;
use crate::parse::errors::Result as ClapResult;
use crate::parse::features::suggestions;
use crate::parse::Validator;
use crate::parse::{KeyType, HyphenStyle, RawValue, RawArg, RawLong, RawOpt, SeenArg, ArgMatcher, SubCommand, ArgPrediction, ValueState};
use crate::util::{hash, OsStrExt2};
use crate::INTERNAL_ERROR_MSG;
use crate::INVALID_UTF8;
const HELP_HASH: u64 = hash("help");
#[derive(Debug, PartialEq, Copy, Clone)]
#[doc(hidden)]
pub enum ParseCtx<'help> {
Initial,
ArgAcceptsVals(u64),
PosByIndexAcceptsVals(usize),
SubCmd(u64),
NextArg,
MaybeNegNum,
MaybeHyphenValue,
UnknownShort,
UnknownPositional,
TrailingValues,
LowIndexMultsOrMissingPos,
}
impl Default for ParseCtx {
fn default() -> Self {
ParseCtx::Initial
}
}
#[derive(Default)]
#[doc(hidden)]
pub struct Parser<'help> {
seen: Vec<SeenArg>,
cur_idx: Cell<usize>,
num_pos: usize,
cur_pos: usize,
cache: u64,
trailing_vals: bool,
}
// Parsing Methods
impl<'help> Parser<'help> {
// The actual parsing function
fn parse<I, T>(
&mut self,
it: &mut Peekable<I>,
app: &mut App,
) -> ClapResult<()>
where
I: Iterator<Item = T>,
T: AsRef<OsStr>,
{
debugln!("Parser::get_matches_with;");
let mut matcher = ArgMatcher::new();
let mut ctx = ParseCtx::Initial;
'outer: for arg in it {
debugln!("Parser::get_matches_with: Begin parsing '{:?}'", arg_os);
let arg_os = arg.as_ref();
let raw = arg_os.into();
// First make sure this isn't coming after a `--` only
ctx = if self.trailing_vals {
self.handle_low_index_multiples(&mut ctx, is_second_to_last)
} else {
self.try_parse_arg(&mut matcher, ctx, &raw)?
};
'inner: loop {
match ctx {
ParseCtx::LowIndexMultsOrMissingPos => {
if it.peek().is_some() && (self.cur_pos < self.num_pos) {
ctx = ParseCtx::PosAcceptsVal;
} else {
continue 'outer;
}
},
ParseCtx::PosAcceptsVals => {
if let Some(p) = app.args.get_by_index(self.cur_pos) {
ctx = self.parse_positional(app, p, &mut matcher, raw.into())?;
// Only increment the positional counter if it doesn't allow multiples
if ctx == ParseCtx::NextArg {
self.cur_pos += 1;
}
} else {
// Unknown Positional Argument
ctx = ParseCtx::ExternalSubCmd;
}
}
ParseCtx::ExternalSubCmd => {
if !self.is_set(AS::AllowExternalSubcommands) {
return Err(self.find_unknown_arg_error(app, raw.0));
}
// Get external subcommand name
// @TODO @perf @p3 probably don't need to convert to a String anymore
// unless checking strict UTF-8
let sc_name = match raw.0.to_str() {
Some(s) => s.to_string(),
None => {
if !self.is_set(AS::StrictUtf8) {
ctx = ParseCtx::InvalidUtf8;
continue;
}
arg_os.to_string_lossy().into_owned()
}
};
// Collect the external subcommand args
let mut sc_m = ArgMatcher::new();
while let Some(v) = it.next() {
let a = v.into();
if a.to_str().is_none() && !self.is_set(AS::StrictUtf8) {
ctx = ParseCtx::InvalidUtf8;
}
sc_m.add_val_to(0, &a);
}
matcher.subcommand(SubCommand {
id: hash(&*sc_name),
matches: sc_m.into(),
});
break 'outer;
},
ParseCtx::UnknownLong => {
if self.is_set(AS::AllowLeadingHyphen) {
ctx = ParseCtx::MaybeHyphenValue;
} else {
return self.did_you_mean_error(raw.0.to_str().expect(INVALID_UTF8), &mut matcher);
}
}
ParseCtx::UnknownShort => {
if self.is_set(AS::AllowLeadingHyphen) {
ctx = ParseCtx::MaybeHyphenValue;
continue;
} else if self.is_set(AS::AllowNegativeNumbers) {
ctx = ParseCtx::MaybeNegNum;
continue;
} else {
ctx = ParseCtx::UnknownArgError;
}
},
ParseCtx::MaybeNegNum => {
if raw.0.to_string_lossy().parse::<i64>().is_ok()
|| raw.0.to_string_lossy().parse::<f64>().is_ok() {
ctx = ParseCtx::MaybeHyphenValue;
}
ctx = ParseCtx::UnknownArgError;
},
ParseCtx::MaybeHyphenValue => {
if app.find(self.cache).map(|x| x.accepts_value()).unwrap_or(false) {
ctx = ParseCtx::ArgAcceptsVals(self.cache);
continue;
}
ctx = ParseCtx::UnknownArgError;
},
ParseCtx::ArgAcceptsVals(id) => {
let opt = app.args.get_by_id(id).unwrap();
ctx = self.add_val_to_arg(opt, raw.0.into(), &mut matcher)?;
if let ParseCtx::ArgAcceptsVals(id) = ctx {
continue 'outer;
}
},
ParseCtx::SubCmd(id) => {
if id == HELP_HASH && !self.is_set(AS::NoAutoHelp) {
self.parse_help_subcommand(it)?;
}
break;
},
ParseCtx::UknownArgError => {
return Err(ClapError::unknown_argument(
&*raw.0.to_string_lossy(),
"",
&*usage::with_title(app, &[]),
app.color(),
));
},
ParseCtx::InvalidUtf8 => {
return Err(ClapError::invalid_utf8(
&*Usage::new(app).create_usage_with_title(&[]),
app.color(),
));
}
}
}
self.maybe_misspelled_subcmd(app, raw.0)?;
}
self.check_subcommand(it, app, &mut matcher, pos_sc)?;
let overridden = matcher.remove_overrides(self, &*self.seen);
Validator::new(self, overridden).validate(&subcmd_name, &mut matcher)
}
fn check_subcommand(&mut self, it: &mut Peekable<I>, app: &mut App, mut matcher: &mut ArgMatcher, subcmd_name: Option<u64>) -> Clapresult<()> {
if let Some(pos_sc_name) = subcmd_name {
let sc= app.subcommands
.iter_mut()
.find(|x| x.id == *pos_sc_name)
.expect(INTERNAL_ERROR_MSG);
self.parse_subcommand(sc, matcher, it)?;
} else if self.is_set(AS::SubcommandRequired) {
let bn = app.bin_name.as_ref().unwrap_or(&app.name.into());
return Err(ClapError::missing_subcommand(
bn,
&Usage::new(self).create_usage_with_title(&[]),
app.color(),
));
} else if self.is_set(AS::SubcommandRequiredElseHelp) {
debugln!("Parser::get_matches_with: SubcommandRequiredElseHelp=true");
let mut out = vec![];
self.write_help_err(&mut out)?;
return Err(ClapError {
message: String::from_utf8_lossy(&*out).into_owned(),
kind: ErrorKind::MissingArgumentOrSubcommand,
info: None,
});
}
Ok(())
}
fn handle_low_index_multiples(&mut self, ctx: &mut ParseCtx, is_second_to_last: bool) -> ParseCtx {
let is_second_to_last = self.pos_counter == (self.num_pos - 1);
let low_index_mults = self.is_set(AS::LowIndexMultiplePositional)
&& is_second_to_last;
let missing_pos = self.is_set(AS::AllowMissingPositional)
&& is_second_to_last
&& !self.is_set(AS::TrailingValues);
if low_index_mults || missing_pos {
ParseCtx::LowIndexMultsOrMissingPos
} else if self.skip_to_last_positional() {
// Came to -- and one postional has .last(true) set, so we go immediately
// to the last (highest index) positional
debugln!("Parser::parse: .last(true) and --, setting last pos");
self.cur_pos = self.num_pos;
}
ParseCtx::PosAcceptsVal
}
fn try_parse_arg(&mut self, mut matcher: &mut ArgMatcher, mut ctx: ParseCtx, raw: &RawArg) -> ClapResult<ParseCtx> {
match raw.make_prediction(ctx) {
ArgPrediction::ShortKey(raw) => {
self.parse_short(&mut matcher, raw.into())
},
ArgPrediction::LongKey(raw) => {
self.parse_long(&mut matcher, raw.into())
},
ArgPrediction::PossibleValue(raw) => {
self.possible_subcommand(&raw)
},
ArgPrediction::TrailingValueSignal => {
self.trailing_vals = true;
Ok(ParseCtx::NextArg)
},
ArgPrediction::Value => {unimplemented!()},
}
}
fn find_unknown_arg_error(&self, app: &App, raw: &OsStr) -> ClapError {
if !((app.is_set(AS::AllowLeadingHyphen) || app.is_set(AS::AllowNegativeNumbers)) && raw.starts_with(b"-")) && !self.is_set(AS::InferSubcommands) {
return ClapError::unknown_argument(
&*raw.to_string_lossy(),
"",
&*usage::with_title(app, &[]),
app.color(),
);
} else if !app.has_args() || self.is_set(AS::InferSubcommands) && app.has_subcommands() {
if let Some(cdate) = suggestions::did_you_mean(&*raw.to_string_lossy(), sc_names!(app)) {
return ClapError::invalid_subcommand(
raw.to_string_lossy().into_owned(),
cdate,
app.bin_name.as_ref().unwrap_or(&app.name.into()),
&*usage::with_title(app, &[]),
app.color(),
);
} else {
return ClapError::unrecognized_subcommand(
raw.to_string_lossy().into_owned(),
app.bin_name.as_ref().unwrap_or(&app.name.into()),
app.color(),
);
}
} else {
return ClapError::unknown_argument(
&*raw.to_string_lossy(),
"",
&*usage::with_title(app, &[]),
app.color(),
);
}
}
fn maybe_misspelled_subcmd(&self, app: &App, arg_os: &OsStr) -> ClapResult<()> {
// @TODO @p2 Use Flags directly for BitOr instead of this...
if !(app.is_set(AS::ArgsNegateSubcommands) && app.is_set(AS::ValidArgFound)
|| app.is_set(AS::AllowExternalSubcommands)
|| app.is_set(AS::InferSubcommands))
{
if let Some(cdate) =
suggestions::did_you_mean(arg_os.to_string_lossy(), sc_names!(app))
{
return Err(ClapError::invalid_subcommand(
arg_os.to_string_lossy().into_owned(),
cdate,
app.bin_name.as_ref().unwrap_or(&app.name.into()),
&*usage::with_title(app, &[]),
app.color(),
));
}
}
Ok(())
}
fn parse_positional(&mut self, app: &mut App, p: &Arg, matcher: &mut ArgMatcher, raw: RawValue) -> ClapResult<ParseCtx> {
let no_trailing_vals = !self.is_set(AS::TrailingValues);
if p.is_set(ArgSettings::Last) && no_trailing_vals {
return Err(ClapError::unknown_argument(
&*raw.raw.to_string_lossy(),
"",
&*usage::with_title(app, &[]),
self.app.color(),
));
}
if no_trailing_vals && (self.is_set(AS::TrailingVarArg) && self.cur_pos == self.cur_pos) {
self.app.settings.set(AS::TrailingValues);
}
self.seen.push(SeenArg { id: p.id, key: KeyType::Index});
let ret = self.add_val_to_arg(p, &raw.raw.into(), matcher)?;
matcher.inc_occurrence_of(p.name);
for grp in groups_for_arg!(self.app, &p.name) {
matcher.inc_occurrence_of(&*grp);
}
self.app.settings.set(AS::ValidArgFound);
Ok(ret)
}
// Checks if the arg matches a subcommand name, or any of it's aliases (if defined)
fn possible_subcommand(&self, raw: &RawArg) -> ParseCtx {
debugln!("Parser::possible_subcommand: arg={:?}", raw);
fn starts(h: &str, n: &OsStr) -> bool {
#[cfg(target_os = "windows")]
use osstringext::OsStrExt3;
#[cfg(not(target_os = "windows"))]
use std::os::unix::ffi::OsStrExt;
let n_bytes = n.as_bytes();
let h_bytes = OsStr::new(h).as_bytes();
h_bytes.starts_with(n_bytes)
}
if self.is_set(AS::ArgsNegateSubcommands) && self.is_set(AS::ValidArgFound) {
return None;
}
if !self.is_set(AS::InferSubcommands) {
let pos_id = hash(raw.0);
if let Some(sc) = self.app.subcommands.iter().find(|x| x.id == pos_id) {
return ParseCtx::SubCmd(&sc.id);
}
} else {
let v = sc_names!(self.app)
.filter(|s| starts(s, &*raw.0))
.collect::<Vec<_>>();
if v.len() == 1 {
return ParseResult::SubCmd(hash(v[0]));
}
}
ParseResult::PosByIndex(self.cur_pos)
}
fn parse_subcommand<I, T>(
&mut self,
s: &mut App,
matcher: &mut ArgMatcher,
it: &mut Peekable<I>,
) -> ClapResult<()>
where
I: Iterator<Item = T>,
T: Into<OsString> + Clone,
{
use std::fmt::Write;
debugln!("Parser::parse_subcommand;");
// Ensure all args are built and ready to parse
sc._build(Propagation::NextLevel);
debugln!("Parser::parse_subcommand: About to parse sc={}", sc.name);
let mut p = Parser::new(sc);
let mut sc_matcher = ArgMatcher::new();
p.get_matches_with(&mut sc_matcher, it)?;
matcher.subcommand(SubCommand {
id: sc.id,
matches: sc_matcher.into(),
});
Ok(())
}
// Retrieves the names of all args the user has supplied thus far, except required ones
// because those will be listed in self.required
fn check_for_help_and_version_long(&self, arg: &OsStr) -> ClapResult<()> {
debugln!("Parser::check_for_help_and_version_long;");
// Needs to use app.settings.is_set instead of just is_set() because is_set() checks
// both global and local settings, we only want to check local
if arg == "help" && !self.app.settings.is_set(AS::NoAutoHelp) {
return Err(self.help_err(true));
}
if arg == "version" && !self.app.settings.is_set(AS::NoAutoVersion) {
return Err(self.version_err(true));
}
Ok(())
}
fn check_for_help_and_version_char(&self, arg: char) -> ClapResult<()> {
debugln!("Parser::check_for_help_and_version_char;");
// Needs to use app.settings.is_set instead of just is_set() because is_set() checks
// both global and local settings, we only want to check local
if let Some(help) = self.app.find(hash("help")) {
if let Some(h) = help.short {
if arg == h && !self.app.settings.is_set(AS::NoAutoHelp) {
return Err(self.help_err(false));
}
}
}
if let Some(version) = self.app.find(hash("version")) {
if let Some(v) = version.short {
if arg == v && !self.app.settings.is_set(AS::NoAutoVersion) {
return Err(self.version_err(false));
}
}
}
Ok(())
}
fn parse_long(
&mut self,
matcher: &mut ArgMatcher,
raw_long: RawLong,
) -> ClapResult<ParseCtx> {
// maybe here lifetime should be 'a
debugln!("Parser::parse_long;");
// Update the curent index
self.cur_idx.set(self.cur_idx.get() + 1);
if let Some(arg) = self.app.args.get_by_long_with_hyphen(raw_long.key_as_bytes()) {
self.app.settings.set(AS::ValidArgFound);
self.seen.push(SeenArg::new(arg.id, KeyType::Long));
if arg.is_set(ArgSettings::TakesValue) {
return self.parse_opt(app, raw_long.into(), arg, matcher);
}
// Check for help/version *after* opt so we avoid needlessly checking every time
self.check_for_help_and_version_long(raw_long.key())?;
self.parse_flag(arg.id, matcher)?;
return Ok(ParseResult::NextArg);
}
Ok(ParseResult::MaybeHyphenValue)
}
fn parse_short(
&mut self,
matcher: &mut ArgMatcher,
full_arg: RawArg,
) -> ClapResult<ParseCtx> {
debugln!("Parser::parse_short_arg: full_arg={:?}", full_arg);
let arg_os = full_arg.trim_left_matches(b'-');
let arg = arg_os.to_string_lossy();
let mut ret = ParseResult::UnkownShort;
for c in arg.chars() {
debugln!("Parser::parse_short_arg:iter:{}", c);
// update each index because `-abcd` is four indices to clap
self.cur_idx.set(self.cur_idx.get() + 1);
// Check for matching short options, and return the name if there is no trailing
// concatenated value: -oval
// Option: -o
// Value: val
if let Some(arg) = self.app.args.get_by_short(c) {
debugln!( "Parser::parse_short_arg:iter:{}: Found valid opt or flag", c );
self.app.settings.set(AS::ValidArgFound);
self.seen.push(arg.name);
if !arg.is_set(ArgSettings::TakesValue) {
self.check_for_help_and_version_char(c)?;
ret = self.parse_flag(arg.id, matcher)?;
continue;
}
// Check for trailing concatenated value such as -oval where 'o' is the short and
// 'val' is the value
let p: Vec<_> = arg.splitn(2, c).collect();
let ro = RawOpt {
raw_key: p[0],
key: KeyType::Short,
value: RawValue::from_maybe_empty_osstr(p[1]),
};
return self.parse_opt(app, ro, arg, matcher);
} else {
return Ok(ParseResult::UnknownShort);
}
}
Ok(ret)
}
fn parse_opt(
&self,
app: &mut App,
raw: RawOpt,
opt: &Arg<'help>,
matcher: &mut ArgMatcher,
) -> ClapResult<ParseCtx> {
debugln!("Parser::parse_opt; opt={}, val={:?}", opt.id, raw.value);
let had_eq = raw.had_eq();
let mut ret = ParseResult::Initial; // @TODO: valid args found state?
if raw.has_value() {
ret = self.add_val_to_arg(opt, raw.value_unchecked(), matcher)?;
} else if opt.is_set(ArgSettings::RequireEquals) {
return Err(ClapError::empty_value(
opt,
&*usage::with_title(app, &[]),
app.color(),
));
}
matcher.inc_occurrence_of(opt.id);
// Increment or create the group
for grp in groups_for_arg!(self.app, &opt.id) {
matcher.inc_occurrence_of(&*grp);
}
Ok(ParseResult::NextArg)
}
fn add_val_to_arg(
&self,
arg: &Arg<'help>,
mut raw: RawValue,
matcher: &mut ArgMatcher,
) -> ClapResult<ParseCtx> {
debugln!("Parser::add_val_to_arg; arg={}, val={:?}", arg.id, val);
let honor_delims = !(self.is_set(AS::TrailingValues)
&& self.is_set(AS::DontDelimitTrailingValues));
if honor_delims {
raw.sep = arg.val_delim;
}
let mut ret = ParseResult::Initial; // @TODO: valid args found state?
for v in raw.values() {
ret = self.add_single_val_to_arg(arg, v, matcher)?;
}
// If there was a delimiter used, we're not looking for more values because
// --foo=bar,baz qux isn't three values. Same with --foo bar,baz qux
if honor_delims && raw.used_sep() { //|| arg.is_set(ArgSettings::RequireDelimiter)) {
ret = ParseResult::NextArg;
} else {
ret = match matcher.value_state_after_val(arg) {
ValueState::Done => {unimplemented!()}
ValueState::RequiresValue(id) => {unimplemented!()}
ValueState::AcceptsValue(id) => {unimplemented!()}
};
}
Ok(ret)
}
fn add_single_val_to_arg(
&self,
arg: &Arg<'help>,
v: &OsStr,
matcher: &mut ArgMatcher,
) -> ClapResult<ParseCtx> {
debugln!("Parser::add_single_val_to_arg: adding val...{:?}", v);
// update the current index because each value is a distinct index to clap
self.cur_idx.set(self.cur_idx.get() + 1);
// @TODO @docs @p4 docs should probably note that terminator doesn't get an index
if let Some(t) = arg.terminator {
if t == v {
return Ok(ParseResult::NextArg); // @TODO maybe add, ValueDone state?
}
}
matcher.add_val_to(arg.id, v);
matcher.add_index_to(arg.id, self.cur_idx.get());
// Increment or create the group "args"
for grp in groups_for_arg!(self.app, &arg.id) {
matcher.add_val_to(&*grp, v);
}
if matcher.needs_more_vals(arg) {
return Ok(ParseResult::ArgAcceptsVals(arg.id));
}
Ok(ParseResult::NextArg)
}
fn parse_flag(
&self,
flag_id: u64,
matcher: &mut ArgMatcher,
) -> ClapResult<ParseCtx> {
debugln!("Parser::parse_flag;");
matcher.inc_occurrence_of(flag_id);
matcher.add_index_to(flag_id, self.cur_idx.get());
// Increment or create the group "args"
for grp in groups_for_arg!(self.app, &flag_id) {
matcher.inc_occurrence_of(grp);
}
Ok(ParseResult::NextArg)
}
pub(crate) fn add_defaults(&mut self, matcher: &mut ArgMatcher) -> ClapResult<()> {
debugln!("Parser::add_defaults;");
macro_rules! add_val {
(@default $_self:ident, $a:ident, $m:ident) => {
if let Some(ref val) = $a.default_val {
debugln!("Parser::add_defaults:iter:{}: has default vals", $a.name);
if $m
.get($a.name)
.map(|ma| ma.vals.len())
.map(|len| len == 0)
.unwrap_or(false)
{
debugln!(
"Parser::add_defaults:iter:{}: has no user defined vals",
$a.name
);
$_self.add_val_to_arg($a, OsStr::new(val), $m)?;
} else if $m.get($a.name).is_some() {
debugln!(
"Parser::add_defaults:iter:{}: has user defined vals",
$a.name
);
} else {
debugln!("Parser::add_defaults:iter:{}: wasn't used", $a.name);
$_self.add_val_to_arg($a, OsStr::new(val), $m)?;
}
} else {
debugln!(
"Parser::add_defaults:iter:{}: doesn't have default vals",
$a.name
);
}
};
($_self:ident, $a:ident, $m:ident) => {
if let Some(ref vm) = $a.default_vals_ifs {
sdebugln!(" has conditional defaults");
let mut done = false;
if $m.get($a.name).is_none() {
for &(arg, val, default) in vm.values() {
let add = if let Some(a) = $m.get(arg) {
if let Some(v) = val {
a.vals.iter().any(|value| v == value)
} else {
true
}
} else {
false
};
if add {
$_self.add_val_to_arg($a, OsStr::new(default), $m)?;
done = true;
break;
}
}
}
if done {
continue; // outer loop (outside macro)
}
} else {
sdebugln!(" doesn't have conditional defaults");
}
add_val!(@default $_self, $a, $m)
};
}
for o in self.app.args.opts() {
debug!("Parser::add_defaults:iter:{}:", o.name);
add_val!(self, o, matcher);
}
for p in self.app.args.positionals() {
debug!("Parser::add_defaults:iter:{}:", p.name);
add_val!(self, p, matcher);
}
Ok(())
}
pub(crate) fn add_env(&mut self, matcher: &mut ArgMatcher) -> ClapResult<()> {
for a in self.app.args.args.iter() {
if let Some(ref val) = a.env {
if let Some(ref val) = val.1 {
self.add_val_to_arg(a, OsStr::new(val).into(), matcher)?;
}
}
}
Ok(())
}
}
// Error, Help, and Version Methods
impl Parser {
fn did_you_mean_error(&mut self, arg: &str, matcher: &mut ArgMatcher) -> ClapResult<()> {
debugln!("Parser::did_you_mean_error: arg={}", arg);
// Get all longs
let longs = self
.app
.args
.args
.iter()
.filter_map(|x| x.long)
.collect::<Vec<_>>();
debugln!("Parser::did_you_mean_error: longs={:?}", longs);
let suffix = suggestions::did_you_mean_flag_suffix(
arg,
longs.iter().map(|ref x| &x[..]),
self.app.subcommands.as_mut_slice(),
);
// Add the arg to the matches to build a proper usage string
if let Some(ref name) = suffix.1 {
if let Some(opt) = self.app.args.get_by_long(&*name) {
for g in groups_for_arg!(self.app, &opt.id) {
matcher.inc_occurrence_of(g);
}
matcher.insert(opt.id);
}
}
let used: Vec<u64> = matcher
.arg_names()
.filter(|n| {
if let Some(a) = self.app.find(**n) {
!(self.required.contains(a.name) || a.is_set(ArgSettings::Hidden))
} else {
true
}
})
.cloned()
.collect();
Err(ClapError::unknown_argument(
&*format!("--{}", arg),
&*suffix.0,
&*usage::with_title(app, &*used),
app.color(),
))
}
}
// Query Methods
impl Parser {
#[inline]
fn skip_to_last_positional(&self) -> bool {
self.is_set(AS::TrailingValues) && (self.is_set(AS::AllowMissingPositional) || self.is_set(AS::ContainsLast))
}
fn contains_short(&self, s: char) -> bool { self.app.contains_short(s) }
#[cfg_attr(feature = "lints", allow(needless_borrow))]
pub(crate) fn has_args(&self) -> bool { self.app.has_args() }
pub(crate) fn has_opts(&self) -> bool { self.app.has_opts() }
pub(crate) fn has_flags(&self) -> bool { self.app.has_flags() }
pub(crate) fn has_positionals(&self) -> bool {
self.app.args.args.iter().any(|x| x.index.is_some())
}
pub(crate) fn has_subcommands(&self) -> bool { self.app.has_subcommands() }
pub(crate) fn has_visible_subcommands(&self) -> bool { self.app.has_visible_subcommands() }
pub(crate) fn is_set(&self, s: AS) -> bool { self.app.is_set(s) }
pub(crate) fn set(&mut self, s: AS) { self.app.set(s) }
pub(crate) fn unset(&mut self, s: AS) { self.app.unset(s) }
}
| 37.122744 | 155 | 0.487666 |
d91f93b280e226afe92be0ba5cf1eb42083e0340 | 4,695 | pub mod undo {
use bstr::{BStr, BString};
use quick_error::quick_error;
quick_error! {
#[derive(Debug)]
pub enum Error {
InvalidInput { message: String, input: BString } {
display("{}: {:?}", message, input)
}
UnsupportedEscapeByte { byte: u8, input: BString } {
display("Invalid escaped value {} in input {:?}", byte, input)
}
}
}
impl Error {
pub(crate) fn new(message: impl ToString, input: &BStr) -> Error {
Error::InvalidInput {
message: message.to_string(),
input: input.into(),
}
}
}
}
use std::{borrow::Cow, io::Read};
use bstr::{BStr, BString, ByteSlice};
/// Unquote the given ansi-c quoted `input` string, returning it and all of the consumed bytes.
///
/// The `input` is returned unaltered if it doesn't start with a `"` character to indicate
/// quotation, otherwise a new unquoted string will always be allocated.
/// The amount of consumed bytes allow to pass strings that start with a quote, and skip all quoted text for additional processing
///
/// See [the tests][tests] for quotation examples.
///
/// [tests]: https://github.com/Byron/gitoxide/blob/e355b4ad133075152312816816af5ce72cf79cff/git-odb/src/alternate/unquote.rs#L110-L118
pub fn undo(input: &BStr) -> Result<(Cow<'_, BStr>, usize), undo::Error> {
if !input.starts_with(b"\"") {
return Ok((input.into(), input.len()));
}
if input.len() < 2 {
return Err(undo::Error::new("Input must be surrounded by double quotes", input));
}
let original = input.as_bstr();
let mut input = &input[1..];
let mut consumed = 1;
let mut out = BString::default();
fn consume_one_past(input: &mut &BStr, position: usize) -> Result<u8, undo::Error> {
*input = input
.get(position + 1..)
.ok_or_else(|| undo::Error::new("Unexpected end of input", input))?
.as_bstr();
let next = input[0];
*input = input.get(1..).unwrap_or_default().as_bstr();
Ok(next)
}
loop {
match input.find_byteset(b"\"\\") {
Some(position) => {
out.extend_from_slice(&input[..position]);
consumed += position + 1;
match input[position] {
b'"' => break,
b'\\' => {
let next = consume_one_past(&mut input, position)?;
consumed += 1;
match next {
b'n' => out.push(b'\n'),
b'r' => out.push(b'\r'),
b't' => out.push(b'\t'),
b'a' => out.push(7),
b'b' => out.push(8),
b'v' => out.push(0xb),
b'f' => out.push(0xc),
b'"' => out.push(b'"'),
b'\\' => out.push(b'\\'),
b'0' | b'1' | b'2' | b'3' => {
let mut buf = [next; 3];
input
.get(..2)
.ok_or_else(|| {
undo::Error::new(
"Unexpected end of input when fetching two more octal bytes",
input,
)
})?
.read_exact(&mut buf[1..])
.expect("impossible to fail as numbers match");
let byte = btoi::btou_radix(&buf, 8).map_err(|e| undo::Error::new(e, original))?;
out.push(byte);
input = &input[2..];
consumed += 2;
}
_ => {
return Err(undo::Error::UnsupportedEscapeByte {
byte: next,
input: original.into(),
})
}
}
}
_ => unreachable!("cannot find character that we didn't search for"),
}
}
None => {
out.extend_from_slice(input);
consumed += input.len();
break;
}
}
}
Ok((out.into(), consumed))
}
| 40.128205 | 135 | 0.416187 |
6aa8242193d5fce87d9a22a4ae2c1f9e6023b30e | 19,236 | //! ### Inferring borrow kinds for upvars
//!
//! Whenever there is a closure expression, we need to determine how each
//! upvar is used. We do this by initially assigning each upvar an
//! immutable "borrow kind" (see `ty::BorrowKind` for details) and then
//! "escalating" the kind as needed. The borrow kind proceeds according to
//! the following lattice:
//!
//! ty::ImmBorrow -> ty::UniqueImmBorrow -> ty::MutBorrow
//!
//! So, for example, if we see an assignment `x = 5` to an upvar `x`, we
//! will promote its borrow kind to mutable borrow. If we see an `&mut x`
//! we'll do the same. Naturally, this applies not just to the upvar, but
//! to everything owned by `x`, so the result is the same for something
//! like `x.f = 5` and so on (presuming `x` is not a borrowed pointer to a
//! struct). These adjustments are performed in
//! `adjust_upvar_borrow_kind()` (you can trace backwards through the code
//! from there).
//!
//! The fact that we are inferring borrow kinds as we go results in a
//! semi-hacky interaction with mem-categorization. In particular,
//! mem-categorization will query the current borrow kind as it
//! categorizes, and we'll return the *current* value, but this may get
//! adjusted later. Therefore, in this module, we generally ignore the
//! borrow kind (and derived mutabilities) that are returned from
//! mem-categorization, since they may be inaccurate. (Another option
//! would be to use a unification scheme, where instead of returning a
//! concrete borrow kind like `ty::ImmBorrow`, we return a
//! `ty::InferBorrow(upvar_id)` or something like that, but this would
//! then mean that all later passes would have to check for these figments
//! and report an error, and it just seems like more mess in the end.)
use super::FnCtxt;
use crate::expr_use_visitor as euv;
use crate::mem_categorization as mc;
use crate::mem_categorization::PlaceBase;
use rustc_data_structures::fx::FxIndexMap;
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
use rustc_infer::infer::UpvarRegion;
use rustc_middle::ty::{self, Ty, TyCtxt, UpvarSubsts};
use rustc_span::{Span, Symbol};
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
pub fn closure_analyze(&self, body: &'tcx hir::Body<'tcx>) {
InferBorrowKindVisitor { fcx: self }.visit_body(body);
// it's our job to process these.
assert!(self.deferred_call_resolutions.borrow().is_empty());
}
}
struct InferBorrowKindVisitor<'a, 'tcx> {
fcx: &'a FnCtxt<'a, 'tcx>,
}
impl<'a, 'tcx> Visitor<'tcx> for InferBorrowKindVisitor<'a, 'tcx> {
type Map = intravisit::ErasedMap<'tcx>;
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
if let hir::ExprKind::Closure(cc, _, body_id, _, _) = expr.kind {
let body = self.fcx.tcx.hir().body(body_id);
self.visit_body(body);
self.fcx.analyze_closure(expr.hir_id, expr.span, body, cc);
}
intravisit::walk_expr(self, expr);
}
}
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// Analysis starting point.
fn analyze_closure(
&self,
closure_hir_id: hir::HirId,
span: Span,
body: &hir::Body<'_>,
capture_clause: hir::CaptureBy,
) {
debug!("analyze_closure(id={:?}, body.id={:?})", closure_hir_id, body.id());
// Extract the type of the closure.
let ty = self.node_ty(closure_hir_id);
let (closure_def_id, substs) = match ty.kind {
ty::Closure(def_id, substs) => (def_id, UpvarSubsts::Closure(substs)),
ty::Generator(def_id, substs, _) => (def_id, UpvarSubsts::Generator(substs)),
ty::Error => {
// #51714: skip analysis when we have already encountered type errors
return;
}
_ => {
span_bug!(
span,
"type of closure expr {:?} is not a closure {:?}",
closure_hir_id,
ty
);
}
};
let infer_kind = if let UpvarSubsts::Closure(closure_substs) = substs {
self.closure_kind(closure_substs).is_none().then_some(closure_substs)
} else {
None
};
if let Some(upvars) = self.tcx.upvars(closure_def_id) {
let mut upvar_list: FxIndexMap<hir::HirId, ty::UpvarId> =
FxIndexMap::with_capacity_and_hasher(upvars.len(), Default::default());
for (&var_hir_id, _) in upvars.iter() {
let upvar_id = ty::UpvarId {
var_path: ty::UpvarPath { hir_id: var_hir_id },
closure_expr_id: closure_def_id.expect_local(),
};
debug!("seed upvar_id {:?}", upvar_id);
// Adding the upvar Id to the list of Upvars, which will be added
// to the map for the closure at the end of the for loop.
upvar_list.insert(var_hir_id, upvar_id);
let capture_kind = match capture_clause {
hir::CaptureBy::Value => ty::UpvarCapture::ByValue,
hir::CaptureBy::Ref => {
let origin = UpvarRegion(upvar_id, span);
let upvar_region = self.next_region_var(origin);
let upvar_borrow =
ty::UpvarBorrow { kind: ty::ImmBorrow, region: upvar_region };
ty::UpvarCapture::ByRef(upvar_borrow)
}
};
self.tables.borrow_mut().upvar_capture_map.insert(upvar_id, capture_kind);
}
// Add the vector of upvars to the map keyed with the closure id.
// This gives us an easier access to them without having to call
// tcx.upvars again..
if !upvar_list.is_empty() {
self.tables.borrow_mut().upvar_list.insert(closure_def_id, upvar_list);
}
}
let body_owner_def_id = self.tcx.hir().body_owner_def_id(body.id());
assert_eq!(body_owner_def_id.to_def_id(), closure_def_id);
let mut delegate = InferBorrowKind {
fcx: self,
closure_def_id,
current_closure_kind: ty::ClosureKind::LATTICE_BOTTOM,
current_origin: None,
adjust_upvar_captures: ty::UpvarCaptureMap::default(),
};
euv::ExprUseVisitor::new(
&mut delegate,
&self.infcx,
body_owner_def_id,
self.param_env,
&self.tables.borrow(),
)
.consume_body(body);
if let Some(closure_substs) = infer_kind {
// Unify the (as yet unbound) type variable in the closure
// substs with the kind we inferred.
let inferred_kind = delegate.current_closure_kind;
let closure_kind_ty = closure_substs.as_closure().kind_ty();
self.demand_eqtype(span, inferred_kind.to_ty(self.tcx), closure_kind_ty);
// If we have an origin, store it.
if let Some(origin) = delegate.current_origin {
self.tables.borrow_mut().closure_kind_origins_mut().insert(closure_hir_id, origin);
}
}
self.tables.borrow_mut().upvar_capture_map.extend(delegate.adjust_upvar_captures);
// Now that we've analyzed the closure, we know how each
// variable is borrowed, and we know what traits the closure
// implements (Fn vs FnMut etc). We now have some updates to do
// with that information.
//
// Note that no closure type C may have an upvar of type C
// (though it may reference itself via a trait object). This
// results from the desugaring of closures to a struct like
// `Foo<..., UV0...UVn>`. If one of those upvars referenced
// C, then the type would have infinite size (and the
// inference algorithm will reject it).
// Equate the type variables for the upvars with the actual types.
let final_upvar_tys = self.final_upvar_tys(closure_hir_id);
debug!(
"analyze_closure: id={:?} substs={:?} final_upvar_tys={:?}",
closure_hir_id, substs, final_upvar_tys
);
for (upvar_ty, final_upvar_ty) in substs.upvar_tys().zip(final_upvar_tys) {
self.demand_suptype(span, upvar_ty, final_upvar_ty);
}
// If we are also inferred the closure kind here,
// process any deferred resolutions.
let deferred_call_resolutions = self.remove_deferred_call_resolutions(closure_def_id);
for deferred_call_resolution in deferred_call_resolutions {
deferred_call_resolution.resolve(self);
}
}
// Returns a list of `Ty`s for each upvar.
fn final_upvar_tys(&self, closure_id: hir::HirId) -> Vec<Ty<'tcx>> {
// Presently an unboxed closure type cannot "escape" out of a
// function, so we will only encounter ones that originated in the
// local crate or were inlined into it along with some function.
// This may change if abstract return types of some sort are
// implemented.
let tcx = self.tcx;
let closure_def_id = tcx.hir().local_def_id(closure_id);
tcx.upvars(closure_def_id)
.iter()
.flat_map(|upvars| {
upvars.iter().map(|(&var_hir_id, _)| {
let upvar_ty = self.node_ty(var_hir_id);
let upvar_id = ty::UpvarId {
var_path: ty::UpvarPath { hir_id: var_hir_id },
closure_expr_id: closure_def_id,
};
let capture = self.tables.borrow().upvar_capture(upvar_id);
debug!("var_id={:?} upvar_ty={:?} capture={:?}", var_hir_id, upvar_ty, capture);
match capture {
ty::UpvarCapture::ByValue => upvar_ty,
ty::UpvarCapture::ByRef(borrow) => tcx.mk_ref(
borrow.region,
ty::TypeAndMut { ty: upvar_ty, mutbl: borrow.kind.to_mutbl_lossy() },
),
}
})
})
.collect()
}
}
struct InferBorrowKind<'a, 'tcx> {
fcx: &'a FnCtxt<'a, 'tcx>,
// The def-id of the closure whose kind and upvar accesses are being inferred.
closure_def_id: DefId,
// The kind that we have inferred that the current closure
// requires. Note that we *always* infer a minimal kind, even if
// we don't always *use* that in the final result (i.e., sometimes
// we've taken the closure kind from the expectations instead, and
// for generators we don't even implement the closure traits
// really).
current_closure_kind: ty::ClosureKind,
// If we modified `current_closure_kind`, this field contains a `Some()` with the
// variable access that caused us to do so.
current_origin: Option<(Span, Symbol)>,
// For each upvar that we access, we track the minimal kind of
// access we need (ref, ref mut, move, etc).
adjust_upvar_captures: ty::UpvarCaptureMap<'tcx>,
}
impl<'a, 'tcx> InferBorrowKind<'a, 'tcx> {
fn adjust_upvar_borrow_kind_for_consume(
&mut self,
place: &mc::Place<'tcx>,
mode: euv::ConsumeMode,
) {
debug!("adjust_upvar_borrow_kind_for_consume(place={:?}, mode={:?})", place, mode);
// we only care about moves
match mode {
euv::Copy => {
return;
}
euv::Move => {}
}
let tcx = self.fcx.tcx;
let upvar_id = if let PlaceBase::Upvar(upvar_id) = place.base {
upvar_id
} else {
return;
};
debug!("adjust_upvar_borrow_kind_for_consume: upvar={:?}", upvar_id);
// To move out of an upvar, this must be a FnOnce closure
self.adjust_closure_kind(
upvar_id.closure_expr_id,
ty::ClosureKind::FnOnce,
place.span,
var_name(tcx, upvar_id.var_path.hir_id),
);
self.adjust_upvar_captures.insert(upvar_id, ty::UpvarCapture::ByValue);
}
/// Indicates that `place` is being directly mutated (e.g., assigned
/// to). If the place is based on a by-ref upvar, this implies that
/// the upvar must be borrowed using an `&mut` borrow.
fn adjust_upvar_borrow_kind_for_mut(&mut self, place: &mc::Place<'tcx>) {
debug!("adjust_upvar_borrow_kind_for_mut(place={:?})", place);
if let PlaceBase::Upvar(upvar_id) = place.base {
let mut borrow_kind = ty::MutBorrow;
for pointer_ty in place.deref_tys() {
match pointer_ty.kind {
// Raw pointers don't inherit mutability.
ty::RawPtr(_) => return,
// assignment to deref of an `&mut`
// borrowed pointer implies that the
// pointer itself must be unique, but not
// necessarily *mutable*
ty::Ref(.., hir::Mutability::Mut) => borrow_kind = ty::UniqueImmBorrow,
_ => (),
}
}
self.adjust_upvar_deref(upvar_id, place.span, borrow_kind);
}
}
fn adjust_upvar_borrow_kind_for_unique(&mut self, place: &mc::Place<'tcx>) {
debug!("adjust_upvar_borrow_kind_for_unique(place={:?})", place);
if let PlaceBase::Upvar(upvar_id) = place.base {
if place.deref_tys().any(ty::TyS::is_unsafe_ptr) {
// Raw pointers don't inherit mutability.
return;
}
// for a borrowed pointer to be unique, its base must be unique
self.adjust_upvar_deref(upvar_id, place.span, ty::UniqueImmBorrow);
}
}
fn adjust_upvar_deref(
&mut self,
upvar_id: ty::UpvarId,
place_span: Span,
borrow_kind: ty::BorrowKind,
) {
assert!(match borrow_kind {
ty::MutBorrow => true,
ty::UniqueImmBorrow => true,
// imm borrows never require adjusting any kinds, so we don't wind up here
ty::ImmBorrow => false,
});
let tcx = self.fcx.tcx;
// if this is an implicit deref of an
// upvar, then we need to modify the
// borrow_kind of the upvar to make sure it
// is inferred to mutable if necessary
self.adjust_upvar_borrow_kind(upvar_id, borrow_kind);
// also need to be in an FnMut closure since this is not an ImmBorrow
self.adjust_closure_kind(
upvar_id.closure_expr_id,
ty::ClosureKind::FnMut,
place_span,
var_name(tcx, upvar_id.var_path.hir_id),
);
}
/// We infer the borrow_kind with which to borrow upvars in a stack closure.
/// The borrow_kind basically follows a lattice of `imm < unique-imm < mut`,
/// moving from left to right as needed (but never right to left).
/// Here the argument `mutbl` is the borrow_kind that is required by
/// some particular use.
fn adjust_upvar_borrow_kind(&mut self, upvar_id: ty::UpvarId, kind: ty::BorrowKind) {
let upvar_capture = self
.adjust_upvar_captures
.get(&upvar_id)
.copied()
.unwrap_or_else(|| self.fcx.tables.borrow().upvar_capture(upvar_id));
debug!(
"adjust_upvar_borrow_kind(upvar_id={:?}, upvar_capture={:?}, kind={:?})",
upvar_id, upvar_capture, kind
);
match upvar_capture {
ty::UpvarCapture::ByValue => {
// Upvar is already by-value, the strongest criteria.
}
ty::UpvarCapture::ByRef(mut upvar_borrow) => {
match (upvar_borrow.kind, kind) {
// Take RHS:
(ty::ImmBorrow, ty::UniqueImmBorrow | ty::MutBorrow)
| (ty::UniqueImmBorrow, ty::MutBorrow) => {
upvar_borrow.kind = kind;
self.adjust_upvar_captures
.insert(upvar_id, ty::UpvarCapture::ByRef(upvar_borrow));
}
// Take LHS:
(ty::ImmBorrow, ty::ImmBorrow)
| (ty::UniqueImmBorrow, ty::ImmBorrow | ty::UniqueImmBorrow)
| (ty::MutBorrow, _) => {}
}
}
}
}
fn adjust_closure_kind(
&mut self,
closure_id: LocalDefId,
new_kind: ty::ClosureKind,
upvar_span: Span,
var_name: Symbol,
) {
debug!(
"adjust_closure_kind(closure_id={:?}, new_kind={:?}, upvar_span={:?}, var_name={})",
closure_id, new_kind, upvar_span, var_name
);
// Is this the closure whose kind is currently being inferred?
if closure_id.to_def_id() != self.closure_def_id {
debug!("adjust_closure_kind: not current closure");
return;
}
// closures start out as `Fn`.
let existing_kind = self.current_closure_kind;
debug!(
"adjust_closure_kind: closure_id={:?}, existing_kind={:?}, new_kind={:?}",
closure_id, existing_kind, new_kind
);
match (existing_kind, new_kind) {
(ty::ClosureKind::Fn, ty::ClosureKind::Fn)
| (ty::ClosureKind::FnMut, ty::ClosureKind::Fn | ty::ClosureKind::FnMut)
| (ty::ClosureKind::FnOnce, _) => {
// no change needed
}
(ty::ClosureKind::Fn, ty::ClosureKind::FnMut | ty::ClosureKind::FnOnce)
| (ty::ClosureKind::FnMut, ty::ClosureKind::FnOnce) => {
// new kind is stronger than the old kind
self.current_closure_kind = new_kind;
self.current_origin = Some((upvar_span, var_name));
}
}
}
}
impl<'a, 'tcx> euv::Delegate<'tcx> for InferBorrowKind<'a, 'tcx> {
fn consume(&mut self, place: &mc::Place<'tcx>, mode: euv::ConsumeMode) {
debug!("consume(place={:?},mode={:?})", place, mode);
self.adjust_upvar_borrow_kind_for_consume(place, mode);
}
fn borrow(&mut self, place: &mc::Place<'tcx>, bk: ty::BorrowKind) {
debug!("borrow(place={:?}, bk={:?})", place, bk);
match bk {
ty::ImmBorrow => {}
ty::UniqueImmBorrow => {
self.adjust_upvar_borrow_kind_for_unique(place);
}
ty::MutBorrow => {
self.adjust_upvar_borrow_kind_for_mut(place);
}
}
}
fn mutate(&mut self, assignee_place: &mc::Place<'tcx>) {
debug!("mutate(assignee_place={:?})", assignee_place);
self.adjust_upvar_borrow_kind_for_mut(assignee_place);
}
}
fn var_name(tcx: TyCtxt<'_>, var_hir_id: hir::HirId) -> Symbol {
tcx.hir().name(var_hir_id)
}
| 39.661856 | 100 | 0.580006 |
1e757ca4d73eb3214f5c4053d4c50e1da30cfd8f | 6,870 | //! Parse encoding parameter to `Regexp#initialize` and `Regexp::compile`.
use bstr::ByteSlice;
use core::convert::TryFrom;
use core::fmt;
use core::hash::{Hash, Hasher};
use core::mem;
use std::error;
use crate::Flags;
#[derive(Default, Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct InvalidEncodingError {
_private: (),
}
impl InvalidEncodingError {
/// Constructs a new, default `InvalidEncodingError`.
#[must_use]
pub const fn new() -> Self {
Self { _private: () }
}
}
impl fmt::Display for InvalidEncodingError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("Invalid Regexp encoding")
}
}
impl error::Error for InvalidEncodingError {}
/// The encoding of a Regexp literal.
///
/// Regexps are assumed to use the source encoding but literals may override
/// the encoding with a Regexp modifier.
///
/// See [`Regexp encoding][regexp-encoding].
///
/// [regexp-encoding]: https://ruby-doc.org/core-2.6.3/Regexp.html#class-Regexp-label-Encoding
#[derive(Debug, Clone, Copy, PartialOrd, Ord)]
pub enum Encoding {
Fixed,
No,
None,
}
impl Default for Encoding {
fn default() -> Self {
Self::None
}
}
impl Hash for Encoding {
fn hash<H: Hasher>(&self, state: &mut H) {
let discriminant = mem::discriminant(self);
discriminant.hash(state);
}
}
impl PartialEq for Encoding {
fn eq(&self, other: &Self) -> bool {
use Encoding::{Fixed, No, None};
matches!(
(self, other),
(No, No) | (No, None) | (None, No) | (None, None) | (Fixed, Fixed)
)
}
}
impl Eq for Encoding {}
impl TryFrom<Flags> for Encoding {
type Error = InvalidEncodingError;
fn try_from(mut flags: Flags) -> Result<Self, Self::Error> {
flags.set(Flags::ALL_REGEXP_OPTS, false);
if flags.intersects(Flags::FIXEDENCODING) {
Ok(Self::Fixed)
} else if flags.intersects(Flags::NOENCODING) {
Ok(Encoding::No)
} else if flags.is_empty() {
Ok(Encoding::new())
} else {
Err(InvalidEncodingError::new())
}
}
}
impl TryFrom<u8> for Encoding {
type Error = InvalidEncodingError;
fn try_from(flags: u8) -> Result<Self, Self::Error> {
let flags = Flags::from_bits(flags).ok_or_else(InvalidEncodingError::new)?;
Self::try_from(flags)
}
}
impl TryFrom<i64> for Encoding {
type Error = InvalidEncodingError;
fn try_from(flags: i64) -> Result<Self, Self::Error> {
let [byte, _, _, _, _, _, _, _] = flags.to_le_bytes();
Self::try_from(byte)
}
}
impl TryFrom<&str> for Encoding {
type Error = InvalidEncodingError;
fn try_from(encoding: &str) -> Result<Self, Self::Error> {
if encoding.contains('u') && encoding.contains('n') {
return Err(InvalidEncodingError::new());
}
let mut enc = None;
for flag in encoding.bytes() {
match flag {
b'u' | b's' | b'e' if enc.is_none() => enc = Some(Encoding::Fixed),
b'n' if enc.is_none() => enc = Some(Encoding::No),
b'i' | b'm' | b'x' | b'o' => continue,
_ => return Err(InvalidEncodingError::new()),
}
}
Ok(enc.unwrap_or_default())
}
}
impl TryFrom<&[u8]> for Encoding {
type Error = InvalidEncodingError;
fn try_from(encoding: &[u8]) -> Result<Self, Self::Error> {
if encoding.find_byte(b'u').is_some() && encoding.find_byte(b'n').is_some() {
return Err(InvalidEncodingError::new());
}
let mut enc = None;
for &flag in encoding {
match flag {
b'u' | b's' | b'e' if enc.is_none() => enc = Some(Encoding::Fixed),
b'n' if enc.is_none() => enc = Some(Encoding::No),
b'i' | b'm' | b'x' | b'o' => continue,
_ => return Err(InvalidEncodingError::new()),
}
}
Ok(enc.unwrap_or_default())
}
}
impl TryFrom<String> for Encoding {
type Error = InvalidEncodingError;
fn try_from(encoding: String) -> Result<Self, Self::Error> {
Self::try_from(encoding.as_str())
}
}
impl TryFrom<Vec<u8>> for Encoding {
type Error = InvalidEncodingError;
fn try_from(encoding: Vec<u8>) -> Result<Self, Self::Error> {
Self::try_from(encoding.as_slice())
}
}
impl From<Encoding> for Flags {
/// Convert an `Encoding` to its bit flag representation.
fn from(encoding: Encoding) -> Self {
encoding.flags()
}
}
impl From<&Encoding> for Flags {
/// Convert an `Encoding` to its bit flag representation.
fn from(encoding: &Encoding) -> Self {
encoding.flags()
}
}
impl From<Encoding> for u8 {
/// Convert an `Encoding` to its bit representation.
fn from(encoding: Encoding) -> Self {
encoding.into_bits()
}
}
impl From<&Encoding> for u8 {
/// Convert an `Encoding` to its bit representation.
fn from(encoding: &Encoding) -> Self {
encoding.into_bits()
}
}
impl From<Encoding> for i64 {
/// Convert an `Encoding` to its widened bit representation.
fn from(encoding: Encoding) -> Self {
encoding.into_bits().into()
}
}
impl From<&Encoding> for i64 {
/// Convert an `Encoding` to its widened bit representation.
fn from(encoding: &Encoding) -> Self {
encoding.into_bits().into()
}
}
impl fmt::Display for Encoding {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.modifier_string())
}
}
impl Encoding {
/// Construct a new [`None`] encoding.
///
/// [`None`]: Self::None
#[must_use]
pub const fn new() -> Self {
Self::None
}
/// Convert an `Encoding` to its bit flag representation.
///
/// Alias for the corresponding `Into<Flags>` implementation.
#[must_use]
pub const fn flags(self) -> Flags {
match self {
Encoding::Fixed => Flags::FIXEDENCODING,
Encoding::No => Flags::NOENCODING,
Encoding::None => Flags::empty(),
}
}
/// Convert an `Encoding` to its bit representation.
///
/// Alias for the corresponding `Into<u8>` implementation.
#[must_use]
pub const fn into_bits(self) -> u8 {
self.flags().bits()
}
/// Serialize the encoding flags to a string suitable for a `Regexp` display
/// or debug implementation.
///
/// See also [`Regexp#inspect`][regexp-inspect].
///
/// [regexp-inspect]: https://ruby-doc.org/core-2.6.3/Regexp.html#method-i-inspect
#[must_use]
pub const fn modifier_string(self) -> &'static str {
match self {
Self::Fixed | Self::None => "",
Self::No => "n",
}
}
}
| 26.941176 | 94 | 0.582242 |
9157dc87dc2fc5e415de7679974bfe0de3a91f0c | 83,066 | // This file is Copyright its original authors, visible in version control
// history.
//
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
// You may not use this file except in accordance with one or both of these
// licenses.
//! Top level peer message handling and socket handling logic lives here.
//!
//! Instead of actually servicing sockets ourselves we require that you implement the
//! SocketDescriptor interface and use that to receive actions which you should perform on the
//! socket, and call into PeerManager with bytes read from the socket. The PeerManager will then
//! call into the provided message handlers (probably a ChannelManager and NetGraphmsgHandler) with messages
//! they should handle, and encoding/sending response messages.
use bitcoin::secp256k1::key::{SecretKey,PublicKey};
use ln::features::InitFeatures;
use ln::msgs;
use ln::msgs::{ChannelMessageHandler, LightningError, RoutingMessageHandler};
use ln::channelmanager::{SimpleArcChannelManager, SimpleRefChannelManager};
use util::ser::{VecWriter, Writeable, Writer};
use ln::peer_channel_encryptor::{PeerChannelEncryptor,NextNoiseStep};
use ln::wire;
use ln::wire::Encode;
use util::atomic_counter::AtomicCounter;
use util::events::{MessageSendEvent, MessageSendEventsProvider};
use util::logger::Logger;
use routing::network_graph::{NetworkGraph, NetGraphMsgHandler};
use prelude::*;
use io;
use alloc::collections::LinkedList;
use sync::{Arc, Mutex};
use core::{cmp, hash, fmt, mem};
use core::ops::Deref;
use core::convert::Infallible;
#[cfg(feature = "std")] use std::error;
use bitcoin::hashes::sha256::Hash as Sha256;
use bitcoin::hashes::sha256::HashEngine as Sha256Engine;
use bitcoin::hashes::{HashEngine, Hash};
/// Handler for BOLT1-compliant messages.
pub trait CustomMessageHandler: wire::CustomMessageReader {
/// Called with the message type that was received and the buffer to be read.
/// Can return a `MessageHandlingError` if the message could not be handled.
fn handle_custom_message(&self, msg: Self::CustomMessage, sender_node_id: &PublicKey) -> Result<(), LightningError>;
/// Gets the list of pending messages which were generated by the custom message
/// handler, clearing the list in the process. The first tuple element must
/// correspond to the intended recipients node ids. If no connection to one of the
/// specified node does not exist, the message is simply not sent to it.
fn get_and_clear_pending_msg(&self) -> Vec<(PublicKey, Self::CustomMessage)>;
}
/// A dummy struct which implements `RoutingMessageHandler` without storing any routing information
/// or doing any processing. You can provide one of these as the route_handler in a MessageHandler.
pub struct IgnoringMessageHandler{}
impl MessageSendEventsProvider for IgnoringMessageHandler {
fn get_and_clear_pending_msg_events(&self) -> Vec<MessageSendEvent> { Vec::new() }
}
impl RoutingMessageHandler for IgnoringMessageHandler {
fn handle_node_announcement(&self, _msg: &msgs::NodeAnnouncement) -> Result<bool, LightningError> { Ok(false) }
fn handle_channel_announcement(&self, _msg: &msgs::ChannelAnnouncement) -> Result<bool, LightningError> { Ok(false) }
fn handle_channel_update(&self, _msg: &msgs::ChannelUpdate) -> Result<bool, LightningError> { Ok(false) }
fn get_next_channel_announcements(&self, _starting_point: u64, _batch_amount: u8) ->
Vec<(msgs::ChannelAnnouncement, Option<msgs::ChannelUpdate>, Option<msgs::ChannelUpdate>)> { Vec::new() }
fn get_next_node_announcements(&self, _starting_point: Option<&PublicKey>, _batch_amount: u8) -> Vec<msgs::NodeAnnouncement> { Vec::new() }
fn sync_routing_table(&self, _their_node_id: &PublicKey, _init: &msgs::Init) {}
fn handle_reply_channel_range(&self, _their_node_id: &PublicKey, _msg: msgs::ReplyChannelRange) -> Result<(), LightningError> { Ok(()) }
fn handle_reply_short_channel_ids_end(&self, _their_node_id: &PublicKey, _msg: msgs::ReplyShortChannelIdsEnd) -> Result<(), LightningError> { Ok(()) }
fn handle_query_channel_range(&self, _their_node_id: &PublicKey, _msg: msgs::QueryChannelRange) -> Result<(), LightningError> { Ok(()) }
fn handle_query_short_channel_ids(&self, _their_node_id: &PublicKey, _msg: msgs::QueryShortChannelIds) -> Result<(), LightningError> { Ok(()) }
}
impl Deref for IgnoringMessageHandler {
type Target = IgnoringMessageHandler;
fn deref(&self) -> &Self { self }
}
// Implement Type for Infallible, note that it cannot be constructed, and thus you can never call a
// method that takes self for it.
impl wire::Type for Infallible {
fn type_id(&self) -> u16 {
unreachable!();
}
}
impl Writeable for Infallible {
fn write<W: Writer>(&self, _: &mut W) -> Result<(), io::Error> {
unreachable!();
}
}
impl wire::CustomMessageReader for IgnoringMessageHandler {
type CustomMessage = Infallible;
fn read<R: io::Read>(&self, _message_type: u16, _buffer: &mut R) -> Result<Option<Self::CustomMessage>, msgs::DecodeError> {
Ok(None)
}
}
impl CustomMessageHandler for IgnoringMessageHandler {
fn handle_custom_message(&self, _msg: Infallible, _sender_node_id: &PublicKey) -> Result<(), LightningError> {
// Since we always return `None` in the read the handle method should never be called.
unreachable!();
}
fn get_and_clear_pending_msg(&self) -> Vec<(PublicKey, Self::CustomMessage)> { Vec::new() }
}
/// A dummy struct which implements `ChannelMessageHandler` without having any channels.
/// You can provide one of these as the route_handler in a MessageHandler.
pub struct ErroringMessageHandler {
message_queue: Mutex<Vec<MessageSendEvent>>
}
impl ErroringMessageHandler {
/// Constructs a new ErroringMessageHandler
pub fn new() -> Self {
Self { message_queue: Mutex::new(Vec::new()) }
}
fn push_error(&self, node_id: &PublicKey, channel_id: [u8; 32]) {
self.message_queue.lock().unwrap().push(MessageSendEvent::HandleError {
action: msgs::ErrorAction::SendErrorMessage {
msg: msgs::ErrorMessage { channel_id, data: "We do not support channel messages, sorry.".to_owned() },
},
node_id: node_id.clone(),
});
}
}
impl MessageSendEventsProvider for ErroringMessageHandler {
fn get_and_clear_pending_msg_events(&self) -> Vec<MessageSendEvent> {
let mut res = Vec::new();
mem::swap(&mut res, &mut self.message_queue.lock().unwrap());
res
}
}
impl ChannelMessageHandler for ErroringMessageHandler {
// Any messages which are related to a specific channel generate an error message to let the
// peer know we don't care about channels.
fn handle_open_channel(&self, their_node_id: &PublicKey, _their_features: InitFeatures, msg: &msgs::OpenChannel) {
ErroringMessageHandler::push_error(self, their_node_id, msg.temporary_channel_id);
}
fn handle_accept_channel(&self, their_node_id: &PublicKey, _their_features: InitFeatures, msg: &msgs::AcceptChannel) {
ErroringMessageHandler::push_error(self, their_node_id, msg.temporary_channel_id);
}
fn handle_funding_created(&self, their_node_id: &PublicKey, msg: &msgs::FundingCreated) {
ErroringMessageHandler::push_error(self, their_node_id, msg.temporary_channel_id);
}
fn handle_funding_signed(&self, their_node_id: &PublicKey, msg: &msgs::FundingSigned) {
ErroringMessageHandler::push_error(self, their_node_id, msg.channel_id);
}
fn handle_funding_locked(&self, their_node_id: &PublicKey, msg: &msgs::FundingLocked) {
ErroringMessageHandler::push_error(self, their_node_id, msg.channel_id);
}
fn handle_shutdown(&self, their_node_id: &PublicKey, _their_features: &InitFeatures, msg: &msgs::Shutdown) {
ErroringMessageHandler::push_error(self, their_node_id, msg.channel_id);
}
fn handle_closing_signed(&self, their_node_id: &PublicKey, msg: &msgs::ClosingSigned) {
ErroringMessageHandler::push_error(self, their_node_id, msg.channel_id);
}
fn handle_update_add_htlc(&self, their_node_id: &PublicKey, msg: &msgs::UpdateAddHTLC) {
ErroringMessageHandler::push_error(self, their_node_id, msg.channel_id);
}
fn handle_update_fulfill_htlc(&self, their_node_id: &PublicKey, msg: &msgs::UpdateFulfillHTLC) {
ErroringMessageHandler::push_error(self, their_node_id, msg.channel_id);
}
fn handle_update_fail_htlc(&self, their_node_id: &PublicKey, msg: &msgs::UpdateFailHTLC) {
ErroringMessageHandler::push_error(self, their_node_id, msg.channel_id);
}
fn handle_update_fail_malformed_htlc(&self, their_node_id: &PublicKey, msg: &msgs::UpdateFailMalformedHTLC) {
ErroringMessageHandler::push_error(self, their_node_id, msg.channel_id);
}
fn handle_commitment_signed(&self, their_node_id: &PublicKey, msg: &msgs::CommitmentSigned) {
ErroringMessageHandler::push_error(self, their_node_id, msg.channel_id);
}
fn handle_revoke_and_ack(&self, their_node_id: &PublicKey, msg: &msgs::RevokeAndACK) {
ErroringMessageHandler::push_error(self, their_node_id, msg.channel_id);
}
fn handle_update_fee(&self, their_node_id: &PublicKey, msg: &msgs::UpdateFee) {
ErroringMessageHandler::push_error(self, their_node_id, msg.channel_id);
}
fn handle_announcement_signatures(&self, their_node_id: &PublicKey, msg: &msgs::AnnouncementSignatures) {
ErroringMessageHandler::push_error(self, their_node_id, msg.channel_id);
}
fn handle_channel_reestablish(&self, their_node_id: &PublicKey, msg: &msgs::ChannelReestablish) {
ErroringMessageHandler::push_error(self, their_node_id, msg.channel_id);
}
// msgs::ChannelUpdate does not contain the channel_id field, so we just drop them.
fn handle_channel_update(&self, _their_node_id: &PublicKey, _msg: &msgs::ChannelUpdate) {}
fn peer_disconnected(&self, _their_node_id: &PublicKey, _no_connection_possible: bool) {}
fn peer_connected(&self, _their_node_id: &PublicKey, _msg: &msgs::Init) {}
fn handle_error(&self, _their_node_id: &PublicKey, _msg: &msgs::ErrorMessage) {}
}
impl Deref for ErroringMessageHandler {
type Target = ErroringMessageHandler;
fn deref(&self) -> &Self { self }
}
/// Provides references to trait impls which handle different types of messages.
pub struct MessageHandler<CM: Deref, RM: Deref> where
CM::Target: ChannelMessageHandler,
RM::Target: RoutingMessageHandler {
/// A message handler which handles messages specific to channels. Usually this is just a
/// [`ChannelManager`] object or an [`ErroringMessageHandler`].
///
/// [`ChannelManager`]: crate::ln::channelmanager::ChannelManager
pub chan_handler: CM,
/// A message handler which handles messages updating our knowledge of the network channel
/// graph. Usually this is just a [`NetGraphMsgHandler`] object or an
/// [`IgnoringMessageHandler`].
///
/// [`NetGraphMsgHandler`]: crate::routing::network_graph::NetGraphMsgHandler
pub route_handler: RM,
}
/// Provides an object which can be used to send data to and which uniquely identifies a connection
/// to a remote host. You will need to be able to generate multiple of these which meet Eq and
/// implement Hash to meet the PeerManager API.
///
/// For efficiency, Clone should be relatively cheap for this type.
///
/// Two descriptors may compare equal (by [`cmp::Eq`] and [`hash::Hash`]) as long as the original
/// has been disconnected, the [`PeerManager`] has been informed of the disconnection (either by it
/// having triggered the disconnection or a call to [`PeerManager::socket_disconnected`]), and no
/// further calls to the [`PeerManager`] related to the original socket occur. This allows you to
/// use a file descriptor for your SocketDescriptor directly, however for simplicity you may wish
/// to simply use another value which is guaranteed to be globally unique instead.
pub trait SocketDescriptor : cmp::Eq + hash::Hash + Clone {
/// Attempts to send some data from the given slice to the peer.
///
/// Returns the amount of data which was sent, possibly 0 if the socket has since disconnected.
/// Note that in the disconnected case, [`PeerManager::socket_disconnected`] must still be
/// called and further write attempts may occur until that time.
///
/// If the returned size is smaller than `data.len()`, a
/// [`PeerManager::write_buffer_space_avail`] call must be made the next time more data can be
/// written. Additionally, until a `send_data` event completes fully, no further
/// [`PeerManager::read_event`] calls should be made for the same peer! Because this is to
/// prevent denial-of-service issues, you should not read or buffer any data from the socket
/// until then.
///
/// If a [`PeerManager::read_event`] call on this descriptor had previously returned true
/// (indicating that read events should be paused to prevent DoS in the send buffer),
/// `resume_read` may be set indicating that read events on this descriptor should resume. A
/// `resume_read` of false carries no meaning, and should not cause any action.
fn send_data(&mut self, data: &[u8], resume_read: bool) -> usize;
/// Disconnect the socket pointed to by this SocketDescriptor.
///
/// You do *not* need to call [`PeerManager::socket_disconnected`] with this socket after this
/// call (doing so is a noop).
fn disconnect_socket(&mut self);
}
/// Error for PeerManager errors. If you get one of these, you must disconnect the socket and
/// generate no further read_event/write_buffer_space_avail/socket_disconnected calls for the
/// descriptor.
#[derive(Clone)]
pub struct PeerHandleError {
/// Used to indicate that we probably can't make any future connections to this peer, implying
/// we should go ahead and force-close any channels we have with it.
pub no_connection_possible: bool,
}
impl fmt::Debug for PeerHandleError {
fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
formatter.write_str("Peer Sent Invalid Data")
}
}
impl fmt::Display for PeerHandleError {
fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
formatter.write_str("Peer Sent Invalid Data")
}
}
#[cfg(feature = "std")]
impl error::Error for PeerHandleError {
fn description(&self) -> &str {
"Peer Sent Invalid Data"
}
}
enum InitSyncTracker{
NoSyncRequested,
ChannelsSyncing(u64),
NodesSyncing(PublicKey),
}
/// The ratio between buffer sizes at which we stop sending initial sync messages vs when we stop
/// forwarding gossip messages to peers altogether.
const FORWARD_INIT_SYNC_BUFFER_LIMIT_RATIO: usize = 2;
/// When the outbound buffer has this many messages, we'll stop reading bytes from the peer until
/// we have fewer than this many messages in the outbound buffer again.
/// We also use this as the target number of outbound gossip messages to keep in the write buffer,
/// refilled as we send bytes.
const OUTBOUND_BUFFER_LIMIT_READ_PAUSE: usize = 10;
/// When the outbound buffer has this many messages, we'll simply skip relaying gossip messages to
/// the peer.
const OUTBOUND_BUFFER_LIMIT_DROP_GOSSIP: usize = OUTBOUND_BUFFER_LIMIT_READ_PAUSE * FORWARD_INIT_SYNC_BUFFER_LIMIT_RATIO;
/// If we've sent a ping, and are still awaiting a response, we may need to churn our way through
/// the socket receive buffer before receiving the ping.
///
/// On a fairly old Arm64 board, with Linux defaults, this can take as long as 20 seconds, not
/// including any network delays, outbound traffic, or the same for messages from other peers.
///
/// Thus, to avoid needlessly disconnecting a peer, we allow a peer to take this many timer ticks
/// per connected peer to respond to a ping, as long as they send us at least one message during
/// each tick, ensuring we aren't actually just disconnected.
/// With a timer tick interval of five seconds, this translates to about 30 seconds per connected
/// peer.
///
/// When we improve parallelism somewhat we should reduce this to e.g. this many timer ticks per
/// two connected peers, assuming most LDK-running systems have at least two cores.
const MAX_BUFFER_DRAIN_TICK_INTERVALS_PER_PEER: i8 = 6;
/// This is the minimum number of messages we expect a peer to be able to handle within one timer
/// tick. Once we have sent this many messages since the last ping, we send a ping right away to
/// ensures we don't just fill up our send buffer and leave the peer with too many messages to
/// process before the next ping.
const BUFFER_DRAIN_MSGS_PER_TICK: usize = 32;
struct Peer {
channel_encryptor: PeerChannelEncryptor,
their_node_id: Option<PublicKey>,
their_features: Option<InitFeatures>,
pending_outbound_buffer: LinkedList<Vec<u8>>,
pending_outbound_buffer_first_msg_offset: usize,
awaiting_write_event: bool,
pending_read_buffer: Vec<u8>,
pending_read_buffer_pos: usize,
pending_read_is_header: bool,
sync_status: InitSyncTracker,
msgs_sent_since_pong: usize,
awaiting_pong_timer_tick_intervals: i8,
received_message_since_timer_tick: bool,
}
impl Peer {
/// Returns true if the channel announcements/updates for the given channel should be
/// forwarded to this peer.
/// If we are sending our routing table to this peer and we have not yet sent channel
/// announcements/updates for the given channel_id then we will send it when we get to that
/// point and we shouldn't send it yet to avoid sending duplicate updates. If we've already
/// sent the old versions, we should send the update, and so return true here.
fn should_forward_channel_announcement(&self, channel_id: u64)->bool{
match self.sync_status {
InitSyncTracker::NoSyncRequested => true,
InitSyncTracker::ChannelsSyncing(i) => i < channel_id,
InitSyncTracker::NodesSyncing(_) => true,
}
}
/// Similar to the above, but for node announcements indexed by node_id.
fn should_forward_node_announcement(&self, node_id: PublicKey) -> bool {
match self.sync_status {
InitSyncTracker::NoSyncRequested => true,
InitSyncTracker::ChannelsSyncing(_) => false,
InitSyncTracker::NodesSyncing(pk) => pk < node_id,
}
}
}
struct PeerHolder<Descriptor: SocketDescriptor> {
peers: HashMap<Descriptor, Peer>,
/// Only add to this set when noise completes:
node_id_to_descriptor: HashMap<PublicKey, Descriptor>,
}
/// SimpleArcPeerManager is useful when you need a PeerManager with a static lifetime, e.g.
/// when you're using lightning-net-tokio (since tokio::spawn requires parameters with static
/// lifetimes). Other times you can afford a reference, which is more efficient, in which case
/// SimpleRefPeerManager is the more appropriate type. Defining these type aliases prevents
/// issues such as overly long function definitions.
pub type SimpleArcPeerManager<SD, M, T, F, C, L> = PeerManager<SD, Arc<SimpleArcChannelManager<M, T, F, L>>, Arc<NetGraphMsgHandler<Arc<NetworkGraph>, Arc<C>, Arc<L>>>, Arc<L>, Arc<IgnoringMessageHandler>>;
/// SimpleRefPeerManager is a type alias for a PeerManager reference, and is the reference
/// counterpart to the SimpleArcPeerManager type alias. Use this type by default when you don't
/// need a PeerManager with a static lifetime. You'll need a static lifetime in cases such as
/// usage of lightning-net-tokio (since tokio::spawn requires parameters with static lifetimes).
/// But if this is not necessary, using a reference is more efficient. Defining these type aliases
/// helps with issues such as long function definitions.
pub type SimpleRefPeerManager<'a, 'b, 'c, 'd, 'e, 'f, 'g, 'h, SD, M, T, F, C, L> = PeerManager<SD, SimpleRefChannelManager<'a, 'b, 'c, 'd, 'e, M, T, F, L>, &'e NetGraphMsgHandler<&'g NetworkGraph, &'h C, &'f L>, &'f L, IgnoringMessageHandler>;
/// A PeerManager manages a set of peers, described by their [`SocketDescriptor`] and marshalls
/// socket events into messages which it passes on to its [`MessageHandler`].
///
/// Locks are taken internally, so you must never assume that reentrancy from a
/// [`SocketDescriptor`] call back into [`PeerManager`] methods will not deadlock.
///
/// Calls to [`read_event`] will decode relevant messages and pass them to the
/// [`ChannelMessageHandler`], likely doing message processing in-line. Thus, the primary form of
/// parallelism in Rust-Lightning is in calls to [`read_event`]. Note, however, that calls to any
/// [`PeerManager`] functions related to the same connection must occur only in serial, making new
/// calls only after previous ones have returned.
///
/// Rather than using a plain PeerManager, it is preferable to use either a SimpleArcPeerManager
/// a SimpleRefPeerManager, for conciseness. See their documentation for more details, but
/// essentially you should default to using a SimpleRefPeerManager, and use a
/// SimpleArcPeerManager when you require a PeerManager with a static lifetime, such as when
/// you're using lightning-net-tokio.
///
/// [`read_event`]: PeerManager::read_event
pub struct PeerManager<Descriptor: SocketDescriptor, CM: Deref, RM: Deref, L: Deref, CMH: Deref> where
CM::Target: ChannelMessageHandler,
RM::Target: RoutingMessageHandler,
L::Target: Logger,
CMH::Target: CustomMessageHandler {
message_handler: MessageHandler<CM, RM>,
peers: Mutex<PeerHolder<Descriptor>>,
our_node_secret: SecretKey,
ephemeral_key_midstate: Sha256Engine,
custom_message_handler: CMH,
peer_counter: AtomicCounter,
logger: L,
}
enum MessageHandlingError {
PeerHandleError(PeerHandleError),
LightningError(LightningError),
}
impl From<PeerHandleError> for MessageHandlingError {
fn from(error: PeerHandleError) -> Self {
MessageHandlingError::PeerHandleError(error)
}
}
impl From<LightningError> for MessageHandlingError {
fn from(error: LightningError) -> Self {
MessageHandlingError::LightningError(error)
}
}
macro_rules! encode_msg {
($msg: expr) => {{
let mut buffer = VecWriter(Vec::new());
wire::write($msg, &mut buffer).unwrap();
buffer.0
}}
}
impl<Descriptor: SocketDescriptor, CM: Deref, L: Deref> PeerManager<Descriptor, CM, IgnoringMessageHandler, L, IgnoringMessageHandler> where
CM::Target: ChannelMessageHandler,
L::Target: Logger {
/// Constructs a new PeerManager with the given ChannelMessageHandler. No routing message
/// handler is used and network graph messages are ignored.
///
/// ephemeral_random_data is used to derive per-connection ephemeral keys and must be
/// cryptographically secure random bytes.
///
/// (C-not exported) as we can't export a PeerManager with a dummy route handler
pub fn new_channel_only(channel_message_handler: CM, our_node_secret: SecretKey, ephemeral_random_data: &[u8; 32], logger: L) -> Self {
Self::new(MessageHandler {
chan_handler: channel_message_handler,
route_handler: IgnoringMessageHandler{},
}, our_node_secret, ephemeral_random_data, logger, IgnoringMessageHandler{})
}
}
impl<Descriptor: SocketDescriptor, RM: Deref, L: Deref> PeerManager<Descriptor, ErroringMessageHandler, RM, L, IgnoringMessageHandler> where
RM::Target: RoutingMessageHandler,
L::Target: Logger {
/// Constructs a new PeerManager with the given RoutingMessageHandler. No channel message
/// handler is used and messages related to channels will be ignored (or generate error
/// messages). Note that some other lightning implementations time-out connections after some
/// time if no channel is built with the peer.
///
/// ephemeral_random_data is used to derive per-connection ephemeral keys and must be
/// cryptographically secure random bytes.
///
/// (C-not exported) as we can't export a PeerManager with a dummy channel handler
pub fn new_routing_only(routing_message_handler: RM, our_node_secret: SecretKey, ephemeral_random_data: &[u8; 32], logger: L) -> Self {
Self::new(MessageHandler {
chan_handler: ErroringMessageHandler::new(),
route_handler: routing_message_handler,
}, our_node_secret, ephemeral_random_data, logger, IgnoringMessageHandler{})
}
}
/// A simple wrapper that optionally prints " from <pubkey>" for an optional pubkey.
/// This works around `format!()` taking a reference to each argument, preventing
/// `if let Some(node_id) = peer.their_node_id { format!(.., node_id) } else { .. }` from compiling
/// due to lifetime errors.
struct OptionalFromDebugger<'a>(&'a Option<PublicKey>);
impl core::fmt::Display for OptionalFromDebugger<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
if let Some(node_id) = self.0 { write!(f, " from {}", log_pubkey!(node_id)) } else { Ok(()) }
}
}
impl<Descriptor: SocketDescriptor, CM: Deref, RM: Deref, L: Deref, CMH: Deref> PeerManager<Descriptor, CM, RM, L, CMH> where
CM::Target: ChannelMessageHandler,
RM::Target: RoutingMessageHandler,
L::Target: Logger,
CMH::Target: CustomMessageHandler {
/// Constructs a new PeerManager with the given message handlers and node_id secret key
/// ephemeral_random_data is used to derive per-connection ephemeral keys and must be
/// cryptographically secure random bytes.
pub fn new(message_handler: MessageHandler<CM, RM>, our_node_secret: SecretKey, ephemeral_random_data: &[u8; 32], logger: L, custom_message_handler: CMH) -> Self {
let mut ephemeral_key_midstate = Sha256::engine();
ephemeral_key_midstate.input(ephemeral_random_data);
PeerManager {
message_handler,
peers: Mutex::new(PeerHolder {
peers: HashMap::new(),
node_id_to_descriptor: HashMap::new()
}),
our_node_secret,
ephemeral_key_midstate,
peer_counter: AtomicCounter::new(),
logger,
custom_message_handler,
}
}
/// Get the list of node ids for peers which have completed the initial handshake.
///
/// For outbound connections, this will be the same as the their_node_id parameter passed in to
/// new_outbound_connection, however entries will only appear once the initial handshake has
/// completed and we are sure the remote peer has the private key for the given node_id.
pub fn get_peer_node_ids(&self) -> Vec<PublicKey> {
let peers = self.peers.lock().unwrap();
peers.peers.values().filter_map(|p| {
if !p.channel_encryptor.is_ready_for_encryption() || p.their_features.is_none() {
return None;
}
p.their_node_id
}).collect()
}
fn get_ephemeral_key(&self) -> SecretKey {
let mut ephemeral_hash = self.ephemeral_key_midstate.clone();
let counter = self.peer_counter.get_increment();
ephemeral_hash.input(&counter.to_le_bytes());
SecretKey::from_slice(&Sha256::from_engine(ephemeral_hash).into_inner()).expect("You broke SHA-256!")
}
/// Indicates a new outbound connection has been established to a node with the given node_id.
/// Note that if an Err is returned here you MUST NOT call socket_disconnected for the new
/// descriptor but must disconnect the connection immediately.
///
/// Returns a small number of bytes to send to the remote node (currently always 50).
///
/// Panics if descriptor is duplicative with some other descriptor which has not yet been
/// [`socket_disconnected()`].
///
/// [`socket_disconnected()`]: PeerManager::socket_disconnected
pub fn new_outbound_connection(&self, their_node_id: PublicKey, descriptor: Descriptor) -> Result<Vec<u8>, PeerHandleError> {
let mut peer_encryptor = PeerChannelEncryptor::new_outbound(their_node_id.clone(), self.get_ephemeral_key());
let res = peer_encryptor.get_act_one().to_vec();
let pending_read_buffer = [0; 50].to_vec(); // Noise act two is 50 bytes
let mut peers = self.peers.lock().unwrap();
if peers.peers.insert(descriptor, Peer {
channel_encryptor: peer_encryptor,
their_node_id: None,
their_features: None,
pending_outbound_buffer: LinkedList::new(),
pending_outbound_buffer_first_msg_offset: 0,
awaiting_write_event: false,
pending_read_buffer,
pending_read_buffer_pos: 0,
pending_read_is_header: false,
sync_status: InitSyncTracker::NoSyncRequested,
msgs_sent_since_pong: 0,
awaiting_pong_timer_tick_intervals: 0,
received_message_since_timer_tick: false,
}).is_some() {
panic!("PeerManager driver duplicated descriptors!");
};
Ok(res)
}
/// Indicates a new inbound connection has been established.
///
/// May refuse the connection by returning an Err, but will never write bytes to the remote end
/// (outbound connector always speaks first). Note that if an Err is returned here you MUST NOT
/// call socket_disconnected for the new descriptor but must disconnect the connection
/// immediately.
///
/// Panics if descriptor is duplicative with some other descriptor which has not yet been
/// [`socket_disconnected()`].
///
/// [`socket_disconnected()`]: PeerManager::socket_disconnected
pub fn new_inbound_connection(&self, descriptor: Descriptor) -> Result<(), PeerHandleError> {
let peer_encryptor = PeerChannelEncryptor::new_inbound(&self.our_node_secret);
let pending_read_buffer = [0; 50].to_vec(); // Noise act one is 50 bytes
let mut peers = self.peers.lock().unwrap();
if peers.peers.insert(descriptor, Peer {
channel_encryptor: peer_encryptor,
their_node_id: None,
their_features: None,
pending_outbound_buffer: LinkedList::new(),
pending_outbound_buffer_first_msg_offset: 0,
awaiting_write_event: false,
pending_read_buffer,
pending_read_buffer_pos: 0,
pending_read_is_header: false,
sync_status: InitSyncTracker::NoSyncRequested,
msgs_sent_since_pong: 0,
awaiting_pong_timer_tick_intervals: 0,
received_message_since_timer_tick: false,
}).is_some() {
panic!("PeerManager driver duplicated descriptors!");
};
Ok(())
}
fn do_attempt_write_data(&self, descriptor: &mut Descriptor, peer: &mut Peer) {
while !peer.awaiting_write_event {
if peer.pending_outbound_buffer.len() < OUTBOUND_BUFFER_LIMIT_READ_PAUSE && peer.msgs_sent_since_pong < BUFFER_DRAIN_MSGS_PER_TICK {
match peer.sync_status {
InitSyncTracker::NoSyncRequested => {},
InitSyncTracker::ChannelsSyncing(c) if c < 0xffff_ffff_ffff_ffff => {
let steps = ((OUTBOUND_BUFFER_LIMIT_READ_PAUSE - peer.pending_outbound_buffer.len() + 2) / 3) as u8;
let all_messages = self.message_handler.route_handler.get_next_channel_announcements(c, steps);
for &(ref announce, ref update_a_option, ref update_b_option) in all_messages.iter() {
self.enqueue_message(peer, announce);
if let &Some(ref update_a) = update_a_option {
self.enqueue_message(peer, update_a);
}
if let &Some(ref update_b) = update_b_option {
self.enqueue_message(peer, update_b);
}
peer.sync_status = InitSyncTracker::ChannelsSyncing(announce.contents.short_channel_id + 1);
}
if all_messages.is_empty() || all_messages.len() != steps as usize {
peer.sync_status = InitSyncTracker::ChannelsSyncing(0xffff_ffff_ffff_ffff);
}
},
InitSyncTracker::ChannelsSyncing(c) if c == 0xffff_ffff_ffff_ffff => {
let steps = (OUTBOUND_BUFFER_LIMIT_READ_PAUSE - peer.pending_outbound_buffer.len()) as u8;
let all_messages = self.message_handler.route_handler.get_next_node_announcements(None, steps);
for msg in all_messages.iter() {
self.enqueue_message(peer, msg);
peer.sync_status = InitSyncTracker::NodesSyncing(msg.contents.node_id);
}
if all_messages.is_empty() || all_messages.len() != steps as usize {
peer.sync_status = InitSyncTracker::NoSyncRequested;
}
},
InitSyncTracker::ChannelsSyncing(_) => unreachable!(),
InitSyncTracker::NodesSyncing(key) => {
let steps = (OUTBOUND_BUFFER_LIMIT_READ_PAUSE - peer.pending_outbound_buffer.len()) as u8;
let all_messages = self.message_handler.route_handler.get_next_node_announcements(Some(&key), steps);
for msg in all_messages.iter() {
self.enqueue_message(peer, msg);
peer.sync_status = InitSyncTracker::NodesSyncing(msg.contents.node_id);
}
if all_messages.is_empty() || all_messages.len() != steps as usize {
peer.sync_status = InitSyncTracker::NoSyncRequested;
}
},
}
}
if peer.msgs_sent_since_pong >= BUFFER_DRAIN_MSGS_PER_TICK {
self.maybe_send_extra_ping(peer);
}
if {
let next_buff = match peer.pending_outbound_buffer.front() {
None => return,
Some(buff) => buff,
};
let should_be_reading = peer.pending_outbound_buffer.len() < OUTBOUND_BUFFER_LIMIT_READ_PAUSE;
let pending = &next_buff[peer.pending_outbound_buffer_first_msg_offset..];
let data_sent = descriptor.send_data(pending, should_be_reading);
peer.pending_outbound_buffer_first_msg_offset += data_sent;
if peer.pending_outbound_buffer_first_msg_offset == next_buff.len() { true } else { false }
} {
peer.pending_outbound_buffer_first_msg_offset = 0;
peer.pending_outbound_buffer.pop_front();
} else {
peer.awaiting_write_event = true;
}
}
}
/// Indicates that there is room to write data to the given socket descriptor.
///
/// May return an Err to indicate that the connection should be closed.
///
/// May call [`send_data`] on the descriptor passed in (or an equal descriptor) before
/// returning. Thus, be very careful with reentrancy issues! The invariants around calling
/// [`write_buffer_space_avail`] in case a write did not fully complete must still hold - be
/// ready to call `[write_buffer_space_avail`] again if a write call generated here isn't
/// sufficient!
///
/// [`send_data`]: SocketDescriptor::send_data
/// [`write_buffer_space_avail`]: PeerManager::write_buffer_space_avail
pub fn write_buffer_space_avail(&self, descriptor: &mut Descriptor) -> Result<(), PeerHandleError> {
let mut peers = self.peers.lock().unwrap();
match peers.peers.get_mut(descriptor) {
None => {
// This is most likely a simple race condition where the user found that the socket
// was writeable, then we told the user to `disconnect_socket()`, then they called
// this method. Return an error to make sure we get disconnected.
return Err(PeerHandleError { no_connection_possible: false });
},
Some(peer) => {
peer.awaiting_write_event = false;
self.do_attempt_write_data(descriptor, peer);
}
};
Ok(())
}
/// Indicates that data was read from the given socket descriptor.
///
/// May return an Err to indicate that the connection should be closed.
///
/// Will *not* call back into [`send_data`] on any descriptors to avoid reentrancy complexity.
/// Thus, however, you should call [`process_events`] after any `read_event` to generate
/// [`send_data`] calls to handle responses.
///
/// If `Ok(true)` is returned, further read_events should not be triggered until a
/// [`send_data`] call on this descriptor has `resume_read` set (preventing DoS issues in the
/// send buffer).
///
/// [`send_data`]: SocketDescriptor::send_data
/// [`process_events`]: PeerManager::process_events
pub fn read_event(&self, peer_descriptor: &mut Descriptor, data: &[u8]) -> Result<bool, PeerHandleError> {
match self.do_read_event(peer_descriptor, data) {
Ok(res) => Ok(res),
Err(e) => {
log_trace!(self.logger, "Peer sent invalid data or we decided to disconnect due to a protocol error");
self.disconnect_event_internal(peer_descriptor, e.no_connection_possible);
Err(e)
}
}
}
/// Append a message to a peer's pending outbound/write buffer
fn enqueue_encoded_message(&self, peer: &mut Peer, encoded_message: &Vec<u8>) {
peer.msgs_sent_since_pong += 1;
peer.pending_outbound_buffer.push_back(peer.channel_encryptor.encrypt_message(&encoded_message[..]));
}
/// Append a message to a peer's pending outbound/write buffer
fn enqueue_message<M: wire::Type>(&self, peer: &mut Peer, message: &M) {
let mut buffer = VecWriter(Vec::with_capacity(2048));
wire::write(message, &mut buffer).unwrap(); // crash if the write failed
if is_gossip_msg(message.type_id()) {
log_gossip!(self.logger, "Enqueueing message {:?} to {}", message, log_pubkey!(peer.their_node_id.unwrap()));
} else {
log_trace!(self.logger, "Enqueueing message {:?} to {}", message, log_pubkey!(peer.their_node_id.unwrap()))
}
self.enqueue_encoded_message(peer, &buffer.0);
}
fn do_read_event(&self, peer_descriptor: &mut Descriptor, data: &[u8]) -> Result<bool, PeerHandleError> {
let pause_read = {
let mut peers_lock = self.peers.lock().unwrap();
let peers = &mut *peers_lock;
let mut msgs_to_forward = Vec::new();
let mut peer_node_id = None;
let pause_read = match peers.peers.get_mut(peer_descriptor) {
None => {
// This is most likely a simple race condition where the user read some bytes
// from the socket, then we told the user to `disconnect_socket()`, then they
// called this method. Return an error to make sure we get disconnected.
return Err(PeerHandleError { no_connection_possible: false });
},
Some(peer) => {
assert!(peer.pending_read_buffer.len() > 0);
assert!(peer.pending_read_buffer.len() > peer.pending_read_buffer_pos);
let mut read_pos = 0;
while read_pos < data.len() {
{
let data_to_copy = cmp::min(peer.pending_read_buffer.len() - peer.pending_read_buffer_pos, data.len() - read_pos);
peer.pending_read_buffer[peer.pending_read_buffer_pos..peer.pending_read_buffer_pos + data_to_copy].copy_from_slice(&data[read_pos..read_pos + data_to_copy]);
read_pos += data_to_copy;
peer.pending_read_buffer_pos += data_to_copy;
}
if peer.pending_read_buffer_pos == peer.pending_read_buffer.len() {
peer.pending_read_buffer_pos = 0;
macro_rules! try_potential_handleerror {
($thing: expr) => {
match $thing {
Ok(x) => x,
Err(e) => {
match e.action {
msgs::ErrorAction::DisconnectPeer { msg: _ } => {
//TODO: Try to push msg
log_debug!(self.logger, "Error handling message{}; disconnecting peer with: {}", OptionalFromDebugger(&peer.their_node_id), e.err);
return Err(PeerHandleError{ no_connection_possible: false });
},
msgs::ErrorAction::IgnoreAndLog(level) => {
log_given_level!(self.logger, level, "Error handling message{}; ignoring: {}", OptionalFromDebugger(&peer.their_node_id), e.err);
continue
},
msgs::ErrorAction::IgnoreDuplicateGossip => continue, // Don't even bother logging these
msgs::ErrorAction::IgnoreError => {
log_debug!(self.logger, "Error handling message{}; ignoring: {}", OptionalFromDebugger(&peer.their_node_id), e.err);
continue;
},
msgs::ErrorAction::SendErrorMessage { msg } => {
log_debug!(self.logger, "Error handling message{}; sending error message with: {}", OptionalFromDebugger(&peer.their_node_id), e.err);
self.enqueue_message(peer, &msg);
continue;
},
}
}
}
}
}
macro_rules! insert_node_id {
() => {
match peers.node_id_to_descriptor.entry(peer.their_node_id.unwrap()) {
hash_map::Entry::Occupied(_) => {
log_trace!(self.logger, "Got second connection with {}, closing", log_pubkey!(peer.their_node_id.unwrap()));
peer.their_node_id = None; // Unset so that we don't generate a peer_disconnected event
return Err(PeerHandleError{ no_connection_possible: false })
},
hash_map::Entry::Vacant(entry) => {
log_debug!(self.logger, "Finished noise handshake for connection with {}", log_pubkey!(peer.their_node_id.unwrap()));
entry.insert(peer_descriptor.clone())
},
};
}
}
let next_step = peer.channel_encryptor.get_noise_step();
match next_step {
NextNoiseStep::ActOne => {
let act_two = try_potential_handleerror!(peer.channel_encryptor.process_act_one_with_keys(&peer.pending_read_buffer[..], &self.our_node_secret, self.get_ephemeral_key())).to_vec();
peer.pending_outbound_buffer.push_back(act_two);
peer.pending_read_buffer = [0; 66].to_vec(); // act three is 66 bytes long
},
NextNoiseStep::ActTwo => {
let (act_three, their_node_id) = try_potential_handleerror!(peer.channel_encryptor.process_act_two(&peer.pending_read_buffer[..], &self.our_node_secret));
peer.pending_outbound_buffer.push_back(act_three.to_vec());
peer.pending_read_buffer = [0; 18].to_vec(); // Message length header is 18 bytes
peer.pending_read_is_header = true;
peer.their_node_id = Some(their_node_id);
insert_node_id!();
let features = InitFeatures::known();
let resp = msgs::Init { features };
self.enqueue_message(peer, &resp);
peer.awaiting_pong_timer_tick_intervals = 0;
},
NextNoiseStep::ActThree => {
let their_node_id = try_potential_handleerror!(peer.channel_encryptor.process_act_three(&peer.pending_read_buffer[..]));
peer.pending_read_buffer = [0; 18].to_vec(); // Message length header is 18 bytes
peer.pending_read_is_header = true;
peer.their_node_id = Some(their_node_id);
insert_node_id!();
let features = InitFeatures::known();
let resp = msgs::Init { features };
self.enqueue_message(peer, &resp);
peer.awaiting_pong_timer_tick_intervals = 0;
},
NextNoiseStep::NoiseComplete => {
if peer.pending_read_is_header {
let msg_len = try_potential_handleerror!(peer.channel_encryptor.decrypt_length_header(&peer.pending_read_buffer[..]));
peer.pending_read_buffer = Vec::with_capacity(msg_len as usize + 16);
peer.pending_read_buffer.resize(msg_len as usize + 16, 0);
if msg_len < 2 { // Need at least the message type tag
return Err(PeerHandleError{ no_connection_possible: false });
}
peer.pending_read_is_header = false;
} else {
let msg_data = try_potential_handleerror!(peer.channel_encryptor.decrypt_message(&peer.pending_read_buffer[..]));
assert!(msg_data.len() >= 2);
// Reset read buffer
peer.pending_read_buffer = [0; 18].to_vec();
peer.pending_read_is_header = true;
let mut reader = io::Cursor::new(&msg_data[..]);
let message_result = wire::read(&mut reader, &*self.custom_message_handler);
let message = match message_result {
Ok(x) => x,
Err(e) => {
match e {
msgs::DecodeError::UnknownVersion => return Err(PeerHandleError { no_connection_possible: false }),
msgs::DecodeError::UnknownRequiredFeature => {
log_gossip!(self.logger, "Got a channel/node announcement with an unknown required feature flag, you may want to update!");
continue;
}
msgs::DecodeError::InvalidValue => {
log_debug!(self.logger, "Got an invalid value while deserializing message");
return Err(PeerHandleError { no_connection_possible: false });
}
msgs::DecodeError::ShortRead => {
log_debug!(self.logger, "Deserialization failed due to shortness of message");
return Err(PeerHandleError { no_connection_possible: false });
}
msgs::DecodeError::BadLengthDescriptor => return Err(PeerHandleError { no_connection_possible: false }),
msgs::DecodeError::Io(_) => return Err(PeerHandleError { no_connection_possible: false }),
msgs::DecodeError::UnsupportedCompression => {
log_gossip!(self.logger, "We don't support zlib-compressed message fields, ignoring message");
continue;
}
}
}
};
match self.handle_message(peer, message) {
Err(handling_error) => match handling_error {
MessageHandlingError::PeerHandleError(e) => { return Err(e) },
MessageHandlingError::LightningError(e) => {
try_potential_handleerror!(Err(e));
},
},
Ok(Some(msg)) => {
peer_node_id = Some(peer.their_node_id.expect("After noise is complete, their_node_id is always set"));
msgs_to_forward.push(msg);
},
Ok(None) => {},
}
}
}
}
}
}
peer.pending_outbound_buffer.len() > OUTBOUND_BUFFER_LIMIT_READ_PAUSE // pause_read
}
};
for msg in msgs_to_forward.drain(..) {
self.forward_broadcast_msg(peers, &msg, peer_node_id.as_ref());
}
pause_read
};
Ok(pause_read)
}
/// Process an incoming message and return a decision (ok, lightning error, peer handling error) regarding the next action with the peer
/// Returns the message back if it needs to be broadcasted to all other peers.
fn handle_message(
&self,
peer: &mut Peer,
message: wire::Message<<<CMH as core::ops::Deref>::Target as wire::CustomMessageReader>::CustomMessage>
) -> Result<Option<wire::Message<<<CMH as core::ops::Deref>::Target as wire::CustomMessageReader>::CustomMessage>>, MessageHandlingError> {
if is_gossip_msg(message.type_id()) {
log_gossip!(self.logger, "Received message {:?} from {}", message, log_pubkey!(peer.their_node_id.unwrap()));
} else {
log_trace!(self.logger, "Received message {:?} from {}", message, log_pubkey!(peer.their_node_id.unwrap()));
}
peer.received_message_since_timer_tick = true;
// Need an Init as first message
if let wire::Message::Init(_) = message {
} else if peer.their_features.is_none() {
log_debug!(self.logger, "Peer {} sent non-Init first message", log_pubkey!(peer.their_node_id.unwrap()));
return Err(PeerHandleError{ no_connection_possible: false }.into());
}
let mut should_forward = None;
match message {
// Setup and Control messages:
wire::Message::Init(msg) => {
if msg.features.requires_unknown_bits() {
log_debug!(self.logger, "Peer features required unknown version bits");
return Err(PeerHandleError{ no_connection_possible: true }.into());
}
if peer.their_features.is_some() {
return Err(PeerHandleError{ no_connection_possible: false }.into());
}
log_info!(self.logger, "Received peer Init message from {}: {}", log_pubkey!(peer.their_node_id.unwrap()), msg.features);
if msg.features.initial_routing_sync() {
peer.sync_status = InitSyncTracker::ChannelsSyncing(0);
}
if !msg.features.supports_static_remote_key() {
log_debug!(self.logger, "Peer {} does not support static remote key, disconnecting with no_connection_possible", log_pubkey!(peer.their_node_id.unwrap()));
return Err(PeerHandleError{ no_connection_possible: true }.into());
}
self.message_handler.route_handler.sync_routing_table(&peer.their_node_id.unwrap(), &msg);
self.message_handler.chan_handler.peer_connected(&peer.their_node_id.unwrap(), &msg);
peer.their_features = Some(msg.features);
},
wire::Message::Error(msg) => {
let mut data_is_printable = true;
for b in msg.data.bytes() {
if b < 32 || b > 126 {
data_is_printable = false;
break;
}
}
if data_is_printable {
log_debug!(self.logger, "Got Err message from {}: {}", log_pubkey!(peer.their_node_id.unwrap()), msg.data);
} else {
log_debug!(self.logger, "Got Err message from {} with non-ASCII error message", log_pubkey!(peer.their_node_id.unwrap()));
}
self.message_handler.chan_handler.handle_error(&peer.their_node_id.unwrap(), &msg);
if msg.channel_id == [0; 32] {
return Err(PeerHandleError{ no_connection_possible: true }.into());
}
},
wire::Message::Ping(msg) => {
if msg.ponglen < 65532 {
let resp = msgs::Pong { byteslen: msg.ponglen };
self.enqueue_message(peer, &resp);
}
},
wire::Message::Pong(_msg) => {
peer.awaiting_pong_timer_tick_intervals = 0;
peer.msgs_sent_since_pong = 0;
},
// Channel messages:
wire::Message::OpenChannel(msg) => {
self.message_handler.chan_handler.handle_open_channel(&peer.their_node_id.unwrap(), peer.their_features.clone().unwrap(), &msg);
},
wire::Message::AcceptChannel(msg) => {
self.message_handler.chan_handler.handle_accept_channel(&peer.their_node_id.unwrap(), peer.their_features.clone().unwrap(), &msg);
},
wire::Message::FundingCreated(msg) => {
self.message_handler.chan_handler.handle_funding_created(&peer.their_node_id.unwrap(), &msg);
},
wire::Message::FundingSigned(msg) => {
self.message_handler.chan_handler.handle_funding_signed(&peer.their_node_id.unwrap(), &msg);
},
wire::Message::FundingLocked(msg) => {
self.message_handler.chan_handler.handle_funding_locked(&peer.their_node_id.unwrap(), &msg);
},
wire::Message::Shutdown(msg) => {
self.message_handler.chan_handler.handle_shutdown(&peer.their_node_id.unwrap(), peer.their_features.as_ref().unwrap(), &msg);
},
wire::Message::ClosingSigned(msg) => {
self.message_handler.chan_handler.handle_closing_signed(&peer.their_node_id.unwrap(), &msg);
},
// Commitment messages:
wire::Message::UpdateAddHTLC(msg) => {
self.message_handler.chan_handler.handle_update_add_htlc(&peer.their_node_id.unwrap(), &msg);
},
wire::Message::UpdateFulfillHTLC(msg) => {
self.message_handler.chan_handler.handle_update_fulfill_htlc(&peer.their_node_id.unwrap(), &msg);
},
wire::Message::UpdateFailHTLC(msg) => {
self.message_handler.chan_handler.handle_update_fail_htlc(&peer.their_node_id.unwrap(), &msg);
},
wire::Message::UpdateFailMalformedHTLC(msg) => {
self.message_handler.chan_handler.handle_update_fail_malformed_htlc(&peer.their_node_id.unwrap(), &msg);
},
wire::Message::CommitmentSigned(msg) => {
self.message_handler.chan_handler.handle_commitment_signed(&peer.their_node_id.unwrap(), &msg);
},
wire::Message::RevokeAndACK(msg) => {
self.message_handler.chan_handler.handle_revoke_and_ack(&peer.their_node_id.unwrap(), &msg);
},
wire::Message::UpdateFee(msg) => {
self.message_handler.chan_handler.handle_update_fee(&peer.their_node_id.unwrap(), &msg);
},
wire::Message::ChannelReestablish(msg) => {
self.message_handler.chan_handler.handle_channel_reestablish(&peer.their_node_id.unwrap(), &msg);
},
// Routing messages:
wire::Message::AnnouncementSignatures(msg) => {
self.message_handler.chan_handler.handle_announcement_signatures(&peer.their_node_id.unwrap(), &msg);
},
wire::Message::ChannelAnnouncement(msg) => {
if self.message_handler.route_handler.handle_channel_announcement(&msg)
.map_err(|e| -> MessageHandlingError { e.into() })? {
should_forward = Some(wire::Message::ChannelAnnouncement(msg));
}
},
wire::Message::NodeAnnouncement(msg) => {
if self.message_handler.route_handler.handle_node_announcement(&msg)
.map_err(|e| -> MessageHandlingError { e.into() })? {
should_forward = Some(wire::Message::NodeAnnouncement(msg));
}
},
wire::Message::ChannelUpdate(msg) => {
self.message_handler.chan_handler.handle_channel_update(&peer.their_node_id.unwrap(), &msg);
if self.message_handler.route_handler.handle_channel_update(&msg)
.map_err(|e| -> MessageHandlingError { e.into() })? {
should_forward = Some(wire::Message::ChannelUpdate(msg));
}
},
wire::Message::QueryShortChannelIds(msg) => {
self.message_handler.route_handler.handle_query_short_channel_ids(&peer.their_node_id.unwrap(), msg)?;
},
wire::Message::ReplyShortChannelIdsEnd(msg) => {
self.message_handler.route_handler.handle_reply_short_channel_ids_end(&peer.their_node_id.unwrap(), msg)?;
},
wire::Message::QueryChannelRange(msg) => {
self.message_handler.route_handler.handle_query_channel_range(&peer.their_node_id.unwrap(), msg)?;
},
wire::Message::ReplyChannelRange(msg) => {
self.message_handler.route_handler.handle_reply_channel_range(&peer.their_node_id.unwrap(), msg)?;
},
wire::Message::GossipTimestampFilter(_msg) => {
// TODO: handle message
},
// Unknown messages:
wire::Message::Unknown(type_id) if message.is_even() => {
log_debug!(self.logger, "Received unknown even message of type {}, disconnecting peer!", type_id);
// Fail the channel if message is an even, unknown type as per BOLT #1.
return Err(PeerHandleError{ no_connection_possible: true }.into());
},
wire::Message::Unknown(type_id) => {
log_trace!(self.logger, "Received unknown odd message of type {}, ignoring", type_id);
},
wire::Message::Custom(custom) => {
self.custom_message_handler.handle_custom_message(custom, &peer.their_node_id.unwrap())?;
},
};
Ok(should_forward)
}
fn forward_broadcast_msg(&self, peers: &mut PeerHolder<Descriptor>, msg: &wire::Message<<<CMH as core::ops::Deref>::Target as wire::CustomMessageReader>::CustomMessage>, except_node: Option<&PublicKey>) {
match msg {
wire::Message::ChannelAnnouncement(ref msg) => {
log_gossip!(self.logger, "Sending message to all peers except {:?} or the announced channel's counterparties: {:?}", except_node, msg);
let encoded_msg = encode_msg!(msg);
for (_, peer) in peers.peers.iter_mut() {
if !peer.channel_encryptor.is_ready_for_encryption() || peer.their_features.is_none() ||
!peer.should_forward_channel_announcement(msg.contents.short_channel_id) {
continue
}
if peer.pending_outbound_buffer.len() > OUTBOUND_BUFFER_LIMIT_DROP_GOSSIP
|| peer.msgs_sent_since_pong > BUFFER_DRAIN_MSGS_PER_TICK * FORWARD_INIT_SYNC_BUFFER_LIMIT_RATIO
{
log_gossip!(self.logger, "Skipping broadcast message to {:?} as its outbound buffer is full", peer.their_node_id);
continue;
}
if peer.their_node_id.as_ref() == Some(&msg.contents.node_id_1) ||
peer.their_node_id.as_ref() == Some(&msg.contents.node_id_2) {
continue;
}
if except_node.is_some() && peer.their_node_id.as_ref() == except_node {
continue;
}
self.enqueue_encoded_message(peer, &encoded_msg);
}
},
wire::Message::NodeAnnouncement(ref msg) => {
log_gossip!(self.logger, "Sending message to all peers except {:?} or the announced node: {:?}", except_node, msg);
let encoded_msg = encode_msg!(msg);
for (_, peer) in peers.peers.iter_mut() {
if !peer.channel_encryptor.is_ready_for_encryption() || peer.their_features.is_none() ||
!peer.should_forward_node_announcement(msg.contents.node_id) {
continue
}
if peer.pending_outbound_buffer.len() > OUTBOUND_BUFFER_LIMIT_DROP_GOSSIP
|| peer.msgs_sent_since_pong > BUFFER_DRAIN_MSGS_PER_TICK * FORWARD_INIT_SYNC_BUFFER_LIMIT_RATIO
{
log_gossip!(self.logger, "Skipping broadcast message to {:?} as its outbound buffer is full", peer.their_node_id);
continue;
}
if peer.their_node_id.as_ref() == Some(&msg.contents.node_id) {
continue;
}
if except_node.is_some() && peer.their_node_id.as_ref() == except_node {
continue;
}
self.enqueue_encoded_message(peer, &encoded_msg);
}
},
wire::Message::ChannelUpdate(ref msg) => {
log_gossip!(self.logger, "Sending message to all peers except {:?}: {:?}", except_node, msg);
let encoded_msg = encode_msg!(msg);
for (_, peer) in peers.peers.iter_mut() {
if !peer.channel_encryptor.is_ready_for_encryption() || peer.their_features.is_none() ||
!peer.should_forward_channel_announcement(msg.contents.short_channel_id) {
continue
}
if peer.pending_outbound_buffer.len() > OUTBOUND_BUFFER_LIMIT_DROP_GOSSIP
|| peer.msgs_sent_since_pong > BUFFER_DRAIN_MSGS_PER_TICK * FORWARD_INIT_SYNC_BUFFER_LIMIT_RATIO
{
log_gossip!(self.logger, "Skipping broadcast message to {:?} as its outbound buffer is full", peer.their_node_id);
continue;
}
if except_node.is_some() && peer.their_node_id.as_ref() == except_node {
continue;
}
self.enqueue_encoded_message(peer, &encoded_msg);
}
},
_ => debug_assert!(false, "We shouldn't attempt to forward anything but gossip messages"),
}
}
/// Checks for any events generated by our handlers and processes them. Includes sending most
/// response messages as well as messages generated by calls to handler functions directly (eg
/// functions like [`ChannelManager::process_pending_htlc_forwards`] or [`send_payment`]).
///
/// May call [`send_data`] on [`SocketDescriptor`]s. Thus, be very careful with reentrancy
/// issues!
///
/// You don't have to call this function explicitly if you are using [`lightning-net-tokio`]
/// or one of the other clients provided in our language bindings.
///
/// [`send_payment`]: crate::ln::channelmanager::ChannelManager::send_payment
/// [`ChannelManager::process_pending_htlc_forwards`]: crate::ln::channelmanager::ChannelManager::process_pending_htlc_forwards
/// [`send_data`]: SocketDescriptor::send_data
pub fn process_events(&self) {
{
// TODO: There are some DoS attacks here where you can flood someone's outbound send
// buffer by doing things like announcing channels on another node. We should be willing to
// drop optional-ish messages when send buffers get full!
let mut peers_lock = self.peers.lock().unwrap();
let mut events_generated = self.message_handler.chan_handler.get_and_clear_pending_msg_events();
events_generated.append(&mut self.message_handler.route_handler.get_and_clear_pending_msg_events());
let peers = &mut *peers_lock;
macro_rules! get_peer_for_forwarding {
($node_id: expr) => {
{
match peers.node_id_to_descriptor.get($node_id) {
Some(descriptor) => match peers.peers.get_mut(&descriptor) {
Some(peer) => {
if peer.their_features.is_none() {
continue;
}
peer
},
None => panic!("Inconsistent peers set state!"),
},
None => {
continue;
},
}
}
}
}
for event in events_generated.drain(..) {
match event {
MessageSendEvent::SendAcceptChannel { ref node_id, ref msg } => {
log_debug!(self.logger, "Handling SendAcceptChannel event in peer_handler for node {} for channel {}",
log_pubkey!(node_id),
log_bytes!(msg.temporary_channel_id));
self.enqueue_message(get_peer_for_forwarding!(node_id), msg);
},
MessageSendEvent::SendOpenChannel { ref node_id, ref msg } => {
log_debug!(self.logger, "Handling SendOpenChannel event in peer_handler for node {} for channel {}",
log_pubkey!(node_id),
log_bytes!(msg.temporary_channel_id));
self.enqueue_message(get_peer_for_forwarding!(node_id), msg);
},
MessageSendEvent::SendFundingCreated { ref node_id, ref msg } => {
log_debug!(self.logger, "Handling SendFundingCreated event in peer_handler for node {} for channel {} (which becomes {})",
log_pubkey!(node_id),
log_bytes!(msg.temporary_channel_id),
log_funding_channel_id!(msg.funding_txid, msg.funding_output_index));
// TODO: If the peer is gone we should generate a DiscardFunding event
// indicating to the wallet that they should just throw away this funding transaction
self.enqueue_message(get_peer_for_forwarding!(node_id), msg);
},
MessageSendEvent::SendFundingSigned { ref node_id, ref msg } => {
log_debug!(self.logger, "Handling SendFundingSigned event in peer_handler for node {} for channel {}",
log_pubkey!(node_id),
log_bytes!(msg.channel_id));
self.enqueue_message(get_peer_for_forwarding!(node_id), msg);
},
MessageSendEvent::SendFundingLocked { ref node_id, ref msg } => {
log_debug!(self.logger, "Handling SendFundingLocked event in peer_handler for node {} for channel {}",
log_pubkey!(node_id),
log_bytes!(msg.channel_id));
self.enqueue_message(get_peer_for_forwarding!(node_id), msg);
},
MessageSendEvent::SendAnnouncementSignatures { ref node_id, ref msg } => {
log_debug!(self.logger, "Handling SendAnnouncementSignatures event in peer_handler for node {} for channel {})",
log_pubkey!(node_id),
log_bytes!(msg.channel_id));
self.enqueue_message(get_peer_for_forwarding!(node_id), msg);
},
MessageSendEvent::UpdateHTLCs { ref node_id, updates: msgs::CommitmentUpdate { ref update_add_htlcs, ref update_fulfill_htlcs, ref update_fail_htlcs, ref update_fail_malformed_htlcs, ref update_fee, ref commitment_signed } } => {
log_debug!(self.logger, "Handling UpdateHTLCs event in peer_handler for node {} with {} adds, {} fulfills, {} fails for channel {}",
log_pubkey!(node_id),
update_add_htlcs.len(),
update_fulfill_htlcs.len(),
update_fail_htlcs.len(),
log_bytes!(commitment_signed.channel_id));
let peer = get_peer_for_forwarding!(node_id);
for msg in update_add_htlcs {
self.enqueue_message(peer, msg);
}
for msg in update_fulfill_htlcs {
self.enqueue_message(peer, msg);
}
for msg in update_fail_htlcs {
self.enqueue_message(peer, msg);
}
for msg in update_fail_malformed_htlcs {
self.enqueue_message(peer, msg);
}
if let &Some(ref msg) = update_fee {
self.enqueue_message(peer, msg);
}
self.enqueue_message(peer, commitment_signed);
},
MessageSendEvent::SendRevokeAndACK { ref node_id, ref msg } => {
log_debug!(self.logger, "Handling SendRevokeAndACK event in peer_handler for node {} for channel {}",
log_pubkey!(node_id),
log_bytes!(msg.channel_id));
self.enqueue_message(get_peer_for_forwarding!(node_id), msg);
},
MessageSendEvent::SendClosingSigned { ref node_id, ref msg } => {
log_debug!(self.logger, "Handling SendClosingSigned event in peer_handler for node {} for channel {}",
log_pubkey!(node_id),
log_bytes!(msg.channel_id));
self.enqueue_message(get_peer_for_forwarding!(node_id), msg);
},
MessageSendEvent::SendShutdown { ref node_id, ref msg } => {
log_debug!(self.logger, "Handling Shutdown event in peer_handler for node {} for channel {}",
log_pubkey!(node_id),
log_bytes!(msg.channel_id));
self.enqueue_message(get_peer_for_forwarding!(node_id), msg);
},
MessageSendEvent::SendChannelReestablish { ref node_id, ref msg } => {
log_debug!(self.logger, "Handling SendChannelReestablish event in peer_handler for node {} for channel {}",
log_pubkey!(node_id),
log_bytes!(msg.channel_id));
self.enqueue_message(get_peer_for_forwarding!(node_id), msg);
},
MessageSendEvent::BroadcastChannelAnnouncement { msg, update_msg } => {
log_debug!(self.logger, "Handling BroadcastChannelAnnouncement event in peer_handler for short channel id {}", msg.contents.short_channel_id);
match self.message_handler.route_handler.handle_channel_announcement(&msg) {
Ok(_) | Err(LightningError { action: msgs::ErrorAction::IgnoreDuplicateGossip, .. }) =>
self.forward_broadcast_msg(peers, &wire::Message::ChannelAnnouncement(msg), None),
_ => {},
}
match self.message_handler.route_handler.handle_channel_update(&update_msg) {
Ok(_) | Err(LightningError { action: msgs::ErrorAction::IgnoreDuplicateGossip, .. }) =>
self.forward_broadcast_msg(peers, &wire::Message::ChannelUpdate(update_msg), None),
_ => {},
}
},
MessageSendEvent::BroadcastNodeAnnouncement { msg } => {
log_debug!(self.logger, "Handling BroadcastNodeAnnouncement event in peer_handler");
match self.message_handler.route_handler.handle_node_announcement(&msg) {
Ok(_) | Err(LightningError { action: msgs::ErrorAction::IgnoreDuplicateGossip, .. }) =>
self.forward_broadcast_msg(peers, &wire::Message::NodeAnnouncement(msg), None),
_ => {},
}
},
MessageSendEvent::BroadcastChannelUpdate { msg } => {
log_debug!(self.logger, "Handling BroadcastChannelUpdate event in peer_handler for short channel id {}", msg.contents.short_channel_id);
match self.message_handler.route_handler.handle_channel_update(&msg) {
Ok(_) | Err(LightningError { action: msgs::ErrorAction::IgnoreDuplicateGossip, .. }) =>
self.forward_broadcast_msg(peers, &wire::Message::ChannelUpdate(msg), None),
_ => {},
}
},
MessageSendEvent::SendChannelUpdate { ref node_id, ref msg } => {
log_trace!(self.logger, "Handling SendChannelUpdate event in peer_handler for node {} for channel {}",
log_pubkey!(node_id), msg.contents.short_channel_id);
let peer = get_peer_for_forwarding!(node_id);
peer.pending_outbound_buffer.push_back(peer.channel_encryptor.encrypt_message(&encode_msg!(msg)));
},
MessageSendEvent::HandleError { ref node_id, ref action } => {
match *action {
msgs::ErrorAction::DisconnectPeer { ref msg } => {
if let Some(mut descriptor) = peers.node_id_to_descriptor.remove(node_id) {
if let Some(mut peer) = peers.peers.remove(&descriptor) {
if let Some(ref msg) = *msg {
log_trace!(self.logger, "Handling DisconnectPeer HandleError event in peer_handler for node {} with message {}",
log_pubkey!(node_id),
msg.data);
self.enqueue_message(&mut peer, msg);
// This isn't guaranteed to work, but if there is enough free
// room in the send buffer, put the error message there...
self.do_attempt_write_data(&mut descriptor, &mut peer);
} else {
log_gossip!(self.logger, "Handling DisconnectPeer HandleError event in peer_handler for node {} with no message", log_pubkey!(node_id));
}
}
descriptor.disconnect_socket();
self.message_handler.chan_handler.peer_disconnected(&node_id, false);
}
},
msgs::ErrorAction::IgnoreAndLog(level) => {
log_given_level!(self.logger, level, "Received a HandleError event to be ignored for node {}", log_pubkey!(node_id));
},
msgs::ErrorAction::IgnoreDuplicateGossip => {},
msgs::ErrorAction::IgnoreError => {
log_debug!(self.logger, "Received a HandleError event to be ignored for node {}", log_pubkey!(node_id));
},
msgs::ErrorAction::SendErrorMessage { ref msg } => {
log_trace!(self.logger, "Handling SendErrorMessage HandleError event in peer_handler for node {} with message {}",
log_pubkey!(node_id),
msg.data);
self.enqueue_message(get_peer_for_forwarding!(node_id), msg);
},
}
},
MessageSendEvent::SendChannelRangeQuery { ref node_id, ref msg } => {
self.enqueue_message(get_peer_for_forwarding!(node_id), msg);
},
MessageSendEvent::SendShortIdsQuery { ref node_id, ref msg } => {
self.enqueue_message(get_peer_for_forwarding!(node_id), msg);
}
MessageSendEvent::SendReplyChannelRange { ref node_id, ref msg } => {
log_gossip!(self.logger, "Handling SendReplyChannelRange event in peer_handler for node {} with num_scids={} first_blocknum={} number_of_blocks={}, sync_complete={}",
log_pubkey!(node_id),
msg.short_channel_ids.len(),
msg.first_blocknum,
msg.number_of_blocks,
msg.sync_complete);
self.enqueue_message(get_peer_for_forwarding!(node_id), msg);
}
}
}
for (node_id, msg) in self.custom_message_handler.get_and_clear_pending_msg() {
self.enqueue_message(get_peer_for_forwarding!(&node_id), &msg);
}
for (descriptor, peer) in peers.peers.iter_mut() {
self.do_attempt_write_data(&mut (*descriptor).clone(), peer);
}
}
}
/// Indicates that the given socket descriptor's connection is now closed.
pub fn socket_disconnected(&self, descriptor: &Descriptor) {
self.disconnect_event_internal(descriptor, false);
}
fn disconnect_event_internal(&self, descriptor: &Descriptor, no_connection_possible: bool) {
let mut peers = self.peers.lock().unwrap();
let peer_option = peers.peers.remove(descriptor);
match peer_option {
None => {
// This is most likely a simple race condition where the user found that the socket
// was disconnected, then we told the user to `disconnect_socket()`, then they
// called this method. Either way we're disconnected, return.
},
Some(peer) => {
match peer.their_node_id {
Some(node_id) => {
log_trace!(self.logger,
"Handling disconnection of peer {}, with {}future connection to the peer possible.",
log_pubkey!(node_id), if no_connection_possible { "no " } else { "" });
peers.node_id_to_descriptor.remove(&node_id);
self.message_handler.chan_handler.peer_disconnected(&node_id, no_connection_possible);
},
None => {}
}
}
};
}
/// Disconnect a peer given its node id.
///
/// Set `no_connection_possible` to true to prevent any further connection with this peer,
/// force-closing any channels we have with it.
///
/// If a peer is connected, this will call [`disconnect_socket`] on the descriptor for the
/// peer. Thus, be very careful about reentrancy issues.
///
/// [`disconnect_socket`]: SocketDescriptor::disconnect_socket
pub fn disconnect_by_node_id(&self, node_id: PublicKey, no_connection_possible: bool) {
let mut peers_lock = self.peers.lock().unwrap();
if let Some(mut descriptor) = peers_lock.node_id_to_descriptor.remove(&node_id) {
log_trace!(self.logger, "Disconnecting peer with id {} due to client request", node_id);
peers_lock.peers.remove(&descriptor);
self.message_handler.chan_handler.peer_disconnected(&node_id, no_connection_possible);
descriptor.disconnect_socket();
}
}
/// Disconnects all currently-connected peers. This is useful on platforms where there may be
/// an indication that TCP sockets have stalled even if we weren't around to time them out
/// using regular ping/pongs.
pub fn disconnect_all_peers(&self) {
let mut peers_lock = self.peers.lock().unwrap();
let peers = &mut *peers_lock;
for (mut descriptor, peer) in peers.peers.drain() {
if let Some(node_id) = peer.their_node_id {
log_trace!(self.logger, "Disconnecting peer with id {} due to client request to disconnect all peers", node_id);
peers.node_id_to_descriptor.remove(&node_id);
self.message_handler.chan_handler.peer_disconnected(&node_id, false);
}
descriptor.disconnect_socket();
}
debug_assert!(peers.node_id_to_descriptor.is_empty());
}
/// This is called when we're blocked on sending additional gossip messages until we receive a
/// pong. If we aren't waiting on a pong, we take this opportunity to send a ping (setting
/// `awaiting_pong_timer_tick_intervals` to a special flag value to indicate this).
fn maybe_send_extra_ping(&self, peer: &mut Peer) {
if peer.awaiting_pong_timer_tick_intervals == 0 {
peer.awaiting_pong_timer_tick_intervals = -1;
let ping = msgs::Ping {
ponglen: 0,
byteslen: 64,
};
self.enqueue_message(peer, &ping);
}
}
/// Send pings to each peer and disconnect those which did not respond to the last round of
/// pings.
///
/// This may be called on any timescale you want, however, roughly once every five to ten
/// seconds is preferred. The call rate determines both how often we send a ping to our peers
/// and how much time they have to respond before we disconnect them.
///
/// May call [`send_data`] on all [`SocketDescriptor`]s. Thus, be very careful with reentrancy
/// issues!
///
/// [`send_data`]: SocketDescriptor::send_data
pub fn timer_tick_occurred(&self) {
let mut peers_lock = self.peers.lock().unwrap();
{
let peers = &mut *peers_lock;
let node_id_to_descriptor = &mut peers.node_id_to_descriptor;
let peers = &mut peers.peers;
let mut descriptors_needing_disconnect = Vec::new();
let peer_count = peers.len();
peers.retain(|descriptor, peer| {
let mut do_disconnect_peer = false;
if !peer.channel_encryptor.is_ready_for_encryption() || peer.their_node_id.is_none() {
// The peer needs to complete its handshake before we can exchange messages. We
// give peers one timer tick to complete handshake, reusing
// `awaiting_pong_timer_tick_intervals` to track number of timer ticks taken
// for handshake completion.
if peer.awaiting_pong_timer_tick_intervals != 0 {
do_disconnect_peer = true;
} else {
peer.awaiting_pong_timer_tick_intervals = 1;
return true;
}
}
if peer.awaiting_pong_timer_tick_intervals == -1 {
// Magic value set in `maybe_send_extra_ping`.
peer.awaiting_pong_timer_tick_intervals = 1;
peer.received_message_since_timer_tick = false;
return true;
}
if do_disconnect_peer
|| (peer.awaiting_pong_timer_tick_intervals > 0 && !peer.received_message_since_timer_tick)
|| peer.awaiting_pong_timer_tick_intervals as u64 >
MAX_BUFFER_DRAIN_TICK_INTERVALS_PER_PEER as u64 * peer_count as u64
{
descriptors_needing_disconnect.push(descriptor.clone());
match peer.their_node_id {
Some(node_id) => {
log_trace!(self.logger, "Disconnecting peer with id {} due to ping timeout", node_id);
node_id_to_descriptor.remove(&node_id);
self.message_handler.chan_handler.peer_disconnected(&node_id, false);
}
None => {},
}
return false;
}
peer.received_message_since_timer_tick = false;
if peer.awaiting_pong_timer_tick_intervals > 0 {
peer.awaiting_pong_timer_tick_intervals += 1;
return true;
}
peer.awaiting_pong_timer_tick_intervals = 1;
let ping = msgs::Ping {
ponglen: 0,
byteslen: 64,
};
self.enqueue_message(peer, &ping);
self.do_attempt_write_data(&mut (descriptor.clone()), &mut *peer);
true
});
for mut descriptor in descriptors_needing_disconnect.drain(..) {
descriptor.disconnect_socket();
}
}
}
}
fn is_gossip_msg(type_id: u16) -> bool {
match type_id {
msgs::ChannelAnnouncement::TYPE |
msgs::ChannelUpdate::TYPE |
msgs::NodeAnnouncement::TYPE => true,
_ => false
}
}
#[cfg(test)]
mod tests {
use ln::peer_handler::{PeerManager, MessageHandler, SocketDescriptor, IgnoringMessageHandler};
use ln::msgs;
use util::events;
use util::test_utils;
use bitcoin::secp256k1::Secp256k1;
use bitcoin::secp256k1::key::{SecretKey, PublicKey};
use prelude::*;
use sync::{Arc, Mutex};
use core::sync::atomic::Ordering;
#[derive(Clone)]
struct FileDescriptor {
fd: u16,
outbound_data: Arc<Mutex<Vec<u8>>>,
}
impl PartialEq for FileDescriptor {
fn eq(&self, other: &Self) -> bool {
self.fd == other.fd
}
}
impl Eq for FileDescriptor { }
impl core::hash::Hash for FileDescriptor {
fn hash<H: core::hash::Hasher>(&self, hasher: &mut H) {
self.fd.hash(hasher)
}
}
impl SocketDescriptor for FileDescriptor {
fn send_data(&mut self, data: &[u8], _resume_read: bool) -> usize {
self.outbound_data.lock().unwrap().extend_from_slice(data);
data.len()
}
fn disconnect_socket(&mut self) {}
}
struct PeerManagerCfg {
chan_handler: test_utils::TestChannelMessageHandler,
routing_handler: test_utils::TestRoutingMessageHandler,
logger: test_utils::TestLogger,
}
fn create_peermgr_cfgs(peer_count: usize) -> Vec<PeerManagerCfg> {
let mut cfgs = Vec::new();
for _ in 0..peer_count {
cfgs.push(
PeerManagerCfg{
chan_handler: test_utils::TestChannelMessageHandler::new(),
logger: test_utils::TestLogger::new(),
routing_handler: test_utils::TestRoutingMessageHandler::new(),
}
);
}
cfgs
}
fn create_network<'a>(peer_count: usize, cfgs: &'a Vec<PeerManagerCfg>) -> Vec<PeerManager<FileDescriptor, &'a test_utils::TestChannelMessageHandler, &'a test_utils::TestRoutingMessageHandler, &'a test_utils::TestLogger, IgnoringMessageHandler>> {
let mut peers = Vec::new();
for i in 0..peer_count {
let node_secret = SecretKey::from_slice(&[42 + i as u8; 32]).unwrap();
let ephemeral_bytes = [i as u8; 32];
let msg_handler = MessageHandler { chan_handler: &cfgs[i].chan_handler, route_handler: &cfgs[i].routing_handler };
let peer = PeerManager::new(msg_handler, node_secret, &ephemeral_bytes, &cfgs[i].logger, IgnoringMessageHandler {});
peers.push(peer);
}
peers
}
fn establish_connection<'a>(peer_a: &PeerManager<FileDescriptor, &'a test_utils::TestChannelMessageHandler, &'a test_utils::TestRoutingMessageHandler, &'a test_utils::TestLogger, IgnoringMessageHandler>, peer_b: &PeerManager<FileDescriptor, &'a test_utils::TestChannelMessageHandler, &'a test_utils::TestRoutingMessageHandler, &'a test_utils::TestLogger, IgnoringMessageHandler>) -> (FileDescriptor, FileDescriptor) {
let secp_ctx = Secp256k1::new();
let a_id = PublicKey::from_secret_key(&secp_ctx, &peer_a.our_node_secret);
let mut fd_a = FileDescriptor { fd: 1, outbound_data: Arc::new(Mutex::new(Vec::new())) };
let mut fd_b = FileDescriptor { fd: 1, outbound_data: Arc::new(Mutex::new(Vec::new())) };
let initial_data = peer_b.new_outbound_connection(a_id, fd_b.clone()).unwrap();
peer_a.new_inbound_connection(fd_a.clone()).unwrap();
assert_eq!(peer_a.read_event(&mut fd_a, &initial_data).unwrap(), false);
peer_a.process_events();
assert_eq!(peer_b.read_event(&mut fd_b, &fd_a.outbound_data.lock().unwrap().split_off(0)).unwrap(), false);
peer_b.process_events();
assert_eq!(peer_a.read_event(&mut fd_a, &fd_b.outbound_data.lock().unwrap().split_off(0)).unwrap(), false);
(fd_a.clone(), fd_b.clone())
}
#[test]
fn test_disconnect_peer() {
// Simple test which builds a network of PeerManager, connects and brings them to NoiseState::Finished and
// push a DisconnectPeer event to remove the node flagged by id
let cfgs = create_peermgr_cfgs(2);
let chan_handler = test_utils::TestChannelMessageHandler::new();
let mut peers = create_network(2, &cfgs);
establish_connection(&peers[0], &peers[1]);
assert_eq!(peers[0].peers.lock().unwrap().peers.len(), 1);
let secp_ctx = Secp256k1::new();
let their_id = PublicKey::from_secret_key(&secp_ctx, &peers[1].our_node_secret);
chan_handler.pending_events.lock().unwrap().push(events::MessageSendEvent::HandleError {
node_id: their_id,
action: msgs::ErrorAction::DisconnectPeer { msg: None },
});
assert_eq!(chan_handler.pending_events.lock().unwrap().len(), 1);
peers[0].message_handler.chan_handler = &chan_handler;
peers[0].process_events();
assert_eq!(peers[0].peers.lock().unwrap().peers.len(), 0);
}
#[test]
fn test_timer_tick_occurred() {
// Create peers, a vector of two peer managers, perform initial set up and check that peers[0] has one Peer.
let cfgs = create_peermgr_cfgs(2);
let peers = create_network(2, &cfgs);
establish_connection(&peers[0], &peers[1]);
assert_eq!(peers[0].peers.lock().unwrap().peers.len(), 1);
// peers[0] awaiting_pong is set to true, but the Peer is still connected
peers[0].timer_tick_occurred();
peers[0].process_events();
assert_eq!(peers[0].peers.lock().unwrap().peers.len(), 1);
// Since timer_tick_occurred() is called again when awaiting_pong is true, all Peers are disconnected
peers[0].timer_tick_occurred();
peers[0].process_events();
assert_eq!(peers[0].peers.lock().unwrap().peers.len(), 0);
}
#[test]
fn test_do_attempt_write_data() {
// Create 2 peers with custom TestRoutingMessageHandlers and connect them.
let cfgs = create_peermgr_cfgs(2);
cfgs[0].routing_handler.request_full_sync.store(true, Ordering::Release);
cfgs[1].routing_handler.request_full_sync.store(true, Ordering::Release);
let peers = create_network(2, &cfgs);
// By calling establish_connect, we trigger do_attempt_write_data between
// the peers. Previously this function would mistakenly enter an infinite loop
// when there were more channel messages available than could fit into a peer's
// buffer. This issue would now be detected by this test (because we use custom
// RoutingMessageHandlers that intentionally return more channel messages
// than can fit into a peer's buffer).
let (mut fd_a, mut fd_b) = establish_connection(&peers[0], &peers[1]);
// Make each peer to read the messages that the other peer just wrote to them. Note that
// due to the max-messagse-before-ping limits this may take a few iterations to complete.
for _ in 0..150/super::BUFFER_DRAIN_MSGS_PER_TICK + 1 {
peers[0].process_events();
let b_read_data = fd_a.outbound_data.lock().unwrap().split_off(0);
assert!(!b_read_data.is_empty());
peers[1].read_event(&mut fd_b, &b_read_data).unwrap();
peers[1].process_events();
let a_read_data = fd_b.outbound_data.lock().unwrap().split_off(0);
assert!(!a_read_data.is_empty());
peers[0].read_event(&mut fd_a, &a_read_data).unwrap();
peers[1].process_events();
assert_eq!(fd_b.outbound_data.lock().unwrap().len(), 0, "Until B receives data, it shouldn't send more messages");
}
// Check that each peer has received the expected number of channel updates and channel
// announcements.
assert_eq!(cfgs[0].routing_handler.chan_upds_recvd.load(Ordering::Acquire), 100);
assert_eq!(cfgs[0].routing_handler.chan_anns_recvd.load(Ordering::Acquire), 50);
assert_eq!(cfgs[1].routing_handler.chan_upds_recvd.load(Ordering::Acquire), 100);
assert_eq!(cfgs[1].routing_handler.chan_anns_recvd.load(Ordering::Acquire), 50);
}
#[test]
fn test_handshake_timeout() {
// Tests that we time out a peer still waiting on handshake completion after a full timer
// tick.
let cfgs = create_peermgr_cfgs(2);
cfgs[0].routing_handler.request_full_sync.store(true, Ordering::Release);
cfgs[1].routing_handler.request_full_sync.store(true, Ordering::Release);
let peers = create_network(2, &cfgs);
let secp_ctx = Secp256k1::new();
let a_id = PublicKey::from_secret_key(&secp_ctx, &peers[0].our_node_secret);
let mut fd_a = FileDescriptor { fd: 1, outbound_data: Arc::new(Mutex::new(Vec::new())) };
let mut fd_b = FileDescriptor { fd: 1, outbound_data: Arc::new(Mutex::new(Vec::new())) };
let initial_data = peers[1].new_outbound_connection(a_id, fd_b.clone()).unwrap();
peers[0].new_inbound_connection(fd_a.clone()).unwrap();
// If we get a single timer tick before completion, that's fine
assert_eq!(peers[0].peers.lock().unwrap().peers.len(), 1);
peers[0].timer_tick_occurred();
assert_eq!(peers[0].peers.lock().unwrap().peers.len(), 1);
assert_eq!(peers[0].read_event(&mut fd_a, &initial_data).unwrap(), false);
peers[0].process_events();
assert_eq!(peers[1].read_event(&mut fd_b, &fd_a.outbound_data.lock().unwrap().split_off(0)).unwrap(), false);
peers[1].process_events();
// ...but if we get a second timer tick, we should disconnect the peer
peers[0].timer_tick_occurred();
assert_eq!(peers[0].peers.lock().unwrap().peers.len(), 0);
assert!(peers[0].read_event(&mut fd_a, &fd_b.outbound_data.lock().unwrap().split_off(0)).is_err());
}
}
| 45.391257 | 418 | 0.711832 |
bb347de50cc1c8080650b8755f2df7cb79142728 | 5,710 | use std::{
collections::{BTreeSet, HashMap},
env::consts::EXE_SUFFIX,
io::{self, BufWriter, Cursor},
path::PathBuf,
process,
};
use semver::{Version, VersionReq};
use serde::{Deserialize, Serialize};
use zip::ZipArchive;
use crate::{
artifact_choosing::platform_keywords,
ci_string::CiString,
fs::{self, File},
github, paths,
};
fn index_file() -> PathBuf {
let mut path = paths::base_dir();
path.push("tool-cache.json");
path
}
/// Contains the current state of all of the tools that Foreman manages.
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct ToolCache {
pub tools: HashMap<CiString, ToolEntry>,
}
impl ToolCache {
#[must_use]
pub fn run(source: &str, version: &Version, args: Vec<String>) -> i32 {
log::debug!("Running tool {}@{}", source, version);
let mut tool_path = paths::tools_dir();
let exe_name = tool_identifier_to_exe_name(source, version);
tool_path.push(exe_name);
let status = process::Command::new(tool_path)
.args(args)
.status()
.unwrap();
status.code().unwrap_or(1)
}
pub fn download_if_necessary(source: &str, version_req: &VersionReq) -> Option<Version> {
let cache = Self::load().unwrap();
if let Some(tool) = cache.tools.get(&CiString(source.to_owned())) {
log::debug!("Tool has some versions installed");
let matching_version = tool
.versions
.iter()
.rev()
.find(|version| version_req.matches(version));
if let Some(version) = matching_version {
return Some(version.clone());
}
}
Self::download(source, version_req)
}
pub fn download(source: &str, version_req: &VersionReq) -> Option<Version> {
log::info!("Downloading {}@{}", source, version_req);
let releases = github::get_releases(source).unwrap();
// Filter down our set of releases to those that are valid versions and
// have release assets for our current platform.
let mut semver_releases: Vec<_> = releases
.into_iter()
.filter_map(|release| {
log::trace!("Evaluating tag {}", release.tag_name);
let version = Version::parse(&release.tag_name).ok().or_else(|| {
if !release.tag_name.starts_with('v') {
return None;
}
Version::parse(&release.tag_name[1..]).ok()
})?;
let asset_index = release.assets.iter().position(|asset| {
platform_keywords()
.iter()
.any(|keyword| asset.name.contains(keyword))
})?;
Some((version, asset_index, release))
})
.collect();
// Releases should come back chronological, but we want strictly
// descending version numbers.
semver_releases.sort_by(|a, b| b.0.cmp(&a.0));
let matching_release = semver_releases
.into_iter()
.find(|(version, _asset_index, _release)| version_req.matches(version));
if let Some((version, asset_index, release)) = matching_release {
log::trace!("Picked version {}", version);
let url = &release.assets[asset_index].url;
let mut buffer = Vec::new();
github::download_asset(url, &mut buffer).unwrap();
log::trace!("Extracting downloaded artifact");
let mut archive = ZipArchive::new(Cursor::new(&buffer)).unwrap();
let mut file = archive.by_index(0).unwrap();
let mut tool_path = paths::tools_dir();
let exe_name = tool_identifier_to_exe_name(source, &version);
tool_path.push(exe_name);
let mut output = BufWriter::new(File::create(&tool_path).unwrap());
io::copy(&mut file, &mut output).unwrap();
// On Unix systems, mark the tool as executable.
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
fs::set_permissions(&tool_path, fs::Permissions::from_mode(0o777)).unwrap();
}
log::trace!("Updating tool cache");
let mut cache = Self::load().unwrap();
let tool = cache.tools.entry(CiString(source.to_owned())).or_default();
tool.versions.insert(version.clone());
cache.save().unwrap();
Some(version)
} else {
log::error!(
"No compatible version of {} was found for version requirement {}",
source,
version_req
);
None
}
}
pub fn load() -> io::Result<Self> {
match fs::read(index_file()) {
Ok(contents) => Ok(serde_json::from_slice(&contents).unwrap()),
Err(err) => {
if err.kind() == io::ErrorKind::NotFound {
Ok(Default::default())
} else {
Err(err)
}
}
}
}
fn save(&self) -> io::Result<()> {
let serialized = serde_json::to_string_pretty(self).unwrap();
fs::write(index_file(), serialized)
}
}
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct ToolEntry {
pub versions: BTreeSet<Version>,
}
fn tool_identifier_to_exe_name(source: &str, version: &Version) -> String {
let mut name = format!("{}-{}{}", source, version, EXE_SUFFIX);
name = name.replace('/', "__");
name.replace('\\', "__")
}
| 31.722222 | 93 | 0.54641 |
bf3d53eee8cd14ac61f5c342e42f292b1d21b42a | 1,563 | use assert_cmd::Command;
use predicates::prelude::*;
#[test]
/// specify an incorrect param (-fc) with --help after it on the command line
/// old behavior printed
/// error: Found argument '-c' which wasn't expected, or isn't valid in this context
///
/// USAGE:
/// feroxbuster --add-slash --url <URL>...
///
/// For more information try --help
///
/// the new behavior we expect to see is to print the long form help message, of which
/// Ludicrous speed... go! is near the bottom of that output, so we can test for that
fn parser_incorrect_param_with_tack_tack_help() {
Command::cargo_bin("feroxbuster")
.unwrap()
.arg("-fc")
.arg("--help")
.assert()
.success()
.stdout(predicate::str::contains("Ludicrous speed... go!"));
}
#[test]
/// specify an incorrect param (-fc) with --help after it on the command line
/// old behavior printed
/// error: Found argument '-c' which wasn't expected, or isn't valid in this context
///
/// USAGE:
/// feroxbuster --add-slash --url <URL>...
///
/// For more information try --help
///
/// the new behavior we expect to see is to print the short form help message, of which
/// "[CAUTION] 4 -v's is probably too much" is near the bottom of that output, so we can test for that
fn parser_incorrect_param_with_tack_h() {
Command::cargo_bin("feroxbuster")
.unwrap()
.arg("-fc")
.arg("-h")
.assert()
.success()
.stdout(predicate::str::contains(
"[CAUTION] 4 -v's is probably too much",
));
}
| 31.897959 | 102 | 0.626999 |
d543e297d8baa448cf07e375ab0a2f1b1b97c004 | 456 | //xfail
#![feature(custom_attribute, specialization)]
#![allow(dead_code, unused_attributes)]
trait IsUnit {
fn is_unit() -> bool;
}
impl<T> IsUnit for T {
default fn is_unit() -> bool {
false
}
}
impl IsUnit for () {
fn is_unit() -> bool {
true
}
}
#[miri_run]
fn specialization() -> (bool, bool) {
(i32::is_unit(), <()>::is_unit())
}
#[miri_run]
fn main() {
assert_eq!(specialization(), (false, true));
}
| 15.2 | 48 | 0.574561 |
1cf10ee2c9f375980045373aac8701bfd9250ca6 | 782 | use backpacktf_api::{
BackpackAPI,
request,
error::Error,
tf2_price::{Currencies, scrap},
};
use dotenv::dotenv;
use std::env;
#[tokio::main]
async fn main() -> Result<(), Error> {
dotenv().ok();
let backpacktf = BackpackAPI::builder()
.key(&env::var("KEY").unwrap())
.token(&env::var("TOKEN").unwrap())
.build();
let listing = backpacktf.update_listings(&vec![
request::UpdateListing {
id: "440_76561198080179568_86755d7981f2b4ffb983b9d054ec0c27".into(),
currencies: Currencies {
keys: 0,
metal: scrap!(3),
},
details: Some("yup".into()),
}
]).await?;
println!("Listings updated: {:?}", listing);
Ok(())
} | 24.4375 | 80 | 0.531969 |
f82feba507e245f30cd62d9585894b08bebc751b | 10,539 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use std::io::{Read, Seek, SeekFrom};
use std::sync::Arc;
use crate::array::*;
use crate::datatypes::Schema;
use crate::error::{ArrowError, Result};
use crate::record_batch::{RecordBatch, RecordBatchReader};
use super::super::{convert, gen};
use super::super::{ARROW_MAGIC, CONTINUATION_MARKER};
use super::common::*;
use flatbuffers::VerifierOptions;
type ArrayRef = Arc<dyn Array>;
#[derive(Debug, Clone)]
pub struct FileMetadata {
/// The schema that is read from the file header
schema: Arc<Schema>,
/// The blocks in the file
///
/// A block indicates the regions in the file to read to get data
blocks: Vec<gen::File::Block>,
/// The total number of blocks, which may contain record batches and other types
total_blocks: usize,
/// Optional dictionaries for each schema field.
///
/// Dictionaries may be appended to in the streaming format.
dictionaries_by_field: Vec<Option<ArrayRef>>,
/// FileMetadata version
version: gen::Schema::MetadataVersion,
is_little_endian: bool,
}
impl FileMetadata {
/// Returns the schema.
pub fn schema(&self) -> &Arc<Schema> {
&self.schema
}
}
/// Arrow File reader
pub struct FileReader<R: Read + Seek> {
reader: R,
metadata: FileMetadata,
current_block: usize,
projection: Option<(Vec<usize>, Arc<Schema>)>,
}
/// Read the IPC file's metadata
pub fn read_file_metadata<R: Read + Seek>(reader: &mut R) -> Result<FileMetadata> {
// check if header and footer contain correct magic bytes
let mut magic_buffer: [u8; 6] = [0; 6];
reader.read_exact(&mut magic_buffer)?;
if magic_buffer != ARROW_MAGIC {
return Err(ArrowError::Ipc(
"Arrow file does not contain correct header".to_string(),
));
}
reader.seek(SeekFrom::End(-6))?;
reader.read_exact(&mut magic_buffer)?;
if magic_buffer != ARROW_MAGIC {
return Err(ArrowError::Ipc(
"Arrow file does not contain correct footer".to_string(),
));
}
// read footer length
let mut footer_size: [u8; 4] = [0; 4];
reader.seek(SeekFrom::End(-10))?;
reader.read_exact(&mut footer_size)?;
let footer_len = i32::from_le_bytes(footer_size);
// read footer
let mut footer_data = vec![0; footer_len as usize];
reader.seek(SeekFrom::End(-10 - footer_len as i64))?;
reader.read_exact(&mut footer_data)?;
// set flatbuffer verification options to the same settings as the C++ arrow implementation.
// Heuristic: tables in a Arrow flatbuffers buffer must take at least 1 bit
// each in average (ARROW-11559).
// Especially, the only recursive table (the `Field` table in Schema.fbs)
// must have a non-empty `type` member.
let verifier_options = VerifierOptions {
max_depth: 128,
max_tables: footer_len as usize * 8,
..Default::default()
};
let footer = gen::File::root_as_footer_with_opts(&verifier_options, &footer_data[..])
.map_err(|err| ArrowError::Ipc(format!("Unable to get root as footer: {:?}", err)))?;
let blocks = footer.recordBatches().ok_or_else(|| {
ArrowError::Ipc("Unable to get record batches from IPC Footer".to_string())
})?;
let total_blocks = blocks.len();
let ipc_schema = footer.schema().unwrap();
let (schema, is_little_endian) = convert::fb_to_schema(ipc_schema);
let schema = Arc::new(schema);
// Create an array of optional dictionary value arrays, one per field.
let mut dictionaries_by_field = vec![None; schema.fields().len()];
for block in footer.dictionaries().unwrap() {
// read length from end of offset
let mut message_size: [u8; 4] = [0; 4];
reader.seek(SeekFrom::Start(block.offset() as u64))?;
reader.read_exact(&mut message_size)?;
if message_size == CONTINUATION_MARKER {
reader.read_exact(&mut message_size)?;
};
let footer_len = i32::from_le_bytes(message_size);
let mut block_data = vec![0; footer_len as usize];
reader.read_exact(&mut block_data)?;
let message = gen::Message::root_as_message(&block_data[..])
.map_err(|err| ArrowError::Ipc(format!("Unable to get root as message: {:?}", err)))?;
match message.header_type() {
gen::Message::MessageHeader::DictionaryBatch => {
let block_offset = block.offset() as u64 + block.metaDataLength() as u64;
let batch = message.header_as_dictionary_batch().unwrap();
read_dictionary(
batch,
&schema,
is_little_endian,
&mut dictionaries_by_field,
reader,
block_offset,
)?;
}
t => {
return Err(ArrowError::Ipc(format!(
"Expecting DictionaryBatch in dictionary blocks, found {:?}.",
t
)));
}
};
}
Ok(FileMetadata {
schema,
is_little_endian,
blocks: blocks.to_vec(),
total_blocks,
dictionaries_by_field,
version: footer.version(),
})
}
/// Read the IPC file's metadata
pub fn read_batch<R: Read + Seek>(
reader: &mut R,
metadata: &FileMetadata,
projection: Option<(&[usize], Arc<Schema>)>,
block: usize,
) -> Result<Option<RecordBatch>> {
let block = metadata.blocks[block];
// read length
reader.seek(SeekFrom::Start(block.offset() as u64))?;
let mut meta_buf = [0; 4];
reader.read_exact(&mut meta_buf)?;
if meta_buf == CONTINUATION_MARKER {
// continuation marker encountered, read message next
reader.read_exact(&mut meta_buf)?;
}
let meta_len = i32::from_le_bytes(meta_buf);
let mut block_data = vec![0; meta_len as usize];
reader.read_exact(&mut block_data)?;
let message = gen::Message::root_as_message(&block_data[..])
.map_err(|err| ArrowError::Ipc(format!("Unable to get root as footer: {:?}", err)))?;
// some old test data's footer metadata is not set, so we account for that
if metadata.version != gen::Schema::MetadataVersion::V1 && message.version() != metadata.version
{
return Err(ArrowError::Ipc(
"Could not read IPC message as metadata versions mismatch".to_string(),
));
}
match message.header_type() {
gen::Message::MessageHeader::Schema => Err(ArrowError::Ipc(
"Not expecting a schema when messages are read".to_string(),
)),
gen::Message::MessageHeader::RecordBatch => {
let batch = message.header_as_record_batch().ok_or_else(|| {
ArrowError::Ipc("Unable to read IPC message as record batch".to_string())
})?;
read_record_batch(
batch,
metadata.schema.clone(),
projection,
metadata.is_little_endian,
&metadata.dictionaries_by_field,
metadata.version,
reader,
block.offset() as u64 + block.metaDataLength() as u64,
)
.map(Some)
}
gen::Message::MessageHeader::NONE => Ok(None),
t => Err(ArrowError::Ipc(format!(
"Reading types other than record batches not yet supported, unable to read {:?}",
t
))),
}
}
impl<R: Read + Seek> FileReader<R> {
/// Creates a new [`FileReader`]. Use `projection` to only take certain columns.
/// # Panic
/// Panics iff the projection is not in increasing order (e.g. `[1, 0]` nor `[0, 1, 1]` are valid)
pub fn new(reader: R, metadata: FileMetadata, projection: Option<Vec<usize>>) -> Self {
if let Some(projection) = projection.as_ref() {
let _ = projection.iter().fold(0, |mut acc, v| {
assert!(
*v > acc,
"The projection on IPC must be ordered and non-overlapping"
);
acc = *v;
acc
});
}
let projection = projection.map(|projection| {
let fields = metadata.schema().fields();
let fields = projection.iter().map(|x| fields[*x].clone()).collect();
let schema = Arc::new(Schema {
fields,
metadata: metadata.schema().metadata().clone(),
});
(projection, schema)
});
Self {
reader,
metadata,
projection,
current_block: 0,
}
}
/// Return the schema of the file
pub fn schema(&self) -> &Arc<Schema> {
self.projection
.as_ref()
.map(|x| &x.1)
.unwrap_or(&self.metadata.schema)
}
/// Consumes this FileReader, returning the underlying reader
pub fn into_inner(self) -> R {
self.reader
}
}
impl<R: Read + Seek> Iterator for FileReader<R> {
type Item = Result<RecordBatch>;
fn next(&mut self) -> Option<Self::Item> {
// get current block
if self.current_block < self.metadata.total_blocks {
let block = self.current_block;
self.current_block += 1;
read_batch(
&mut self.reader,
&self.metadata,
self.projection
.as_ref()
.map(|x| (x.0.as_ref(), x.1.clone())),
block,
)
.transpose()
} else {
None
}
}
}
impl<R: Read + Seek> RecordBatchReader for FileReader<R> {
fn schema(&self) -> &Schema {
self.schema().as_ref()
}
}
| 34.217532 | 102 | 0.593035 |
75b85185a78446c9fc655c3382c28406339949b5 | 13,757 | use std::convert::{TryFrom, TryInto};
use std::str::FromStr;
use std::time::Duration;
use serde::Serialize;
use tendermint::trust_threshold::{
TrustThresholdFraction as TrustThreshold, TrustThresholdFraction,
};
use tendermint_proto::Protobuf;
use ibc_proto::ibc::lightclients::tendermint::v1::{ClientState as RawClientState, Fraction};
use crate::ics02_client::client_state::AnyClientState;
use crate::ics02_client::client_type::ClientType;
use crate::ics07_tendermint::error::{Error, Kind};
use crate::ics07_tendermint::header::Header;
use crate::ics23_commitment::specs::ProofSpecs;
use crate::ics24_host::identifier::ChainId;
use crate::Height;
#[derive(Clone, Debug, PartialEq, Eq, Serialize)]
pub struct ClientState {
pub chain_id: ChainId,
pub trust_level: TrustThreshold,
pub trusting_period: Duration,
pub unbonding_period: Duration,
pub max_clock_drift: Duration,
pub frozen_height: Height,
pub latest_height: Height,
// pub proof_specs: ::std::vec::Vec<super::super::super::super::ics23::ProofSpec>,
pub upgrade_path: Vec<String>,
pub allow_update: AllowUpdate,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize)]
pub struct AllowUpdate {
pub after_expiry: bool,
pub after_misbehaviour: bool,
}
impl Protobuf<RawClientState> for ClientState {}
impl ClientState {
#[allow(clippy::too_many_arguments)]
pub fn new(
chain_id: ChainId,
trust_level: TrustThreshold,
trusting_period: Duration,
unbonding_period: Duration,
max_clock_drift: Duration,
latest_height: Height,
frozen_height: Height,
upgrade_path: Vec<String>,
allow_update: AllowUpdate,
) -> Result<ClientState, Error> {
// Basic validation of trusting period and unbonding period: each should be non-zero.
if trusting_period <= Duration::new(0, 0) {
return Err(Kind::InvalidTrustingPeriod
.context("ClientState trusting period must be greater than zero")
.into());
}
if unbonding_period <= Duration::new(0, 0) {
return Err(Kind::InvalidUnboundingPeriod
.context("ClientState unbonding period must be greater than zero")
.into());
}
if trusting_period >= unbonding_period {
return Err(Kind::InvalidUnboundingPeriod
.context("ClientState trusting period must be smaller than unbonding period")
.into());
}
// Basic validation for the frozen_height parameter.
if !frozen_height.is_zero() {
return Err(Kind::ValidationError
.context("ClientState cannot be frozen at creation time")
.into());
}
// Basic validation for the latest_height parameter.
if latest_height <= Height::zero() {
return Err(Kind::ValidationError
.context("ClientState latest height cannot be smaller or equal than zero")
.into());
}
Ok(Self {
chain_id,
trust_level,
trusting_period,
unbonding_period,
max_clock_drift,
frozen_height,
latest_height,
upgrade_path,
allow_update,
})
}
pub fn latest_height(&self) -> Height {
self.latest_height
}
pub fn with_header(self, h: Header) -> Self {
// TODO: Clarify which fields should update.
ClientState {
latest_height: self
.latest_height
.with_revision_height(u64::from(h.signed_header.header.height)),
..self
}
}
/// Helper function to verify the upgrade client procedure.
/// Resets all fields except the blockchain-specific ones.
pub fn zero_custom_fields(mut client_state: Self) -> Self {
client_state.trusting_period = Duration::from_secs(0);
client_state.trust_level = TrustThresholdFraction {
numerator: 0,
denominator: 0,
};
client_state.allow_update.after_expiry = false;
client_state.allow_update.after_misbehaviour = false;
client_state.frozen_height = Height::zero();
client_state.max_clock_drift = Duration::from_secs(0);
client_state
}
}
impl crate::ics02_client::client_state::ClientState for ClientState {
fn chain_id(&self) -> ChainId {
self.chain_id.clone()
}
fn client_type(&self) -> ClientType {
ClientType::Tendermint
}
fn latest_height(&self) -> Height {
self.latest_height
}
fn is_frozen(&self) -> bool {
// If 'frozen_height' is set to a non-zero value, then the client state is frozen.
!self.frozen_height.is_zero()
}
fn wrap_any(self) -> AnyClientState {
AnyClientState::Tendermint(self)
}
}
impl TryFrom<RawClientState> for ClientState {
type Error = Error;
fn try_from(raw: RawClientState) -> Result<Self, Self::Error> {
let trust_level = raw
.trust_level
.clone()
.ok_or_else(|| Kind::InvalidRawClientState.context("missing trusting period"))?;
Ok(Self {
chain_id: ChainId::from_str(raw.chain_id.as_str())
.map_err(|_| Kind::InvalidRawClientState.context("Invalid chain identifier"))?,
trust_level: TrustThreshold {
numerator: trust_level.numerator,
denominator: trust_level.denominator,
},
trusting_period: raw
.trusting_period
.ok_or_else(|| Kind::InvalidRawClientState.context("missing trusting period"))?
.try_into()
.map_err(|_| Kind::InvalidRawClientState.context("negative trusting period"))?,
unbonding_period: raw
.unbonding_period
.ok_or_else(|| Kind::InvalidRawClientState.context("missing unbonding period"))?
.try_into()
.map_err(|_| Kind::InvalidRawClientState.context("negative unbonding period"))?,
max_clock_drift: raw
.max_clock_drift
.ok_or_else(|| Kind::InvalidRawClientState.context("missing max clock drift"))?
.try_into()
.map_err(|_| Kind::InvalidRawClientState.context("negative max clock drift"))?,
latest_height: raw
.latest_height
.ok_or_else(|| Kind::InvalidRawClientState.context("missing latest height"))?
.try_into()
.map_err(|_| Kind::InvalidRawHeight)?,
frozen_height: raw
.frozen_height
.ok_or_else(|| Kind::InvalidRawClientState.context("missing frozen height"))?
.try_into()
.map_err(|_| Kind::InvalidRawHeight)?,
upgrade_path: raw.upgrade_path,
allow_update: AllowUpdate {
after_expiry: raw.allow_update_after_expiry,
after_misbehaviour: raw.allow_update_after_misbehaviour,
},
})
}
}
impl From<ClientState> for RawClientState {
fn from(value: ClientState) -> Self {
RawClientState {
chain_id: value.chain_id.to_string(),
trust_level: Some(Fraction {
numerator: value.trust_level.numerator,
denominator: value.trust_level.denominator,
}),
trusting_period: Some(value.trusting_period.into()),
unbonding_period: Some(value.unbonding_period.into()),
max_clock_drift: Some(value.max_clock_drift.into()),
frozen_height: Some(value.frozen_height.into()),
latest_height: Some(value.latest_height.into()),
proof_specs: ProofSpecs::cosmos().into(),
allow_update_after_expiry: false,
allow_update_after_misbehaviour: false,
upgrade_path: value.upgrade_path,
}
}
}
#[cfg(test)]
mod tests {
use std::time::Duration;
use tendermint::trust_threshold::TrustThresholdFraction as TrustThreshold;
use tendermint_rpc::endpoint::abci_query::AbciQuery;
use crate::ics07_tendermint::client_state::{AllowUpdate, ClientState};
use crate::ics24_host::identifier::ChainId;
use crate::test::test_serialization_roundtrip;
use crate::Height;
#[test]
fn serialization_roundtrip_no_proof() {
let json_data = include_str!("../../tests/support/query/serialization/client_state.json");
println!("json_data: {:?}", json_data);
test_serialization_roundtrip::<AbciQuery>(json_data);
}
#[test]
fn serialization_roundtrip_with_proof() {
let json_data =
include_str!("../../tests/support/query/serialization/client_state_proof.json");
println!("json_data: {:?}", json_data);
test_serialization_roundtrip::<AbciQuery>(json_data);
}
#[test]
fn client_state_new() {
#[derive(Clone, Debug, PartialEq)]
struct ClientStateParams {
id: ChainId,
trust_level: TrustThreshold,
trusting_period: Duration,
unbonding_period: Duration,
max_clock_drift: Duration,
latest_height: Height,
frozen_height: Height,
upgrade_path: Vec<String>,
allow_update: AllowUpdate,
}
// Define a "default" set of parameters to reuse throughout these tests.
let default_params: ClientStateParams = ClientStateParams {
id: ChainId::default(),
trust_level: TrustThreshold {
numerator: 1,
denominator: 3,
},
trusting_period: Duration::new(64000, 0),
unbonding_period: Duration::new(128000, 0),
max_clock_drift: Duration::new(3, 0),
latest_height: Height::new(0, 10),
frozen_height: Height::default(),
upgrade_path: vec!["".to_string()],
allow_update: AllowUpdate {
after_expiry: false,
after_misbehaviour: false,
},
};
struct Test {
name: String,
params: ClientStateParams,
want_pass: bool,
}
let tests: Vec<Test> = vec![
Test {
name: "Valid parameters".to_string(),
params: default_params.clone(),
want_pass: true,
},
Test {
name: "Invalid frozen height parameter (should be 0)".to_string(),
params: ClientStateParams {
frozen_height: Height::new(0, 1),
..default_params.clone()
},
want_pass: false,
},
Test {
name: "Invalid unbonding period".to_string(),
params: ClientStateParams {
unbonding_period: Duration::default(),
..default_params.clone()
},
want_pass: false,
},
Test {
name: "Invalid (too small) trusting period".to_string(),
params: ClientStateParams {
trusting_period: Duration::default(),
..default_params.clone()
},
want_pass: false,
},
Test {
name: "Invalid (too large) trusting period w.r.t. unbonding period".to_string(),
params: ClientStateParams {
trusting_period: Duration::new(11, 0),
unbonding_period: Duration::new(10, 0),
..default_params
},
want_pass: false,
},
]
.into_iter()
.collect();
for test in tests {
let p = test.params.clone();
let cs_result = ClientState::new(
p.id,
p.trust_level,
p.trusting_period,
p.unbonding_period,
p.max_clock_drift,
p.latest_height,
p.frozen_height,
p.upgrade_path,
p.allow_update,
);
assert_eq!(
test.want_pass,
cs_result.is_ok(),
"ClientState::new() failed for test {}, \nmsg{:?} with error {:?}",
test.name,
test.params.clone(),
cs_result.err(),
);
}
}
}
#[cfg(any(test, feature = "mocks"))]
pub mod test_util {
use std::time::Duration;
use tendermint::block::Header;
use crate::ics02_client::client_state::AnyClientState;
use crate::ics02_client::height::Height;
use crate::ics07_tendermint::client_state::{AllowUpdate, ClientState};
use crate::ics24_host::identifier::ChainId;
pub fn get_dummy_tendermint_client_state(tm_header: Header) -> AnyClientState {
AnyClientState::Tendermint(
ClientState::new(
ChainId::from(tm_header.chain_id.clone()),
Default::default(),
Duration::from_secs(64000),
Duration::from_secs(128000),
Duration::from_millis(3000),
Height::new(
ChainId::chain_version(tm_header.chain_id.as_str()),
u64::from(tm_header.height),
),
Height::zero(),
vec!["".to_string()],
AllowUpdate {
after_expiry: false,
after_misbehaviour: false,
},
)
.unwrap(),
)
}
}
| 34.916244 | 98 | 0.573235 |
878b22dc6950a752f8ac95203200e01e94438875 | 7,450 | // Copyright 2019-2021 Parity Technologies (UK) Ltd.
//
// Permission is hereby granted, free of charge, to any
// person obtaining a copy of this software and associated
// documentation files (the "Software"), to deal in the
// Software without restriction, including without
// limitation the rights to use, copy, modify, merge,
// publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software
// is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice
// shall be included in all copies or substantial portions
// of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
use crate::transport::HttpTransportClient;
use crate::types::{
traits::{Client, SubscriptionClient},
v2::{Id, NotificationSer, ParamsSer, RequestSer, Response, RpcError},
CertificateStore, Error, RequestIdManager, Subscription, TEN_MB_SIZE_BYTES,
};
use async_trait::async_trait;
use rustc_hash::FxHashMap;
use serde::de::DeserializeOwned;
use std::{sync::Arc, time::Duration};
/// Http Client Builder.
#[derive(Debug)]
pub struct HttpClientBuilder {
max_request_body_size: u32,
request_timeout: Duration,
max_concurrent_requests: usize,
certificate_store: CertificateStore,
}
impl HttpClientBuilder {
/// Sets the maximum size of a request body in bytes (default is 10 MiB).
pub fn max_request_body_size(mut self, size: u32) -> Self {
self.max_request_body_size = size;
self
}
/// Set request timeout (default is 60 seconds).
pub fn request_timeout(mut self, timeout: Duration) -> Self {
self.request_timeout = timeout;
self
}
/// Set max concurrent requests.
pub fn max_concurrent_requests(mut self, max: usize) -> Self {
self.max_concurrent_requests = max;
self
}
/// Set which certificate store to use.
pub fn certificate_store(mut self, certificate_store: CertificateStore) -> Self {
self.certificate_store = certificate_store;
self
}
/// Build the HTTP client with target to connect to.
pub fn build(self, target: impl AsRef<str>) -> Result<HttpClient, Error> {
let transport = HttpTransportClient::new(target, self.max_request_body_size, self.certificate_store)
.map_err(|e| Error::Transport(e.into()))?;
Ok(HttpClient {
transport,
id_manager: Arc::new(RequestIdManager::new(self.max_concurrent_requests)),
request_timeout: self.request_timeout,
})
}
}
impl Default for HttpClientBuilder {
fn default() -> Self {
Self {
max_request_body_size: TEN_MB_SIZE_BYTES,
request_timeout: Duration::from_secs(60),
max_concurrent_requests: 256,
certificate_store: CertificateStore::Native,
}
}
}
/// JSON-RPC HTTP Client that provides functionality to perform method calls and notifications.
#[derive(Debug, Clone)]
pub struct HttpClient {
/// HTTP transport client.
transport: HttpTransportClient,
/// Request timeout. Defaults to 60sec.
request_timeout: Duration,
/// Request ID manager.
id_manager: Arc<RequestIdManager>,
}
#[async_trait]
impl Client for HttpClient {
async fn notification<'a>(&self, method: &'a str, params: Option<ParamsSer<'a>>) -> Result<(), Error> {
let notif = NotificationSer::new(method, params);
let fut = self.transport.send(serde_json::to_string(¬if).map_err(Error::ParseError)?);
match tokio::time::timeout(self.request_timeout, fut).await {
Ok(Ok(ok)) => Ok(ok),
Err(_) => Err(Error::RequestTimeout),
Ok(Err(e)) => Err(Error::Transport(e.into())),
}
}
/// Perform a request towards the server.
async fn request<'a, R>(&self, method: &'a str, params: Option<ParamsSer<'a>>) -> Result<R, Error>
where
R: DeserializeOwned,
{
let id = self.id_manager.next_request_id()?;
let request = RequestSer::new(Id::Number(*id.inner()), method, params);
let fut = self.transport.send_and_read_body(serde_json::to_string(&request).map_err(Error::ParseError)?);
let body = match tokio::time::timeout(self.request_timeout, fut).await {
Ok(Ok(body)) => body,
Err(_e) => {
return Err(Error::RequestTimeout);
}
Ok(Err(e)) => {
return Err(Error::Transport(e.into()));
}
};
let response: Response<_> = match serde_json::from_slice(&body) {
Ok(response) => response,
Err(_) => {
let err: RpcError = serde_json::from_slice(&body).map_err(Error::ParseError)?;
return Err(Error::Request(err.to_string()));
}
};
let response_id = response.id.as_number().copied().ok_or(Error::InvalidRequestId)?;
if response_id == *id.inner() {
Ok(response.result)
} else {
Err(Error::InvalidRequestId)
}
}
async fn batch_request<'a, R>(&self, batch: Vec<(&'a str, Option<ParamsSer<'a>>)>) -> Result<Vec<R>, Error>
where
R: DeserializeOwned + Default + Clone,
{
let mut batch_request = Vec::with_capacity(batch.len());
// NOTE(niklasad1): `ID` is not necessarily monotonically increasing.
let mut ordered_requests = Vec::with_capacity(batch.len());
let mut request_set = FxHashMap::with_capacity_and_hasher(batch.len(), Default::default());
let ids = self.id_manager.next_request_ids(batch.len())?;
for (pos, (method, params)) in batch.into_iter().enumerate() {
batch_request.push(RequestSer::new(Id::Number(ids.inner()[pos]), method, params));
ordered_requests.push(ids.inner()[pos]);
request_set.insert(ids.inner()[pos], pos);
}
let fut = self.transport.send_and_read_body(serde_json::to_string(&batch_request).map_err(Error::ParseError)?);
let body = match tokio::time::timeout(self.request_timeout, fut).await {
Ok(Ok(body)) => body,
Err(_e) => return Err(Error::RequestTimeout),
Ok(Err(e)) => return Err(Error::Transport(e.into())),
};
let rps: Vec<Response<_>> =
serde_json::from_slice(&body).map_err(|_| match serde_json::from_slice::<RpcError>(&body) {
Ok(e) => Error::Request(e.to_string()),
Err(e) => Error::ParseError(e),
})?;
// NOTE: `R::default` is placeholder and will be replaced in loop below.
let mut responses = vec![R::default(); ordered_requests.len()];
for rp in rps {
let response_id = rp.id.as_number().copied().ok_or(Error::InvalidRequestId)?;
let pos = match request_set.get(&response_id) {
Some(pos) => *pos,
None => return Err(Error::InvalidRequestId),
};
responses[pos] = rp.result
}
Ok(responses)
}
}
#[async_trait]
impl SubscriptionClient for HttpClient {
/// Send a subscription request to the server. Not implemented for HTTP; will always return [`Error::HttpNotImplemented`].
async fn subscribe<'a, N>(
&self,
_subscribe_method: &'a str,
_params: Option<ParamsSer<'a>>,
_unsubscribe_method: &'a str,
) -> Result<Subscription<N>, Error>
where
N: DeserializeOwned,
{
Err(Error::HttpNotImplemented)
}
/// Subscribe to a specific method. Not implemented for HTTP; will always return [`Error::HttpNotImplemented`].
async fn subscribe_to_method<'a, N>(&self, _method: &'a str) -> Result<Subscription<N>, Error>
where
N: DeserializeOwned,
{
Err(Error::HttpNotImplemented)
}
}
| 33.710407 | 123 | 0.710336 |
9116576d6b227da65d367fcb5f9bf9fa288eaaf3 | 3,194 | // Copyright Materialize, Inc. All rights reserved.
//
// Use of this software is governed by the Business Source License
// included in the LICENSE file.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.
use std::iter;
use std::str;
use differential_dataflow::{AsCollection, Collection};
use regex::Regex;
use timely::dataflow::operators::Operator;
use timely::dataflow::{Scope, Stream};
use dataflow_types::{DataflowError, DecodeError};
use repr::{Datum, Diff, Row, Timestamp};
use crate::operator::CollectionExt;
use crate::source::SourceOutput;
pub fn regex<G>(
stream: &Stream<G, SourceOutput<Vec<u8>, Vec<u8>>>,
regex: Regex,
name: &str,
) -> (
Collection<G, Row, Diff>,
Option<Collection<G, dataflow_types::DataflowError, Diff>>,
)
where
G: Scope<Timestamp = Timestamp>,
{
let name = String::from(name);
let pact = SourceOutput::<Vec<u8>, Vec<u8>>::position_value_contract();
let mut row_packer = repr::RowPacker::new();
let stream = stream.unary(pact, "RegexDecode", |_cap, _op_info| {
move |input, output| {
input.for_each(|cap, lines| {
let mut session = output.session(&cap);
for SourceOutput {
key: _,
value: line,
position: line_no,
upstream_time_millis: _,
} in &*lines
{
let line = match str::from_utf8(&line) {
Ok(line) => line,
Err(_) => {
session.give((
Err(DataflowError::DecodeError(DecodeError::Text(
match line_no {
Some(line_no) => format!("Regex error in source {} at lineno {}: invalid UTF-8", name, line_no.to_string()),
None => format!("Regex error in source {} at lineno 'unknown': invalid UTF-8", name),
}
))),
*cap.time(),
1,
));
continue;
}
};
let captures = match regex.captures(line) {
Some(captures) => captures,
None => continue,
};
// Skip the 0th capture, which is the entire match, so that
// we only output the actual capture groups.
let datums = captures
.iter()
.skip(1)
.map(|c| Datum::from(c.map(|c| c.as_str())))
.chain(iter::once(Datum::from(*line_no)));
session.give((Ok(row_packer.pack(datums)), *cap.time(), 1));
}
});
}
});
let (oks, errs) = stream.as_collection().map_fallible(|x| x);
return (oks, Some(errs));
}
| 36.295455 | 148 | 0.482154 |
e9d2a901cb897160e3a17c711935d22992b01380 | 3,517 | use rand::{rngs::StdRng, Rng, SeedableRng};
use std::sync::Arc;
use std::thread;
use std::time::{Duration, Instant};
use txcell::tree::BinarySearchTree;
fn deterministic_rng() -> StdRng {
let seed = [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
26, 27, 28, 29, 30, 31, 32,
]; // byte array
StdRng::from_seed(seed)
}
// - create an empty binary tree
// - spawn N threads
// - in each thread:
// - precompute random numbers
// - start timer
// - repeat the following transaction 100 times:
// - insert one element into the tree or look up one element
// - stop timer
// - sum timers, compute ops/sec
fn test_stm(
num_threads: usize,
percent_writes: f64,
num_iterations: usize,
) -> Duration {
// Create an empty binary tree
let tree = Arc::new(BinarySearchTree::new(0));
let mut total_time = Duration::new(0, 0);
// spawn N threads
let mut handles = vec![];
for n in 0..num_threads {
handles.push(thread::spawn({
let tree_clone = Arc::clone(&tree);
move || {
// precompute random numbers
let mut rng = deterministic_rng();
let mut is_write = Vec::with_capacity(num_iterations);
for _ in 0..num_iterations {
if rng.gen::<f64>() < percent_writes {
is_write.push(true);
} else {
is_write.push(false);
}
}
// start timer
let now = Instant::now();
// repeat the following transaction
for i in 0..num_iterations {
let value = n * num_threads + i;
let mut _count = 0;
transaction {
if is_write[i] {
tree_clone.add(value);
} else {
if let Some(element) = tree_clone.find(value) {
let node = element.borrow();
_count += (*node).val;
}
}
}
}
// Stop timer
now.elapsed()
}
}));
}
for handle in handles {
total_time += handle.join().unwrap();
}
total_time
}
fn ops_per_second(duration: Duration, num_threads: usize, num_repeats: usize) -> f64 {
let duration_ns = duration.as_nanos() as usize;
let duration_sec = duration_ns as f64 / 10f64.powf(9.0);
// number of operations = number of transactions.
// m threads * num_repeats of tx per thread = number of transactions executed
let ops_per_sec = (num_threads * num_repeats) as f64 / duration_sec;
ops_per_sec
}
#[test]
fn branching() {
// M cores, 1 thread per core
let num_threads = vec![1];
// Y% of accesses are writes
let percent_writes = vec![0.0, 0.1, 0.25, 0.5, 0.75, 1.0];
// Repeat this many times
let num_repeats = 1;
// output in CSV format
println!("num_cores,num_accesses,percent_writes,stm_ops_per_sec,swym_ops_per_sec");
for m in &num_threads {
for y in &percent_writes {
let duration_stm = test_stm(*m, *y, num_repeats);
let ops_stm = ops_per_second(duration_stm, *m, num_repeats);
println!("{},{},{},{}", m, y, ops_stm, 0);
}
}
}
| 33.179245 | 98 | 0.517486 |
ab63ee47dd3129c4bbaad132aa8ee2dee252efaa | 18,916 | // Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use std::collections::HashSet;
use common_datavalues::prelude::*;
use common_exception::ErrorCode;
use common_exception::Result;
use common_functions::scalars::FunctionFactory;
use crate::validate_function_arg;
use crate::Expression;
use crate::ExpressionVisitor;
use crate::Recursion;
/// Resolves an `Expression::Wildcard` to a collection of `Expression::Column`'s.
pub fn expand_wildcard(expr: &Expression, schema: &DataSchemaRef) -> Vec<Expression> {
match expr {
Expression::Wildcard => schema
.fields()
.iter()
.map(|f| Expression::Column(f.name().to_string()))
.collect::<Vec<Expression>>(),
_ => vec![expr.clone()],
}
}
/// Collect all deeply nested `Expression::AggregateFunction` and
/// `Expression::AggregateUDF`. They are returned in order of occurrence (depth
/// first), with duplicates omitted.
pub fn find_aggregate_exprs(exprs: &[Expression]) -> Vec<Expression> {
find_exprs_in_exprs(exprs, &|nest_exprs| {
matches!(nest_exprs, Expression::AggregateFunction { .. })
})
}
pub fn find_aggregate_exprs_in_expr(expr: &Expression) -> Vec<Expression> {
find_exprs_in_expr(expr, &|nest_exprs| {
matches!(nest_exprs, Expression::AggregateFunction { .. })
})
}
/// Collect all arguments from aggregation function and append to this exprs
/// [ColumnExpr(b), Aggr(sum(a, b))] ---> [ColumnExpr(b), ColumnExpr(a)]
pub fn expand_aggregate_arg_exprs(exprs: &[Expression]) -> Vec<Expression> {
let mut res = vec![];
for expr in exprs {
match expr {
Expression::AggregateFunction { args, .. } => {
for arg in args {
if !res.contains(arg) {
res.push(arg.clone());
}
}
}
_ => {
if !res.contains(expr) {
res.push(expr.clone());
}
}
}
}
res
}
/// Collect all deeply nested `Expression::Column`'s. They are returned in order of
/// appearance (depth first), with duplicates omitted.
pub fn find_column_exprs(exprs: &[Expression]) -> Vec<Expression> {
find_exprs_in_exprs(exprs, &|nest_exprs| {
matches!(nest_exprs, Expression::Column(_))
})
}
/// Search the provided `Expression`'s, and all of their nested `Expression`, for any that
/// pass the provided test. The returned `Expression`'s are deduplicated and returned
/// in order of appearance (depth first).
fn find_exprs_in_exprs<F>(exprs: &[Expression], test_fn: &F) -> Vec<Expression>
where F: Fn(&Expression) -> bool {
exprs
.iter()
.flat_map(|expr| find_exprs_in_expr(expr, test_fn))
.fold(vec![], |mut acc, expr| {
if !acc.contains(&expr) {
acc.push(expr)
}
acc
})
}
// Visitor that find Expressions that match a particular predicate
struct Finder<'a, F>
where F: Fn(&Expression) -> bool
{
test_fn: &'a F,
exprs: Vec<Expression>,
}
impl<'a, F> Finder<'a, F>
where F: Fn(&Expression) -> bool
{
/// Create a new finder with the `test_fn`
fn new(test_fn: &'a F) -> Self {
Self {
test_fn,
exprs: Vec::new(),
}
}
}
impl<'a, F> ExpressionVisitor for Finder<'a, F>
where F: Fn(&Expression) -> bool
{
fn pre_visit(mut self, expr: &Expression) -> Result<Recursion<Self>> {
if (self.test_fn)(expr) {
if !(self.exprs.contains(expr)) {
self.exprs.push(expr.clone())
}
// stop recursing down this expr once we find a match
return Ok(Recursion::Stop(self));
}
Ok(Recursion::Continue(self))
}
}
/// Search an `Expression`, and all of its nested `Expression`'s, for any that pass the
/// provided test. The returned `Expression`'s are deduplicated and returned in order
/// of appearance (depth first).
fn find_exprs_in_expr<F>(expr: &Expression, test_fn: &F) -> Vec<Expression>
where F: Fn(&Expression) -> bool {
let Finder { exprs, .. } = expr
.accept(Finder::new(test_fn))
// pre_visit always returns OK, so this will always too
.expect("no way to return error during recursion");
exprs
}
/// Convert any `Expression` to an `Expression::Column`.
pub fn expr_as_column_expr(expr: &Expression) -> Result<Expression> {
match expr {
Expression::Column(_) => Ok(expr.clone()),
_ => Ok(Expression::Column(expr.column_name())),
}
}
/// Rebuilds an `expr` as a projection on top of a collection of `Expression`'s.
///
/// For example, the Expression `a + b < 1` would require, as input, the 2
/// individual columns, `a` and `b`. But, if the base exprs already
/// contain the `a + b` result, then that may be used in lieu of the `a` and
/// `b` columns.
///
/// This is useful in the context of a query like:
///
/// SELECT a + b < 1 ... GROUP BY a + b
///
/// where post-aggregation, `a + b` need not be a projection against the
/// individual columns `a` and `b`, but rather it is a projection against the
/// `a + b` found in the GROUP BY.
pub fn rebase_expr(expr: &Expression, base_exprs: &[Expression]) -> Result<Expression> {
clone_with_replacement(expr, &|nest_exprs| {
if base_exprs.contains(nest_exprs) {
Ok(Some(expr_as_column_expr(nest_exprs)?))
} else {
Ok(None)
}
})
}
// Rebuilds an `expr` to ColumnExpr when some expressions already processed in upstream
// Skip Sort, Alias because we can go into the inner nest_exprs
pub fn rebase_expr_from_input(expr: &Expression, schema: &DataSchemaRef) -> Result<Expression> {
clone_with_replacement(expr, &|nest_exprs| match nest_exprs {
Expression::Sort { .. }
| Expression::Column(_)
| Expression::Literal {
column_name: None, ..
}
| Expression::Alias(_, _) => Ok(None),
_ => {
if schema.field_with_name(&nest_exprs.column_name()).is_ok() {
Ok(Some(expr_as_column_expr(nest_exprs)?))
} else {
Ok(None)
}
}
})
}
pub fn sort_to_inner_expr(expr: &Expression) -> Expression {
match expr {
Expression::Sort {
expr: nest_exprs, ..
} => *nest_exprs.clone(),
_ => expr.clone(),
}
}
/// Determines if the set of `Expression`'s are a valid projection on the input
/// `Expression::Column`'s.
pub fn find_columns_not_satisfy_exprs(
columns: &[Expression],
exprs: &[Expression],
) -> Result<Option<Expression>> {
columns.iter().try_for_each(|c| match c {
Expression::Column(_) => Ok(()),
_ => Err(ErrorCode::SyntaxException(
"Expression::Column are required".to_string(),
)),
})?;
let exprs = find_column_exprs(exprs);
for expr in &exprs {
if !columns.contains(expr) {
return Ok(Some(expr.clone()));
}
}
Ok(None)
}
/// Returns a cloned `expr`, but any of the `expr`'s in the tree may be
/// replaced/customized by the replacement function.
///
/// The replacement function is called repeatedly with `expr`, starting with
/// the argument `expr`, then descending depth-first through its
/// descendants. The function chooses to replace or keep (clone) each `expr`.
///
/// The function's return type is `Result<Option<Expression>>>`, where:
///
/// * `Ok(Some(replacement_expr))`: A replacement `expr` is provided; it is
/// swapped in at the particular node in the tree. Any nested `expr` are
/// not subject to cloning/replacement.
/// * `Ok(None)`: A replacement `expr` is not provided. The `expr` is
/// recreated, with all of its nested `expr`'s subject to
/// cloning/replacement.
/// * `Err(err)`: Any error returned by the function is returned as-is by
/// `clone_with_replacement()`.
fn clone_with_replacement<F>(expr: &Expression, replacement_fn: &F) -> Result<Expression>
where F: Fn(&Expression) -> Result<Option<Expression>> {
let replacement_opt = replacement_fn(expr)?;
match replacement_opt {
// If we were provided a replacement, use the replacement. Do not
// descend further.
Some(replacement) => Ok(replacement),
// No replacement was provided, clone the node and recursively call
// clone_with_replacement() on any nested Expressionessions.
None => match expr {
Expression::Wildcard => Ok(Expression::Wildcard),
Expression::Alias(alias_name, nested_expr) => Ok(Expression::Alias(
alias_name.clone(),
Box::new(clone_with_replacement(&**nested_expr, replacement_fn)?),
)),
Expression::UnaryExpression {
op,
expr: nested_expr,
} => Ok(Expression::UnaryExpression {
op: op.clone(),
expr: Box::new(clone_with_replacement(&**nested_expr, replacement_fn)?),
}),
Expression::BinaryExpression { left, op, right } => Ok(Expression::BinaryExpression {
left: Box::new(clone_with_replacement(&**left, replacement_fn)?),
op: op.clone(),
right: Box::new(clone_with_replacement(&**right, replacement_fn)?),
}),
Expression::ScalarFunction { op, args } => Ok(Expression::ScalarFunction {
op: op.clone(),
args: args
.iter()
.map(|e| clone_with_replacement(e, replacement_fn))
.collect::<Result<Vec<Expression>>>()?,
}),
Expression::AggregateFunction {
op,
distinct,
params,
args,
} => Ok(Expression::AggregateFunction {
op: op.clone(),
distinct: *distinct,
params: params.clone(),
args: args
.iter()
.map(|e| clone_with_replacement(e, replacement_fn))
.collect::<Result<Vec<Expression>>>()?,
}),
Expression::Sort {
expr: nested_expr,
asc,
nulls_first,
origin_expr,
} => Ok(Expression::Sort {
expr: Box::new(clone_with_replacement(&**nested_expr, replacement_fn)?),
asc: *asc,
nulls_first: *nulls_first,
origin_expr: origin_expr.clone(),
}),
Expression::Cast {
expr: nested_expr,
data_type,
pg_style,
} => Ok(Expression::Cast {
expr: Box::new(clone_with_replacement(&**nested_expr, replacement_fn)?),
data_type: data_type.clone(),
pg_style: *pg_style,
}),
Expression::MapAccess { name, args } => Ok(Expression::MapAccess {
name: name.clone(),
args: args
.iter()
.map(|e| clone_with_replacement(e, replacement_fn))
.collect::<Result<Vec<Expression>>>()?,
}),
Expression::Column(_)
| Expression::QualifiedColumn(_)
| Expression::Literal { .. }
| Expression::Subquery { .. }
| Expression::ScalarSubquery { .. } => Ok(expr.clone()),
},
}
}
/// Returns mapping of each alias (`String`) to the exprs (`Expression`) it is
/// aliasing.
pub fn extract_aliases(exprs: &[Expression]) -> HashMap<String, Expression> {
exprs
.iter()
.filter_map(|expr| match expr {
Expression::Alias(alias_name, nest_exprs) => {
Some((alias_name.clone(), *nest_exprs.clone()))
}
_ => None,
})
.collect::<HashMap<String, Expression>>()
}
/// Rebuilds an `expr` with columns that refer to aliases replaced by the
/// alias' underlying `expr`.
pub fn resolve_aliases_to_exprs(
expr: &Expression,
aliases: &HashMap<String, Expression>,
) -> Result<Expression> {
clone_with_replacement(expr, &|nest_exprs| match nest_exprs {
Expression::Column(name) => {
if let Some(aliased_expr) = aliases.get(name) {
Ok(Some(aliased_expr.clone()))
} else {
Ok(None)
}
}
_ => Ok(None),
})
}
/// Rebuilds an `expr` using the inner expr for expression
/// `(a + b) as c` ---> `(a + b)`
pub fn unwrap_alias_exprs(expr: &Expression) -> Result<Expression> {
clone_with_replacement(expr, &|nest_exprs| match nest_exprs {
Expression::Alias(_, nested_expr) => Ok(Some(*nested_expr.clone())),
_ => Ok(None),
})
}
pub struct ExpressionDataTypeVisitor {
stack: Vec<DataTypePtr>,
input_schema: DataSchemaRef,
}
impl ExpressionDataTypeVisitor {
pub fn create(input_schema: DataSchemaRef) -> ExpressionDataTypeVisitor {
ExpressionDataTypeVisitor {
input_schema,
stack: vec![],
}
}
pub fn finalize(mut self) -> Result<DataTypePtr> {
match self.stack.len() {
1 => Ok(self.stack.remove(0)),
_ => Err(ErrorCode::LogicalError(
"Stack has too many elements in ExpressionDataTypeVisitor::finalize",
)),
}
}
fn visit_function(mut self, op: &str, args_size: usize) -> Result<ExpressionDataTypeVisitor> {
let features = FunctionFactory::instance().get_features(op)?;
validate_function_arg(
op,
args_size,
features.variadic_arguments,
features.num_arguments,
)?;
let mut arguments = Vec::with_capacity(args_size);
for index in 0..args_size {
arguments.push(match self.stack.pop() {
None => Err(ErrorCode::LogicalError(format!(
"Expected {} arguments, actual {}.",
args_size, index
))),
Some(element) => Ok(element),
}?);
}
let arguments: Vec<&DataTypePtr> = arguments.iter().collect();
let function = FunctionFactory::instance().get(op, &arguments)?;
let return_type = function.return_type();
self.stack.push(return_type);
Ok(self)
}
}
impl ExpressionVisitor for ExpressionDataTypeVisitor {
fn pre_visit(self, _expr: &Expression) -> Result<Recursion<Self>> {
Ok(Recursion::Continue(self))
}
fn post_visit(mut self, expr: &Expression) -> Result<Self> {
match expr {
Expression::Column(s) => {
let field = self.input_schema.field_with_name(s)?;
self.stack.push(field.data_type().clone());
Ok(self)
}
Expression::Wildcard => Result::Err(ErrorCode::IllegalDataType(
"Wildcard expressions are not valid to get return type",
)),
Expression::QualifiedColumn(_) => Err(ErrorCode::LogicalError(
"QualifiedColumn should be resolve in analyze.",
)),
Expression::Literal { data_type, .. } => {
self.stack.push(data_type.clone());
Ok(self)
}
Expression::Subquery { query_plan, .. } => {
let data_type = Expression::to_subquery_type(query_plan);
self.stack.push(data_type);
Ok(self)
}
Expression::ScalarSubquery { query_plan, .. } => {
let data_type = Expression::to_subquery_type(query_plan);
self.stack.push(data_type);
Ok(self)
}
Expression::BinaryExpression { op, .. } => self.visit_function(op, 2),
Expression::UnaryExpression { op, .. } => self.visit_function(op, 1),
Expression::ScalarFunction { op, args } => self.visit_function(op, args.len()),
expr @ Expression::AggregateFunction { args, .. } => {
// Pop arguments.
for index in 0..args.len() {
if self.stack.pop().is_none() {
return Err(ErrorCode::LogicalError(format!(
"Expected {} arguments, actual {}.",
args.len(),
index
)));
}
}
let aggregate_function = expr.to_aggregate_function(&self.input_schema)?;
let return_type = aggregate_function.return_type()?;
self.stack.push(return_type);
Ok(self)
}
Expression::Cast { data_type, .. } => {
let inner_type = match self.stack.pop() {
None => Err(ErrorCode::LogicalError(
"Cast expr expected 1 arguments, actual 0.",
)),
Some(_) => Ok(data_type),
}?;
self.stack.push(inner_type.clone());
Ok(self)
}
Expression::MapAccess { args, .. } => self.visit_function("get_path", args.len()),
Expression::Alias(_, _) | Expression::Sort { .. } => Ok(self),
}
}
}
// This visitor is for recursively visiting expression tree and collects all columns.
pub struct RequireColumnsVisitor {
pub required_columns: HashSet<String>,
}
impl RequireColumnsVisitor {
pub fn default() -> Self {
Self {
required_columns: HashSet::new(),
}
}
pub fn collect_columns_from_expr(expr: &Expression) -> Result<HashSet<String>> {
let mut visitor = Self::default();
visitor = expr.accept(visitor)?;
Ok(visitor.required_columns)
}
}
impl ExpressionVisitor for RequireColumnsVisitor {
fn pre_visit(self, expr: &Expression) -> Result<Recursion<Self>> {
match expr {
Expression::Column(c) => {
let mut v = self;
v.required_columns.insert(c.clone());
Ok(Recursion::Continue(v))
}
_ => Ok(Recursion::Continue(self)),
}
}
}
| 35.291045 | 98 | 0.568143 |
22054b9b135086f96b11a85cab7b2ba3da758d0f | 7,311 | use sdl2::{pixels, render, video};
use std::mem;
pub fn init(
creator: &render::TextureCreator<video::WindowContext>,
) -> Result<render::Texture, String> {
let mut font = creator
.create_texture_static(pixels::PixelFormatEnum::ARGB8888, WIDTH as u32, HEIGHT as u32)
.map_err(|e| e.to_string())?;
let mut pixels32 =
unsafe { *mem::MaybeUninit::<[u32; (WIDTH * HEIGHT) as usize]>::uninit().as_mut_ptr() };
for (i, p) in DATA.iter().enumerate() {
for j in 0..8 {
pixels32[i * 8 + j] = if ((*p as usize) & (1 << j)) == 0 { u32::MAX } else { 0 }
}
}
let pixels = unsafe { pixels32.align_to::<u8>().1 };
font.update(None, pixels, WIDTH as usize * mem::size_of::<u32>()).map_err(|e| e.to_string())?;
font.set_blend_mode(render::BlendMode::Blend);
Ok(font)
}
pub fn width(len: usize) -> i32 {
len as i32 * CHAR_WIDTH + CHAR_WIDTH
}
pub const WIDTH: i32 = 128;
pub const HEIGHT: i32 = 64;
pub const CHARS_BY_ROW: i32 = 16;
pub const CHARS_BY_COL: i32 = 8;
pub const CHAR_WIDTH: i32 = WIDTH / CHARS_BY_ROW;
pub const CHAR_HEIGHT: i32 = HEIGHT / CHARS_BY_COL;
const DATA: &[u8] = &[
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0,
0xff, 0xee, 0xee, 0xee, 0xe6, 0xfe, 0xee, 0xe6, 0xfe, 0xee, 0xe6, 0xfe, 0xee, 0xe6, 0xfe, 0xee,
0xff, 0xee, 0xee, 0xfe, 0xfe, 0xee, 0xfe, 0xfe, 0xee, 0xfe, 0xfe, 0xee, 0xfe, 0xfe, 0xee, 0xfe,
0xff, 0xea, 0xee, 0xee, 0xef, 0xef, 0xee, 0xef, 0xef, 0xee, 0xef, 0xef, 0xee, 0xef, 0xef, 0xee,
0xff, 0xee, 0xee, 0xee, 0xee, 0xee, 0xee, 0xee, 0xee, 0xee, 0xee, 0xee, 0xee, 0xee, 0xee, 0xee,
0xff, 0xee, 0xee, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0,
0xff, 0xe0, 0xe0, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xe0, 0xe0, 0xe2, 0xe0, 0xe0, 0xe2, 0xe0, 0xe0, 0xe2, 0xe0, 0xe0, 0xe2, 0xe0, 0xe0, 0xe2, 0xe0,
0xfe, 0xfe, 0xee, 0xee, 0xe6, 0xee, 0xfe, 0xfe, 0xee, 0xee, 0xe6, 0xee, 0xfe, 0xfe, 0xee, 0xee,
0xee, 0xee, 0xee, 0xff, 0xfe, 0xee, 0xee, 0xee, 0xee, 0xff, 0xfe, 0xee, 0xee, 0xee, 0xee, 0xff,
0xef, 0xef, 0xfe, 0xee, 0xef, 0xee, 0xef, 0xef, 0xfe, 0xee, 0xef, 0xfe, 0xef, 0xef, 0xfe, 0xee,
0xee, 0xee, 0xee, 0xee, 0xfe, 0xee, 0xee, 0xee, 0xee, 0xee, 0xfe, 0xee, 0xee, 0xee, 0xee, 0xee,
0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xfe, 0xf5, 0xff, 0xfb, 0xff, 0xf9, 0xfb, 0xef, 0xfe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xef,
0xff, 0xfe, 0xf5, 0xf5, 0xe0, 0xec, 0xf6, 0xfb, 0xf7, 0xfd, 0xf5, 0xff, 0xff, 0xff, 0xff, 0xef,
0xff, 0xfe, 0xff, 0xe0, 0xfa, 0xf4, 0xfa, 0xff, 0xf7, 0xfd, 0xfb, 0xfb, 0xff, 0xff, 0xff, 0xf7,
0xff, 0xfe, 0xff, 0xf5, 0xe0, 0xfb, 0xed, 0xff, 0xf7, 0xfd, 0xf5, 0xf1, 0xff, 0xf1, 0xff, 0xfb,
0xff, 0xfe, 0xff, 0xe0, 0xeb, 0xe5, 0xea, 0xff, 0xf7, 0xfd, 0xff, 0xfb, 0xff, 0xff, 0xff, 0xfd,
0xff, 0xff, 0xff, 0xf5, 0xe0, 0xe6, 0xf6, 0xff, 0xf7, 0xfd, 0xff, 0xff, 0xfb, 0xff, 0xff, 0xfe,
0xff, 0xfe, 0xff, 0xff, 0xfb, 0xff, 0xe9, 0xff, 0xef, 0xfe, 0xff, 0xff, 0xfb, 0xff, 0xfb, 0xfe,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xe0, 0xf8, 0xe0, 0xe0, 0xee, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf1,
0xee, 0xfb, 0xef, 0xef, 0xee, 0xfe, 0xfe, 0xef, 0xee, 0xee, 0xff, 0xff, 0xf7, 0xff, 0xfd, 0xee,
0xe6, 0xfb, 0xef, 0xef, 0xee, 0xfe, 0xfe, 0xef, 0xee, 0xee, 0xfd, 0xfd, 0xfb, 0xf1, 0xfb, 0xef,
0xea, 0xfb, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xf7, 0xe0, 0xe0, 0xff, 0xff, 0xfd, 0xff, 0xf7, 0xf7,
0xec, 0xfb, 0xfe, 0xef, 0xef, 0xef, 0xee, 0xfb, 0xee, 0xef, 0xff, 0xff, 0xfb, 0xf1, 0xfb, 0xfb,
0xee, 0xfb, 0xfe, 0xef, 0xef, 0xef, 0xee, 0xfb, 0xee, 0xef, 0xfd, 0xfd, 0xf7, 0xff, 0xfd, 0xff,
0xe0, 0xe0, 0xe0, 0xe0, 0xef, 0xe0, 0xe0, 0xfb, 0xe0, 0xef, 0xff, 0xfd, 0xff, 0xff, 0xff, 0xfb,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xf1, 0xf1, 0xf0, 0xf1, 0xf0, 0xe0, 0xe0, 0xf1, 0xee, 0xe0, 0xef, 0xee, 0xfe, 0xee, 0xee, 0xf1,
0xee, 0xee, 0xee, 0xee, 0xee, 0xfe, 0xfe, 0xee, 0xee, 0xfb, 0xef, 0xf6, 0xfe, 0xe4, 0xec, 0xee,
0xe2, 0xee, 0xee, 0xfe, 0xee, 0xfe, 0xfe, 0xfe, 0xee, 0xfb, 0xef, 0xfa, 0xfe, 0xea, 0xea, 0xee,
0xea, 0xe0, 0xf0, 0xfe, 0xee, 0xf0, 0xf0, 0xfe, 0xe0, 0xfb, 0xef, 0xfc, 0xfe, 0xee, 0xe6, 0xee,
0xe2, 0xee, 0xee, 0xfe, 0xee, 0xfe, 0xfe, 0xe6, 0xee, 0xfb, 0xee, 0xfa, 0xfe, 0xee, 0xee, 0xee,
0xfe, 0xee, 0xee, 0xee, 0xee, 0xfe, 0xfe, 0xee, 0xee, 0xfb, 0xee, 0xf6, 0xfe, 0xee, 0xee, 0xee,
0xf1, 0xee, 0xf0, 0xf1, 0xf0, 0xe0, 0xfe, 0xf1, 0xee, 0xe0, 0xf1, 0xee, 0xe0, 0xee, 0xee, 0xf1,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xf0, 0xf1, 0xf0, 0xe1, 0xe0, 0xee, 0xee, 0xee, 0xee, 0xee, 0xe0, 0xe7, 0xfe, 0xfc, 0xfb, 0xff,
0xee, 0xee, 0xee, 0xfe, 0xfb, 0xee, 0xee, 0xee, 0xee, 0xee, 0xef, 0xf7, 0xfe, 0xfd, 0xf5, 0xff,
0xee, 0xee, 0xee, 0xfe, 0xfb, 0xee, 0xee, 0xee, 0xf5, 0xee, 0xf7, 0xf7, 0xfd, 0xfd, 0xff, 0xff,
0xf0, 0xee, 0xf0, 0xf1, 0xfb, 0xee, 0xee, 0xee, 0xfb, 0xe1, 0xfb, 0xf7, 0xfb, 0xfd, 0xff, 0xff,
0xfe, 0xea, 0xf6, 0xef, 0xfb, 0xee, 0xf5, 0xea, 0xf5, 0xef, 0xfd, 0xf7, 0xf7, 0xfd, 0xff, 0xff,
0xfe, 0xf6, 0xee, 0xef, 0xfb, 0xee, 0xf5, 0xe4, 0xee, 0xef, 0xfe, 0xf7, 0xef, 0xfd, 0xff, 0xff,
0xfe, 0xe9, 0xee, 0xf0, 0xfb, 0xf1, 0xfb, 0xee, 0xee, 0xf0, 0xe0, 0xe7, 0xef, 0xfc, 0xff, 0xe0,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xfe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfb, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xfe, 0xff, 0xfe, 0xff, 0xef, 0xff, 0xe3, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xe1, 0xf0, 0xe1, 0xe1, 0xf1, 0xfd, 0xe0, 0xfe, 0xfb, 0xef, 0xee, 0xfe, 0xe4, 0xf0, 0xf1,
0xff, 0xee, 0xee, 0xfe, 0xee, 0xee, 0xfd, 0xee, 0xf0, 0xfb, 0xef, 0xf6, 0xfe, 0xea, 0xee, 0xee,
0xff, 0xee, 0xee, 0xfe, 0xee, 0xe0, 0xe0, 0xe0, 0xee, 0xfb, 0xef, 0xf8, 0xfe, 0xea, 0xee, 0xee,
0xff, 0xee, 0xee, 0xfe, 0xee, 0xfe, 0xfd, 0xef, 0xee, 0xfb, 0xef, 0xf6, 0xfe, 0xea, 0xee, 0xee,
0xff, 0xe1, 0xf0, 0xe1, 0xe1, 0xe1, 0xfd, 0xf1, 0xee, 0xf1, 0xf0, 0xee, 0xe1, 0xea, 0xee, 0xf1,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe0, 0xfe, 0xe0, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xfd, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xee, 0xfe, 0xee, 0xed, 0xff,
0xe0, 0xe1, 0xe1, 0xe1, 0xe0, 0xee, 0xee, 0xea, 0xee, 0xee, 0xe0, 0xee, 0xfe, 0xee, 0xf2, 0xff,
0xee, 0xee, 0xfe, 0xfe, 0xfd, 0xee, 0xee, 0xea, 0xf5, 0xee, 0xf7, 0xee, 0xfe, 0xea, 0xff, 0xff,
0xee, 0xee, 0xfe, 0xe0, 0xfd, 0xee, 0xee, 0xea, 0xfb, 0xe1, 0xfb, 0xee, 0xfe, 0xee, 0xff, 0xff,
0xf0, 0xe0, 0xfe, 0xef, 0xfd, 0xee, 0xf5, 0xea, 0xf5, 0xef, 0xfd, 0xee, 0xfe, 0xee, 0xff, 0xff,
0xfe, 0xef, 0xfe, 0xf0, 0xe3, 0xf1, 0xfb, 0xf5, 0xee, 0xf0, 0xe0, 0xe0, 0xfe, 0xe0, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
];
| 73.11 | 96 | 0.659007 |
e660a28682c2b7cc9a8274f5132c2f7927ac8532 | 363 | mod adventofcode;
mod data;
mod graph;
mod list;
mod nested_integer;
mod read;
mod test_gen;
mod tree;
mod unionfind;
pub use crate::adventofcode::*;
pub use crate::data::*;
pub use crate::graph::*;
pub use crate::list::*;
pub use crate::nested_integer::*;
pub use crate::read::*;
pub use crate::test_gen::*;
pub use crate::tree::*;
pub use crate::unionfind::*;
| 18.15 | 33 | 0.699725 |
4854b3d654283b2b11d7e863f1f18ca53c39f9a9 | 90,697 | //! Query Builder
//!
pub mod synonym {
use crate::query_builder as qb;
pub type EmptySelectBuilder = qb::SelectBuilder<
qb::EmptyFromClause,
qb::EmptyWhereClause,
(),
qb::EmptyGroupByClause,
qb::EmptyHavingClause,
qb::EmptyOrderByClause,
qb::EmptyLimitClause,
qb::LockModeDefaultBehavior,
>;
pub type SourceUpdatedBuilder<QS> = qb::SelectBuilder<
qb::FromClause<QS>,
qb::EmptyWhereClause,
(),
qb::EmptyGroupByClause,
qb::EmptyHavingClause,
qb::EmptyOrderByClause,
qb::EmptyLimitClause,
qb::LockModeDefaultBehavior,
>;
pub type Join<L, R, EXP> = qb::Join<L, IntoQuerySourceRef<R>, EXP>;
pub type LeftOuterJoin<L, R, EXP> = qb::Join<L, IntoNullableQuerySourceRef<R>, EXP>;
pub type RightOuterJoin<L, R, EXP> =
qb::Join<<L as qb::QuerySource>::NullableSelf, IntoQuerySourceRef<R>, EXP>;
pub type CrossJoin<L, R> = qb::Join<L, IntoQuerySourceRef<R>, qb::BlankBoolExpression>;
pub type IntoQuerySource<QS> = <QS as qb::IntoQuerySource>::QuerySource;
pub type IntoNullableQuerySource<QS> =
<<QS as qb::IntoQuerySource>::QuerySource as qb::QuerySource>::NullableSelf;
pub type IntoQuerySourceRef<QS> = qb::QuerySourceRef<IntoQuerySource<QS>>;
pub type IntoNullableQuerySourceRef<QS> = qb::QuerySourceRef<IntoNullableQuerySource<QS>>;
}
use std::borrow::{Borrow, Cow};
use std::cell::RefCell;
use std::convert::TryInto;
use std::io::Write;
use std::marker::PhantomData;
use std::rc::Rc;
use std::string::FromUtf8Error;
pub use synonym::EmptySelectBuilder;
// とりあえず名前は適当に。
//
// 新しい引数を取って、新しい型を返す必要がある。
// これってナイーブに表現すると新しい型パラメータを持った型を返す必要があるのでGATが必要なのでは?
// Fromみたいな感じに実装することでGAT無しでの実装をやってみて、だめならGATを使う(たぶんできそう)
pub trait NewSelect: Default {
fn new() -> Self {
Self::default()
}
}
impl NewSelect for synonym::EmptySelectBuilder {}
pub trait SelectSource<T>
where
T: IntoQuerySource,
T::QuerySource: QuerySource + Clone,
{
type Next: Sized;
fn source(self, source: T) -> (Self::Next, synonym::IntoQuerySourceRef<T>);
}
impl<T> SelectSource<T> for synonym::EmptySelectBuilder
where
T: IntoQuerySource,
T::QuerySource: QuerySource + Clone,
{
type Next = synonym::SourceUpdatedBuilder<synonym::IntoQuerySourceRef<T>>;
fn source(self, source: T) -> (Self::Next, synonym::IntoQuerySourceRef<T>) {
let sources_num = self.sources_num + 1;
let src_ref = QuerySourceRef::new(
source.into_query_source(),
SourceAlias::new(self.sources_alias_name.clone(), sources_num),
);
let ret_src_ref = src_ref.clone();
let new_builder = SelectBuilder {
sources: FromClause::new(src_ref),
sources_num,
sources_alias_name: self.sources_alias_name,
filter: self.filter,
columns: (),
group_by: self.group_by,
having: self.having,
order_by: self.order_by,
limit: self.limit,
lock_mode: self.lock_mode,
};
(new_builder, ret_src_ref)
}
}
pub trait SelectInnerJoin<T, ON, EXP>
where
T: IntoQuerySource,
T::QuerySource: QuerySource + Clone,
{
type Next;
fn inner_join(self, source: T, on: ON) -> (Self::Next, synonym::IntoQuerySourceRef<T>);
}
type JoinableSelect<QS> = SelectBuilder<
FromClause<QS>,
EmptyWhereClause,
(),
EmptyGroupByClause,
EmptyHavingClause,
EmptyOrderByClause,
EmptyLimitClause,
LockModeDefaultBehavior,
>;
impl<QS, QS2, ON, EXP> SelectInnerJoin<QS2, ON, EXP> for JoinableSelect<QS>
where
QS: QuerySource,
QS2: IntoQuerySource,
QS2::QuerySource: QuerySource + Clone,
QS2::Database: Joinable<QS::Database>,
ON: FnMut(QuerySourceRef<QS2::QuerySource>) -> EXP,
EXP: Expression<SqlType = SqlTypeBool>,
{
type Next = synonym::SourceUpdatedBuilder<synonym::Join<QS, QS2, EXP>>;
fn inner_join(self, source: QS2, mut on: ON) -> (Self::Next, synonym::IntoQuerySourceRef<QS2>) {
let sources_num = self.sources_num + 1;
let src_ref = QuerySourceRef::new(
source.into_query_source(),
SourceAlias::new(self.sources_alias_name.clone(), sources_num),
);
let ret_src_ref = src_ref.clone();
let on_expr = on(src_ref.clone());
let new_builder = SelectBuilder {
sources: FromClause::new(Join::Inner(self.sources.unwrap(), src_ref, on_expr)),
sources_num,
sources_alias_name: self.sources_alias_name,
filter: self.filter,
columns: (),
group_by: self.group_by,
having: self.having,
order_by: self.order_by,
limit: self.limit,
lock_mode: self.lock_mode,
};
(new_builder, ret_src_ref)
}
}
pub trait SelectLeftOuterJoin<QS2, ON, EXP>
where
QS2: IntoQuerySource,
QS2::QuerySource: QuerySource + Clone,
{
type Next;
fn left_outer_join(
self,
source: QS2,
on: ON,
) -> (Self::Next, synonym::IntoNullableQuerySourceRef<QS2>);
}
impl<QS, QS2, ON, EXP> SelectLeftOuterJoin<QS2, ON, EXP> for JoinableSelect<QS>
where
QS: QuerySource,
QS2: IntoQuerySource,
QS2::QuerySource: QuerySource + Clone,
QS2::Database: Joinable<QS::Database>,
<QS2::QuerySource as QuerySource>::NullableSelf: Clone,
ON: FnMut(QuerySourceRef<<QS2::QuerySource as QuerySource>::NullableSelf>) -> EXP,
EXP: Expression<SqlType = SqlTypeBool>,
{
type Next = synonym::SourceUpdatedBuilder<synonym::LeftOuterJoin<QS, QS2, EXP>>;
fn left_outer_join(
self,
source: QS2,
mut on: ON,
) -> (Self::Next, synonym::IntoNullableQuerySourceRef<QS2>) {
let sources_num = self.sources_num + 1;
let src_ref = QuerySourceRef::new(
source.into_query_source().nullable(),
SourceAlias::new(self.sources_alias_name.clone(), sources_num),
);
let ret_src_ref = src_ref.clone();
let on_expr = on(src_ref.clone());
let new_builder = SelectBuilder {
sources: FromClause::new(Join::LeftOuter(self.sources.unwrap(), src_ref, on_expr)),
sources_num,
sources_alias_name: self.sources_alias_name,
filter: self.filter,
columns: (),
group_by: self.group_by,
having: self.having,
order_by: self.order_by,
limit: self.limit,
lock_mode: self.lock_mode,
};
(new_builder, ret_src_ref)
}
}
pub trait SelectCrossJoin<QS2>
where
QS2: IntoQuerySource,
QS2::QuerySource: QuerySource + Clone,
{
type Next;
fn cross_join(self, source: QS2) -> (Self::Next, synonym::IntoQuerySourceRef<QS2>);
}
impl<QS, QS2> SelectCrossJoin<QS2>
for SelectBuilder<
FromClause<QS>,
EmptyWhereClause,
(),
EmptyGroupByClause,
EmptyHavingClause,
EmptyOrderByClause,
EmptyLimitClause,
LockModeDefaultBehavior,
>
where
QS: QuerySource,
QS2: IntoQuerySource,
QS2::Database: Joinable<QS::Database>,
QS2::QuerySource: QuerySource + Clone,
{
type Next = synonym::SourceUpdatedBuilder<synonym::CrossJoin<QS, QS2>>;
fn cross_join(self, source: QS2) -> (Self::Next, synonym::IntoQuerySourceRef<QS2>) {
let sources_num = self.sources_num + 1;
let src_ref = QuerySourceRef::new(
source.into_query_source(),
SourceAlias::new(self.sources_alias_name.clone(), sources_num),
);
let ret_src_ref = src_ref.clone();
let new_builder = SelectBuilder {
sources: FromClause::new(Join::Cross(self.sources.unwrap(), src_ref)),
sources_num,
sources_alias_name: self.sources_alias_name,
filter: self.filter,
columns: (),
group_by: self.group_by,
having: self.having,
order_by: self.order_by,
limit: self.limit,
lock_mode: self.lock_mode,
};
(new_builder, ret_src_ref)
}
}
pub trait SelectColumns<C>
where
C: Columns,
{
type Next;
fn select(self, columns: C) -> Self::Next;
}
impl<C, QS, W> SelectColumns<C>
for SelectBuilder<
QS,
W,
(),
EmptyGroupByClause,
EmptyHavingClause,
EmptyOrderByClause,
EmptyLimitClause,
LockModeDefaultBehavior,
>
where
C: Columns,
{
type Next = SelectBuilder<
QS,
W,
C,
EmptyGroupByClause,
EmptyHavingClause,
EmptyOrderByClause,
EmptyLimitClause,
LockModeDefaultBehavior,
>;
fn select(self, columns: C) -> Self::Next {
SelectBuilder {
sources: self.sources,
sources_num: self.sources_num,
sources_alias_name: self.sources_alias_name,
filter: self.filter,
columns,
group_by: self.group_by,
having: self.having,
order_by: self.order_by,
limit: self.limit,
lock_mode: self.lock_mode,
}
}
}
pub trait SelectFilter<W>
where
W: Expression<SqlType = SqlTypeBool, Aggregation = NonAggregate>,
{
type Next;
fn filter(self, expr: W) -> Self::Next;
}
impl<QS, W, C> SelectFilter<W>
for SelectBuilder<
FromClause<QS>,
EmptyWhereClause,
C,
EmptyGroupByClause,
EmptyHavingClause,
EmptyOrderByClause,
EmptyLimitClause,
LockModeDefaultBehavior,
>
where
W: Expression<SqlType = SqlTypeBool, Aggregation = NonAggregate>,
{
type Next = SelectBuilder<
FromClause<QS>,
WhereClause<W>,
C,
EmptyGroupByClause,
EmptyHavingClause,
EmptyOrderByClause,
EmptyLimitClause,
LockModeDefaultBehavior,
>;
fn filter(self, expr: W) -> Self::Next {
SelectBuilder {
sources: self.sources,
sources_num: self.sources_num,
sources_alias_name: self.sources_alias_name,
filter: WhereClause::new(expr),
columns: self.columns,
group_by: self.group_by,
having: self.having,
order_by: self.order_by,
limit: self.limit,
lock_mode: self.lock_mode,
}
}
}
pub trait SelectGroupBy<G>
where
G: Columns<Aggregation = NonAggregate>,
{
type Next;
fn group_by(self, group: G) -> Self::Next;
}
impl<QS, W, C, G> SelectGroupBy<G>
for SelectBuilder<
FromClause<QS>,
W,
C,
EmptyGroupByClause,
EmptyHavingClause,
EmptyOrderByClause,
EmptyLimitClause,
LockModeDefaultBehavior,
>
where
G: Columns<Aggregation = NonAggregate>,
{
type Next = SelectBuilder<
FromClause<QS>,
W,
C,
GroupByClause<G>,
EmptyHavingClause,
EmptyOrderByClause,
EmptyLimitClause,
LockModeDefaultBehavior,
>;
fn group_by(self, group: G) -> Self::Next {
SelectBuilder {
sources: self.sources,
sources_num: self.sources_num,
sources_alias_name: self.sources_alias_name,
filter: self.filter,
columns: self.columns,
group_by: GroupByClause::new(group),
having: self.having,
order_by: self.order_by,
limit: self.limit,
lock_mode: self.lock_mode,
}
}
}
pub trait SelectHaving<H>
where
H: Expression<SqlType = SqlTypeBool>,
{
type Next;
fn having(self, having: H) -> Self::Next;
}
impl<QS, W, C, G, H> SelectHaving<H>
for SelectBuilder<
FromClause<QS>,
W,
C,
GroupByClause<G>,
EmptyHavingClause,
EmptyOrderByClause,
EmptyLimitClause,
LockModeDefaultBehavior,
>
where
QS: QuerySource,
G: Columns,
H: Expression<SqlType = SqlTypeBool>,
{
type Next = SelectBuilder<
FromClause<QS>,
W,
C,
GroupByClause<G>,
HavingClause<H>,
EmptyOrderByClause,
EmptyLimitClause,
LockModeDefaultBehavior,
>;
fn having(self, having: H) -> Self::Next {
SelectBuilder {
sources: self.sources,
sources_num: self.sources_num,
sources_alias_name: self.sources_alias_name,
filter: self.filter,
columns: self.columns,
group_by: self.group_by,
having: HavingClause::new(having),
order_by: self.order_by,
limit: self.limit,
lock_mode: self.lock_mode,
}
}
}
pub trait SelectOrderBy<O>
where
O: Orders,
{
type Next;
fn order_by(self, order: O) -> Self::Next;
}
impl<QS, W, C, G, H, O> SelectOrderBy<O>
for SelectBuilder<
FromClause<QS>,
W,
C,
G,
H,
EmptyOrderByClause,
EmptyLimitClause,
LockModeDefaultBehavior,
>
where
QS: QuerySource,
O: Orders,
{
type Next = SelectBuilder<
FromClause<QS>,
W,
C,
G,
H,
OrderByClause<O>,
EmptyLimitClause,
LockModeDefaultBehavior,
>;
fn order_by(self, order: O) -> Self::Next {
SelectBuilder {
sources: self.sources,
sources_num: self.sources_num,
sources_alias_name: self.sources_alias_name,
filter: self.filter,
columns: self.columns,
group_by: self.group_by,
having: self.having,
order_by: OrderByClause::new(order),
limit: self.limit,
lock_mode: self.lock_mode,
}
}
}
pub trait SelectLimit<L>
where
L: Into<Limit>,
{
type Next;
fn limit(self, limit: L) -> Self::Next;
}
impl<QS, W, C, G, H, O, L> SelectLimit<L>
for SelectBuilder<FromClause<QS>, W, C, G, H, O, EmptyLimitClause, LockModeDefaultBehavior>
where
QS: QuerySource,
L: Into<Limit>,
{
type Next =
SelectBuilder<FromClause<QS>, W, C, G, H, O, LimitClause<Limit>, LockModeDefaultBehavior>;
fn limit(self, limit: L) -> Self::Next {
SelectBuilder {
sources: self.sources,
sources_num: self.sources_num,
sources_alias_name: self.sources_alias_name,
filter: self.filter,
columns: self.columns,
group_by: self.group_by,
having: self.having,
order_by: self.order_by,
limit: LimitClause::new(limit.into()),
lock_mode: self.lock_mode,
}
}
}
pub trait SelectForUpdate {
type Next;
fn for_update(self) -> Self::Next;
}
impl<QS, W, C, G, H, O, L> SelectForUpdate
for SelectBuilder<FromClause<QS>, W, C, G, H, O, L, LockModeDefaultBehavior>
where
QS: QuerySource,
{
type Next = SelectBuilder<FromClause<QS>, W, C, G, H, O, L, ForUpdate>;
fn for_update(self) -> Self::Next {
self.set_lock_mode(ForUpdate)
}
}
pub trait SelectLockInShareMode {
type Next;
fn for_update(self) -> Self::Next;
}
impl<QS, W, C, G, H, O, L> SelectLockInShareMode
for SelectBuilder<FromClause<QS>, W, C, G, H, O, L, LockModeDefaultBehavior>
where
QS: QuerySource,
{
type Next = SelectBuilder<FromClause<QS>, W, C, G, H, O, L, LockInShareMode>;
fn for_update(self) -> Self::Next {
self.set_lock_mode(LockInShareMode)
}
}
// SelectBuilderが実装する各機能のI/Fをtraitに切り出す
// こうすすること
macro_rules! define_select_clause {
( $type_name:ident, $empty_type:tt, $clause:expr ) => {
#[derive(Debug, Clone, Default)]
pub struct $empty_type;
#[derive(Debug, Clone)]
pub struct $type_name<T>(T);
impl $empty_type {
pub fn new() -> Self {
$empty_type
}
}
impl<T> $type_name<T> {
pub fn new(source: T) -> Self {
$type_name(source)
}
pub fn unwrap(self) -> T {
self.0
}
pub fn inner_ref(&self) -> &T {
&self.0
}
}
impl BuildSql for $empty_type {
fn build_sql(
&self,
_buf: &mut Vec<u8>,
_params: &mut Vec<Value>,
) -> Result<(), BuildSqlError> {
Ok(())
}
}
impl<T> BuildSql for $type_name<T>
where
T: BuildSql,
{
fn build_sql(
&self,
buf: &mut Vec<u8>,
params: &mut Vec<Value>,
) -> Result<(), BuildSqlError> {
write!(buf, $clause)?;
self.inner_ref().build_sql(buf, params)
}
}
};
}
define_select_clause!(FromClause, EmptyFromClause, " FROM ");
define_select_clause!(WhereClause, EmptyWhereClause, " WHERE ");
define_select_clause!(GroupByClause, EmptyGroupByClause, " GROUP BY ");
define_select_clause!(HavingClause, EmptyHavingClause, " HAVING ");
define_select_clause!(OrderByClause, EmptyOrderByClause, " ORDER BY ");
define_select_clause!(LimitClause, EmptyLimitClause, " LIMIT ");
#[derive(Clone)]
pub struct SelectBuilder<QS, W, C, G, H, O, L, LM> {
sources: QS,
sources_num: u8,
sources_alias_name: SourceAliasName,
filter: W,
columns: C,
group_by: G,
having: H,
order_by: O,
limit: L,
lock_mode: LM,
}
impl<QS, W, C, G, H, O, L, LM> SelectBuilder<QS, W, C, G, H, O, L, LM> {
pub fn change_sources_alias_name(&mut self, new_name: &'static str) {
self.sources_alias_name.set(new_name)
}
}
#[derive(Debug, Clone, Copy, Ord, PartialOrd, Eq, PartialEq, Hash, Default)]
pub struct LockModeDefaultBehavior;
impl BuildSql for LockModeDefaultBehavior {
fn build_sql(&self, _buf: &mut Vec<u8>, _params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
Ok(())
}
}
#[derive(Debug, Clone, Copy, Ord, PartialOrd, Eq, PartialEq, Hash, Default)]
pub struct ForUpdate;
impl BuildSql for ForUpdate {
fn build_sql(&self, buf: &mut Vec<u8>, _params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
Ok(write!(buf, " FOR UPDATE")?)
}
}
#[derive(Debug, Clone, Copy, Ord, PartialOrd, Eq, PartialEq, Hash, Default)]
pub struct LockInShareMode;
impl BuildSql for LockInShareMode {
fn build_sql(&self, buf: &mut Vec<u8>, _params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
Ok(write!(buf, " LOCK IN SHARE MODE")?)
}
}
impl Default for synonym::EmptySelectBuilder {
fn default() -> Self {
SelectBuilder {
sources: EmptyFromClause,
sources_num: 0,
sources_alias_name: SourceAliasName::default(),
filter: EmptyWhereClause,
columns: (),
group_by: EmptyGroupByClause,
having: EmptyHavingClause,
order_by: EmptyOrderByClause,
limit: EmptyLimitClause,
lock_mode: LockModeDefaultBehavior,
}
}
}
impl synonym::EmptySelectBuilder {
pub fn new() -> synonym::EmptySelectBuilder {
Default::default()
}
}
impl<QS>
SelectBuilder<
FromClause<QS>,
EmptyWhereClause,
(),
EmptyGroupByClause,
EmptyHavingClause,
EmptyOrderByClause,
EmptyLimitClause,
LockModeDefaultBehavior,
>
where
QS: QuerySource,
{
pub fn right_outer_join<QS2, ON, EXP>(
self,
source: QS2,
mut on: ON,
) -> (
synonym::SourceUpdatedBuilder<synonym::RightOuterJoin<QS, QS2, EXP>>,
synonym::IntoQuerySourceRef<QS2>,
)
where
QS2: IntoQuerySource,
QS2::Database: Joinable<QS::Database>,
QS2::QuerySource: QuerySource + Clone,
ON: FnMut(QuerySourceRef<QS2::QuerySource>) -> EXP,
EXP: Expression<SqlType = SqlTypeBool>,
{
let sources_num = self.sources_num + 1;
let src_ref = QuerySourceRef::new(
source.into_query_source(),
SourceAlias::new(self.sources_alias_name.clone(), sources_num),
);
let ret_src_ref = src_ref.clone();
let on_expr = on(src_ref.clone());
let new_builder = SelectBuilder {
sources: FromClause::new(Join::RightOuter(
self.sources.unwrap().nullable(),
src_ref,
on_expr,
)),
sources_num,
sources_alias_name: self.sources_alias_name,
filter: self.filter,
columns: (),
group_by: self.group_by,
having: self.having,
order_by: self.order_by,
limit: self.limit,
lock_mode: self.lock_mode,
};
(new_builder, ret_src_ref)
}
}
impl<QS, W, C, G, H, O, L> SelectBuilder<FromClause<QS>, W, C, G, H, O, L, LockModeDefaultBehavior>
where
QS: QuerySource,
{
fn set_lock_mode<LM>(
self,
lock_mode: LM,
) -> SelectBuilder<FromClause<QS>, W, C, G, H, O, L, LM> {
SelectBuilder {
sources: self.sources,
sources_num: self.sources_num,
sources_alias_name: self.sources_alias_name,
filter: self.filter,
columns: self.columns,
group_by: self.group_by,
having: self.having,
order_by: self.order_by,
limit: self.limit,
lock_mode,
}
}
}
impl<QS, C, W, G, H, O, L, LM> SelectBuilder<QS, W, C, G, H, O, L, LM>
where
QS: BuildSql,
C: BuildSql,
W: BuildSql,
G: BuildSql,
H: BuildSql,
O: BuildSql,
L: BuildSql,
LM: BuildSql,
{
pub fn build(self) -> Result<Query, QueryBuildError> {
Query::build(|buf, params| {
write!(buf, "SELECT ")?;
self.columns.build_sql(buf, params)?;
self.sources.build_sql(buf, params)?;
self.filter.build_sql(buf, params)?;
self.group_by.build_sql(buf, params)?;
self.having.build_sql(buf, params)?;
self.order_by.build_sql(buf, params)?;
self.limit.build_sql(buf, params)?;
self.lock_mode.build_sql(buf, params)?;
write!(buf, ";")?;
Ok(())
})
}
}
impl<QS, C, W, G, H, O, L, LM> BuildSql for SelectBuilder<QS, W, C, G, H, O, L, LM>
where
QS: BuildSql,
C: BuildSql,
W: BuildSql,
G: BuildSql,
H: BuildSql,
O: BuildSql,
L: BuildSql,
LM: BuildSql,
{
fn build_sql(&self, buf: &mut Vec<u8>, params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
write!(buf, "(SELECT ")?;
self.columns.build_sql(buf, params)?;
self.sources.build_sql(buf, params)?;
self.filter.build_sql(buf, params)?;
self.group_by.build_sql(buf, params)?;
self.having.build_sql(buf, params)?;
self.order_by.build_sql(buf, params)?;
self.limit.build_sql(buf, params)?;
self.lock_mode.build_sql(buf, params)?;
write!(buf, ")")?;
Ok(())
}
}
impl<QS, W, C, G, H, O, L, LM> Expression for SelectBuilder<QS, W, C, G, H, O, L, LM>
where
C: Columns,
{
type SqlType = C::SqlType;
type Term = Monomial;
type BoolOperation = NonBool;
type Aggregation = NonAggregate; // サブクエリなので必ず値になる
}
impl<W, C, G, H, O, L, LM> QuerySource for SelectBuilder<EmptyFromClause, W, C, G, H, O, L, LM> {
type Database = AnyDatabase;
type NullableSelf = Self;
fn nullable(self) -> Self::NullableSelf {
self
}
}
impl<DB, QS, W, C, G, H, O, L, LM> QuerySource
for SelectBuilder<FromClause<QS>, W, C, G, H, O, L, LM>
where
QS: QuerySource<Database = DB>,
{
type Database = DB;
type NullableSelf = Self;
fn nullable(self) -> Self::NullableSelf {
self
}
}
impl<W, C, G, H, O, L, LM> IntoQuerySource
for SelectBuilder<EmptyFromClause, W, C, G, H, O, L, LM>
{
type Database = AnyDatabase;
type QuerySource = Self;
fn into_query_source(self) -> Self::QuerySource {
self
}
}
impl<DB, QS, W, C, G, H, O, L, LM> IntoQuerySource
for SelectBuilder<FromClause<QS>, W, C, G, H, O, L, LM>
where
DB: Clone,
QS: QuerySource<Database = DB>,
{
type Database = DB;
type QuerySource = Self;
fn into_query_source(self) -> Self::QuerySource {
self
}
}
#[derive(Debug, Clone)]
pub struct Query {
sql: String,
params: Vec<Value>,
}
#[derive(Debug, thiserror::Error)]
pub enum QueryBuildError {
#[error("QueryBuildError::BuildSqlError: {0}")]
BuildSqlError(#[from] BuildSqlError),
#[error("QueryBuildError::EncodeError: {0}")]
EncodeError(#[from] FromUtf8Error),
}
impl Query {
pub fn build<F>(mut f: F) -> Result<Query, QueryBuildError>
where
F: FnMut(&mut Vec<u8>, &mut Vec<Value>) -> Result<(), BuildSqlError>,
{
let num_of_params = 32;
let mut buf: Vec<u8> = Vec::with_capacity(128);
let mut params: Vec<Value> = Vec::with_capacity(num_of_params); // TODO: メモリの再アロケートを繰り返さない為にパラメータの数だけ最初から確保したほうがいい
f(&mut buf, &mut params)?;
Ok(Query {
sql: String::from_utf8(buf)?,
params,
})
}
pub fn sql(&self) -> &str {
self.sql.as_str()
}
pub fn params(&self) -> &[Value] {
&self.params
}
}
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub enum Value {
Null,
String(String),
Int(i64),
Uint(u64),
}
impl Expression for Value {
type SqlType = SqlTypeAny;
type Term = Monomial;
type BoolOperation = BoolMono;
type Aggregation = NonAggregate;
}
impl From<&str> for Value {
fn from(value: &str) -> Self {
Value::String(value.to_string())
}
}
impl From<i64> for Value {
fn from(value: i64) -> Self {
Value::Int(value)
}
}
impl From<u64> for Value {
fn from(value: u64) -> Self {
Value::Uint(value)
}
}
impl<T> From<Option<T>> for Value
where
T: Into<Value>,
{
fn from(value: Option<T>) -> Self {
value.map(Into::into).unwrap_or(Value::Null)
}
}
impl From<SqlString> for Value {
fn from(value: SqlString) -> Self {
Value::String(value.0)
}
}
impl From<SqlInt> for Value {
fn from(value: SqlInt) -> Self {
Value::Int(value.0)
}
}
impl From<SqlUint> for Value {
fn from(value: SqlUint) -> Self {
Value::Uint(value.0)
}
}
impl<T> BuildSql for T
where
T: Into<Value> + Clone,
{
fn build_sql(&self, buf: &mut Vec<u8>, params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
write!(buf, "?")?;
params.push((*self).clone().into());
Ok(())
}
}
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub struct SqlString(String);
impl SqlString {
pub fn new(s: &str) -> SqlString {
SqlString(s.to_string())
}
}
impl<T> From<T> for SqlString
where
T: AsRef<str>,
{
fn from(value: T) -> Self {
SqlString::new(value.as_ref())
}
}
impl Expression for SqlString {
type SqlType = SqlTypeString;
type Term = Monomial;
type BoolOperation = NonBool;
type Aggregation = NonAggregate;
}
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub struct SqlInt(i64);
impl SqlInt {
pub fn new(i: i64) -> SqlInt {
SqlInt(i)
}
pub fn try_from<T: TryInto<i64>>(value: T) -> Result<SqlInt, T::Error> {
Ok(SqlInt::new(value.try_into()?))
}
}
impl<T> From<T> for SqlInt
where
T: Into<i64>,
{
fn from(value: T) -> Self {
SqlInt(value.into())
}
}
impl Expression for SqlInt {
type SqlType = SqlTypeInt;
type Term = Monomial;
type BoolOperation = NonBool;
type Aggregation = NonAggregate;
}
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub struct SqlUint(u64);
impl SqlUint {
pub fn new(i: u64) -> SqlUint {
SqlUint(i)
}
pub fn try_from<T: TryInto<u64>>(value: T) -> Result<SqlUint, T::Error> {
Ok(SqlUint::new(value.try_into()?))
}
}
impl<T> From<T> for SqlUint
where
T: Into<u64>,
{
fn from(value: T) -> Self {
SqlUint(value.into())
}
}
impl Expression for SqlUint {
type SqlType = SqlTypeUint;
type Term = Monomial;
type BoolOperation = NonBool;
type Aggregation = NonAggregate;
}
#[derive(Debug, Clone, Copy, Eq, PartialEq, Default, Hash)]
pub struct Dual;
#[derive(Debug, Clone, Copy, Eq, PartialEq, Default, Hash)]
pub struct AnyDatabase;
pub trait QuerySource {
type Database;
type NullableSelf: QuerySource<Database = Self::Database>;
fn nullable(self) -> Self::NullableSelf;
}
pub trait IntoQuerySource {
type Database;
type QuerySource: QuerySource<Database = Self::Database>;
fn into_query_source(self) -> Self::QuerySource;
}
impl<'a> IntoQuerySource for &'a str {
type Database = AnyDatabase;
type QuerySource = TableName<'a, AnyDatabase>;
fn into_query_source(self) -> Self::QuerySource {
TableName::new(self)
}
}
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Default, Hash, derive_more::Deref)]
pub struct TableName<'a, DB> {
#[deref]
name: &'a str,
db: PhantomData<DB>,
}
impl<DB> TableName<'_, DB> {
pub fn new(name: &str) -> TableName<'_, DB> {
TableName {
name,
db: PhantomData,
}
}
}
impl<'a, DB> From<&'a str> for TableName<'a, DB> {
fn from(value: &'a str) -> Self {
TableName::new(value)
}
}
pub trait Joinable<DB> {
type Database;
}
impl Joinable<AnyDatabase> for AnyDatabase {
type Database = AnyDatabase;
}
#[macro_export]
macro_rules! impl_joinable {
($ty:ty) => {
impl $crate::query_builder::Joinable<$ty> for $ty {
type Database = $ty;
}
impl $crate::query_builder::Joinable<$crate::query_builder::AnyDatabase> for $ty {
type Database = $ty;
}
impl $crate::query_builder::Joinable<$ty> for $crate::query_builder::AnyDatabase {
type Database = $ty;
}
};
}
// 個別のテーブル型ではNullableSelfにOption<カラムの型>を返すメソッドを生やす。
// left_outer_join等が呼ばれたらbuilderがnullableメソッドをコールする。
impl<DB> QuerySource for TableName<'_, DB> {
type Database = DB;
type NullableSelf = Self;
fn nullable(self) -> Self::NullableSelf {
self
}
}
impl<DB> IntoQuerySource for TableName<'_, DB>
where
DB: Clone,
{
type Database = DB;
type QuerySource = Self;
fn into_query_source(self) -> Self::QuerySource {
self
}
}
impl<DB> BuildSql for TableName<'_, DB> {
fn build_sql(&self, buf: &mut Vec<u8>, _params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
write!(buf, "{}", self.name)?;
Ok(())
}
}
#[derive(Clone)]
pub struct Column<ST = SqlTypeAny> {
table_name: SourceAlias,
column_name: String,
sql_type: ST,
}
impl<ST> Column<ST>
where
ST: Default,
{
pub fn new<T>(table_name: SourceAlias, column_name: T) -> Column<ST>
where
T: AsColumnName + Sized,
{
Column {
table_name,
column_name: column_name.as_column_name().to_string(),
sql_type: Default::default(),
}
}
}
impl<ST> Expression for Column<ST> {
type SqlType = ST;
type Term = Monomial;
type BoolOperation = NonBool;
type Aggregation = NonAggregate;
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct ColumnAlias<'a, T> {
value: T,
alias: Cow<'a, str>,
}
impl<'a, T> ColumnAlias<'a, T> {
pub fn new(value: T, alias: &'a str) -> ColumnAlias<'a, T> {
ColumnAlias {
value,
alias: Cow::from(alias),
}
}
}
impl<'a, T> Columns for ColumnAlias<'a, T>
where
T: Expression,
{
type SqlType = T::SqlType;
type Aggregation = T::Aggregation;
}
impl<T> BuildSql for ColumnAlias<'_, T>
where
T: BuildSql,
{
fn build_sql(&self, buf: &mut Vec<u8>, params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
self.value.build_sql(buf, params)?;
write!(buf, " as {}", self.alias)?;
Ok(())
}
}
#[derive(Clone)]
pub struct SourceAliasName {
name: Rc<RefCell<&'static str>>,
}
impl SourceAliasName {
pub fn new(name: &'static str) -> Self {
SourceAliasName {
name: Rc::new(RefCell::new(name)),
}
}
pub fn set(&mut self, new_name: &'static str) {
self.name.replace(new_name);
}
pub fn as_str(&self) -> &'static str {
RefCell::borrow(&self.name).borrow()
}
}
impl Default for SourceAliasName {
fn default() -> Self {
Self::new("t")
}
}
#[derive(Clone)]
pub struct SourceAlias {
name: SourceAliasName,
suffix_number: u8,
}
impl SourceAlias {
pub fn new(name: SourceAliasName, suffix_number: u8) -> Self {
SourceAlias {
name,
suffix_number,
}
}
pub fn change_name(&mut self, new_name: &'static str) {
self.name.set(new_name);
}
}
impl ToString for SourceAlias {
fn to_string(&self) -> String {
format!("{}{}", self.name.as_str(), self.suffix_number)
}
}
#[derive(Clone, derive_more::Deref)]
pub struct QuerySourceRef<QS> {
#[deref]
source: QS,
alias: SourceAlias,
}
impl<QS> QuerySourceRef<QS>
where
QS: QuerySource,
{
pub fn new(source: QS, alias: SourceAlias) -> QuerySourceRef<QS> {
QuerySourceRef { source, alias }
}
pub fn column<T>(&self, column_name: T) -> Column
where
T: AsColumnName + Sized,
{
Column::new(self.alias.clone(), column_name)
}
pub fn typed_column<T: Default>(&self, column_name: impl AsColumnName + Sized) -> Column<T> {
Column::new(self.alias.clone(), column_name)
}
pub fn alias(&self) -> String {
self.alias.to_string()
}
}
impl<QS> QuerySource for QuerySourceRef<QS>
where
QS: QuerySource,
{
type Database = QS::Database;
type NullableSelf = QuerySourceRef<QS::NullableSelf>;
fn nullable(self) -> Self::NullableSelf {
QuerySourceRef {
source: self.source.nullable(),
alias: self.alias.clone(),
}
}
}
impl<QS> BuildSql for QuerySourceRef<QS>
where
QS: QuerySource + BuildSql,
{
fn build_sql(&self, buf: &mut Vec<u8>, params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
self.source
.build_sql(buf, params)
.map_err(anyhow::Error::from)?;
write!(buf, " as {}", self.alias.to_string()).map_err(anyhow::Error::from)?;
Ok(())
}
}
pub trait AsColumnName {
fn as_column_name(&self) -> &str;
}
impl AsColumnName for &str {
fn as_column_name(&self) -> &str {
self
}
}
impl<T> AsColumnName for &T
where
T: AsColumnName,
{
fn as_column_name(&self) -> &str {
(**self).as_column_name()
}
}
impl<T> AsColumnName for &mut T
where
T: AsColumnName,
{
fn as_column_name(&self) -> &str {
(**self).as_column_name()
}
}
#[derive(Debug, Clone)]
pub enum Join<L, R, ON> {
Inner(L, R, ON),
LeftOuter(L, R, ON),
RightOuter(L, R, ON),
Cross(L, R),
}
impl<L, R, E> QuerySource for Join<L, R, E>
where
L: QuerySource,
R: QuerySource,
L::Database: Joinable<R::Database>,
L::NullableSelf: QuerySource,
R::NullableSelf: QuerySource,
E: Expression<SqlType = SqlTypeBool> + Clone,
{
type Database = <L::Database as Joinable<R::Database>>::Database;
type NullableSelf = Join<L::NullableSelf, R::NullableSelf, E>;
fn nullable(self) -> Self::NullableSelf {
match self {
Join::Inner(l, r, on) => Join::Inner(l.nullable(), r.nullable(), on),
Join::LeftOuter(l, r, on) => Join::LeftOuter(l.nullable(), r.nullable(), on),
Join::RightOuter(l, r, on) => Join::RightOuter(l.nullable(), r.nullable(), on),
Join::Cross(l, r) => Join::Cross(l.nullable(), r.nullable()),
}
}
}
impl<L, R, E> BuildSql for Join<L, R, E>
where
L: QuerySource + BuildSql,
R: QuerySource + BuildSql,
E: Expression<SqlType = SqlTypeBool> + BuildSql,
{
fn build_sql(&self, buf: &mut Vec<u8>, params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
match self {
Join::Inner(l, r, on) => (|| -> Result<(), anyhow::Error> {
l.build_sql(buf, params)?;
write!(buf, " JOIN ")?;
r.build_sql(buf, params)?;
write!(buf, " ON ")?;
on.build_sql(buf, params)?;
Ok(())
})()
.map_err(From::from),
Join::LeftOuter(l, r, on) => (|| -> Result<(), anyhow::Error> {
l.build_sql(buf, params)?;
write!(buf, " LEFT OUTER JOIN ")?;
r.build_sql(buf, params)?;
write!(buf, " ON ")?;
on.build_sql(buf, params)?;
Ok(())
})()
.map_err(From::from),
Join::RightOuter(l, r, on) => (|| -> Result<(), anyhow::Error> {
l.build_sql(buf, params)?;
write!(buf, " RIGHT OUTER JOIN ")?;
r.build_sql(buf, params)?;
write!(buf, " ON ")?;
on.build_sql(buf, params)?;
Ok(())
})()
.map_err(From::from),
Join::Cross(l, r) => (|| -> Result<(), anyhow::Error> {
l.build_sql(buf, params)?;
write!(buf, " CROSS JOIN ")?;
r.build_sql(buf, params)?;
Ok(())
})()
.map_err(From::from),
}
}
}
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub struct Monomial;
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub struct Polynomial;
#[derive(Debug, Clone, Copy, Eq, PartialEq, Default, Hash)]
pub struct BoolAnd;
#[derive(Debug, Clone, Copy, Eq, PartialEq, Default, Hash)]
pub struct BoolOr;
#[derive(Debug, Clone, Copy, Eq, PartialEq, Default, Hash)]
pub struct BoolMono;
#[derive(Debug, Clone, Copy, Eq, PartialEq, Default, Hash)]
pub struct NonBool;
#[derive(Debug, Clone, Copy, Eq, PartialEq, Default, Hash)]
pub struct Aggregate;
#[derive(Debug, Clone, Copy, Eq, PartialEq, Default, Hash)]
pub struct NonAggregate;
pub trait Aggregation<T> {
type Output;
}
impl Aggregation<Aggregate> for NonAggregate {
type Output = Aggregate;
}
impl Aggregation<NonAggregate> for Aggregate {
type Output = Aggregate;
}
impl Aggregation<Aggregate> for Aggregate {
type Output = Aggregate;
}
impl Aggregation<NonAggregate> for NonAggregate {
type Output = NonAggregate;
}
impl Aggregation<()> for Aggregate {
type Output = Aggregate;
}
impl Aggregation<()> for NonAggregate {
type Output = NonAggregate;
}
impl<A, B, C> Aggregation<(B, C)> for A
where
A: Aggregation<B>,
A::Output: Aggregation<C>,
{
type Output = <A::Output as Aggregation<C>>::Output;
}
pub trait Expression {
type SqlType;
type Term;
type BoolOperation;
type Aggregation;
}
/// Only for cross join.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Default, Hash)]
pub struct BlankBoolExpression;
impl Expression for BlankBoolExpression {
type SqlType = SqlTypeBool;
type Term = Monomial;
type BoolOperation = ();
type Aggregation = NonAggregate;
}
impl BuildSql for BlankBoolExpression {
fn build_sql(&self, _buf: &mut Vec<u8>, _params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
Ok(())
}
}
#[derive(Debug, Clone)]
pub enum Order<E> {
Asc(E),
Desc(E),
}
impl<E> BuildSql for Order<E>
where
E: BuildSql,
{
fn build_sql(&self, buf: &mut Vec<u8>, params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
match self {
Order::Asc(expr) => {
expr.build_sql(buf, params)?;
write!(buf, " ASC")?;
}
Order::Desc(expr) => {
expr.build_sql(buf, params)?;
write!(buf, " DESC")?;
}
}
Ok(())
}
}
pub trait Orders {}
impl<E> Orders for Order<E> where E: Expression {}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Limit {
pub offset: Option<usize>,
pub row_count: usize,
}
impl From<usize> for Limit {
fn from(row_count: usize) -> Self {
Limit {
offset: None,
row_count,
}
}
}
impl From<(usize, usize)> for Limit {
fn from((offset, row_count): (usize, usize)) -> Self {
Limit {
offset: Some(offset),
row_count,
}
}
}
impl BuildSql for Limit {
fn build_sql(&self, buf: &mut Vec<u8>, params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
if let Some(offset) = self.offset {
write!(buf, "?, ")?;
params.push((offset as i64).into());
}
write!(buf, "?")?;
params.push((self.row_count as i64).into());
Ok(())
}
}
pub trait Columns {
type SqlType;
type Aggregation;
}
impl<E: Expression> Columns for E {
type SqlType = E::SqlType;
type Aggregation = E::Aggregation;
}
#[derive(Debug, Clone)]
pub struct Distinct<T>(T);
impl<T> Distinct<T> {
pub fn new(x: T) -> Distinct<T> {
Distinct(x)
}
}
impl<T> Columns for Distinct<T>
where
T: Columns<Aggregation = NonAggregate>,
{
type SqlType = T::SqlType;
type Aggregation = T::Aggregation;
}
impl<T: BuildSql> BuildSql for Distinct<T> {
fn build_sql(&self, buf: &mut Vec<u8>, params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
write!(buf, "DISTINCT ")?;
self.0.build_sql(buf, params)
}
}
/// Build SQL string as a part of SQL.
pub trait BuildSql {
fn build_sql(&self, buf: &mut Vec<u8>, params: &mut Vec<Value>) -> Result<(), BuildSqlError>;
}
#[derive(Debug, thiserror::Error)]
pub enum BuildSqlError {
#[error("Failed to build sql: {0}")]
IoError(#[from] std::io::Error),
#[error("Failed to build sql: {0}")]
AnyError(#[from] anyhow::Error),
}
impl<ST> BuildSql for Column<ST> {
fn build_sql(&self, buf: &mut Vec<u8>, _params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
write!(buf, "{}.{}", self.table_name.to_string(), self.column_name)?;
Ok(())
}
}
#[derive(Debug, Clone)]
pub struct Record<T> {
columns: T,
}
impl<T> Record<T> {
pub fn new(columns: T) -> Record<T> {
Record { columns }
}
}
impl<A> Columns for (A,)
where
A: Expression,
{
type SqlType = (A::SqlType,);
type Aggregation = A::Aggregation;
}
impl<A: Expression> Expression for Record<(A,)> {
type SqlType = A::SqlType;
type Term = A::Term;
type BoolOperation = A::BoolOperation;
type Aggregation = A::Aggregation;
}
impl<A> Orders for (Order<A>,) where A: Expression {}
impl<A> BuildSql for (A,)
where
A: BuildSql,
{
fn build_sql(&self, buf: &mut Vec<u8>, params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
self.0.build_sql(buf, params)
}
}
macro_rules! recursive_aggregation {
( $first_ty:ty, $( $ty:ty ),* $(,)* ) => {
($first_ty, recursive_aggregation!( $($ty,)* ))
};
() => {
()
};
}
macro_rules! impl_traits_for_tuple {
($type_paramA:ident, $field0:tt $(,$type_param:ident, $field:tt)*) => {
impl<$type_paramA $(, $type_param)*> Columns for ($type_paramA $(, $type_param)*)
where
$type_paramA: Columns,
$($type_param: Columns,)*
<$type_paramA>::Aggregation: Aggregation<recursive_aggregation!( $( <$type_param>::Aggregation, )* )>,
{
type SqlType = ( $type_paramA::SqlType, $( $type_param::SqlType, )* );
type Aggregation = <<$type_paramA>::Aggregation as Aggregation<recursive_aggregation!( $( <$type_param>::Aggregation, )* )>>::Output;
}
impl<$type_paramA $(, $type_param)*> Expression for Record<($type_paramA $(, $type_param)*)>
where
$type_paramA: Expression,
$($type_param: Expression,)*
<$type_paramA>::Aggregation: Aggregation<recursive_aggregation!( $( <$type_param>::Aggregation, )* )>,
{
type SqlType = ( $type_paramA::SqlType, $( $type_param::SqlType, )* );
type Term = Polynomial;
type BoolOperation = BoolMono;
type Aggregation = <<$type_paramA>::Aggregation as Aggregation<recursive_aggregation!( $( <$type_param>::Aggregation, )* )>>::Output;
}
impl<$type_paramA $(, $type_param)*> Orders for (Order<$type_paramA> $(, Order<$type_param>)*)
where
$type_paramA: Expression,
$($type_param: Expression,)*
{}
impl<$type_paramA $(, $type_param)*> BuildSql for ($type_paramA $(, $type_param)*)
where
$type_paramA: BuildSql,
$($type_param: BuildSql,)*
{
fn build_sql(&self, buf: &mut Vec<u8>, params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
self.$field0.build_sql(buf, params)?;
$(
write!(buf, ", ")?;
self.$field.build_sql(buf, params)?;
)*
Ok(())
}
}
impl<$type_paramA $(, $type_param)*> BuildSql for Record<($type_paramA $(, $type_param)*)>
where
$type_paramA: BuildSql,
$($type_param: BuildSql,)*
{
fn build_sql(&self, buf: &mut Vec<u8>, params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
write!(buf, "(")?; // Rowって付けた方がいい?
self.columns.$field0.build_sql(buf, params)?;
$(
write!(buf, ", ")?;
self.columns.$field.build_sql(buf, params)?;
)*
write!(buf, ")")?;
Ok(())
}
}
}
}
// Increase if needed.
impl_traits_for_tuple!(A, 0, B, 1);
impl_traits_for_tuple!(A, 0, B, 1, C, 2);
impl_traits_for_tuple!(A, 0, B, 1, C, 2, D, 3);
impl_traits_for_tuple!(A, 0, B, 1, C, 2, D, 3, E, 4);
impl_traits_for_tuple!(A, 0, B, 1, C, 2, D, 3, E, 4, F, 5);
impl_traits_for_tuple!(A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6);
impl_traits_for_tuple!(A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7);
impl_traits_for_tuple!(A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8);
impl_traits_for_tuple!(A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9);
impl_traits_for_tuple!(A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10);
impl_traits_for_tuple!(A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32, AH, 33
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32, AH, 33, AI, 34
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32, AH, 33, AI, 34, AJ, 35
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32, AH, 33, AI, 34, AJ, 35, AK, 36
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32, AH, 33, AI, 34, AJ, 35, AK, 36, AL, 37
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32, AH, 33, AI, 34, AJ, 35, AK, 36, AL, 37, AM, 38
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32, AH, 33, AI, 34, AJ, 35, AK, 36, AL, 37, AM, 38, AN, 39
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32, AH, 33, AI, 34, AJ, 35, AK, 36, AL, 37, AM, 38, AN, 39,
AO, 40
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32, AH, 33, AI, 34, AJ, 35, AK, 36, AL, 37, AM, 38, AN, 39,
AO, 40, AP, 41
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32, AH, 33, AI, 34, AJ, 35, AK, 36, AL, 37, AM, 38, AN, 39,
AO, 40, AP, 41, AQ, 42
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32, AH, 33, AI, 34, AJ, 35, AK, 36, AL, 37, AM, 38, AN, 39,
AO, 40, AP, 41, AQ, 42, AR, 43
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32, AH, 33, AI, 34, AJ, 35, AK, 36, AL, 37, AM, 38, AN, 39,
AO, 40, AP, 41, AQ, 42, AR, 43, AS, 44
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32, AH, 33, AI, 34, AJ, 35, AK, 36, AL, 37, AM, 38, AN, 39,
AO, 40, AP, 41, AQ, 42, AR, 43, AS, 44, AT, 45
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32, AH, 33, AI, 34, AJ, 35, AK, 36, AL, 37, AM, 38, AN, 39,
AO, 40, AP, 41, AQ, 42, AR, 43, AS, 44, AT, 45, AU, 46
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32, AH, 33, AI, 34, AJ, 35, AK, 36, AL, 37, AM, 38, AN, 39,
AO, 40, AP, 41, AQ, 42, AR, 43, AS, 44, AT, 45, AU, 46, AV, 47
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32, AH, 33, AI, 34, AJ, 35, AK, 36, AL, 37, AM, 38, AN, 39,
AO, 40, AP, 41, AQ, 42, AR, 43, AS, 44, AT, 45, AU, 46, AV, 47, AW, 48
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32, AH, 33, AI, 34, AJ, 35, AK, 36, AL, 37, AM, 38, AN, 39,
AO, 40, AP, 41, AQ, 42, AR, 43, AS, 44, AT, 45, AU, 46, AV, 47, AW, 48, AX, 49
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32, AH, 33, AI, 34, AJ, 35, AK, 36, AL, 37, AM, 38, AN, 39,
AO, 40, AP, 41, AQ, 42, AR, 43, AS, 44, AT, 45, AU, 46, AV, 47, AW, 48, AX, 49, AY, 50
);
impl_traits_for_tuple!(
A, 0, B, 1, C, 2, D, 3, E, 4, F, 5, G, 6, H, 7, I, 8, J, 9, K, 10, L, 11, M, 12, N, 13, O, 14,
P, 15, Q, 16, R, 17, S, 18, T, 19, U, 20, V, 21, W, 22, X, 23, Y, 24, Z, 25, AA, 26, AB, 27,
AC, 28, AD, 29, AE, 30, AF, 31, AG, 32, AH, 33, AI, 34, AJ, 35, AK, 36, AL, 37, AM, 38, AN, 39,
AO, 40, AP, 41, AQ, 42, AR, 43, AS, 44, AT, 45, AU, 46, AV, 47, AW, 48, AX, 49, AY, 50, AZ, 51
);
#[derive(Debug, Clone)]
pub struct Row<T> {
columns: T,
}
impl<T: Columns> Row<T> {
pub fn new(columns: T) -> Row<T> {
Row { columns }
}
}
/// 事前にコードレベルでSQL上での型が確定できない場合に使用する。
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Default)]
pub struct SqlTypeAny;
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Default)]
pub struct SqlTypeString;
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Default)]
pub struct SqlTypeInt;
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Default)]
pub struct SqlTypeUint;
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Default)]
pub struct SqlTypeBool;
pub trait Comparable<T> {}
impl Comparable<SqlTypeString> for SqlTypeAny {}
impl Comparable<SqlTypeInt> for SqlTypeAny {}
impl Comparable<SqlTypeUint> for SqlTypeAny {}
impl Comparable<SqlTypeBool> for SqlTypeAny {}
impl Comparable<SqlTypeAny> for SqlTypeString {}
impl Comparable<SqlTypeAny> for SqlTypeInt {}
impl Comparable<SqlTypeAny> for SqlTypeUint {}
impl Comparable<SqlTypeAny> for SqlTypeBool {}
impl Comparable<SqlTypeInt> for SqlTypeUint {}
impl Comparable<SqlTypeUint> for SqlTypeInt {}
impl<T> Comparable<T> for T {}
macro_rules! impl_bool_binary_operators {
( $( ( $ty:ident, $op:expr ) ),* $(,)* ) => {
$(
#[derive(Debug, Clone)]
pub struct $ty<L, R>
where
L: Expression,
R: Expression,
L::SqlType: Comparable<R::SqlType>,
{
lhs: L,
rhs: R,
}
impl<L, R> Expression for $ty<L, R>
where
L: Expression,
R: Expression,
L::SqlType: Comparable<R::SqlType>,
L::Aggregation: Aggregation<R::Aggregation>,
{
type SqlType = SqlTypeBool;
type Term = Polynomial;
type BoolOperation = BoolMono;
type Aggregation = <L::Aggregation as Aggregation<R::Aggregation>>::Output;
}
impl<L, R> AndOperatorMethod for $ty<L, R>
where
L: Expression,
R: Expression,
L::SqlType: Comparable<R::SqlType>,
Self: Expression<SqlType = SqlTypeBool>,
{}
impl<L,R> OrOperatorMethod for $ty<L,R>
where
L: Expression,
R: Expression,
L::SqlType: Comparable<R::SqlType>,
Self: Expression<SqlType = SqlTypeBool>,
{}
impl<L,R> NotOperatorMethod for $ty<L,R>
where
L: Expression,
R: Expression,
L::SqlType: Comparable<R::SqlType>,
Self: Expression<SqlType = SqlTypeBool>,
{}
impl<L, R> BuildSql for $ty<L, R>
where
L: Expression + BuildSql,
R: Expression + BuildSql,
L::SqlType: Comparable<R::SqlType>,
{
fn build_sql(
&self,
buf: &mut Vec<u8>,
params: &mut Vec<Value>,
) -> Result<(), BuildSqlError> {
(|| -> Result<(), anyhow::Error> {
self.lhs.build_sql(buf, params)?;
write!(buf, $op)?;
self.rhs.build_sql(buf, params)?;
Ok(())
})()
.map_err(From::from)
}
}
)*
};
}
impl_bool_binary_operators!(
(Eq, " = "),
(NotEq, " != "),
(Gt, " > "),
(Ge, " >= "),
(Lt, " < "),
(Le, " <= "),
(Like, " LIKE "),
(NotLike, " NOT LIKE "),
);
macro_rules! impl_subquery_bool_binary_operators {
( $( ( $ty:ident, $op:expr ) ),* $(,)* ) => {
$(
#[derive(Clone)]
pub struct $ty<Lhs, QS, W, C, G, H, O, L, LM>
where
C: Columns,
Lhs: Expression,
Lhs::SqlType: Comparable<C::SqlType>,
{
lhs: Lhs,
rhs: SelectBuilder<QS, W, C, G, H, O, L, LM>,
}
impl<Lhs, QS, W, C, G, H, O, L, LM> Expression for $ty<Lhs, QS, W, C, G, H, O, L, LM>
where
C: Columns,
Lhs: Expression,
Lhs::SqlType: Comparable<C::SqlType>,
{
type SqlType = SqlTypeBool;
type Term = Polynomial;
type BoolOperation = BoolMono;
type Aggregation = NonAggregate;
}
impl<Lhs, QS, W, C, G, H, O, L, LM> AndOperatorMethod for $ty<Lhs, QS, W, C, G, H, O, L, LM>
where
C: Columns,
Lhs: Expression,
Lhs::SqlType: Comparable<C::SqlType>,
Self: Expression<SqlType = SqlTypeBool>,
{}
impl<Lhs, QS, W, C, G, H, O, L, LM> OrOperatorMethod for $ty<Lhs, QS, W, C, G, H, O, L, LM>
where
C: Columns,
Lhs: Expression,
Lhs::SqlType: Comparable<C::SqlType>,
Self: Expression<SqlType = SqlTypeBool>,
{}
impl<Lhs, QS, W, C, G, H, O, L, LM> NotOperatorMethod for $ty<Lhs, QS, W, C, G, H, O, L, LM>
where
C: Columns,
Lhs: Expression,
Lhs::SqlType: Comparable<C::SqlType>,
Self: Expression<SqlType = SqlTypeBool>,
{}
impl<Lhs, QS, W, C, G, H, O, L, LM> BuildSql for $ty<Lhs, QS, W, C, G, H, O, L, LM>
where
QS: BuildSql,
C: BuildSql + Columns,
W: BuildSql,
G: BuildSql,
H: BuildSql,
O: BuildSql,
L: BuildSql,
LM: BuildSql,
Lhs: Expression + BuildSql,
Lhs::SqlType: Comparable<C::SqlType>,
{
fn build_sql(
&self,
buf: &mut Vec<u8>,
params: &mut Vec<Value>,
) -> Result<(), BuildSqlError> {
(|| -> Result<(), anyhow::Error> {
self.lhs.build_sql(buf, params)?;
write!(buf, $op)?;
self.rhs.build_sql(buf, params)?;
Ok(())
})()
.map_err(From::from)
}
}
)*
};
}
impl_subquery_bool_binary_operators!(
(EqAny, " = ANY "),
(NotEqAny, " != ANY "),
(GtAny, " > ANY "),
(GeAny, " >= ANY "),
(LtAny, " < ANY "),
(LeAny, " <= ANY "),
(EqAll, " = ALL "),
(NotEqAll, " != ALL "),
(GtAll, " > ALL "),
(GeAll, " >= ALL "),
(LtAll, " < ALL "),
(LeAll, " <= ALL "),
);
#[derive(Debug, Clone)]
pub struct Between<T, L, U>
where
T: Expression,
L: Expression,
U: Expression,
L::SqlType: Comparable<T::SqlType>,
U::SqlType: Comparable<T::SqlType>,
{
target: T,
lower_bound: L,
upper_bound: U,
}
impl<T, L, U> Expression for Between<T, L, U>
where
T: Expression,
L: Expression,
U: Expression,
L::SqlType: Comparable<T::SqlType>,
U::SqlType: Comparable<T::SqlType>,
{
type SqlType = SqlTypeBool;
type Term = Polynomial;
type BoolOperation = BoolMono;
type Aggregation = NonAggregate;
}
impl<T, L, U> AndOperatorMethod for Between<T, L, U>
where
T: Expression,
L: Expression,
U: Expression,
L::SqlType: Comparable<T::SqlType>,
U::SqlType: Comparable<T::SqlType>,
Self: Expression<SqlType = SqlTypeBool>,
{
}
impl<T, L, U> OrOperatorMethod for Between<T, L, U>
where
T: Expression,
L: Expression,
U: Expression,
L::SqlType: Comparable<T::SqlType>,
U::SqlType: Comparable<T::SqlType>,
Self: Expression<SqlType = SqlTypeBool>,
{
}
impl<T, L, U> NotOperatorMethod for Between<T, L, U>
where
T: Expression,
L: Expression,
U: Expression,
L::SqlType: Comparable<T::SqlType>,
U::SqlType: Comparable<T::SqlType>,
Self: Expression<SqlType = SqlTypeBool>,
{
}
impl<T, L, U> BuildSql for Between<T, L, U>
where
T: Expression + BuildSql,
L: Expression + BuildSql,
U: Expression + BuildSql,
L::SqlType: Comparable<T::SqlType>,
U::SqlType: Comparable<T::SqlType>,
{
fn build_sql(&self, buf: &mut Vec<u8>, params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
(|| -> Result<(), anyhow::Error> {
self.target.build_sql(buf, params)?;
write!(buf, " BETWEEN ")?;
self.lower_bound.build_sql(buf, params)?;
write!(buf, " AND ")?;
self.upper_bound.build_sql(buf, params)?;
Ok(())
})()
.map_err(From::from)
}
}
macro_rules! impl_in_operators {
( $( ( $ty:ident, $op:expr ) ),* $(,)* ) => {
$(
#[derive(Debug, Clone)]
pub struct $ty<L, R, ARR>
where
L: Expression,
R: Expression,
R::SqlType: Comparable<L::SqlType>,
ARR: AsRef<[R]>,
{
lhs: L,
rhs: ARR,
rhs_value: PhantomData<R>,
}
impl<L, R, ARR> Expression for $ty<L, R, ARR>
where
L: Expression,
R: Expression,
R::SqlType: Comparable<L::SqlType>,
ARR: AsRef<[R]>,
R::Aggregation: Aggregation<L::Aggregation>,
{
type SqlType = SqlTypeBool;
type Term = Polynomial;
type BoolOperation = BoolMono;
type Aggregation = <R::Aggregation as Aggregation<L::Aggregation>>::Output;
}
impl<L, R, ARR> AndOperatorMethod for $ty<L, R, ARR>
where
L: Expression,
R: Expression,
R::SqlType: Comparable<L::SqlType>,
ARR: AsRef<[R]>,
Self: Expression<SqlType = SqlTypeBool>,
{}
impl<L, R, ARR> OrOperatorMethod for $ty<L, R, ARR>
where
L: Expression,
R: Expression,
R::SqlType: Comparable<L::SqlType>,
ARR: AsRef<[R]>,
Self: Expression<SqlType = SqlTypeBool>,
{}
impl<L, R, ARR> NotOperatorMethod for $ty<L, R, ARR>
where
L: Expression,
R: Expression,
R::SqlType: Comparable<L::SqlType>,
ARR: AsRef<[R]>,
Self: Expression<SqlType = SqlTypeBool>,
{}
impl<L, R, ARR> BuildSql for $ty<L, R, ARR>
where
L: Expression + BuildSql,
R: Expression + BuildSql + Clone + Into<Value>,
R::SqlType: Comparable<L::SqlType>,
ARR: AsRef<[R]>,
{
fn build_sql(
&self,
buf: &mut Vec<u8>,
params: &mut Vec<Value>,
) -> Result<(), BuildSqlError> {
(|| -> Result<(), anyhow::Error> {
self.lhs.build_sql(buf, params)?;
write!(buf, $op)?;
write!(buf, "(")?;
let mut is_first = true;
for v in self.rhs.as_ref() {
if is_first {
write!(buf, "?")?;
is_first = false;
} else {
write!(buf, ", ?")?;
}
params.push((*v).clone().into());
}
write!(buf, ")")?;
Ok(())
})()
.map_err(From::from)
}
}
)*
};
}
impl_in_operators!((Any, " IN "), (NotAny, " NOT IN "));
macro_rules! impl_null_check_operators {
( $( ( $ty:ident, $op:expr ) ),* $(,)* ) => {
$(
#[derive(Debug, Clone)]
pub struct $ty<T>
where
T: Expression,
{
target: T
}
impl<T> Expression for $ty<T>
where
T: Expression,
{
type SqlType = SqlTypeBool;
type Term = Monomial;
type BoolOperation = BoolMono;
type Aggregation = T::Aggregation;
}
impl<T> AndOperatorMethod for $ty<T>
where
T: Expression,
Self: Expression<SqlType = SqlTypeBool>,
{}
impl<T> OrOperatorMethod for $ty<T>
where
T: Expression,
Self: Expression<SqlType = SqlTypeBool>,
{}
impl<T> NotOperatorMethod for $ty<T>
where
T: Expression,
Self: Expression<SqlType = SqlTypeBool>,
{}
impl<T> BuildSql for $ty<T>
where
T: Expression + BuildSql,
{
fn build_sql(
&self,
buf: &mut Vec<u8>,
params: &mut Vec<Value>,
) -> Result<(), BuildSqlError> {
(|| -> Result<(), anyhow::Error> {
self.target.build_sql(buf, params)?;
write!(buf, $op)?;
Ok(())
})()
.map_err(From::from)
}
}
)*
};
}
impl_null_check_operators!((IsNull, " IS NULL"), (IsNotNull, " IS NOT NULL"));
pub trait CompareBinaryOperatorMethod<R>: Expression + Sized
where
R: Expression,
Self::SqlType: Comparable<R::SqlType>,
{
/// SQL `=`.
fn eq(self, rhs: R) -> Eq<Self, R> {
Eq { lhs: self, rhs }
}
/// SQL `!=`.
fn not_eq(self, rhs: R) -> NotEq<Self, R> {
NotEq { lhs: self, rhs }
}
/// SQL `>`.
fn gt(self, rhs: R) -> Gt<Self, R> {
Gt { lhs: self, rhs }
}
/// SQL `>=`.
fn ge(self, rhs: R) -> Ge<Self, R> {
Ge { lhs: self, rhs }
}
/// SQL `<`.
fn lt(self, rhs: R) -> Lt<Self, R> {
Lt { lhs: self, rhs }
}
/// SQL `<=`.
fn le(self, rhs: R) -> Le<Self, R> {
Le { lhs: self, rhs }
}
/// SQL `LIKE`.
fn like(self, rhs: R) -> Like<Self, R> {
Like { lhs: self, rhs }
}
/// SQL `NOT LIKE`.
fn not_like(self, rhs: R) -> NotLike<Self, R> {
NotLike { lhs: self, rhs }
}
}
// これをSelectBuilder毎に定義する必要がある
// DBによってサポートしてたりしてなかったりなので、それが正しいのかもしれない
pub trait SubQueryCompareBinaryOperatorMethod<QS, W, C, G, H, O, L, LM>:
Expression + Sized
where
C: Columns,
Self::SqlType: Comparable<C::SqlType>,
{
/// SQL `= ANY (...)`.
fn eq_any(
self,
rhs: SelectBuilder<QS, W, C, G, H, O, L, LM>,
) -> EqAny<Self, QS, W, C, G, H, O, L, LM> {
EqAny { lhs: self, rhs }
}
/// SQL `!= ANY (...)`.
fn not_eq_any(
self,
rhs: SelectBuilder<QS, W, C, G, H, O, L, LM>,
) -> NotEqAny<Self, QS, W, C, G, H, O, L, LM> {
NotEqAny { lhs: self, rhs }
}
/// SQL `> ANY (...)`.
fn gt_any(
self,
rhs: SelectBuilder<QS, W, C, G, H, O, L, LM>,
) -> GtAny<Self, QS, W, C, G, H, O, L, LM> {
GtAny { lhs: self, rhs }
}
/// SQL `>= ANY (...)`.
fn ge_any(
self,
rhs: SelectBuilder<QS, W, C, G, H, O, L, LM>,
) -> GeAny<Self, QS, W, C, G, H, O, L, LM> {
GeAny { lhs: self, rhs }
}
/// SQL `< ANY (...)`.
fn lt_any(
self,
rhs: SelectBuilder<QS, W, C, G, H, O, L, LM>,
) -> LtAny<Self, QS, W, C, G, H, O, L, LM> {
LtAny { lhs: self, rhs }
}
/// SQL `<= ANY (...)`.
fn le_any(
self,
rhs: SelectBuilder<QS, W, C, G, H, O, L, LM>,
) -> LeAny<Self, QS, W, C, G, H, O, L, LM> {
LeAny { lhs: self, rhs }
}
/// SQL `= ALL (...)`.
fn eq_all(
self,
rhs: SelectBuilder<QS, W, C, G, H, O, L, LM>,
) -> EqAll<Self, QS, W, C, G, H, O, L, LM> {
EqAll { lhs: self, rhs }
}
/// SQL `!= ALL (...)`.
fn not_eq_all(
self,
rhs: SelectBuilder<QS, W, C, G, H, O, L, LM>,
) -> NotEqAll<Self, QS, W, C, G, H, O, L, LM> {
NotEqAll { lhs: self, rhs }
}
/// SQL `> ALL (...)`.
fn gt_all(
self,
rhs: SelectBuilder<QS, W, C, G, H, O, L, LM>,
) -> GtAll<Self, QS, W, C, G, H, O, L, LM> {
GtAll { lhs: self, rhs }
}
/// SQL `>= ALL (...)`.
fn ge_all(
self,
rhs: SelectBuilder<QS, W, C, G, H, O, L, LM>,
) -> GeAll<Self, QS, W, C, G, H, O, L, LM> {
GeAll { lhs: self, rhs }
}
/// SQL `< ALL (...)`.
fn lt_all(
self,
rhs: SelectBuilder<QS, W, C, G, H, O, L, LM>,
) -> LtAll<Self, QS, W, C, G, H, O, L, LM> {
LtAll { lhs: self, rhs }
}
/// SQL `<= ALL (...)`.
fn le_all(
self,
rhs: SelectBuilder<QS, W, C, G, H, O, L, LM>,
) -> LeAll<Self, QS, W, C, G, H, O, L, LM> {
LeAll { lhs: self, rhs }
}
}
pub trait BetweenOperatorMethod<L, U>: Expression + Sized
where
L: Expression,
U: Expression,
L::SqlType: Comparable<Self::SqlType>,
U::SqlType: Comparable<Self::SqlType>,
{
/// SQL `BETWEEN`.
fn between(self, lower_bound: L, upper_bound: U) -> Between<Self, L, U> {
Between {
target: self,
lower_bound,
upper_bound,
}
}
}
pub trait InOperatorMethod<T>: Expression + Sized
where
T: Expression,
T::SqlType: Comparable<Self::SqlType>,
{
/// SQL `IN`.
fn any<V: AsRef<[T]>>(self, values: V) -> Any<Self, T, V> {
Any {
lhs: self,
rhs: values,
rhs_value: PhantomData,
}
}
/// SQL `NOT IN`.
fn not_any<V: AsRef<[T]>>(self, values: V) -> NotAny<Self, T, V> {
NotAny {
lhs: self,
rhs: values,
rhs_value: PhantomData,
}
}
}
pub trait NullCheckOperatorMethod: Expression + Sized {
/// SQL `IS NULL`.
#[allow(clippy::wrong_self_convention)]
fn is_null(self) -> IsNull<Self> {
IsNull { target: self }
}
/// SQL `IS NOT NULL`.
#[allow(clippy::wrong_self_convention)]
fn is_not_null(self) -> IsNotNull<Self> {
IsNotNull { target: self }
}
}
impl<L, R> CompareBinaryOperatorMethod<R> for L
where
L: Expression,
R: Expression,
L::SqlType: Comparable<R::SqlType>,
{
}
impl<Lhs, QS, W, C, G, H, O, L, LM> SubQueryCompareBinaryOperatorMethod<QS, W, C, G, H, O, L, LM>
for Lhs
where
C: Columns,
Lhs: Expression,
Lhs::SqlType: Comparable<C::SqlType>,
{
}
impl<T, L, U> BetweenOperatorMethod<L, U> for T
where
T: Expression,
L: Expression,
U: Expression,
L::SqlType: Comparable<T::SqlType>,
U::SqlType: Comparable<T::SqlType>,
{
}
impl<L, T> InOperatorMethod<T> for L
where
L: Expression,
T: Expression,
T::SqlType: Comparable<L::SqlType>,
{
}
impl<T> NullCheckOperatorMethod for T where T: Expression {}
#[derive(Debug, Clone)]
pub struct And<L, LK, R, RK> {
lhs: L,
rhs: R,
lhs_kind: PhantomData<LK>,
rhs_kind: PhantomData<RK>,
}
impl<L, R> And<L, L::BoolOperation, R, R::BoolOperation>
where
L: Expression,
R: Expression,
{
pub fn new(lhs: L, rhs: R) -> Self {
And {
lhs,
rhs,
lhs_kind: PhantomData,
rhs_kind: PhantomData,
}
}
}
impl<L, R> Expression for And<L, L::BoolOperation, R, R::BoolOperation>
where
L: Expression,
R: Expression,
L::SqlType: Comparable<SqlTypeBool>,
R::SqlType: Comparable<L::SqlType>,
L::Aggregation: Aggregation<R::Aggregation>,
{
type SqlType = SqlTypeBool;
type Term = Polynomial;
type BoolOperation = BoolAnd;
type Aggregation = <L::Aggregation as Aggregation<R::Aggregation>>::Output;
}
impl<L, R> AndOperatorMethod for And<L, L::BoolOperation, R, R::BoolOperation>
where
L: Expression,
R: Expression,
L::SqlType: Comparable<SqlTypeBool>,
R::SqlType: Comparable<L::SqlType>,
Self: Expression<SqlType = SqlTypeBool>,
{
}
impl<L, R> NotOperatorMethod for And<L, L::BoolOperation, R, R::BoolOperation>
where
L: Expression,
R: Expression,
L::SqlType: Comparable<SqlTypeBool>,
R::SqlType: Comparable<L::SqlType>,
Self: Expression<SqlType = SqlTypeBool>,
{
}
#[derive(Debug, Clone)]
pub struct Or<L, LK, R, RK> {
lhs: L,
rhs: R,
lhs_kind: PhantomData<LK>,
rhs_kind: PhantomData<RK>,
}
impl<L, R> Or<L, L::BoolOperation, R, R::BoolOperation>
where
L: Expression,
R: Expression,
{
pub fn new(lhs: L, rhs: R) -> Self {
Or {
lhs,
rhs,
lhs_kind: PhantomData,
rhs_kind: PhantomData,
}
}
}
impl<L, R> Expression for Or<L, L::BoolOperation, R, R::BoolOperation>
where
L: Expression,
R: Expression,
L::SqlType: Comparable<SqlTypeBool>,
R::SqlType: Comparable<L::SqlType>,
L::Aggregation: Aggregation<R::Aggregation>,
{
type SqlType = SqlTypeBool;
type Term = Polynomial;
type BoolOperation = BoolOr;
type Aggregation = <L::Aggregation as Aggregation<R::Aggregation>>::Output;
}
impl<L, R> OrOperatorMethod for Or<L, L::BoolOperation, R, R::BoolOperation>
where
L: Expression,
R: Expression,
L::SqlType: Comparable<SqlTypeBool>,
R::SqlType: Comparable<L::SqlType>,
Self: Expression<SqlType = SqlTypeBool>,
{
}
impl<L, R> NotOperatorMethod for Or<L, L::BoolOperation, R, R::BoolOperation>
where
L: Expression,
R: Expression,
L::SqlType: Comparable<SqlTypeBool>,
R::SqlType: Comparable<L::SqlType>,
Self: Expression<SqlType = SqlTypeBool>,
{
}
macro_rules! impl_build_sql_and_or {
( $ty:ident, $op:expr, both_no_parentheses, $l_kind:ty, $r_kind:ty ) => {
impl<L, R> BuildSql for $ty<L, $l_kind, R, $r_kind>
where
L: Expression<BoolOperation = $l_kind> + BuildSql,
R: Expression<BoolOperation = $r_kind> + BuildSql,
L::SqlType: Comparable<SqlTypeBool>,
R::SqlType: Comparable<SqlTypeBool>,
{
fn build_sql(
&self,
buf: &mut Vec<u8>,
params: &mut Vec<Value>,
) -> Result<(), BuildSqlError> {
(|| -> Result<(), anyhow::Error> {
self.lhs.build_sql(buf, params)?;
write!(buf, $op)?;
self.rhs.build_sql(buf, params)?;
Ok(())
})()
.map_err(From::from)
}
}
};
( $ty:ident, $op:expr, lhs_parentheses, $l_kind:ty, $r_kind:ty ) => {
impl<L, R> BuildSql for $ty<L, $l_kind, R, $r_kind>
where
L: Expression<BoolOperation = $l_kind> + BuildSql,
R: Expression<BoolOperation = $r_kind> + BuildSql,
L::SqlType: Comparable<SqlTypeBool>,
R::SqlType: Comparable<SqlTypeBool>,
{
fn build_sql(
&self,
buf: &mut Vec<u8>,
params: &mut Vec<Value>,
) -> Result<(), BuildSqlError> {
(|| -> Result<(), anyhow::Error> {
write!(buf, "(")?;
self.lhs.build_sql(buf, params)?;
write!(buf, ")")?;
write!(buf, $op)?;
self.rhs.build_sql(buf, params)?;
Ok(())
})()
.map_err(From::from)
}
}
};
( $ty:ident, $op:expr, rhs_parentheses, $l_kind:ty, $r_kind:ty ) => {
impl<L, R> BuildSql for $ty<L, $l_kind, R, $r_kind>
where
L: Expression<BoolOperation = $l_kind> + BuildSql,
R: Expression<BoolOperation = $r_kind> + BuildSql,
L::SqlType: Comparable<SqlTypeBool>,
R::SqlType: Comparable<SqlTypeBool>,
{
fn build_sql(
&self,
buf: &mut Vec<u8>,
params: &mut Vec<Value>,
) -> Result<(), BuildSqlError> {
(|| -> Result<(), anyhow::Error> {
self.lhs.build_sql(buf, params)?;
write!(buf, $op)?;
write!(buf, "(")?;
self.rhs.build_sql(buf, params)?;
write!(buf, ")")?;
Ok(())
})()
.map_err(From::from)
}
}
};
( $ty:ident, $op:expr, both_parentheses, $l_kind:ty, $r_kind:ty ) => {
impl<L, R> BuildSql for $ty<L, $l_kind, R, $r_kind>
where
L: Expression<BoolOperation = $l_kind> + BuildSql,
R: Expression<BoolOperation = $r_kind> + BuildSql,
L::SqlType: Comparable<SqlTypeBool>,
R::SqlType: Comparable<SqlTypeBool>,
{
fn build_sql(
&self,
buf: &mut Vec<u8>,
params: &mut Vec<Value>,
) -> Result<(), BuildSqlError> {
(|| -> Result<(), anyhow::Error> {
write!(buf, "(")?;
self.lhs.build_sql(buf, params)?;
write!(buf, ")")?;
write!(buf, $op)?;
write!(buf, "(")?;
self.rhs.build_sql(buf, params)?;
write!(buf, ")")?;
Ok(())
})()
.map_err(From::from)
}
}
};
}
impl_build_sql_and_or!(And, " AND ", both_no_parentheses, BoolMono, BoolMono);
impl_build_sql_and_or!(And, " AND ", both_no_parentheses, BoolMono, BoolAnd);
impl_build_sql_and_or!(And, " AND ", rhs_parentheses, BoolMono, BoolOr);
impl_build_sql_and_or!(And, " AND ", both_no_parentheses, BoolAnd, BoolMono);
impl_build_sql_and_or!(And, " AND ", both_no_parentheses, BoolAnd, BoolAnd);
impl_build_sql_and_or!(And, " AND ", rhs_parentheses, BoolAnd, BoolOr);
impl_build_sql_and_or!(And, " AND ", lhs_parentheses, BoolOr, BoolMono);
impl_build_sql_and_or!(And, " AND ", lhs_parentheses, BoolOr, BoolAnd);
impl_build_sql_and_or!(And, " AND ", both_parentheses, BoolOr, BoolOr);
impl_build_sql_and_or!(Or, " OR ", both_no_parentheses, BoolMono, BoolMono);
impl_build_sql_and_or!(Or, " OR ", rhs_parentheses, BoolMono, BoolAnd);
impl_build_sql_and_or!(Or, " OR ", both_no_parentheses, BoolMono, BoolOr);
impl_build_sql_and_or!(Or, " OR ", lhs_parentheses, BoolAnd, BoolMono);
impl_build_sql_and_or!(Or, " OR ", both_parentheses, BoolAnd, BoolAnd);
impl_build_sql_and_or!(Or, " OR ", lhs_parentheses, BoolAnd, BoolOr);
impl_build_sql_and_or!(Or, " OR ", both_no_parentheses, BoolOr, BoolMono);
impl_build_sql_and_or!(Or, " OR ", rhs_parentheses, BoolOr, BoolAnd);
impl_build_sql_and_or!(Or, " OR ", both_no_parentheses, BoolOr, BoolOr);
#[derive(Debug, Clone)]
pub struct Not<T, S> {
expr: T,
expr_term: PhantomData<S>,
}
impl<T> Not<T, T::Term>
where
T: Expression,
T::SqlType: Comparable<SqlTypeBool>,
{
pub fn new(expr: T) -> Not<T, T::Term> {
Not {
expr,
expr_term: PhantomData,
}
}
}
impl<T> Expression for Not<T, T::Term>
where
T: Expression,
T::SqlType: Comparable<SqlTypeBool>,
{
type SqlType = SqlTypeBool;
type Term = Monomial;
type BoolOperation = BoolMono;
type Aggregation = T::Aggregation;
}
impl<T> BuildSql for Not<T, Monomial>
where
T: Expression<Term = Monomial> + BuildSql,
T::SqlType: Comparable<SqlTypeBool>,
{
fn build_sql(&self, buf: &mut Vec<u8>, params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
(|| -> Result<(), anyhow::Error> {
write!(buf, "NOT ")?;
self.expr.build_sql(buf, params)?;
Ok(())
})()
.map_err(From::from)
}
}
impl<T> BuildSql for Not<T, Polynomial>
where
T: Expression<Term = Polynomial> + BuildSql,
T::SqlType: Comparable<SqlTypeBool>,
{
fn build_sql(&self, buf: &mut Vec<u8>, params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
(|| -> Result<(), anyhow::Error> {
write!(buf, "NOT (")?;
self.expr.build_sql(buf, params)?;
write!(buf, ")")?;
Ok(())
})()
.map_err(From::from)
}
}
pub trait AndOperatorMethod: Expression + Sized
where
Self::SqlType: Comparable<SqlTypeBool>,
{
fn and<R>(self, rhs: R) -> And<Self, Self::BoolOperation, R, R::BoolOperation>
where
R: Expression,
R::SqlType: Comparable<Self::SqlType>,
{
And::new(self, rhs)
}
}
pub trait OrOperatorMethod: Expression + Sized
where
Self::SqlType: Comparable<SqlTypeBool>,
{
fn or<R>(self, rhs: R) -> Or<Self, Self::BoolOperation, R, R::BoolOperation>
where
R: Expression,
R::SqlType: Comparable<Self::SqlType>,
{
Or::new(self, rhs)
}
}
pub trait NotOperatorMethod: Expression + Sized
where
Self::SqlType: Comparable<SqlTypeBool>,
{
fn not(self) -> Not<Self, Self::Term> {
Not::new(self)
}
}
pub fn not<T: NotOperatorMethod + Expression>(expr: T) -> Not<T, T::Term>
where
T: NotOperatorMethod + Expression,
T::SqlType: Comparable<SqlTypeBool>,
{
expr.not()
}
macro_rules! define_sql_function {
( $func_type:ident, $func_name:ident ( $( $arg_name:ident : $arg_type:ty ),* ) -> $ret_type:ty, $aggregation:ty ) => {
#[allow(non_camel_case_types)]
#[derive(Debug, Clone)]
pub struct $func_type< $( $arg_name, )* > {
$( $arg_name: $arg_name, )*
}
#[allow(non_camel_case_types)]
impl<$( $arg_name, )*> $func_type<$( $arg_name, )*> {
pub fn new( $( $arg_name: $arg_name, )* ) -> $func_type<$( $arg_name, )*> {
$func_type { $( $arg_name, )* }
}
}
#[allow(non_camel_case_types)]
pub fn $func_name<$( $arg_name, )*>( $( $arg_name: $arg_name, )* ) -> $func_type<$( $arg_name, )*>
where
$(
$arg_name: Expression,
<$arg_name>::SqlType: Comparable<$arg_type>,
)*
{
$func_type::new($( $arg_name, )*)
}
#[allow(non_camel_case_types)]
impl<$( $arg_name, )*> Expression for $func_type<$( $arg_name, )*>
where
$(
$arg_name: Expression,
<$arg_name>::SqlType: Comparable<$arg_type>,
)*
{
type SqlType = $ret_type;
type Term = Monomial;
type BoolOperation = NonBool;
type Aggregation = $aggregation;
}
#[allow(non_camel_case_types)]
impl<$( $arg_name, )*> BuildSql for $func_type<$( $arg_name, )*>
where
$( $arg_name: BuildSql, )*
{
fn build_sql(
&self,
buf: &mut Vec<u8>,
params: &mut Vec<Value>,
) -> Result<(), BuildSqlError> {
write!(buf, concat!(stringify!($func_name), "("))?;
build_sql_comma_separated_values!(buf, params, self, $( $arg_name, )*);
write!(buf, ")")?;
Ok(())
}
}
};
}
macro_rules! build_sql_comma_separated_values {
( $buf:ident, $params:ident, $x:ident, $first_field:ident $(, $field:ident )* $(,)* ) => {
$x.$first_field.build_sql($buf, $params)?;
$(
write!($buf, ", ")?;
$x.$field.build_sql($buf, $params)?;
)*
};
( $x:ident, ) => {};
}
// SQLは動的型付けなので関数も動的な型に対応できる必要がある。
// 例えばsumは整数型にも実数型にも使えるので、複数の型を取り得る。
// なので関数はtraitとして実装した方が良いのではないか?
// define_sql_function!(SumInt, sum(t: SqlTypeInt) -> SqlTypeInt, Aggregate);
define_sql_function!(Count, count(t: SqlTypeAny) -> SqlTypeInt, Aggregate);
define_sql_function!(Date, date(t: SqlTypeString) -> SqlTypeString, NonAggregate);
define_sql_function!(Left, left(t: SqlTypeString, n: SqlTypeInt) -> SqlTypeString, NonAggregate);
pub trait SqlKindNumber {}
impl SqlKindNumber for SqlTypeInt {}
impl SqlKindNumber for SqlTypeUint {}
impl SqlKindNumber for SqlTypeAny {}
#[derive(Debug, Clone)]
pub struct Sum<T>
where
T: Expression,
T::SqlType: SqlKindNumber,
{
t: T,
}
impl<T> Sum<T>
where
T: Expression,
T::SqlType: SqlKindNumber,
{
pub fn new(t: T) -> Sum<T> {
Sum { t }
}
}
pub fn sum<T>(t: T) -> Sum<T>
where
T: Expression,
T::SqlType: SqlKindNumber,
{
Sum::new(t)
}
impl<T> Expression for Sum<T>
where
T: Expression,
T::SqlType: SqlKindNumber,
{
type SqlType = T::SqlType;
type Term = Monomial;
type BoolOperation = NonBool;
type Aggregation = Aggregate;
}
impl<T> BuildSql for Sum<T>
where
T: BuildSql + Expression,
T::SqlType: SqlKindNumber,
{
fn build_sql(&self, buf: &mut Vec<u8>, params: &mut Vec<Value>) -> Result<(), BuildSqlError> {
write!(buf, "sum(")?;
self.t.build_sql(buf, params)?;
write!(buf, ")")?;
Ok(())
}
}
| 28.396055 | 145 | 0.534814 |
67c3d4f3597fc405ed360b27d25d0698efb7d5f9 | 3,307 | // Copyright 2020 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
anyhow::Result,
errors::FfxError,
ffx_core::ffx_plugin,
ffx_get_ssh_address_args::GetSshAddressCommand,
fidl_fuchsia_developer_bridge::{
DaemonError, TargetAddrInfo, TargetCollectionProxy, TargetMarker, TargetQuery,
},
fidl_fuchsia_net::{IpAddress, Ipv4Address, Ipv6Address},
netext::scope_id_to_name,
std::io::{stdout, Write},
std::net::IpAddr,
std::time::Duration,
timeout::timeout,
};
// This constant can be removed, and the implementation can assert that a port
// always comes from the daemon after some transition period (~May '21).
const DEFAULT_SSH_PORT: u16 = 22;
#[ffx_plugin(TargetCollectionProxy = "daemon::protocol")]
pub async fn get_ssh_address(
collection_proxy: TargetCollectionProxy,
cmd: GetSshAddressCommand,
) -> Result<()> {
get_ssh_address_impl(collection_proxy, cmd, &mut stdout()).await
}
async fn get_ssh_address_impl<W: Write>(
collection_proxy: TargetCollectionProxy,
cmd: GetSshAddressCommand,
writer: &mut W,
) -> Result<()> {
let timeout_dur = Duration::from_secs_f64(cmd.timeout().await?);
let (proxy, handle) = fidl::endpoints::create_proxy::<TargetMarker>()?;
let target: Option<String> = ffx_config::get("target.default").await?;
let ffx: ffx_lib_args::Ffx = argh::from_env();
let is_default_target = ffx.target.is_none();
let t_clone = target.clone();
let t_clone_2 = target.clone();
let res = timeout(timeout_dur, async {
collection_proxy
.open_target(TargetQuery { string_matcher: target, ..TargetQuery::EMPTY }, handle)
.await?
.map_err(|err| {
anyhow::Error::from(FfxError::OpenTargetError {
err,
target: t_clone_2,
is_default_target,
})
})?;
proxy.get_ssh_address().await.map_err(anyhow::Error::from)
})
.await
.map_err(|_| FfxError::DaemonError {
err: DaemonError::Timeout,
target: t_clone,
is_default_target,
})??;
let (ip, scope, port) = match res {
TargetAddrInfo::Ip(info) => {
let ip = match info.ip {
IpAddress::Ipv6(Ipv6Address { addr }) => IpAddr::from(addr),
IpAddress::Ipv4(Ipv4Address { addr }) => IpAddr::from(addr),
};
(ip, info.scope_id, 0)
}
TargetAddrInfo::IpPort(info) => {
let ip = match info.ip {
IpAddress::Ipv6(Ipv6Address { addr }) => IpAddr::from(addr),
IpAddress::Ipv4(Ipv4Address { addr }) => IpAddr::from(addr),
};
(ip, info.scope_id, info.port)
}
};
match ip {
IpAddr::V4(ip) => {
write!(writer, "{}", ip)?;
}
IpAddr::V6(ip) => {
write!(writer, "[{}", ip)?;
if scope > 0 {
write!(writer, "%{}", scope_id_to_name(scope))?;
}
write!(writer, "]")?;
}
}
write!(writer, ":{}", if port == 0 { DEFAULT_SSH_PORT } else { port })?;
writeln!(writer)?;
Ok(())
}
| 34.447917 | 94 | 0.585425 |
26487fe1bf22e3829d0d87a9e4609c0a9b526ca2 | 781 | #[doc = "Writer for register OPTKEYR"]
pub type W = crate::W<u32, super::OPTKEYR>;
#[doc = "Register OPTKEYR `reset()`'s with value 0"]
impl crate::ResetValue for super::OPTKEYR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Write proxy for field `OPTKEYR`"]
pub struct OPTKEYR_W<'a> {
w: &'a mut W,
}
impl<'a> OPTKEYR_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff);
self.w
}
}
impl W {
#[doc = "Bits 0:31 - Option byte key"]
#[inline(always)]
pub fn optkeyr(&mut self) -> OPTKEYR_W {
OPTKEYR_W { w: self }
}
}
| 26.033333 | 84 | 0.569782 |
612875057d104943a3d1ce4a3da76e2f947a20fb | 1,743 | #[doc = "Writer for register SYSTEMOFF"]
pub type W = crate::W<u32, super::SYSTEMOFF>;
#[doc = "Register SYSTEMOFF `reset()`'s with value 0"]
impl crate::ResetValue for super::SYSTEMOFF {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Enable System OFF mode\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYSTEMOFF_AW {
#[doc = "1: Enable System OFF mode"]
ENABLE,
}
impl From<SYSTEMOFF_AW> for bool {
#[inline(always)]
fn from(variant: SYSTEMOFF_AW) -> Self {
match variant {
SYSTEMOFF_AW::ENABLE => true,
}
}
}
#[doc = "Write proxy for field `SYSTEMOFF`"]
pub struct SYSTEMOFF_W<'a> {
w: &'a mut W,
}
impl<'a> SYSTEMOFF_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SYSTEMOFF_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Enable System OFF mode"]
#[inline(always)]
pub fn enable(self) -> &'a mut W {
self.variant(SYSTEMOFF_AW::ENABLE)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
impl W {
#[doc = "Bit 0 - Enable System OFF mode"]
#[inline(always)]
pub fn systemoff(&mut self) -> SYSTEMOFF_W {
SYSTEMOFF_W { w: self }
}
}
| 26.409091 | 70 | 0.55938 |
1a8684eda6a9ed74fdb437af53901fc6c3dc14ba | 21,760 | use crate::api::{WebSocketApiTask, WebSocketServiceExt};
use crate::app;
use crate::audio::AudioService;
use crate::canvas::VirtualCanvas;
use crate::component;
use crate::util::NeqAssign;
use anyhow::{anyhow, Error};
use ferrogallic_shared::api::game::{Canvas, Game, GamePhase, GameReq, GameState, Player};
use ferrogallic_shared::config::{CANVAS_HEIGHT, CANVAS_WIDTH};
use ferrogallic_shared::domain::{
Color, Epoch, Guess, I12Pair, LineWidth, Lobby, Lowercase, Nickname, Tool, UserId,
};
use gloo::events::{EventListener, EventListenerOptions};
use std::collections::BTreeMap;
use std::mem;
use std::sync::Arc;
use time::Duration;
use wasm_bindgen::JsCast;
use web_sys::{CanvasRenderingContext2d, HtmlCanvasElement, HtmlElement, KeyboardEvent};
use yew::services::render::{RenderService, RenderTask};
use yew::services::websocket::WebSocketStatus;
use yew::{
html, Callback, Component, ComponentLink, Html, NodeRef, PointerEvent, Properties, ShouldRender,
};
pub enum Msg {
ConnStatus(WebSocketStatus),
Message(Game),
RemovePlayer(UserId, Epoch<UserId>),
ChooseWord(Lowercase),
Pointer(PointerAction),
Undo,
Render,
SetGuess(Lowercase),
SendGuess,
SetTool(Tool),
SetColor(Color),
SetGlobalError(Error),
Ignore,
}
pub enum PointerAction {
Down(I12Pair),
Move(I12Pair),
Up(I12Pair),
}
#[derive(Clone, Properties)]
pub struct Props {
pub app_link: ComponentLink<app::App>,
pub lobby: Lobby,
pub nick: Nickname,
}
pub struct InGame {
link: ComponentLink<Self>,
app_link: ComponentLink<app::App>,
lobby: Lobby,
nick: Nickname,
user_id: UserId,
active_ws: Option<WebSocketApiTask<Game>>,
audio: AudioService,
scheduled_render: Option<RenderTask>,
canvas_ref: NodeRef,
canvas: Option<CanvasState>,
pointer: PointerState,
guess: Lowercase,
tool: Tool,
color: Color,
players: Arc<BTreeMap<UserId, Player>>,
game: Arc<GameState>,
guesses: Arc<Vec<Guess>>,
}
struct CanvasState {
vr: VirtualCanvas,
context: CanvasRenderingContext2d,
_disable_touchstart: EventListener,
}
#[derive(Copy, Clone)]
enum PointerState {
Up,
Down { at: I12Pair },
}
impl Component for InGame {
type Message = Msg;
type Properties = Props;
fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self {
Self {
link,
app_link: props.app_link,
lobby: props.lobby,
user_id: props.nick.user_id(),
nick: props.nick,
active_ws: None,
audio: AudioService::new(),
scheduled_render: None,
canvas_ref: Default::default(),
canvas: None,
pointer: PointerState::Up,
guess: Default::default(),
tool: Default::default(),
color: Default::default(),
players: Default::default(),
game: Default::default(),
guesses: Default::default(),
}
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
Msg::ConnStatus(status) => match status {
WebSocketStatus::Opened => {
if let Some(ws) = &mut self.active_ws {
ws.send_api(&GameReq::Join(self.lobby.clone(), self.nick.clone()));
}
false
}
WebSocketStatus::Closed => {
self.active_ws = None;
self.app_link
.send_message(app::Msg::SetError(anyhow!("Lost connection")));
false
}
WebSocketStatus::Error => {
self.active_ws = None;
self.app_link
.send_message(app::Msg::SetError(anyhow!("Error in websocket")));
false
}
},
Msg::Message(msg) => match msg {
Game::Canvas(event) => {
self.render_to_virtual(event);
self.schedule_render_to_canvas();
false
}
Game::CanvasBulk(events) => {
for event in events {
self.render_to_virtual(event);
}
self.schedule_render_to_canvas();
false
}
Game::Players(players) => {
self.players = players;
true
}
Game::Game(game) => {
self.game = game;
true
}
Game::Guess(guess) => {
self.play_sound(&guess);
Arc::make_mut(&mut self.guesses).push(guess);
true
}
Game::GuessBulk(guesses) => {
Arc::make_mut(&mut self.guesses).extend(guesses);
true
}
Game::ClearGuesses => {
self.guesses = Default::default();
true
}
Game::Heartbeat => false,
},
Msg::RemovePlayer(user_id, epoch) => {
if let Some(ws) = &mut self.active_ws {
ws.send_api(&GameReq::Remove(user_id, epoch));
}
false
}
Msg::ChooseWord(word) => {
if let Some(ws) = &mut self.active_ws {
ws.send_api(&GameReq::Choose(word));
}
false
}
Msg::SendGuess => {
if let Some(ws) = &mut self.active_ws {
if !self.guess.is_empty() {
ws.send_api(&GameReq::Guess(mem::take(&mut self.guess)));
}
}
false
}
Msg::Pointer(action) => {
let one_event;
let two_events;
let events: &[Canvas] = match (self.tool, action) {
(Tool::Pen(_), PointerAction::Down(at)) => {
self.pointer = PointerState::Down { at };
&[]
}
(Tool::Pen(width), PointerAction::Move(to)) => match self.pointer {
PointerState::Down { at: from } if to != from => {
self.pointer = PointerState::Down { at: to };
one_event = [Canvas::Line {
from,
to,
width,
color: self.color,
}];
&one_event
}
PointerState::Down { .. } | PointerState::Up => &[],
},
(Tool::Pen(width), PointerAction::Up(to)) => match self.pointer {
PointerState::Down { at: from } => {
self.pointer = PointerState::Up;
two_events = [
Canvas::Line {
from,
to,
width,
color: self.color,
},
Canvas::PushUndo,
];
&two_events
}
PointerState::Up => &[],
},
(Tool::Fill, PointerAction::Down(at)) => {
two_events = [
Canvas::Fill {
at,
color: self.color,
},
Canvas::PushUndo,
];
&two_events
}
(Tool::Fill, PointerAction::Move(_)) | (Tool::Fill, PointerAction::Up(_)) => {
&[]
}
};
for &event in events {
self.render_to_virtual(event);
self.schedule_render_to_canvas();
if let Some(ws) = &mut self.active_ws {
ws.send_api(&GameReq::Canvas(event));
}
}
false
}
Msg::Undo => {
let event = Canvas::PopUndo;
self.render_to_virtual(event);
self.schedule_render_to_canvas();
if let Some(ws) = &mut self.active_ws {
ws.send_api(&GameReq::Canvas(event));
}
false
}
Msg::Render => {
self.render_to_canvas();
false
}
Msg::SetGuess(guess) => {
self.guess = guess;
true
}
Msg::SetTool(tool) => {
self.tool = tool;
true
}
Msg::SetColor(color) => {
self.color = color;
true
}
Msg::SetGlobalError(e) => {
self.app_link.send_message(app::Msg::SetError(e));
false
}
Msg::Ignore => false,
}
}
fn change(&mut self, props: Self::Properties) -> ShouldRender {
let Props {
app_link,
lobby,
nick,
} = props;
self.app_link = app_link;
let new_lobby = self.lobby.neq_assign(lobby);
let new_nick = if nick != self.nick {
self.user_id = nick.user_id();
self.nick = nick;
true
} else {
false
};
new_lobby | new_nick
}
fn rendered(&mut self, first_render: bool) {
if first_render {
if let Some(canvas) = self.canvas_ref.cast::<HtmlCanvasElement>() {
if let Some(context) = canvas
.get_context("2d")
.ok()
.flatten()
.and_then(|c| c.dyn_into::<CanvasRenderingContext2d>().ok())
{
let disable_touchstart = EventListener::new_with_options(
&canvas.into(),
"touchstart",
EventListenerOptions::enable_prevent_default(),
|e| e.prevent_default(),
);
self.canvas = Some(CanvasState {
vr: VirtualCanvas::new(),
context,
_disable_touchstart: disable_touchstart,
});
}
}
let started_ws = WebSocketServiceExt::connect_api(
&self.link,
|res| match res {
Ok(msg) => Msg::Message(msg),
Err(e) => Msg::SetGlobalError(e.context("Failed to receive from websocket")),
},
Msg::ConnStatus,
);
match started_ws {
Ok(task) => self.active_ws = Some(task),
Err(e) => self.app_link.send_message(app::Msg::SetError(
e.context("Failed to connect to websocket"),
)),
}
}
}
fn view(&self) -> Html {
enum Status<'a> {
Waiting,
Choosing(&'a Player),
Drawing(&'a Player),
}
let mut can_draw = false;
let mut choose_words = None;
let mut cur_round = None;
let mut status = Status::Waiting;
let mut drawing_started = None;
let mut guess_template = None;
let _: () = match &self.game.phase {
GamePhase::WaitingToStart => {
can_draw = true;
}
GamePhase::ChoosingWords {
round,
choosing,
words,
} => {
cur_round = Some(*round);
if let Some(player) = self.players.get(choosing) {
status = Status::Choosing(player);
}
if *choosing == self.user_id {
choose_words = Some(words.clone());
}
}
GamePhase::Drawing {
round,
drawing,
correct: _,
word,
epoch: _,
started,
} => {
cur_round = Some(*round);
if let Some(player) = self.players.get(drawing) {
status = Status::Drawing(player);
}
drawing_started = Some(*started);
if *drawing == self.user_id {
can_draw = true;
guess_template = Some((word.clone(), component::guess_template::Reveal::All));
} else {
guess_template =
Some((word.clone(), component::guess_template::Reveal::Spaces));
}
}
};
let on_keydown;
let on_pointerdown;
let on_pointermove;
let on_pointerup;
if can_draw {
on_keydown = self.link.callback(|e: KeyboardEvent| {
let ctrl = e.ctrl_key();
let msg = match e.key_code() {
49 /* 1 */ if !ctrl => Msg::SetTool(Tool::Pen(LineWidth::R0)),
50 /* 2 */ if !ctrl => Msg::SetTool(Tool::Pen(LineWidth::R1)),
51 /* 3 */ if !ctrl => Msg::SetTool(Tool::Pen(LineWidth::R2)),
52 /* 4 */ if !ctrl => Msg::SetTool(Tool::Pen(LineWidth::R4)),
53 /* 5 */ if !ctrl => Msg::SetTool(Tool::Pen(LineWidth::R7)),
70 /* f */ if !ctrl => Msg::SetTool(Tool::Fill),
90 /* z */ if ctrl => Msg::Undo,
_ => return Msg::Ignore,
};
e.prevent_default();
msg
});
on_pointerdown = self.handle_pointer_event_if(
|e| e.buttons() == 1,
|e, target, at| {
if let Err(e) = target.focus() {
log::warn!("Failed to focus canvas: {:?}", e);
}
if let Err(e) = target.set_pointer_capture(e.pointer_id()) {
log::warn!("Failed to set pointer capture: {:?}", e);
}
PointerAction::Down(at)
},
);
on_pointermove = self.handle_pointer_event(|_, _, at| PointerAction::Move(at));
on_pointerup = self.handle_pointer_event(|e, target, at| {
if let Err(e) = target.release_pointer_capture(e.pointer_id()) {
log::warn!("Failed to release pointer capture: {:?}", e);
}
PointerAction::Up(at)
});
} else {
on_keydown = Callback::from(|_| {});
let noop = Callback::from(|_| {});
on_pointerdown = noop.clone();
on_pointermove = noop.clone();
on_pointerup = noop;
}
html! {
<main class="window" style="max-width: 1500px; margin: auto">
<div class="title-bar">
<div class="title-bar-text">{"In Game - "}{&self.lobby}</div>
</div>
<article class="window-body" style="display: flex">
<section style="flex: 1; height: 804px">
<component::Players game_link=self.link.clone() players=self.players.clone()/>
</section>
<section style="margin: 0 8px; position: relative" onkeydown=on_keydown>
<fieldset style="padding-block-start: 2px; padding-block-end: 0px; padding-inline-start: 2px; padding-inline-end: 2px;">
<canvas
ref=self.canvas_ref.clone()
style={"outline: initial" /* disable focus outline */}
tabindex={"-1" /* allow focus */}
onpointerdown=on_pointerdown
onpointermove=on_pointermove
onpointerup=on_pointerup
width=CANVAS_WIDTH.to_string()
height=CANVAS_HEIGHT.to_string()
/>
</fieldset>
<div style="position: relative">
<component::ColorToolbar game_link=self.link.clone() color=self.color/>
<component::ToolToolbar game_link=self.link.clone() tool=self.tool/>
<div
class="hatched-background"
style=if can_draw { "" } else { "position: absolute; top: 0; width: 100%; height: 100%" }
/>
</div>
{choose_words.map(|words| html! {
<component::ChoosePopup game_link=self.link.clone() words=words />
}).unwrap_or_default()}
</section>
<section style="flex: 1; height: 804px; display: flex; flex-direction: column">
<div style="flex: 1; min-height: 0; margin-bottom: 8px">
<component::GuessArea players=self.players.clone() guesses=self.guesses.clone()/>
</div>
<component::GuessInput game_link=self.link.clone() guess=self.guess.clone()/>
</section>
</article>
<footer class="status-bar">
<div>
{match status {
Status::Waiting => html! { {"Waiting to start"} },
Status::Choosing(player) => html! { <>{&player.nick}{" is choosing a word"}</> },
Status::Drawing(player) => html! { <>{&player.nick}{" is drawing"}</> },
}}
</div>
<div>
{drawing_started.map(|drawing_started| html! {
<component::Timer started=drawing_started count_down_from=Duration::seconds(i64::from(self.game.config.guess_seconds))/>
}).unwrap_or_default()}
{"/"}{self.game.config.guess_seconds}{" seconds"}
</div>
<div>
{cur_round.map(|cur_round| html! {
{cur_round}
}).unwrap_or_default()}
{"/"}{self.game.config.rounds}{" rounds"}
</div>
<div style="width: calc((min(100vw - 16px, 1500px) - 804px) / 2 - 6px)">
{guess_template.map(|(word, reveal)| html! {
<component::GuessTemplate word=word reveal=reveal guess=self.guess.clone()/>
}).unwrap_or_default()}
</div>
</footer>
</main>
}
}
}
impl InGame {
fn handle_pointer_event(
&self,
f: impl Fn(&PointerEvent, &HtmlElement, I12Pair) -> PointerAction + 'static,
) -> Callback<PointerEvent> {
self.handle_pointer_event_if(|_| true, f)
}
fn handle_pointer_event_if(
&self,
pred: impl Fn(&PointerEvent) -> bool + 'static,
f: impl Fn(&PointerEvent, &HtmlElement, I12Pair) -> PointerAction + 'static,
) -> Callback<PointerEvent> {
self.link.callback(move |e: PointerEvent| {
if pred(&e) {
if let Some(target) = e.target().and_then(|t| t.dyn_into::<HtmlElement>().ok()) {
e.prevent_default();
let origin = target.get_bounding_client_rect();
Msg::Pointer(f(
&e,
&target,
I12Pair::new(
e.client_x() as i16 - origin.x() as i16,
e.client_y() as i16 - origin.y() as i16,
),
))
} else {
Msg::Ignore
}
} else {
Msg::Ignore
}
})
}
fn play_sound(&mut self, guess: &Guess) {
if let Err(e) = self.audio.handle_guess(self.user_id, guess) {
log::error!("Failed to play sound: {:?}", e);
}
}
fn render_to_virtual(&mut self, event: Canvas) {
if let Some(canvas) = &mut self.canvas {
canvas.vr.handle_event(event);
}
}
fn schedule_render_to_canvas(&mut self) {
if let scheduled @ None = &mut self.scheduled_render {
*scheduled = Some(RenderService::request_animation_frame(
self.link.callback(|_| Msg::Render),
));
}
}
fn render_to_canvas(&mut self) {
self.scheduled_render = None;
if let Some(canvas) = &mut self.canvas {
if let Err(e) = canvas.vr.render_to(&canvas.context) {
log::error!("Failed to render to canvas: {:?}", e);
}
}
}
}
| 37.647059 | 148 | 0.439338 |
03efc0456db57d25c285b5081e199825174164bf | 1,129 | use serde::{Deserialize, Serialize};
use crate::{BaseInterface, InterfaceType};
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct VethInterface {
#[serde(flatten)]
pub base: BaseInterface,
#[serde(skip_serializing_if = "Option::is_none")]
pub veth: Option<VethConfig>,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default)]
pub struct VethConfig {
pub peer: String,
}
impl Default for VethInterface {
fn default() -> Self {
Self {
base: BaseInterface {
iface_type: InterfaceType::Veth,
..Default::default()
},
..Default::default()
}
}
}
impl VethInterface {
pub(crate) fn update(&mut self, other_iface: &VethInterface) {
// TODO: this should be done by Trait
self.base.update(&other_iface.base);
if other_iface.veth.is_some() {
self.veth = other_iface.veth.clone();
}
}
pub(crate) fn pre_verify_cleanup(&mut self) {
self.base.pre_verify_cleanup();
self.base.iface_type = InterfaceType::Ethernet;
}
}
| 25.659091 | 67 | 0.613818 |
50f6079926b70575ac28a44b62a6e56bc1214945 | 5,944 | use crate::{
array::BooleanArray,
bitmap::{utils::BitmapIter, MutableBitmap},
error::{ArrowError, Result},
};
use super::utils;
use parquet2::{
encoding::{hybrid_rle, Encoding},
metadata::{ColumnChunkMetaData, ColumnDescriptor},
read::{Page, PageHeader, StreamingIterator},
};
fn read_required(buffer: &[u8], length: u32, values: &mut MutableBitmap) {
let length = length as usize;
// in PLAIN, booleans are LSB bitpacked and thus we can read them as if they were a bitmap.
// note that `values_buffer` contains only non-null values.
let values_iterator = BitmapIter::new(buffer, 0, length);
values.extend_from_trusted_len_iter(values_iterator);
}
fn read_optional(
validity_buffer: &[u8],
values_buffer: &[u8],
length: u32,
values: &mut MutableBitmap,
validity: &mut MutableBitmap,
) {
let length = length as usize;
let validity_iterator = hybrid_rle::Decoder::new(&validity_buffer, 1);
// in PLAIN, booleans are LSB bitpacked and thus we can read them as if they were a bitmap.
// note that `values_buffer` contains only non-null values.
// thus, at this point, it is not known how many values this buffer contains
// values_len is the upper bound. The actual number depends on how many nulls there is.
let values_len = values_buffer.len() * 8;
let mut values_iterator = BitmapIter::new(values_buffer, 0, values_len);
for run in validity_iterator {
match run {
hybrid_rle::HybridEncoded::Bitpacked(packed_validity) => {
// the pack may contain more items than needed.
let remaining = length - values.len();
let len = core::cmp::min(packed_validity.len() * 8, remaining);
for is_valid in BitmapIter::new(packed_validity, 0, len) {
validity.push(is_valid);
let value = if is_valid {
values_iterator.next().unwrap()
} else {
false
};
values.push(value);
}
}
hybrid_rle::HybridEncoded::Rle(value, additional) => {
let is_set = value[0] == 1;
validity.extend_constant(additional, is_set);
if is_set {
(0..additional).for_each(|_| {
let value = values_iterator.next().unwrap();
values.push(value)
})
} else {
values.extend_constant(additional, false)
}
}
}
}
}
pub fn iter_to_array<I, E>(mut iter: I, metadata: &ColumnChunkMetaData) -> Result<BooleanArray>
where
ArrowError: From<E>,
E: Clone,
I: StreamingIterator<Item = core::result::Result<Page, E>>,
{
let capacity = metadata.num_values() as usize;
let mut values = MutableBitmap::with_capacity(capacity);
let mut validity = MutableBitmap::with_capacity(capacity);
while let Some(page) = iter.next() {
extend_from_page(
page.as_ref().map_err(|x| x.clone())?,
metadata.descriptor(),
&mut values,
&mut validity,
)?
}
Ok(BooleanArray::from_data(values.into(), validity.into()))
}
fn extend_from_page(
page: &Page,
descriptor: &ColumnDescriptor,
values: &mut MutableBitmap,
validity: &mut MutableBitmap,
) -> Result<()> {
assert_eq!(descriptor.max_rep_level(), 0);
assert!(descriptor.max_def_level() <= 1);
let is_optional = descriptor.max_def_level() == 1;
match page.header() {
PageHeader::V1(header) => {
assert_eq!(header.definition_level_encoding, Encoding::Rle);
match (&page.encoding(), page.dictionary_page(), is_optional) {
(Encoding::Plain, None, true) => {
let (validity_buffer, values_buffer) = utils::split_buffer_v1(page.buffer());
read_optional(
validity_buffer,
values_buffer,
page.num_values() as u32,
values,
validity,
)
}
(Encoding::Plain, None, false) => {
read_required(page.buffer(), page.num_values() as u32, values)
}
_ => {
return Err(utils::not_implemented(
&page.encoding(),
is_optional,
page.dictionary_page().is_some(),
"V1",
"Boolean",
))
}
}
}
PageHeader::V2(header) => {
let def_level_buffer_length = header.definition_levels_byte_length as usize;
match (page.encoding(), page.dictionary_page(), is_optional) {
(Encoding::Plain, None, true) => {
let (validity_buffer, values_buffer) =
utils::split_buffer_v2(page.buffer(), def_level_buffer_length);
read_optional(
validity_buffer,
values_buffer,
page.num_values() as u32,
values,
validity,
)
}
(Encoding::Plain, None, false) => {
read_required(page.buffer(), page.num_values() as u32, values)
}
_ => {
return Err(utils::not_implemented(
&page.encoding(),
is_optional,
page.dictionary_page().is_some(),
"V2",
"Boolean",
))
}
}
}
};
Ok(())
}
| 36.243902 | 97 | 0.516992 |
2231994a533eb7603eecfc23d3fc95abdfdca285 | 139 | #![no_std]
#[macro_use]
extern crate block_cipher_trait;
extern crate blowfish;
new_test!(blowfish_test, "blowfish", blowfish::Blowfish);
| 19.857143 | 57 | 0.776978 |
fb2ea92c821eb091e0bb3e272c86a7c106e38554 | 3,420 | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
pub struct Config {
pub(crate) endpoint_resolver: ::std::sync::Arc<dyn aws_endpoint::ResolveAwsEndpoint>,
pub(crate) region: Option<aws_types::region::Region>,
pub(crate) credentials_provider: aws_types::credentials::SharedCredentialsProvider,
}
impl std::fmt::Debug for Config {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut config = f.debug_struct("Config");
config.finish()
}
}
impl Config {
pub fn builder() -> Builder {
Builder::default()
}
pub fn new(config: &aws_types::config::Config) -> Self {
Builder::from(config).build()
}
/// The signature version 4 service signing name to use in the credential scope when signing requests.
///
/// The signing service may be overridden by the `Endpoint`, or by specifying a custom
/// [`SigningService`](aws_types::SigningService) during operation construction
pub fn signing_service(&self) -> &'static str {
"finspace-api"
}
}
#[derive(Default)]
pub struct Builder {
endpoint_resolver: Option<::std::sync::Arc<dyn aws_endpoint::ResolveAwsEndpoint>>,
region: Option<aws_types::region::Region>,
credentials_provider: Option<aws_types::credentials::SharedCredentialsProvider>,
}
impl Builder {
pub fn new() -> Self {
Self::default()
}
pub fn endpoint_resolver(
mut self,
endpoint_resolver: impl aws_endpoint::ResolveAwsEndpoint + 'static,
) -> Self {
self.endpoint_resolver = Some(::std::sync::Arc::new(endpoint_resolver));
self
}
pub fn region(mut self, region: impl Into<Option<aws_types::region::Region>>) -> Self {
self.region = region.into();
self
}
/// Set the credentials provider for this service
pub fn credentials_provider(
mut self,
credentials_provider: impl aws_types::credentials::ProvideCredentials + 'static,
) -> Self {
self.credentials_provider = Some(aws_types::credentials::SharedCredentialsProvider::new(
credentials_provider,
));
self
}
pub fn set_credentials_provider(
&mut self,
credentials_provider: Option<aws_types::credentials::SharedCredentialsProvider>,
) -> &mut Self {
self.credentials_provider = credentials_provider;
self
}
pub fn build(self) -> Config {
Config {
endpoint_resolver: self
.endpoint_resolver
.unwrap_or_else(|| ::std::sync::Arc::new(crate::aws_endpoint::endpoint_resolver())),
region: self.region,
credentials_provider: self.credentials_provider.unwrap_or_else(|| {
aws_types::credentials::SharedCredentialsProvider::new(
crate::no_credentials::NoCredentials,
)
}),
}
}
}
impl From<&aws_types::config::Config> for Builder {
fn from(input: &aws_types::config::Config) -> Self {
let mut builder = Builder::default();
builder = builder.region(input.region().cloned());
builder.set_credentials_provider(input.credentials_provider().cloned());
builder
}
}
impl From<&aws_types::config::Config> for Config {
fn from(config: &aws_types::config::Config) -> Self {
Builder::from(config).build()
}
}
| 35.625 | 106 | 0.639181 |
4bc7a989c35599cff9b7942ec36983774652a5a8 | 10,909 | use crate::debug_span;
use chalk_ir::fold::{Fold, Folder};
use chalk_ir::interner::{HasInterner, Interner};
use chalk_ir::visit::{ControlFlow, Visit, Visitor};
use chalk_ir::*;
use super::InferenceTable;
impl<I: Interner> InferenceTable<I> {
pub fn u_canonicalize<T>(
&mut self,
interner: &I,
value0: &Canonical<T>,
) -> UCanonicalized<T::Result>
where
T: Clone + HasInterner<Interner = I> + Fold<I> + Visit<I>,
T::Result: HasInterner<Interner = I>,
{
debug_span!("u_canonicalize", "{:#?}", value0);
// First, find all the universes that appear in `value`.
let mut universes = UniverseMap::new();
for universe in value0.binders.iter(interner) {
universes.add(*universe.skip_kind());
}
value0.value.visit_with(
&mut UCollector {
universes: &mut universes,
interner,
},
DebruijnIndex::INNERMOST,
);
// Now re-map the universes found in value. We have to do this
// in a second pass because it is only then that we know the
// full set of universes found in the original value.
let value1 = value0
.value
.clone()
.fold_with(
&mut UMapToCanonical {
universes: &universes,
interner,
},
DebruijnIndex::INNERMOST,
)
.unwrap();
let binders = CanonicalVarKinds::from_iter(
interner,
value0
.binders
.iter(interner)
.map(|pk| pk.map_ref(|&ui| universes.map_universe_to_canonical(ui).unwrap())),
);
UCanonicalized {
quantified: UCanonical {
universes: universes.num_canonical_universes(),
canonical: Canonical {
value: value1,
binders,
},
},
universes,
}
}
}
#[derive(Debug)]
pub struct UCanonicalized<T: HasInterner> {
/// The canonicalized result.
pub quantified: UCanonical<T>,
/// A map between the universes in `quantified` and the original universes
pub universes: UniverseMap,
}
pub trait UniverseMapExt {
fn add(&mut self, universe: UniverseIndex);
fn map_universe_to_canonical(&self, universe: UniverseIndex) -> Option<UniverseIndex>;
fn map_universe_from_canonical(&self, universe: UniverseIndex) -> UniverseIndex;
fn map_from_canonical<T, I>(
&self,
interner: &I,
canonical_value: &Canonical<T>,
) -> Canonical<T::Result>
where
T: Clone + Fold<I> + HasInterner<Interner = I>,
T::Result: HasInterner<Interner = I>,
I: Interner;
}
impl UniverseMapExt for UniverseMap {
fn add(&mut self, universe: UniverseIndex) {
if let Err(i) = self.universes.binary_search(&universe) {
self.universes.insert(i, universe);
}
}
/// Given a universe U that appeared in our original value, return
/// the universe to use in the u-canonical value. This is done by
/// looking for the index I of U in `self.universes`. We will
/// return the universe with "counter" I. This effectively
/// "compresses" the range of universes to things from
/// `0..self.universes.len()`. If the universe is not present in the map,
/// we return `None`.
fn map_universe_to_canonical(&self, universe: UniverseIndex) -> Option<UniverseIndex> {
self.universes
.binary_search(&universe)
.ok()
.map(|index| UniverseIndex { counter: index })
}
/// Given a "canonical universe" -- one found in the
/// `u_canonicalize` result -- returns the original universe that
/// it corresponded to.
fn map_universe_from_canonical(&self, universe: UniverseIndex) -> UniverseIndex {
if universe.counter < self.universes.len() {
self.universes[universe.counter]
} else {
// If this universe is out of bounds, we assume an
// implicit `forall` binder, effectively, and map to a
// "big enough" universe in the original space. See
// comments on `map_from_canonical` for a detailed
// explanation.
let difference = universe.counter - self.universes.len();
let max_counter = self.universes.last().unwrap().counter;
let new_counter = max_counter + difference + 1;
UniverseIndex {
counter: new_counter,
}
}
}
/// Returns a mapped version of `value` where the universes have
/// been translated from canonical universes into the original
/// universes.
///
/// In some cases, `value` may contain fresh universes that are
/// not described in the original map. This occurs when we return
/// region constraints -- for example, if we were to process a
/// constraint like `for<'a> 'a == 'b`, where `'b` is an inference
/// variable, that would generate a region constraint that `!2 ==
/// ?0`. (This constraint is typically not, as it happens,
/// satisfiable, but it may be, depending on the bounds on `!2`.)
/// In effect, there is a "for all" binder around the constraint,
/// but it is not represented explicitly -- only implicitly, by
/// the presence of a U2 variable.
///
/// If we encounter universes like this, which are "out of bounds"
/// from our original set of universes, we map them to a distinct
/// universe in the original space that is greater than all the
/// other universes in the map. That is, if we encounter a
/// canonical universe `Ux` where our canonical vector is (say)
/// `[U0, U3]`, we would compute the difference `d = x - 2` and
/// then return the universe `3 + d + 1`.
///
/// The important thing is that we preserve (a) the relative order
/// of universes, since that determines visibility, and (b) that
/// the universe we produce does not correspond to any of the
/// other original universes.
fn map_from_canonical<T, I>(
&self,
interner: &I,
canonical_value: &Canonical<T>,
) -> Canonical<T::Result>
where
T: Clone + Fold<I> + HasInterner<Interner = I>,
T::Result: HasInterner<Interner = I>,
I: Interner,
{
debug_span!("map_from_canonical", ?canonical_value, universes = ?self.universes);
let binders = canonical_value
.binders
.iter(interner)
.map(|cvk| cvk.map_ref(|&universe| self.map_universe_from_canonical(universe)));
let value = canonical_value
.value
.clone()
.fold_with(
&mut UMapFromCanonical {
interner,
universes: self,
},
DebruijnIndex::INNERMOST,
)
.unwrap();
Canonical {
binders: CanonicalVarKinds::from_iter(interner, binders),
value,
}
}
}
/// The `UCollector` is a "no-op" in terms of the value, but along the
/// way it collects all universes that were found into a vector.
struct UCollector<'q, 'i, I> {
universes: &'q mut UniverseMap,
interner: &'i I,
}
impl<'i, I: Interner> Visitor<'i, I> for UCollector<'_, 'i, I>
where
I: 'i,
{
type BreakTy = ();
fn as_dyn(&mut self) -> &mut dyn Visitor<'i, I, BreakTy = Self::BreakTy> {
self
}
fn visit_free_placeholder(
&mut self,
universe: PlaceholderIndex,
_outer_binder: DebruijnIndex,
) -> ControlFlow<()> {
self.universes.add(universe.ui);
ControlFlow::CONTINUE
}
fn forbid_inference_vars(&self) -> bool {
true
}
fn interner(&self) -> &'i I {
self.interner
}
}
struct UMapToCanonical<'q, I> {
interner: &'q I,
universes: &'q UniverseMap,
}
impl<'i, I: Interner> Folder<'i, I> for UMapToCanonical<'i, I>
where
I: 'i,
{
fn as_dyn(&mut self) -> &mut dyn Folder<'i, I> {
self
}
fn forbid_inference_vars(&self) -> bool {
true
}
fn fold_free_placeholder_ty(
&mut self,
universe0: PlaceholderIndex,
_outer_binder: DebruijnIndex,
) -> Fallible<Ty<I>> {
let ui = self
.universes
.map_universe_to_canonical(universe0.ui)
.expect("Expected UCollector to encounter this universe");
Ok(PlaceholderIndex {
ui,
idx: universe0.idx,
}
.to_ty(self.interner()))
}
fn fold_free_placeholder_lifetime(
&mut self,
universe0: PlaceholderIndex,
_outer_binder: DebruijnIndex,
) -> Fallible<Lifetime<I>> {
let universe = self
.universes
.map_universe_to_canonical(universe0.ui)
.expect("Expected UCollector to encounter this universe");
Ok(PlaceholderIndex {
ui: universe,
idx: universe0.idx,
}
.to_lifetime(self.interner()))
}
fn fold_free_placeholder_const(
&mut self,
ty: Ty<I>,
universe0: PlaceholderIndex,
_outer_binder: DebruijnIndex,
) -> Fallible<Const<I>> {
let universe = self
.universes
.map_universe_to_canonical(universe0.ui)
.expect("Expected UCollector to encounter this universe");
Ok(PlaceholderIndex {
ui: universe,
idx: universe0.idx,
}
.to_const(self.interner(), ty.clone()))
}
fn interner(&self) -> &'i I {
self.interner
}
}
struct UMapFromCanonical<'q, I> {
interner: &'q I,
universes: &'q UniverseMap,
}
impl<'i, I: Interner> Folder<'i, I> for UMapFromCanonical<'i, I>
where
I: 'i,
{
fn as_dyn(&mut self) -> &mut dyn Folder<'i, I> {
self
}
fn fold_free_placeholder_ty(
&mut self,
universe0: PlaceholderIndex,
_outer_binder: DebruijnIndex,
) -> Fallible<Ty<I>> {
let ui = self.universes.map_universe_from_canonical(universe0.ui);
Ok(PlaceholderIndex {
ui,
idx: universe0.idx,
}
.to_ty(self.interner()))
}
fn fold_free_placeholder_lifetime(
&mut self,
universe0: PlaceholderIndex,
_outer_binder: DebruijnIndex,
) -> Fallible<Lifetime<I>> {
let universe = self.universes.map_universe_from_canonical(universe0.ui);
Ok(PlaceholderIndex {
ui: universe,
idx: universe0.idx,
}
.to_lifetime(self.interner()))
}
fn forbid_inference_vars(&self) -> bool {
true
}
fn interner(&self) -> &'i I {
self.interner
}
}
| 30.816384 | 94 | 0.577505 |
feab46216724d685c9da70b6162a9bdfcf8bcd08 | 591 | #![allow(unused_imports)]
use super::*;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
extern "C" {
# [ wasm_bindgen ( extends = :: js_sys :: Object , js_name = WorkletGlobalScope , typescript_type = "WorkletGlobalScope" ) ]
#[derive(Debug, Clone, PartialEq, Eq)]
#[doc = "The `WorkletGlobalScope` class."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/WorkletGlobalScope)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `WorkletGlobalScope`*"]
pub type WorkletGlobalScope;
}
| 39.4 | 128 | 0.664975 |
fe97fb0b1f6cf12aebf08e70b23aa662d78526a6 | 2,690 | #![no_std]
#![no_main]
#![feature(trait_alias)]
#![feature(min_type_alias_impl_trait)]
#![feature(impl_trait_in_bindings)]
#![feature(type_alias_impl_trait)]
#[path = "../example_common.rs"]
mod example_common;
use embassy_stm32::gpio::{Input, Level, NoPin, Output, Pull};
use embedded_hal::digital::v2::{InputPin, OutputPin};
use example_common::*;
use cortex_m_rt::entry;
//use stm32f4::stm32f429 as pac;
use cortex_m::delay::Delay;
use embassy_stm32::adc::{Adc, Resolution};
use embassy_stm32::dac::{Channel, Dac, Value};
use embassy_stm32::spi::{ByteOrder, Config, Spi, MODE_0};
use embassy_stm32::time::Hertz;
use embedded_hal::blocking::spi::Transfer;
use micromath::F32Ext;
use stm32l4::stm32l4x5 as pac;
use stm32l4xx_hal::gpio::PA4;
use stm32l4xx_hal::rcc::PllSource;
use stm32l4xx_hal::{prelude::*, rcc};
#[entry]
fn main() -> ! {
info!("Hello World, dude!");
//let pp = pac::Peripherals::take().unwrap();
let cp = cortex_m::Peripherals::take().unwrap();
let pp = stm32l4xx_hal::stm32::Peripherals::take().unwrap();
let mut flash = pp.FLASH.constrain();
let mut rcc = pp.RCC.constrain();
let mut pwr = pp.PWR.constrain(&mut rcc.apb1r1);
let mut delay = Delay::new(cp.SYST, 80_000_000);
// TRY the other clock configuration
// let clocks = rcc.cfgr.freeze(&mut flash.acr);
let clocks = rcc
.cfgr
.sysclk(80.mhz())
.pclk1(80.mhz())
.pclk2(80.mhz())
.pll_source(PllSource::HSI16)
.freeze(&mut flash.acr, &mut pwr);
let pp = unsafe { pac::Peripherals::steal() };
pp.RCC.ccipr.modify(|_, w| {
unsafe {
w.adcsel().bits(0b11);
}
w
});
pp.DBGMCU.cr.modify(|_, w| {
w.dbg_sleep().set_bit();
w.dbg_standby().set_bit();
w.dbg_stop().set_bit()
});
pp.RCC.ahb2enr.modify(|_, w| {
w.adcen().set_bit();
w.gpioaen().set_bit();
w.gpioben().set_bit();
w.gpiocen().set_bit();
w.gpioden().set_bit();
w.gpioeen().set_bit();
w.gpiofen().set_bit();
w
});
let p = embassy_stm32::init(Default::default());
let (mut adc, mut delay) = Adc::new(p.ADC1, delay);
//adc.enable_vref();
adc.set_resolution(Resolution::EightBit);
let mut channel = p.PC0;
loop {
let v = adc.read(&mut channel);
info!("--> {}", v);
}
}
fn to_sine_wave(v: u8) -> u8 {
if v >= 128 {
// top half
let r = 3.14 * ((v - 128) as f32 / 128.0);
(r.sin() * 128.0 + 127.0) as u8
} else {
// bottom half
let r = 3.14 + 3.14 * (v as f32 / 128.0);
(r.sin() * 128.0 + 127.0) as u8
}
}
| 26.633663 | 64 | 0.583271 |
03a26f5686685ef71e969915ccc4f603ab897f48 | 209 | #![no_std]
#![deny(unsafe_code)]
#[macro_use]
extern crate static_assertions;
assert_type_eq_all!([u8], [u8]);
#[allow(dead_code)]
type X = u8;
mod m {
assert_type_eq_all!(super::X, u8, (super::X));
}
| 13.933333 | 50 | 0.655502 |
89bbbb6b1669cc0d1b50b988880e76bcd64f18ff | 2,301 | // Copyright 2019 Intel Corporation. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
//
// Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
//
// Portions Copyright 2017 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE-BSD-3-Clause file.
use std::fmt::{Display, Formatter};
use std::io;
use std::result;
use libc::__errno_location;
/// An error number, retrieved from [`errno`](http://man7.org/linux/man-pages/man3/errno.3.html),
/// set by a libc function that returned an error.
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct Error(i32);
pub type Result<T> = result::Result<T, Error>;
impl Error {
/// Constructs a new error with the given `errno`.
pub fn new(e: i32) -> Error {
Error(e)
}
/// Constructs an error from the current `errno`.
///
/// The result of this only has any meaning just after a libc call that returned a value
/// indicating `errno` was set.
pub fn last() -> Error {
Error(unsafe { *__errno_location() })
}
/// Gets the `errno` for this error.
pub fn errno(self) -> i32 {
self.0
}
}
impl Display for Error {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
io::Error::from_raw_os_error(self.0).fmt(f)
}
}
impl From<io::Error> for Error {
fn from(e: io::Error) -> Self {
Error::new(e.raw_os_error().unwrap_or_default())
}
}
/// Returns the last `errno` as a [`Result`] that is always an error.
///
/// [`Result`]: type.Result.html
pub fn errno_result<T>() -> Result<T> {
Err(Error::last())
}
#[cfg(test)]
mod tests {
use super::*;
use libc;
use std::fs::File;
use std::io::{self, Write};
use std::os::unix::io::FromRawFd;
#[test]
pub fn test_invalid_fd() {
let mut file = unsafe { File::from_raw_fd(-1) };
assert!(file.write(b"test").is_err());
let last_err = errno_result::<i32>().unwrap_err();
assert_eq!(last_err, Error::new(libc::EBADF));
assert_eq!(last_err.errno(), libc::EBADF);
assert_eq!(last_err, Error::from(io::Error::last_os_error()));
assert_eq!(last_err, Error::last());
}
}
| 28.407407 | 97 | 0.627553 |
3306afc6c19746785b0c5db6e457f33a37ddbfda | 15,383 | // Copyright 2018 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Utilities to check the status of all the outputs we have stored in
//! the wallet storage and update them.
use std::collections::HashMap;
use uuid::Uuid;
use crate::error::Error;
use crate::grin_core::consensus::reward;
use crate::grin_core::core::{Output, TxKernel};
use crate::grin_core::global;
use crate::grin_core::libtx::proof::ProofBuilder;
use crate::grin_core::libtx::reward;
use crate::grin_keychain::{Identifier, Keychain, SwitchCommitmentType};
use crate::grin_util as util;
use crate::grin_util::secp::key::SecretKey;
use crate::grin_util::secp::pedersen;
use crate::internal::keys;
use crate::types::{
NodeClient, OutputData, OutputStatus, TxLogEntry, TxLogEntryType, WalletBackend, WalletInfo,
};
use crate::{BlockFees, CbData, OutputCommitMapping};
/// Retrieve all of the outputs (doesn't attempt to update from node)
pub fn retrieve_outputs<'a, T: ?Sized, C, K>(
wallet: &mut T,
keychain_mask: Option<&SecretKey>,
show_spent: bool,
tx_id: Option<u32>,
parent_key_id: Option<&Identifier>,
) -> Result<Vec<OutputCommitMapping>, Error>
where
T: WalletBackend<'a, C, K>,
C: NodeClient + 'a,
K: Keychain + 'a,
{
// just read the wallet here, no need for a write lock
let mut outputs = wallet
.iter()
.filter(|out| show_spent || out.status != OutputStatus::Spent)
.collect::<Vec<_>>();
// only include outputs with a given tx_id if provided
if let Some(id) = tx_id {
outputs = outputs
.into_iter()
.filter(|out| out.tx_log_entry == Some(id))
.collect::<Vec<_>>();
}
if let Some(k) = parent_key_id {
outputs = outputs
.iter()
.filter(|o| o.root_key_id == *k)
.map(|o| o.clone())
.collect();
}
outputs.sort_by_key(|out| out.n_child);
let keychain = wallet.keychain(keychain_mask)?;
let res = outputs
.into_iter()
.map(|output| {
let commit = match output.commit.clone() {
Some(c) => pedersen::Commitment::from_vec(util::from_hex(c).unwrap()),
None => keychain
.commit(output.value, &output.key_id, &SwitchCommitmentType::Regular)
.unwrap(), // TODO: proper support for different switch commitment schemes
};
OutputCommitMapping { output, commit }
})
.collect();
Ok(res)
}
/// Retrieve all of the transaction entries, or a particular entry
/// if `parent_key_id` is set, only return entries from that key
pub fn retrieve_txs<'a, T: ?Sized, C, K>(
wallet: &mut T,
tx_id: Option<u32>,
tx_slate_id: Option<Uuid>,
parent_key_id: Option<&Identifier>,
outstanding_only: bool,
) -> Result<Vec<TxLogEntry>, Error>
where
T: WalletBackend<'a, C, K>,
C: NodeClient + 'a,
K: Keychain + 'a,
{
let mut txs: Vec<TxLogEntry> = wallet
.tx_log_iter()
.filter(|tx_entry| {
let f_pk = match parent_key_id {
Some(k) => tx_entry.parent_key_id == *k,
None => true,
};
let f_tx_id = match tx_id {
Some(i) => tx_entry.id == i,
None => true,
};
let f_txs = match tx_slate_id {
Some(t) => tx_entry.tx_slate_id == Some(t),
None => true,
};
let f_outstanding = match outstanding_only {
true => {
!tx_entry.confirmed
&& (tx_entry.tx_type == TxLogEntryType::TxReceived
|| tx_entry.tx_type == TxLogEntryType::TxSent)
}
false => true,
};
f_pk && f_tx_id && f_txs && f_outstanding
})
.collect();
txs.sort_by_key(|tx| tx.creation_ts);
Ok(txs)
}
/// Refreshes the outputs in a wallet with the latest information
/// from a node
pub fn refresh_outputs<'a, T: ?Sized, C, K>(
wallet: &mut T,
keychain_mask: Option<&SecretKey>,
parent_key_id: &Identifier,
update_all: bool,
) -> Result<(), Error>
where
T: WalletBackend<'a, C, K>,
C: NodeClient + 'a,
K: Keychain + 'a,
{
let height = wallet.w2n_client().get_chain_height()?;
refresh_output_state(wallet, keychain_mask, height, parent_key_id, update_all)?;
Ok(())
}
/// build a local map of wallet outputs keyed by commit
/// and a list of outputs we want to query the node for
pub fn map_wallet_outputs<'a, T: ?Sized, C, K>(
wallet: &mut T,
keychain_mask: Option<&SecretKey>,
parent_key_id: &Identifier,
update_all: bool,
) -> Result<HashMap<pedersen::Commitment, (Identifier, Option<u64>)>, Error>
where
T: WalletBackend<'a, C, K>,
C: NodeClient + 'a,
K: Keychain + 'a,
{
let mut wallet_outputs: HashMap<pedersen::Commitment, (Identifier, Option<u64>)> =
HashMap::new();
let keychain = wallet.keychain(keychain_mask)?;
let unspents: Vec<OutputData> = wallet
.iter()
.filter(|x| x.root_key_id == *parent_key_id && x.status != OutputStatus::Spent)
.collect();
let tx_entries = retrieve_txs(wallet, None, None, Some(&parent_key_id), true)?;
// Only select outputs that are actually involved in an outstanding transaction
let unspents: Vec<OutputData> = match update_all {
false => unspents
.into_iter()
.filter(|x| match x.tx_log_entry.as_ref() {
Some(t) => {
if let Some(_) = tx_entries.iter().find(|&te| te.id == *t) {
true
} else {
false
}
}
None => true,
})
.collect(),
true => unspents,
};
for out in unspents {
let commit = match out.commit.clone() {
Some(c) => pedersen::Commitment::from_vec(util::from_hex(c).unwrap()),
None => keychain
.commit(out.value, &out.key_id, &SwitchCommitmentType::Regular)
.unwrap(), // TODO: proper support for different switch commitment schemes
};
wallet_outputs.insert(commit, (out.key_id.clone(), out.mmr_index));
}
Ok(wallet_outputs)
}
/// Cancel transaction and associated outputs
pub fn cancel_tx_and_outputs<'a, T: ?Sized, C, K>(
wallet: &mut T,
keychain_mask: Option<&SecretKey>,
tx: TxLogEntry,
outputs: Vec<OutputData>,
parent_key_id: &Identifier,
) -> Result<(), Error>
where
T: WalletBackend<'a, C, K>,
C: NodeClient + 'a,
K: Keychain + 'a,
{
let mut batch = wallet.batch(keychain_mask)?;
for mut o in outputs {
// unlock locked outputs
if o.status == OutputStatus::Unconfirmed {
batch.delete(&o.key_id, &o.mmr_index)?;
}
if o.status == OutputStatus::Locked {
o.status = OutputStatus::Unspent;
batch.save(o)?;
}
}
let mut tx = tx.clone();
if tx.tx_type == TxLogEntryType::TxSent {
tx.tx_type = TxLogEntryType::TxSentCancelled;
}
if tx.tx_type == TxLogEntryType::TxReceived {
tx.tx_type = TxLogEntryType::TxReceivedCancelled;
}
batch.save_tx_log_entry(tx, parent_key_id)?;
batch.commit()?;
Ok(())
}
/// Apply refreshed API output data to the wallet
pub fn apply_api_outputs<'a, T: ?Sized, C, K>(
wallet: &mut T,
keychain_mask: Option<&SecretKey>,
wallet_outputs: &HashMap<pedersen::Commitment, (Identifier, Option<u64>)>,
api_outputs: &HashMap<pedersen::Commitment, (String, u64, u64)>,
height: u64,
parent_key_id: &Identifier,
) -> Result<(), Error>
where
T: WalletBackend<'a, C, K>,
C: NodeClient + 'a,
K: Keychain + 'a,
{
// now for each commit, find the output in the wallet and the corresponding
// api output (if it exists) and refresh it in-place in the wallet.
// Note: minimizing the time we spend holding the wallet lock.
{
let last_confirmed_height = wallet.last_confirmed_height()?;
// If the server height is less than our confirmed height, don't apply
// these changes as the chain is syncing, incorrect or forking
if height < last_confirmed_height {
warn!(
"Not updating outputs as the height of the node's chain \
is less than the last reported wallet update height."
);
warn!("Please wait for sync on node to complete or fork to resolve and try again.");
return Ok(());
}
let mut batch = wallet.batch(keychain_mask)?;
for (commit, (id, mmr_index)) in wallet_outputs.iter() {
if let Ok(mut output) = batch.get(id, mmr_index) {
match api_outputs.get(&commit) {
Some(o) => {
// if this is a coinbase tx being confirmed, it's recordable in tx log
if output.is_coinbase && output.status == OutputStatus::Unconfirmed {
let log_id = batch.next_tx_log_id(parent_key_id)?;
let mut t = TxLogEntry::new(
parent_key_id.clone(),
TxLogEntryType::ConfirmedCoinbase,
log_id,
);
t.confirmed = true;
t.amount_credited = output.value;
t.amount_debited = 0;
t.num_outputs = 1;
t.update_confirmation_ts();
output.tx_log_entry = Some(log_id);
batch.save_tx_log_entry(t, &parent_key_id)?;
}
// also mark the transaction in which this output is involved as confirmed
// note that one involved input/output confirmation SHOULD be enough
// to reliably confirm the tx
if !output.is_coinbase && output.status == OutputStatus::Unconfirmed {
let tx = batch.tx_log_iter().find(|t| {
Some(t.id) == output.tx_log_entry
&& t.parent_key_id == *parent_key_id
});
if let Some(mut t) = tx {
t.update_confirmation_ts();
t.confirmed = true;
batch.save_tx_log_entry(t, &parent_key_id)?;
}
}
output.height = o.1;
output.mark_unspent();
}
None => output.mark_spent(),
};
batch.save(output)?;
}
}
{
batch.save_last_confirmed_height(parent_key_id, height)?;
}
batch.commit()?;
}
Ok(())
}
/// Builds a single api query to retrieve the latest output data from the node.
/// So we can refresh the local wallet outputs.
fn refresh_output_state<'a, T: ?Sized, C, K>(
wallet: &mut T,
keychain_mask: Option<&SecretKey>,
height: u64,
parent_key_id: &Identifier,
update_all: bool,
) -> Result<(), Error>
where
T: WalletBackend<'a, C, K>,
C: NodeClient + 'a,
K: Keychain + 'a,
{
debug!("Refreshing wallet outputs");
// build a local map of wallet outputs keyed by commit
// and a list of outputs we want to query the node for
let wallet_outputs = map_wallet_outputs(wallet, keychain_mask, parent_key_id, update_all)?;
let wallet_output_keys = wallet_outputs.keys().map(|commit| commit.clone()).collect();
let api_outputs = wallet
.w2n_client()
.get_outputs_from_node(wallet_output_keys)?;
apply_api_outputs(
wallet,
keychain_mask,
&wallet_outputs,
&api_outputs,
height,
parent_key_id,
)?;
clean_old_unconfirmed(wallet, keychain_mask, height)?;
Ok(())
}
fn clean_old_unconfirmed<'a, T: ?Sized, C, K>(
wallet: &mut T,
keychain_mask: Option<&SecretKey>,
height: u64,
) -> Result<(), Error>
where
T: WalletBackend<'a, C, K>,
C: NodeClient + 'a,
K: Keychain + 'a,
{
if height < 50 {
return Ok(());
}
let mut ids_to_del = vec![];
for out in wallet.iter() {
if out.status == OutputStatus::Unconfirmed
&& out.height > 0
&& out.height < height - 50
&& out.is_coinbase
{
ids_to_del.push(out.key_id.clone())
}
}
let mut batch = wallet.batch(keychain_mask)?;
for id in ids_to_del {
batch.delete(&id, &None)?;
}
batch.commit()?;
Ok(())
}
/// Retrieve summary info about the wallet
/// caller should refresh first if desired
pub fn retrieve_info<'a, T: ?Sized, C, K>(
wallet: &mut T,
parent_key_id: &Identifier,
minimum_confirmations: u64,
) -> Result<WalletInfo, Error>
where
T: WalletBackend<'a, C, K>,
C: NodeClient + 'a,
K: Keychain + 'a,
{
let current_height = wallet.last_confirmed_height()?;
let outputs = wallet
.iter()
.filter(|out| out.root_key_id == *parent_key_id);
let mut unspent_total = 0;
let mut immature_total = 0;
let mut awaiting_finalization_total = 0;
let mut unconfirmed_total = 0;
let mut locked_total = 0;
for out in outputs {
match out.status {
OutputStatus::Unspent => {
if out.is_coinbase && out.lock_height > current_height {
immature_total += out.value;
} else if out.num_confirmations(current_height) < minimum_confirmations {
// Treat anything less than minimum confirmations as "unconfirmed".
unconfirmed_total += out.value;
} else {
unspent_total += out.value;
}
}
OutputStatus::Unconfirmed => {
// We ignore unconfirmed coinbase outputs completely.
if !out.is_coinbase {
if minimum_confirmations == 0 {
unconfirmed_total += out.value;
} else {
awaiting_finalization_total += out.value;
}
}
}
OutputStatus::Locked => {
locked_total += out.value;
}
OutputStatus::Spent => {}
}
}
Ok(WalletInfo {
last_confirmed_height: current_height,
minimum_confirmations,
total: unspent_total + unconfirmed_total + immature_total,
amount_awaiting_finalization: awaiting_finalization_total,
amount_awaiting_confirmation: unconfirmed_total,
amount_immature: immature_total,
amount_locked: locked_total,
amount_currently_spendable: unspent_total,
})
}
/// Build a coinbase output and insert into wallet
pub fn build_coinbase<'a, T: ?Sized, C, K>(
wallet: &mut T,
keychain_mask: Option<&SecretKey>,
block_fees: &BlockFees,
test_mode: bool,
) -> Result<CbData, Error>
where
T: WalletBackend<'a, C, K>,
C: NodeClient + 'a,
K: Keychain + 'a,
{
let (out, kern, block_fees) = receive_coinbase(wallet, keychain_mask, block_fees, test_mode)?;
Ok(CbData {
output: out,
kernel: kern,
key_id: block_fees.key_id,
})
}
//TODO: Split up the output creation and the wallet insertion
/// Build a coinbase output and the corresponding kernel
pub fn receive_coinbase<'a, T: ?Sized, C, K>(
wallet: &mut T,
keychain_mask: Option<&SecretKey>,
block_fees: &BlockFees,
test_mode: bool,
) -> Result<(Output, TxKernel, BlockFees), Error>
where
T: WalletBackend<'a, C, K>,
C: NodeClient + 'a,
K: Keychain + 'a,
{
let height = block_fees.height;
let lock_height = height + global::coinbase_maturity();
let key_id = block_fees.key_id();
let parent_key_id = wallet.parent_key_id();
let key_id = match key_id {
Some(key_id) => match keys::retrieve_existing_key(wallet, key_id, None) {
Ok(k) => k.0,
Err(_) => keys::next_available_key(wallet, keychain_mask)?,
},
None => keys::next_available_key(wallet, keychain_mask)?,
};
{
// Now acquire the wallet lock and write the new output.
let amount = reward(block_fees.fees);
let commit = wallet.calc_commit_for_cache(keychain_mask, amount, &key_id)?;
let mut batch = wallet.batch(keychain_mask)?;
batch.save(OutputData {
root_key_id: parent_key_id,
key_id: key_id.clone(),
n_child: key_id.to_path().last_path_index(),
mmr_index: None,
commit: commit,
value: amount,
status: OutputStatus::Unconfirmed,
height: height,
lock_height: lock_height,
is_coinbase: true,
tx_log_entry: None,
})?;
batch.commit()?;
}
debug!(
"receive_coinbase: built candidate output - {:?}, {}",
key_id.clone(),
key_id,
);
let mut block_fees = block_fees.clone();
block_fees.key_id = Some(key_id.clone());
debug!("receive_coinbase: {:?}", block_fees);
let keychain = wallet.keychain(keychain_mask)?;
let (out, kern) = reward::output(
&keychain,
&ProofBuilder::new(&keychain),
&key_id,
block_fees.fees,
test_mode,
)?;
Ok((out, kern, block_fees))
}
| 28.32965 | 95 | 0.678866 |
116d9b9699e144ebfccf45ffb07ef03b3b1538dd | 698 | <?xml version="1.0" encoding="UTF-8"?>
<WebElementEntity>
<description>This object defines "the holidays are coming - peek our collection here" media body</description>
<name>theHolidaysAreComing_media-body</name>
<tag></tag>
<elementGuidId>557e97c1-4b96-426b-8fc5-87002871fba3</elementGuidId>
<selectorCollection>
<entry>
<key>BASIC</key>
<value></value>
</entry>
<entry>
<key>XPATH</key>
<value>//*[@id="comp_00390333"]/div</value>
</entry>
</selectorCollection>
<selectorMethod>XPATH</selectorMethod>
<useRalativeImagePath>false</useRalativeImagePath>
</WebElementEntity>
| 34.9 | 124 | 0.654728 |
d9e77bdffaf49e5279138cc23ccc50ab08158039 | 8,481 | use crate::{FromInner, HandleTrait, Inner, IntoInner};
use std::borrow::Cow;
use std::convert::TryFrom;
use std::ffi::{CStr, CString};
use uv::{
uv_fs_event_getpath, uv_fs_event_init, uv_fs_event_start, uv_fs_event_stop, uv_fs_event_t,
};
bitflags! {
/// Flags for FsEventHandle::start()
pub struct FsEventFlags: u32 {
/// By default, if the fs event watcher is given a directory name, we will watch for all
/// events in that directory. This flags overrides this behavior and makes fs_event report
/// only changes to the directory entry itself. This flag does not affect individual files
/// watched.
///
/// This flag is currently not implemented yet on any backend.
const WATCHENTRY = uv::uv_fs_event_flags_UV_FS_EVENT_WATCH_ENTRY as _;
/// By default FsEventHandle will try to use a kernel interface such as inotify or kqueue
/// to detect events. This may not work on remote file systems such as NFS mounts. This
/// flag makes fs_event fall back to calling stat() on a regular interval.
///
/// This flag is currently not implemented yet on any backend.
const STAT = uv::uv_fs_event_flags_UV_FS_EVENT_STAT as _;
/// By default, event watcher, when watching directory, is not registering (is ignoring)
/// changes in its subdirectories.
///
/// This flag will override this behaviour on platforms that support it.
const RECURSIVE = uv::uv_fs_event_flags_UV_FS_EVENT_RECURSIVE as _;
}
}
bitflags! {
/// Event that caused the FsEventHandle callback to be called.
pub struct FsEvent: u32 {
/// File has been renamed
const RENAME = uv::uv_fs_event_UV_RENAME as _;
/// File has changed
const CHANGE = uv::uv_fs_event_UV_CHANGE as _;
}
}
callbacks! {
pub FsEventCB(
handle: FsEventHandle,
filename: Option<Cow<str>>,
events: FsEvent,
status: crate::Result<u32>
);
}
/// Additional data stored on the handle
#[derive(Default)]
pub(crate) struct FsEventDataFields<'a> {
fs_event_cb: FsEventCB<'a>,
}
/// Callback for uv_fs_event_start
extern "C" fn uv_fs_event_cb(
handle: *mut uv_fs_event_t,
filename: *const std::os::raw::c_char,
events: std::os::raw::c_int,
status: std::os::raw::c_int,
) {
let dataptr = crate::Handle::get_data(uv_handle!(handle));
if !dataptr.is_null() {
unsafe {
if let super::FsEventData(d) = &mut (*dataptr).addl {
let filename = if filename.is_null() {
None
} else {
Some(CStr::from_ptr(filename).to_string_lossy())
};
let status = if status < 0 {
Err(crate::Error::from_inner(status as uv::uv_errno_t))
} else {
Ok(status as _)
};
d.fs_event_cb.call(
handle.into_inner(),
filename,
FsEvent::from_bits_truncate(events as _),
status,
);
}
}
}
}
/// FS Event handles allow the user to monitor a given path for changes, for example, if the file
/// was renamed or there was a generic change in it. This handle uses the best backend for the job
/// on each platform.
///
/// Note: For AIX, the non default IBM bos.ahafs package has to be installed. The AIX Event
/// Infrastructure file system (ahafs) has some limitations:
///
/// * ahafs tracks monitoring per process and is not thread safe. A separate process must be
/// spawned for each monitor for the same event.
/// * Events for file modification (writing to a file) are not received if only the containing
/// folder is watched.
///
/// See documentation for more details.
///
/// The z/OS file system events monitoring infrastructure does not notify of file creation/deletion
/// within a directory that is being monitored. See the IBM Knowledge centre for more details.
#[derive(Clone, Copy)]
pub struct FsEventHandle {
handle: *mut uv_fs_event_t,
}
impl FsEventHandle {
/// Create and initialize a fs event handle
pub fn new(r#loop: &crate::Loop) -> crate::Result<FsEventHandle> {
let layout = std::alloc::Layout::new::<uv_fs_event_t>();
let handle = unsafe { std::alloc::alloc(layout) as *mut uv_fs_event_t };
if handle.is_null() {
return Err(crate::Error::ENOMEM);
}
let ret = unsafe { uv_fs_event_init(r#loop.into_inner(), handle) };
if ret < 0 {
unsafe { std::alloc::dealloc(handle as _, layout) };
return Err(crate::Error::from_inner(ret as uv::uv_errno_t));
}
crate::Handle::initialize_data(uv_handle!(handle), super::FsEventData(Default::default()));
Ok(FsEventHandle { handle })
}
/// Start the handle with the given callback, which will watch the specified path for changes.
///
/// Note: Currently the only supported flag is RECURSIVE and only on OSX and Windows.
pub fn start<CB: Into<FsEventCB<'static>>>(
&mut self,
path: &str,
flags: FsEventFlags,
cb: CB,
) -> Result<(), Box<dyn std::error::Error>> {
let path = CString::new(path)?;
// uv_cb is either Some(fs_event_cb) or None
let cb = cb.into();
let uv_cb = use_c_callback!(uv_fs_event_cb, cb);
// cb is either Some(closure) or None - it is saved into data
let dataptr = crate::Handle::get_data(uv_handle!(self.handle));
if !dataptr.is_null() {
if let super::FsEventData(d) = unsafe { &mut (*dataptr).addl } {
d.fs_event_cb = cb;
}
}
crate::uvret(unsafe { uv_fs_event_start(self.handle, uv_cb, path.as_ptr(), flags.bits()) })
.map_err(|e| Box::new(e) as _)
}
/// Stop the handle, the callback will no longer be called.
pub fn stop(&mut self) -> crate::Result<()> {
crate::uvret(unsafe { uv_fs_event_stop(self.handle) })
}
/// Get the path being monitored by the handle.
pub fn getpath(&self) -> crate::Result<String> {
// retrieve the size of the buffer we need to allocate
let mut size = 0u64;
let result = crate::uvret(unsafe {
uv_fs_event_getpath(self.handle, std::ptr::null_mut(), &mut size as _)
});
if let Err(e) = result {
if e != crate::Error::ENOBUFS {
return Err(e);
}
}
// On ENOBUFS, size is the length of the required buffer, *including* the null
let mut buf: Vec<std::os::raw::c_uchar> = Vec::with_capacity(size as _);
crate::uvret(unsafe {
uv_fs_event_getpath(self.handle, buf.as_mut_ptr() as _, &mut size as _)
})
.map(|_| {
// size is the length of the string, *not* including the null
unsafe { buf.set_len((size as usize) + 1) };
unsafe { CStr::from_bytes_with_nul_unchecked(&buf) }
.to_string_lossy()
.into_owned()
})
}
}
impl FromInner<*mut uv_fs_event_t> for FsEventHandle {
fn from_inner(handle: *mut uv_fs_event_t) -> FsEventHandle {
FsEventHandle { handle }
}
}
impl Inner<*mut uv::uv_handle_t> for FsEventHandle {
fn inner(&self) -> *mut uv::uv_handle_t {
uv_handle!(self.handle)
}
}
impl From<FsEventHandle> for crate::Handle {
fn from(fs_event: FsEventHandle) -> crate::Handle {
crate::Handle::from_inner(Inner::<*mut uv::uv_handle_t>::inner(&fs_event))
}
}
impl crate::ToHandle for FsEventHandle {
fn to_handle(&self) -> crate::Handle {
crate::Handle::from_inner(Inner::<*mut uv::uv_handle_t>::inner(self))
}
}
impl TryFrom<crate::Handle> for FsEventHandle {
type Error = crate::ConversionError;
fn try_from(handle: crate::Handle) -> Result<Self, Self::Error> {
let t = handle.get_type();
if t != crate::HandleType::FS_EVENT {
Err(crate::ConversionError::new(t, crate::HandleType::FS_EVENT))
} else {
Ok((handle.inner() as *mut uv_fs_event_t).into_inner())
}
}
}
impl HandleTrait for FsEventHandle {}
impl crate::Loop {
/// Create and initialize a fs event handle
pub fn fs_event(&self) -> crate::Result<FsEventHandle> {
FsEventHandle::new(self)
}
}
| 35.190871 | 99 | 0.610187 |
d782175e3a04b612e550140731d4da5f2a1cbf41 | 895 | use std::borrow::Borrow;
use crate::{core::Node, proc_context::ProcContext};
pub struct BufferPlayback<T, B>
where
T: 'static + Clone + Default,
B: Borrow<Vec<T>>,
{
buffer: B,
_t: std::marker::PhantomData<T>,
}
impl<T, B> BufferPlayback<T, B>
where
T: 'static + Clone + Default,
B: Borrow<Vec<T>>,
{
pub fn new(buffer: B) -> Self {
BufferPlayback {
buffer,
_t: Default::default(),
}
}
}
impl<T, B> Node for BufferPlayback<T, B>
where
T: 'static + Clone + Default,
B: Borrow<Vec<T>>,
{
type Output = T;
#[inline]
fn proc(&mut self, ctx: &ProcContext) -> Self::Output {
let s = (ctx.current_time * ctx.sample_rate as f64) as usize;
let buf = self.buffer.borrow();
buf[s % buf.len()].clone()
}
fn lock(&mut self, _ctx: &ProcContext) {}
fn unlock(&mut self) {}
}
| 19.888889 | 69 | 0.560894 |
16ca8c7386f19aa2cf1120be2efeca7679ab0693 | 5,524 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
use std::{
collections::HashMap,
convert::TryInto,
fs::{self, File},
path::Path,
};
use ::ndarray::{Array, ArrayD, Axis};
use image::{FilterType, GenericImageView};
use tvm::runtime::ByteArray;
use tvm::*;
fn main() {
let ctx = Context::cpu(0);
let img = image::open(concat!(env!("CARGO_MANIFEST_DIR"), "/cat.png")).unwrap();
println!("original image dimensions: {:?}", img.dimensions());
// for bigger size images, one needs to first resize to 256x256
// with `img.resize_exact` method and then `image.crop` to 224x224
let img = img.resize(224, 224, FilterType::Nearest).to_rgb();
println!("resized image dimensions: {:?}", img.dimensions());
let mut pixels: Vec<f32> = vec![];
for pixel in img.pixels() {
let tmp = pixel.data;
// normalize the RGB channels using mean, std of imagenet1k
let tmp = [
(tmp[0] as f32 - 123.0) / 58.395, // R
(tmp[1] as f32 - 117.0) / 57.12, // G
(tmp[2] as f32 - 104.0) / 57.375, // B
];
for e in &tmp {
pixels.push(*e);
}
}
let arr = Array::from_shape_vec((224, 224, 3), pixels).unwrap();
let arr: ArrayD<f32> = arr.permuted_axes([2, 0, 1]).into_dyn();
// make arr shape as [1, 3, 224, 224] acceptable to resnet
let arr = arr.insert_axis(Axis(0));
// create input tensor from rust's ndarray
let input = NDArray::from_rust_ndarray(&arr, Context::cpu(0), DataType::float(32, 1)).unwrap();
println!(
"input size is {:?}",
input.shape().expect("cannot get the input shape")
);
let graph =
fs::read_to_string(concat!(env!("CARGO_MANIFEST_DIR"), "/deploy_graph.json")).unwrap();
// load the built module
let lib = Module::load(&Path::new(concat!(
env!("CARGO_MANIFEST_DIR"),
"/deploy_lib.so"
)))
.unwrap();
// get the global TVM graph runtime function
let runtime_create_fn = Function::get("tvm.graph_runtime.create").unwrap();
let runtime_create_fn_ret = runtime_create_fn.invoke(vec![
graph.into(),
(&lib).into(),
(&ctx.device_type).into(),
(&ctx.device_id).into(),
]);
// get graph runtime module
let graph_runtime_module: Module = runtime_create_fn_ret.unwrap().try_into().unwrap();
// get the registered `load_params` from runtime module
let ref load_param_fn = graph_runtime_module
.get_function("load_params", false)
.unwrap();
// parse parameters and convert to TVMByteArray
let params: Vec<u8> =
fs::read(concat!(env!("CARGO_MANIFEST_DIR"), "/deploy_param.params")).unwrap();
let barr = ByteArray::from(¶ms);
// load the parameters
load_param_fn.invoke(vec![(&barr).into()]).unwrap();
// get the set_input function
let ref set_input_fn = graph_runtime_module
.get_function("set_input", false)
.unwrap();
set_input_fn
.invoke(vec!["data".into(), (&input).into()])
.unwrap();
// get `run` function from runtime module
let ref run_fn = graph_runtime_module.get_function("run", false).unwrap();
// execute the run function. Note that it has no argument
run_fn.invoke(vec![]).unwrap();
// prepare to get the output
let output_shape = &mut [1, 1000];
let output = NDArray::empty(output_shape, Context::cpu(0), DataType::float(32, 1));
// get the `get_output` function from runtime module
let ref get_output_fn = graph_runtime_module
.get_function("get_output", false)
.unwrap();
// execute the get output function
get_output_fn
.invoke(vec![(&0).into(), (&output).into()])
.unwrap();
// flatten the output as Vec<f32>
let output = output.to_vec::<f32>().unwrap();
// find the maximum entry in the output and its index
let mut argmax = -1;
let mut max_prob = 0.;
for i in 0..output.len() {
if output[i] > max_prob {
max_prob = output[i];
argmax = i as i32;
}
}
// create a hash map of (class id, class name)
let mut synset: HashMap<i32, String> = HashMap::new();
let file = File::open("synset.csv").unwrap();
let mut rdr = csv::ReaderBuilder::new()
.has_headers(true)
.from_reader(file);
for result in rdr.records() {
let record = result.unwrap();
let id: i32 = record[0].parse().unwrap();
let cls = record[1].to_string();
synset.insert(id, cls);
}
println!(
"input image belongs to the class `{}` with probability {}",
synset
.get(&argmax)
.expect("cannot find the class id for argmax"),
max_prob
);
}
| 36.342105 | 99 | 0.620927 |
bbd452b35acdf7732a5cb04d7708f11e28fa2828 | 2,291 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Helpers for handling cast expressions, used in both
// typeck and trans.
use middle::ty::{self, Ty};
use syntax::ast;
/// Types that are represented as ints.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum IntTy {
U(ast::UintTy),
I,
CEnum,
Bool,
Char
}
// Valid types for the result of a non-coercion cast
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum CastTy<'tcx> {
/// Various types that are represented as ints and handled mostly
/// in the same way, merged for easier matching.
Int(IntTy),
/// Floating-Point types
Float,
/// Function Pointers
FnPtr,
/// Raw pointers
Ptr(&'tcx ty::TypeAndMut<'tcx>),
/// References
RPtr(&'tcx ty::TypeAndMut<'tcx>),
}
/// Cast Kind. See RFC 401 (or librustc_typeck/check/cast.rs)
#[derive(Copy, Clone, Debug, RustcEncodable, RustcDecodable)]
pub enum CastKind {
CoercionCast,
PtrPtrCast,
PtrAddrCast,
AddrPtrCast,
NumericCast,
EnumCast,
PrimIntCast,
U8CharCast,
ArrayPtrCast,
FnPtrPtrCast,
FnPtrAddrCast
}
impl<'tcx> CastTy<'tcx> {
pub fn from_ty(tcx: &ty::ctxt<'tcx>, t: Ty<'tcx>)
-> Option<CastTy<'tcx>> {
match t.sty {
ty::TyBool => Some(CastTy::Int(IntTy::Bool)),
ty::TyChar => Some(CastTy::Int(IntTy::Char)),
ty::TyInt(_) => Some(CastTy::Int(IntTy::I)),
ty::TyUint(u) => Some(CastTy::Int(IntTy::U(u))),
ty::TyFloat(_) => Some(CastTy::Float),
ty::TyEnum(..) if t.is_c_like_enum(tcx) =>
Some(CastTy::Int(IntTy::CEnum)),
ty::TyRawPtr(ref mt) => Some(CastTy::Ptr(mt)),
ty::TyRef(_, ref mt) => Some(CastTy::RPtr(mt)),
ty::TyBareFn(..) => Some(CastTy::FnPtr),
_ => None,
}
}
}
| 29.371795 | 69 | 0.611087 |
76ab46aac117c74c2822d1ab2b0f2372442e3425 | 2,864 | use num::Zero;
use crate::math::{AngularInertia, Point};
use crate::volumetric::{InertiaTensor, Volumetric};
use na::{self, RealField};
use ncollide::shape::Compound;
impl<N: RealField> Volumetric<N> for Compound<N> {
fn area(&self) -> N {
let mut stot: N = na::zero();
for &(_, ref s) in self.shapes().iter() {
stot += s.area()
}
stot
}
fn volume(&self) -> N {
let mut vtot: N = na::zero();
for &(_, ref s) in self.shapes().iter() {
vtot += s.volume()
}
vtot
}
fn center_of_mass(&self) -> Point<N> {
let mut mtot = N::zero();
let mut ctot = Point::origin();
let mut gtot = Point::origin(); // geometric center.
let shapes = self.shapes();
for &(ref m, ref s) in shapes.iter() {
let (mpart, cpart, _) = s.mass_properties(na::one());
mtot += mpart;
ctot += (*m * cpart * mpart).coords;
gtot += (*m * cpart).coords;
}
if mtot.is_zero() {
gtot
} else {
ctot / mtot
}
}
fn unit_angular_inertia(&self) -> AngularInertia<N> {
let mut itot = AngularInertia::zero();
let com = self.center_of_mass();
let shapes = self.shapes();
for &(ref m, ref s) in shapes.iter() {
let (mpart, cpart, ipart) = s.mass_properties(na::one());
itot += ipart
.to_world_space(m)
.to_relative_wrt_point(mpart, &(*m * cpart + (-com.coords)));
}
itot
}
/// The mass properties of this `CompoundData`.
///
/// If `density` is not zero, it will be multiplied with the density of every object of the
/// compound shape.
fn mass_properties(&self, density: N) -> (N, Point<N>, AngularInertia<N>) {
let mut itot = AngularInertia::zero();
let mut ctot = Point::origin();
let mut gtot = Point::origin(); // geometric center.
let mut mtot = N::zero();
let shapes = self.shapes();
let props: Vec<_> = shapes
.iter()
.map(|&(_, ref s)| s.mass_properties(na::one()))
.collect();
for (&(ref m, _), &(ref mpart, ref cpart, _)) in shapes.iter().zip(props.iter()) {
mtot += *mpart;
ctot += (*m * *cpart * *mpart).coords;
gtot += (*m * *cpart).coords;
}
if mtot.is_zero() {
ctot = gtot;
} else {
ctot /= mtot;
}
for (&(ref m, _), &(ref mpart, ref cpart, ref ipart)) in shapes.iter().zip(props.iter()) {
itot += ipart
.to_world_space(m)
.to_relative_wrt_point(*mpart, &(*m * *cpart + (-ctot.coords)));
}
(mtot * density, ctot, itot * density)
}
}
| 27.27619 | 98 | 0.49162 |
75619ea358406f792348edd2be9cd92af8782a67 | 225 | pub mod settings;
pub mod my_file_funcs;
pub mod bird_species;
pub mod bird_species_box;
pub mod bird_species_support;
pub mod bird_sightings;
pub mod bird_sightings_box;
pub mod bird_sightings_supp;
pub mod help;
| 10.714286 | 29 | 0.791111 |
d774522d7d5cf3ddb200c6065a3725dcdfccf693 | 3,331 | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files.git)
// DO NOT EDIT
use crate::ContentFormats;
use crate::Device;
use crate::Display;
use crate::Drag;
use crate::DragAction;
use crate::Surface;
use glib::object::ObjectType as ObjectType_;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
glib::wrapper! {
#[doc(alias = "GdkDrop")]
pub struct Drop(Object<ffi::GdkDrop>);
match fn {
type_ => || ffi::gdk_drop_get_type(),
}
}
impl Drop {
#[doc(alias = "gdk_drop_finish")]
pub fn finish(&self, action: DragAction) {
unsafe {
ffi::gdk_drop_finish(self.to_glib_none().0, action.into_glib());
}
}
#[doc(alias = "gdk_drop_get_actions")]
#[doc(alias = "get_actions")]
pub fn actions(&self) -> DragAction {
unsafe { from_glib(ffi::gdk_drop_get_actions(self.to_glib_none().0)) }
}
#[doc(alias = "gdk_drop_get_device")]
#[doc(alias = "get_device")]
pub fn device(&self) -> Device {
unsafe { from_glib_none(ffi::gdk_drop_get_device(self.to_glib_none().0)) }
}
#[doc(alias = "gdk_drop_get_display")]
#[doc(alias = "get_display")]
pub fn display(&self) -> Display {
unsafe { from_glib_none(ffi::gdk_drop_get_display(self.to_glib_none().0)) }
}
#[doc(alias = "gdk_drop_get_drag")]
#[doc(alias = "get_drag")]
pub fn drag(&self) -> Option<Drag> {
unsafe { from_glib_none(ffi::gdk_drop_get_drag(self.to_glib_none().0)) }
}
#[doc(alias = "gdk_drop_get_formats")]
#[doc(alias = "get_formats")]
pub fn formats(&self) -> ContentFormats {
unsafe { from_glib_none(ffi::gdk_drop_get_formats(self.to_glib_none().0)) }
}
#[doc(alias = "gdk_drop_get_surface")]
#[doc(alias = "get_surface")]
pub fn surface(&self) -> Surface {
unsafe { from_glib_none(ffi::gdk_drop_get_surface(self.to_glib_none().0)) }
}
#[doc(alias = "gdk_drop_status")]
pub fn status(&self, actions: DragAction, preferred: DragAction) {
unsafe {
ffi::gdk_drop_status(
self.to_glib_none().0,
actions.into_glib(),
preferred.into_glib(),
);
}
}
#[doc(alias = "display")]
pub fn connect_display_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_display_trampoline<F: Fn(&Drop) + 'static>(
this: *mut ffi::GdkDrop,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(&from_glib_borrow(this))
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::display\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_display_trampoline::<F> as *const (),
)),
Box_::into_raw(f),
)
}
}
}
impl fmt::Display for Drop {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("Drop")
}
}
| 29.741071 | 91 | 0.575203 |
eff83f480df6f545d34ce2c148afedf1336129f1 | 9,170 | use super::*;
fn build_app() -> App<'static> {
build_app_with_name("myapp")
}
fn build_app_with_name(s: &'static str) -> App<'static> {
App::new(s)
.version("3.0")
.propagate_version(true)
.about("Tests completions")
.arg(
Arg::new("file")
.value_hint(ValueHint::FilePath)
.help("some input file"),
)
.arg(Arg::new("choice").possible_values(["first", "second"]))
.subcommand(
App::new("test").about("tests things").arg(
Arg::new("case")
.long("case")
.takes_value(true)
.help("the case to test"),
),
)
}
#[test]
fn bash() {
let mut app = build_app();
common(Bash, &mut app, "myapp", BASH);
}
static BASH: &str = r#"_myapp() {
local i cur prev opts cmds
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
prev="${COMP_WORDS[COMP_CWORD-1]}"
cmd=""
opts=""
for i in ${COMP_WORDS[@]}
do
case "${i}" in
"$1")
cmd="myapp"
;;
help)
cmd+="__help"
;;
test)
cmd+="__test"
;;
*)
;;
esac
done
case "${cmd}" in
myapp)
opts="-h -V --help --version <file> first second test help"
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
fi
case "${prev}" in
*)
COMPREPLY=()
;;
esac
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
;;
myapp__help)
opts="<SUBCOMMAND>..."
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
fi
case "${prev}" in
*)
COMPREPLY=()
;;
esac
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
;;
myapp__test)
opts="-h -V --case --help --version"
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
fi
case "${prev}" in
--case)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
*)
COMPREPLY=()
;;
esac
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
;;
esac
}
complete -F _myapp -o bashdefault -o default myapp
"#;
#[test]
fn bash_with_special_commands() {
let mut app = build_app_special_commands();
common(Bash, &mut app, "my_app", BASH_SPECIAL_CMDS);
}
fn build_app_special_commands() -> App<'static> {
build_app_with_name("my_app")
.subcommand(
App::new("some_cmd").about("tests other things").arg(
Arg::new("config")
.long("--config")
.takes_value(true)
.help("the other case to test"),
),
)
.subcommand(App::new("some-cmd-with-hyphens").alias("hyphen"))
}
static BASH_SPECIAL_CMDS: &str = r#"_my_app() {
local i cur prev opts cmds
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
prev="${COMP_WORDS[COMP_CWORD-1]}"
cmd=""
opts=""
for i in ${COMP_WORDS[@]}
do
case "${i}" in
"$1")
cmd="my_app"
;;
help)
cmd+="__help"
;;
some-cmd-with-hyphens)
cmd+="__some__cmd__with__hyphens"
;;
some_cmd)
cmd+="__some_cmd"
;;
test)
cmd+="__test"
;;
*)
;;
esac
done
case "${cmd}" in
my_app)
opts="-h -V --help --version <file> first second test some_cmd some-cmd-with-hyphens help"
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
fi
case "${prev}" in
*)
COMPREPLY=()
;;
esac
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
;;
my_app__help)
opts="<SUBCOMMAND>..."
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
fi
case "${prev}" in
*)
COMPREPLY=()
;;
esac
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
;;
my_app__some__cmd__with__hyphens)
opts="-h -V --help --version"
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
fi
case "${prev}" in
*)
COMPREPLY=()
;;
esac
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
;;
my_app__some_cmd)
opts="-h -V --config --help --version"
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
fi
case "${prev}" in
--config)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
*)
COMPREPLY=()
;;
esac
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
;;
my_app__test)
opts="-h -V --case --help --version"
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
fi
case "${prev}" in
--case)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
*)
COMPREPLY=()
;;
esac
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
;;
esac
}
complete -F _my_app -o bashdefault -o default my_app
"#;
#[test]
fn bash_with_aliases() {
let mut app = build_app_with_aliases();
common(Bash, &mut app, "cmd", BASH_ALIASES);
}
fn build_app_with_aliases() -> App<'static> {
App::new("cmd")
.version("3.0")
.about("testing bash completions")
.arg(
Arg::new("flag")
.short('f')
.visible_short_alias('F')
.long("flag")
.visible_alias("flg")
.help("cmd flag"),
)
.arg(
Arg::new("option")
.short('o')
.visible_short_alias('O')
.long("option")
.visible_alias("opt")
.help("cmd option")
.takes_value(true),
)
.arg(Arg::new("positional"))
}
static BASH_ALIASES: &str = r#"_cmd() {
local i cur prev opts cmds
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
prev="${COMP_WORDS[COMP_CWORD-1]}"
cmd=""
opts=""
for i in ${COMP_WORDS[@]}
do
case "${i}" in
"$1")
cmd="cmd"
;;
*)
;;
esac
done
case "${cmd}" in
cmd)
opts="-h -V -F -f -O -o --help --version --flg --flag --opt --option <positional>"
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
fi
case "${prev}" in
--option)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--opt)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-o)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-O)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
*)
COMPREPLY=()
;;
esac
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
;;
esac
}
complete -F _cmd -o bashdefault -o default cmd
"#;
| 27.620482 | 102 | 0.366848 |
874eba4699d20202e9244177a43fd275313631f3 | 1,771 | use engine::Engine;
use search::alpha_beta::AlphaBeta;
use std::error::Error;
use std::io;
use uci::parser::{Parser, ParserMessage};
use uci::uci_in::{go, is_ready, position, quit, stop, uci as cmd_uci, ucinewgame};
use uci::uci_out::{best_move, info};
const TABLE_IDX_BITS: usize = 20;
pub fn run() -> Result<(), Box<dyn Error>> {
let search_algo = AlphaBeta::new(TABLE_IDX_BITS);
let search_info_callback =
Box::new(move |m| info::write(&mut io::stdout(), m).expect("Error writing search info"));
let best_move_callback =
Box::new(move |m| best_move::write(&mut io::stdout(), m).expect("Error writing best move"));
let mut engine = Engine::new(search_algo, search_info_callback, best_move_callback);
let writer = Box::new(io::stdout());
let mut parser = Parser::new(writer);
parser.register_command(String::from("go"), Box::new(go::run_command));
parser.register_command(String::from("isready"), Box::new(is_ready::run_command));
parser.register_command(String::from("position"), Box::new(position::run_command));
parser.register_command(String::from("quit"), Box::new(quit::run_command));
parser.register_command(String::from("stop"), Box::new(stop::run_command));
parser.register_command(String::from("uci"), Box::new(cmd_uci::run_command));
parser.register_command(
String::from("ucinewgame"),
Box::new(ucinewgame::run_command),
);
let reader = io::stdin();
let mut buffer = String::new();
loop {
reader.read_line(&mut buffer)?;
match parser.run_command(&buffer, &mut engine) {
Ok(Some(ParserMessage::Quit)) => break,
Err(e) => eprintln!("{}", e),
_ => {}
}
buffer.clear();
}
Ok(())
}
| 38.5 | 100 | 0.645398 |
08b432d68d5f355d1099bd9dd3f487642543719b | 1,551 | extern crate dialoguer;
extern crate failure;
extern crate open;
extern crate url;
use std::collections::VecDeque;
use std::io;
use dialoguer::Select;
use failure::Error;
use url::Url;
fn main() -> Result<(), Error> {
let mut urls: Vec<Url> = Vec::new();
let mut queue: VecDeque<Url> = VecDeque::new();
std::env::args()
.skip(1)
.for_each(|arg| match Url::parse(&arg) {
Ok(url) => queue.push_back(url),
Err(_) => println!("Could not parse {:?} as URL", arg),
});
while let Some(url) = queue.pop_front() {
for (_, value) in url.query_pairs() {
if let Ok(url) = Url::parse(&value) {
queue.push_back(url);
}
}
urls.push(url);
}
if urls.is_empty() {
eprintln!("No URLs detected");
return Err(io::Error::from_raw_os_error(1).into());
}
let options = urls.iter().map(|url| url.as_str()).collect::<Vec<_>>();
let selected_index = Select::new().items(&options).interact()?;
match options.get(selected_index) {
Some(selection) => open::that(selection)
.and_then(|status| match status.code() {
Some(code) => if code == 0 {
Ok(())
} else {
Err(io::Error::from_raw_os_error(code))
},
None => Err(io::Error::from_raw_os_error(1)),
})
.or_else(|error| Err(error.into())),
None => Err(io::Error::from_raw_os_error(1).into()),
}
}
| 28.2 | 74 | 0.522888 |
fe2ec30610e8db81edbf1630f98979d72e3f1e2a | 471 | use coinbase::{ASync, Public, SANDBOX_URL};
#[allow(unused)]
async fn get_time() {
let client: Public<ASync> = Public::new_with_keep_alive(SANDBOX_URL, false);
// if keep_alive is not disables - tokio::run will hold the connection without exiting test
let time = client.get_time().await.unwrap();
assert!(!time.iso.is_empty())
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn it_works() {
get_time().await;
}
}
| 22.428571 | 95 | 0.647558 |
c147820f28184e0a16793a4526c2e3d6bc69d6af | 284 | // Copyright 2021 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
pub mod config_data;
pub mod create_system;
pub mod create_update;
pub mod image;
pub mod product;
pub mod size_check;
| 25.818182 | 73 | 0.771127 |
f71f4af10373daadd1b53c672cab64dab3a5501e | 606 | extern crate hyper;
use hyper::header::{ContentLength, ContentType};
use hyper::server::{Http, Response, const_service, service_fn};
static TEXT: &'static str = r#"{"hello": "world"}"#;
fn main() {
let addr = ([127, 0, 0, 1], 3000).into();
let hello = const_service(service_fn(|_req| {
Ok(
Response::<hyper::Body>::new()
.with_header(ContentLength(TEXT.len() as u64))
.with_header(ContentType::json())
.with_body(TEXT),
)
}));
let server = Http::new().bind(&addr, hello).unwrap();
server.run().unwrap()
}
| 26.347826 | 63 | 0.564356 |
c1f71fbbfa41ad485012b9942faf2ebf766f6779 | 78,272 | //! This module contains `TyKind` and its major components.
#![allow(rustc::usage_of_ty_tykind)]
use self::TyKind::*;
use crate::infer::canonical::Canonical;
use crate::ty::fold::BoundVarsCollector;
use crate::ty::fold::ValidateBoundVars;
use crate::ty::subst::{GenericArg, InternalSubsts, Subst, SubstsRef};
use crate::ty::InferTy::{self, *};
use crate::ty::{
self, AdtDef, DefIdTree, Discr, Ty, TyCtxt, TypeFlags, TypeFoldable, WithConstness,
};
use crate::ty::{DelaySpanBugEmitted, List, ParamEnv, TyS};
use polonius_engine::Atom;
use rustc_data_structures::captures::Captures;
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_index::vec::Idx;
use rustc_macros::HashStable;
use rustc_span::symbol::{kw, Symbol};
use rustc_target::abi::VariantIdx;
use rustc_target::spec::abi;
use std::borrow::Cow;
use std::cmp::Ordering;
use std::marker::PhantomData;
use std::ops::Range;
use ty::util::IntTypeExt;
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
#[derive(HashStable, TypeFoldable, Lift)]
pub struct TypeAndMut<'tcx> {
pub ty: Ty<'tcx>,
pub mutbl: hir::Mutability,
}
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, TyEncodable, TyDecodable, Copy)]
#[derive(HashStable)]
/// A "free" region `fr` can be interpreted as "some region
/// at least as big as the scope `fr.scope`".
pub struct FreeRegion {
pub scope: DefId,
pub bound_region: BoundRegionKind,
}
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, TyEncodable, TyDecodable, Copy)]
#[derive(HashStable)]
pub enum BoundRegionKind {
/// An anonymous region parameter for a given fn (&T)
BrAnon(u32),
/// Named region parameters for functions (a in &'a T)
///
/// The `DefId` is needed to distinguish free regions in
/// the event of shadowing.
BrNamed(DefId, Symbol),
/// Anonymous region for the implicit env pointer parameter
/// to a closure
BrEnv,
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable, Debug, PartialOrd, Ord)]
#[derive(HashStable)]
pub struct BoundRegion {
pub var: BoundVar,
pub kind: BoundRegionKind,
}
impl BoundRegionKind {
pub fn is_named(&self) -> bool {
match *self {
BoundRegionKind::BrNamed(_, name) => name != kw::UnderscoreLifetime,
_ => false,
}
}
}
/// Defines the kinds of types.
///
/// N.B., if you change this, you'll probably want to change the corresponding
/// AST structure in `rustc_ast/src/ast.rs` as well.
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable, Debug)]
#[derive(HashStable)]
#[rustc_diagnostic_item = "TyKind"]
pub enum TyKind<'tcx> {
/// The primitive boolean type. Written as `bool`.
Bool,
/// The primitive character type; holds a Unicode scalar value
/// (a non-surrogate code point). Written as `char`.
Char,
/// A primitive signed integer type. For example, `i32`.
Int(ty::IntTy),
/// A primitive unsigned integer type. For example, `u32`.
Uint(ty::UintTy),
/// A primitive floating-point type. For example, `f64`.
Float(ty::FloatTy),
/// Algebraic data types (ADT). For example: structures, enumerations and unions.
///
/// InternalSubsts here, possibly against intuition, *may* contain `Param`s.
/// That is, even after substitution it is possible that there are type
/// variables. This happens when the `Adt` corresponds to an ADT
/// definition and not a concrete use of it.
Adt(&'tcx AdtDef, SubstsRef<'tcx>),
/// An unsized FFI type that is opaque to Rust. Written as `extern type T`.
Foreign(DefId),
/// The pointee of a string slice. Written as `str`.
Str,
/// An array with the given length. Written as `[T; n]`.
Array(Ty<'tcx>, &'tcx ty::Const<'tcx>),
/// The pointee of an array slice. Written as `[T]`.
Slice(Ty<'tcx>),
/// A raw pointer. Written as `*mut T` or `*const T`
RawPtr(TypeAndMut<'tcx>),
/// A reference; a pointer with an associated lifetime. Written as
/// `&'a mut T` or `&'a T`.
Ref(Region<'tcx>, Ty<'tcx>, hir::Mutability),
/// The anonymous type of a function declaration/definition. Each
/// function has a unique type, which is output (for a function
/// named `foo` returning an `i32`) as `fn() -> i32 {foo}`.
///
/// For example the type of `bar` here:
///
/// ```rust
/// fn foo() -> i32 { 1 }
/// let bar = foo; // bar: fn() -> i32 {foo}
/// ```
FnDef(DefId, SubstsRef<'tcx>),
/// A pointer to a function. Written as `fn() -> i32`.
///
/// For example the type of `bar` here:
///
/// ```rust
/// fn foo() -> i32 { 1 }
/// let bar: fn() -> i32 = foo;
/// ```
FnPtr(PolyFnSig<'tcx>),
/// A trait object. Written as `dyn for<'b> Trait<'b, Assoc = u32> + Send + 'a`.
Dynamic(&'tcx List<Binder<'tcx, ExistentialPredicate<'tcx>>>, ty::Region<'tcx>),
/// The anonymous type of a closure. Used to represent the type of
/// `|a| a`.
Closure(DefId, SubstsRef<'tcx>),
/// The anonymous type of a generator. Used to represent the type of
/// `|a| yield a`.
Generator(DefId, SubstsRef<'tcx>, hir::Movability),
/// A type representing the types stored inside a generator.
/// This should only appear in GeneratorInteriors.
GeneratorWitness(Binder<'tcx, &'tcx List<Ty<'tcx>>>),
/// The never type `!`.
Never,
/// A tuple type. For example, `(i32, bool)`.
/// Use `TyS::tuple_fields` to iterate over the field types.
Tuple(SubstsRef<'tcx>),
/// The projection of an associated type. For example,
/// `<T as Trait<..>>::N`.
Projection(ProjectionTy<'tcx>),
/// Opaque (`impl Trait`) type found in a return type.
/// The `DefId` comes either from
/// * the `impl Trait` ast::Ty node,
/// * or the `type Foo = impl Trait` declaration
/// The substitutions are for the generics of the function in question.
/// After typeck, the concrete type can be found in the `types` map.
Opaque(DefId, SubstsRef<'tcx>),
/// A type parameter; for example, `T` in `fn f<T>(x: T) {}`.
Param(ParamTy),
/// Bound type variable, used only when preparing a trait query.
Bound(ty::DebruijnIndex, BoundTy),
/// A placeholder type - universally quantified higher-ranked type.
Placeholder(ty::PlaceholderType),
/// A type variable used during type checking.
Infer(InferTy),
/// A placeholder for a type which could not be computed; this is
/// propagated to avoid useless error messages.
Error(DelaySpanBugEmitted),
}
impl TyKind<'tcx> {
#[inline]
pub fn is_primitive(&self) -> bool {
matches!(self, Bool | Char | Int(_) | Uint(_) | Float(_))
}
/// Get the article ("a" or "an") to use with this type.
pub fn article(&self) -> &'static str {
match self {
Int(_) | Float(_) | Array(_, _) => "an",
Adt(def, _) if def.is_enum() => "an",
// This should never happen, but ICEing and causing the user's code
// to not compile felt too harsh.
Error(_) => "a",
_ => "a",
}
}
}
// `TyKind` is used a lot. Make sure it doesn't unintentionally get bigger.
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
static_assert_size!(TyKind<'_>, 32);
/// A closure can be modeled as a struct that looks like:
///
/// struct Closure<'l0...'li, T0...Tj, CK, CS, U>(...U);
///
/// where:
///
/// - 'l0...'li and T0...Tj are the generic parameters
/// in scope on the function that defined the closure,
/// - CK represents the *closure kind* (Fn vs FnMut vs FnOnce). This
/// is rather hackily encoded via a scalar type. See
/// `TyS::to_opt_closure_kind` for details.
/// - CS represents the *closure signature*, representing as a `fn()`
/// type. For example, `fn(u32, u32) -> u32` would mean that the closure
/// implements `CK<(u32, u32), Output = u32>`, where `CK` is the trait
/// specified above.
/// - U is a type parameter representing the types of its upvars, tupled up
/// (borrowed, if appropriate; that is, if an U field represents a by-ref upvar,
/// and the up-var has the type `Foo`, then that field of U will be `&Foo`).
///
/// So, for example, given this function:
///
/// fn foo<'a, T>(data: &'a mut T) {
/// do(|| data.count += 1)
/// }
///
/// the type of the closure would be something like:
///
/// struct Closure<'a, T, U>(...U);
///
/// Note that the type of the upvar is not specified in the struct.
/// You may wonder how the impl would then be able to use the upvar,
/// if it doesn't know it's type? The answer is that the impl is
/// (conceptually) not fully generic over Closure but rather tied to
/// instances with the expected upvar types:
///
/// impl<'b, 'a, T> FnMut() for Closure<'a, T, (&'b mut &'a mut T,)> {
/// ...
/// }
///
/// You can see that the *impl* fully specified the type of the upvar
/// and thus knows full well that `data` has type `&'b mut &'a mut T`.
/// (Here, I am assuming that `data` is mut-borrowed.)
///
/// Now, the last question you may ask is: Why include the upvar types
/// in an extra type parameter? The reason for this design is that the
/// upvar types can reference lifetimes that are internal to the
/// creating function. In my example above, for example, the lifetime
/// `'b` represents the scope of the closure itself; this is some
/// subset of `foo`, probably just the scope of the call to the to
/// `do()`. If we just had the lifetime/type parameters from the
/// enclosing function, we couldn't name this lifetime `'b`. Note that
/// there can also be lifetimes in the types of the upvars themselves,
/// if one of them happens to be a reference to something that the
/// creating fn owns.
///
/// OK, you say, so why not create a more minimal set of parameters
/// that just includes the extra lifetime parameters? The answer is
/// primarily that it would be hard --- we don't know at the time when
/// we create the closure type what the full types of the upvars are,
/// nor do we know which are borrowed and which are not. In this
/// design, we can just supply a fresh type parameter and figure that
/// out later.
///
/// All right, you say, but why include the type parameters from the
/// original function then? The answer is that codegen may need them
/// when monomorphizing, and they may not appear in the upvars. A
/// closure could capture no variables but still make use of some
/// in-scope type parameter with a bound (e.g., if our example above
/// had an extra `U: Default`, and the closure called `U::default()`).
///
/// There is another reason. This design (implicitly) prohibits
/// closures from capturing themselves (except via a trait
/// object). This simplifies closure inference considerably, since it
/// means that when we infer the kind of a closure or its upvars, we
/// don't have to handle cycles where the decisions we make for
/// closure C wind up influencing the decisions we ought to make for
/// closure C (which would then require fixed point iteration to
/// handle). Plus it fixes an ICE. :P
///
/// ## Generators
///
/// Generators are handled similarly in `GeneratorSubsts`. The set of
/// type parameters is similar, but `CK` and `CS` are replaced by the
/// following type parameters:
///
/// * `GS`: The generator's "resume type", which is the type of the
/// argument passed to `resume`, and the type of `yield` expressions
/// inside the generator.
/// * `GY`: The "yield type", which is the type of values passed to
/// `yield` inside the generator.
/// * `GR`: The "return type", which is the type of value returned upon
/// completion of the generator.
/// * `GW`: The "generator witness".
#[derive(Copy, Clone, Debug, TypeFoldable)]
pub struct ClosureSubsts<'tcx> {
/// Lifetime and type parameters from the enclosing function,
/// concatenated with a tuple containing the types of the upvars.
///
/// These are separated out because codegen wants to pass them around
/// when monomorphizing.
pub substs: SubstsRef<'tcx>,
}
/// Struct returned by `split()`.
pub struct ClosureSubstsParts<'tcx, T> {
pub parent_substs: &'tcx [GenericArg<'tcx>],
pub closure_kind_ty: T,
pub closure_sig_as_fn_ptr_ty: T,
pub tupled_upvars_ty: T,
}
impl<'tcx> ClosureSubsts<'tcx> {
/// Construct `ClosureSubsts` from `ClosureSubstsParts`, containing `Substs`
/// for the closure parent, alongside additional closure-specific components.
pub fn new(
tcx: TyCtxt<'tcx>,
parts: ClosureSubstsParts<'tcx, Ty<'tcx>>,
) -> ClosureSubsts<'tcx> {
ClosureSubsts {
substs: tcx.mk_substs(
parts.parent_substs.iter().copied().chain(
[parts.closure_kind_ty, parts.closure_sig_as_fn_ptr_ty, parts.tupled_upvars_ty]
.iter()
.map(|&ty| ty.into()),
),
),
}
}
/// Divides the closure substs into their respective components.
/// The ordering assumed here must match that used by `ClosureSubsts::new` above.
fn split(self) -> ClosureSubstsParts<'tcx, GenericArg<'tcx>> {
match self.substs[..] {
[ref parent_substs @ .., closure_kind_ty, closure_sig_as_fn_ptr_ty, tupled_upvars_ty] => {
ClosureSubstsParts {
parent_substs,
closure_kind_ty,
closure_sig_as_fn_ptr_ty,
tupled_upvars_ty,
}
}
_ => bug!("closure substs missing synthetics"),
}
}
/// Returns `true` only if enough of the synthetic types are known to
/// allow using all of the methods on `ClosureSubsts` without panicking.
///
/// Used primarily by `ty::print::pretty` to be able to handle closure
/// types that haven't had their synthetic types substituted in.
pub fn is_valid(self) -> bool {
self.substs.len() >= 3
&& matches!(self.split().tupled_upvars_ty.expect_ty().kind(), Tuple(_))
}
/// Returns the substitutions of the closure's parent.
pub fn parent_substs(self) -> &'tcx [GenericArg<'tcx>] {
self.split().parent_substs
}
/// Returns an iterator over the list of types of captured paths by the closure.
/// In case there was a type error in figuring out the types of the captured path, an
/// empty iterator is returned.
#[inline]
pub fn upvar_tys(self) -> impl Iterator<Item = Ty<'tcx>> + 'tcx {
match self.tupled_upvars_ty().kind() {
TyKind::Error(_) => None,
TyKind::Tuple(..) => Some(self.tupled_upvars_ty().tuple_fields()),
TyKind::Infer(_) => bug!("upvar_tys called before capture types are inferred"),
ty => bug!("Unexpected representation of upvar types tuple {:?}", ty),
}
.into_iter()
.flatten()
}
/// Returns the tuple type representing the upvars for this closure.
#[inline]
pub fn tupled_upvars_ty(self) -> Ty<'tcx> {
self.split().tupled_upvars_ty.expect_ty()
}
/// Returns the closure kind for this closure; may return a type
/// variable during inference. To get the closure kind during
/// inference, use `infcx.closure_kind(substs)`.
pub fn kind_ty(self) -> Ty<'tcx> {
self.split().closure_kind_ty.expect_ty()
}
/// Returns the `fn` pointer type representing the closure signature for this
/// closure.
// FIXME(eddyb) this should be unnecessary, as the shallowly resolved
// type is known at the time of the creation of `ClosureSubsts`,
// see `rustc_typeck::check::closure`.
pub fn sig_as_fn_ptr_ty(self) -> Ty<'tcx> {
self.split().closure_sig_as_fn_ptr_ty.expect_ty()
}
/// Returns the closure kind for this closure; only usable outside
/// of an inference context, because in that context we know that
/// there are no type variables.
///
/// If you have an inference context, use `infcx.closure_kind()`.
pub fn kind(self) -> ty::ClosureKind {
self.kind_ty().to_opt_closure_kind().unwrap()
}
/// Extracts the signature from the closure.
pub fn sig(self) -> ty::PolyFnSig<'tcx> {
let ty = self.sig_as_fn_ptr_ty();
match ty.kind() {
ty::FnPtr(sig) => *sig,
_ => bug!("closure_sig_as_fn_ptr_ty is not a fn-ptr: {:?}", ty.kind()),
}
}
}
/// Similar to `ClosureSubsts`; see the above documentation for more.
#[derive(Copy, Clone, Debug, TypeFoldable)]
pub struct GeneratorSubsts<'tcx> {
pub substs: SubstsRef<'tcx>,
}
pub struct GeneratorSubstsParts<'tcx, T> {
pub parent_substs: &'tcx [GenericArg<'tcx>],
pub resume_ty: T,
pub yield_ty: T,
pub return_ty: T,
pub witness: T,
pub tupled_upvars_ty: T,
}
impl<'tcx> GeneratorSubsts<'tcx> {
/// Construct `GeneratorSubsts` from `GeneratorSubstsParts`, containing `Substs`
/// for the generator parent, alongside additional generator-specific components.
pub fn new(
tcx: TyCtxt<'tcx>,
parts: GeneratorSubstsParts<'tcx, Ty<'tcx>>,
) -> GeneratorSubsts<'tcx> {
GeneratorSubsts {
substs: tcx.mk_substs(
parts.parent_substs.iter().copied().chain(
[
parts.resume_ty,
parts.yield_ty,
parts.return_ty,
parts.witness,
parts.tupled_upvars_ty,
]
.iter()
.map(|&ty| ty.into()),
),
),
}
}
/// Divides the generator substs into their respective components.
/// The ordering assumed here must match that used by `GeneratorSubsts::new` above.
fn split(self) -> GeneratorSubstsParts<'tcx, GenericArg<'tcx>> {
match self.substs[..] {
[ref parent_substs @ .., resume_ty, yield_ty, return_ty, witness, tupled_upvars_ty] => {
GeneratorSubstsParts {
parent_substs,
resume_ty,
yield_ty,
return_ty,
witness,
tupled_upvars_ty,
}
}
_ => bug!("generator substs missing synthetics"),
}
}
/// Returns `true` only if enough of the synthetic types are known to
/// allow using all of the methods on `GeneratorSubsts` without panicking.
///
/// Used primarily by `ty::print::pretty` to be able to handle generator
/// types that haven't had their synthetic types substituted in.
pub fn is_valid(self) -> bool {
self.substs.len() >= 5
&& matches!(self.split().tupled_upvars_ty.expect_ty().kind(), Tuple(_))
}
/// Returns the substitutions of the generator's parent.
pub fn parent_substs(self) -> &'tcx [GenericArg<'tcx>] {
self.split().parent_substs
}
/// This describes the types that can be contained in a generator.
/// It will be a type variable initially and unified in the last stages of typeck of a body.
/// It contains a tuple of all the types that could end up on a generator frame.
/// The state transformation MIR pass may only produce layouts which mention types
/// in this tuple. Upvars are not counted here.
pub fn witness(self) -> Ty<'tcx> {
self.split().witness.expect_ty()
}
/// Returns an iterator over the list of types of captured paths by the generator.
/// In case there was a type error in figuring out the types of the captured path, an
/// empty iterator is returned.
#[inline]
pub fn upvar_tys(self) -> impl Iterator<Item = Ty<'tcx>> + 'tcx {
match self.tupled_upvars_ty().kind() {
TyKind::Error(_) => None,
TyKind::Tuple(..) => Some(self.tupled_upvars_ty().tuple_fields()),
TyKind::Infer(_) => bug!("upvar_tys called before capture types are inferred"),
ty => bug!("Unexpected representation of upvar types tuple {:?}", ty),
}
.into_iter()
.flatten()
}
/// Returns the tuple type representing the upvars for this generator.
#[inline]
pub fn tupled_upvars_ty(self) -> Ty<'tcx> {
self.split().tupled_upvars_ty.expect_ty()
}
/// Returns the type representing the resume type of the generator.
pub fn resume_ty(self) -> Ty<'tcx> {
self.split().resume_ty.expect_ty()
}
/// Returns the type representing the yield type of the generator.
pub fn yield_ty(self) -> Ty<'tcx> {
self.split().yield_ty.expect_ty()
}
/// Returns the type representing the return type of the generator.
pub fn return_ty(self) -> Ty<'tcx> {
self.split().return_ty.expect_ty()
}
/// Returns the "generator signature", which consists of its yield
/// and return types.
///
/// N.B., some bits of the code prefers to see this wrapped in a
/// binder, but it never contains bound regions. Probably this
/// function should be removed.
pub fn poly_sig(self) -> PolyGenSig<'tcx> {
ty::Binder::dummy(self.sig())
}
/// Returns the "generator signature", which consists of its resume, yield
/// and return types.
pub fn sig(self) -> GenSig<'tcx> {
ty::GenSig {
resume_ty: self.resume_ty(),
yield_ty: self.yield_ty(),
return_ty: self.return_ty(),
}
}
}
impl<'tcx> GeneratorSubsts<'tcx> {
/// Generator has not been resumed yet.
pub const UNRESUMED: usize = 0;
/// Generator has returned or is completed.
pub const RETURNED: usize = 1;
/// Generator has been poisoned.
pub const POISONED: usize = 2;
const UNRESUMED_NAME: &'static str = "Unresumed";
const RETURNED_NAME: &'static str = "Returned";
const POISONED_NAME: &'static str = "Panicked";
/// The valid variant indices of this generator.
#[inline]
pub fn variant_range(&self, def_id: DefId, tcx: TyCtxt<'tcx>) -> Range<VariantIdx> {
// FIXME requires optimized MIR
let num_variants = tcx.generator_layout(def_id).unwrap().variant_fields.len();
VariantIdx::new(0)..VariantIdx::new(num_variants)
}
/// The discriminant for the given variant. Panics if the `variant_index` is
/// out of range.
#[inline]
pub fn discriminant_for_variant(
&self,
def_id: DefId,
tcx: TyCtxt<'tcx>,
variant_index: VariantIdx,
) -> Discr<'tcx> {
// Generators don't support explicit discriminant values, so they are
// the same as the variant index.
assert!(self.variant_range(def_id, tcx).contains(&variant_index));
Discr { val: variant_index.as_usize() as u128, ty: self.discr_ty(tcx) }
}
/// The set of all discriminants for the generator, enumerated with their
/// variant indices.
#[inline]
pub fn discriminants(
self,
def_id: DefId,
tcx: TyCtxt<'tcx>,
) -> impl Iterator<Item = (VariantIdx, Discr<'tcx>)> + Captures<'tcx> {
self.variant_range(def_id, tcx).map(move |index| {
(index, Discr { val: index.as_usize() as u128, ty: self.discr_ty(tcx) })
})
}
/// Calls `f` with a reference to the name of the enumerator for the given
/// variant `v`.
pub fn variant_name(v: VariantIdx) -> Cow<'static, str> {
match v.as_usize() {
Self::UNRESUMED => Cow::from(Self::UNRESUMED_NAME),
Self::RETURNED => Cow::from(Self::RETURNED_NAME),
Self::POISONED => Cow::from(Self::POISONED_NAME),
_ => Cow::from(format!("Suspend{}", v.as_usize() - 3)),
}
}
/// The type of the state discriminant used in the generator type.
#[inline]
pub fn discr_ty(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
tcx.types.u32
}
/// This returns the types of the MIR locals which had to be stored across suspension points.
/// It is calculated in rustc_mir::transform::generator::StateTransform.
/// All the types here must be in the tuple in GeneratorInterior.
///
/// The locals are grouped by their variant number. Note that some locals may
/// be repeated in multiple variants.
#[inline]
pub fn state_tys(
self,
def_id: DefId,
tcx: TyCtxt<'tcx>,
) -> impl Iterator<Item = impl Iterator<Item = Ty<'tcx>> + Captures<'tcx>> {
let layout = tcx.generator_layout(def_id).unwrap();
layout.variant_fields.iter().map(move |variant| {
variant.iter().map(move |field| layout.field_tys[*field].subst(tcx, self.substs))
})
}
/// This is the types of the fields of a generator which are not stored in a
/// variant.
#[inline]
pub fn prefix_tys(self) -> impl Iterator<Item = Ty<'tcx>> {
self.upvar_tys()
}
}
#[derive(Debug, Copy, Clone, HashStable)]
pub enum UpvarSubsts<'tcx> {
Closure(SubstsRef<'tcx>),
Generator(SubstsRef<'tcx>),
}
impl<'tcx> UpvarSubsts<'tcx> {
/// Returns an iterator over the list of types of captured paths by the closure/generator.
/// In case there was a type error in figuring out the types of the captured path, an
/// empty iterator is returned.
#[inline]
pub fn upvar_tys(self) -> impl Iterator<Item = Ty<'tcx>> + 'tcx {
let tupled_tys = match self {
UpvarSubsts::Closure(substs) => substs.as_closure().tupled_upvars_ty(),
UpvarSubsts::Generator(substs) => substs.as_generator().tupled_upvars_ty(),
};
match tupled_tys.kind() {
TyKind::Error(_) => None,
TyKind::Tuple(..) => Some(self.tupled_upvars_ty().tuple_fields()),
TyKind::Infer(_) => bug!("upvar_tys called before capture types are inferred"),
ty => bug!("Unexpected representation of upvar types tuple {:?}", ty),
}
.into_iter()
.flatten()
}
#[inline]
pub fn tupled_upvars_ty(self) -> Ty<'tcx> {
match self {
UpvarSubsts::Closure(substs) => substs.as_closure().tupled_upvars_ty(),
UpvarSubsts::Generator(substs) => substs.as_generator().tupled_upvars_ty(),
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Ord, Eq, Hash, TyEncodable, TyDecodable)]
#[derive(HashStable, TypeFoldable)]
pub enum ExistentialPredicate<'tcx> {
/// E.g., `Iterator`.
Trait(ExistentialTraitRef<'tcx>),
/// E.g., `Iterator::Item = T`.
Projection(ExistentialProjection<'tcx>),
/// E.g., `Send`.
AutoTrait(DefId),
}
impl<'tcx> ExistentialPredicate<'tcx> {
/// Compares via an ordering that will not change if modules are reordered or other changes are
/// made to the tree. In particular, this ordering is preserved across incremental compilations.
pub fn stable_cmp(&self, tcx: TyCtxt<'tcx>, other: &Self) -> Ordering {
use self::ExistentialPredicate::*;
match (*self, *other) {
(Trait(_), Trait(_)) => Ordering::Equal,
(Projection(ref a), Projection(ref b)) => {
tcx.def_path_hash(a.item_def_id).cmp(&tcx.def_path_hash(b.item_def_id))
}
(AutoTrait(ref a), AutoTrait(ref b)) => {
tcx.trait_def(*a).def_path_hash.cmp(&tcx.trait_def(*b).def_path_hash)
}
(Trait(_), _) => Ordering::Less,
(Projection(_), Trait(_)) => Ordering::Greater,
(Projection(_), _) => Ordering::Less,
(AutoTrait(_), _) => Ordering::Greater,
}
}
}
impl<'tcx> Binder<'tcx, ExistentialPredicate<'tcx>> {
pub fn with_self_ty(&self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> ty::Predicate<'tcx> {
use crate::ty::ToPredicate;
match self.skip_binder() {
ExistentialPredicate::Trait(tr) => {
self.rebind(tr).with_self_ty(tcx, self_ty).without_const().to_predicate(tcx)
}
ExistentialPredicate::Projection(p) => {
self.rebind(p.with_self_ty(tcx, self_ty)).to_predicate(tcx)
}
ExistentialPredicate::AutoTrait(did) => {
let trait_ref = self.rebind(ty::TraitRef {
def_id: did,
substs: tcx.mk_substs_trait(self_ty, &[]),
});
trait_ref.without_const().to_predicate(tcx)
}
}
}
}
impl<'tcx> List<ty::Binder<'tcx, ExistentialPredicate<'tcx>>> {
/// Returns the "principal `DefId`" of this set of existential predicates.
///
/// A Rust trait object type consists (in addition to a lifetime bound)
/// of a set of trait bounds, which are separated into any number
/// of auto-trait bounds, and at most one non-auto-trait bound. The
/// non-auto-trait bound is called the "principal" of the trait
/// object.
///
/// Only the principal can have methods or type parameters (because
/// auto traits can have neither of them). This is important, because
/// it means the auto traits can be treated as an unordered set (methods
/// would force an order for the vtable, while relating traits with
/// type parameters without knowing the order to relate them in is
/// a rather non-trivial task).
///
/// For example, in the trait object `dyn fmt::Debug + Sync`, the
/// principal bound is `Some(fmt::Debug)`, while the auto-trait bounds
/// are the set `{Sync}`.
///
/// It is also possible to have a "trivial" trait object that
/// consists only of auto traits, with no principal - for example,
/// `dyn Send + Sync`. In that case, the set of auto-trait bounds
/// is `{Send, Sync}`, while there is no principal. These trait objects
/// have a "trivial" vtable consisting of just the size, alignment,
/// and destructor.
pub fn principal(&self) -> Option<ty::Binder<'tcx, ExistentialTraitRef<'tcx>>> {
self[0]
.map_bound(|this| match this {
ExistentialPredicate::Trait(tr) => Some(tr),
_ => None,
})
.transpose()
}
pub fn principal_def_id(&self) -> Option<DefId> {
self.principal().map(|trait_ref| trait_ref.skip_binder().def_id)
}
#[inline]
pub fn projection_bounds<'a>(
&'a self,
) -> impl Iterator<Item = ty::Binder<'tcx, ExistentialProjection<'tcx>>> + 'a {
self.iter().filter_map(|predicate| {
predicate
.map_bound(|pred| match pred {
ExistentialPredicate::Projection(projection) => Some(projection),
_ => None,
})
.transpose()
})
}
#[inline]
pub fn auto_traits<'a>(&'a self) -> impl Iterator<Item = DefId> + 'a {
self.iter().filter_map(|predicate| match predicate.skip_binder() {
ExistentialPredicate::AutoTrait(did) => Some(did),
_ => None,
})
}
}
/// A complete reference to a trait. These take numerous guises in syntax,
/// but perhaps the most recognizable form is in a where-clause:
///
/// T: Foo<U>
///
/// This would be represented by a trait-reference where the `DefId` is the
/// `DefId` for the trait `Foo` and the substs define `T` as parameter 0,
/// and `U` as parameter 1.
///
/// Trait references also appear in object types like `Foo<U>`, but in
/// that case the `Self` parameter is absent from the substitutions.
#[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
#[derive(HashStable, TypeFoldable)]
pub struct TraitRef<'tcx> {
pub def_id: DefId,
pub substs: SubstsRef<'tcx>,
}
impl<'tcx> TraitRef<'tcx> {
pub fn new(def_id: DefId, substs: SubstsRef<'tcx>) -> TraitRef<'tcx> {
TraitRef { def_id, substs }
}
/// Returns a `TraitRef` of the form `P0: Foo<P1..Pn>` where `Pi`
/// are the parameters defined on trait.
pub fn identity(tcx: TyCtxt<'tcx>, def_id: DefId) -> TraitRef<'tcx> {
TraitRef { def_id, substs: InternalSubsts::identity_for_item(tcx, def_id) }
}
#[inline]
pub fn self_ty(&self) -> Ty<'tcx> {
self.substs.type_at(0)
}
pub fn from_method(
tcx: TyCtxt<'tcx>,
trait_id: DefId,
substs: SubstsRef<'tcx>,
) -> ty::TraitRef<'tcx> {
let defs = tcx.generics_of(trait_id);
ty::TraitRef { def_id: trait_id, substs: tcx.intern_substs(&substs[..defs.params.len()]) }
}
}
pub type PolyTraitRef<'tcx> = Binder<'tcx, TraitRef<'tcx>>;
impl<'tcx> PolyTraitRef<'tcx> {
pub fn self_ty(&self) -> Binder<'tcx, Ty<'tcx>> {
self.map_bound_ref(|tr| tr.self_ty())
}
pub fn def_id(&self) -> DefId {
self.skip_binder().def_id
}
pub fn to_poly_trait_predicate(&self) -> ty::PolyTraitPredicate<'tcx> {
self.map_bound(|trait_ref| ty::TraitPredicate { trait_ref })
}
}
/// An existential reference to a trait, where `Self` is erased.
/// For example, the trait object `Trait<'a, 'b, X, Y>` is:
///
/// exists T. T: Trait<'a, 'b, X, Y>
///
/// The substitutions don't include the erased `Self`, only trait
/// type and lifetime parameters (`[X, Y]` and `['a, 'b]` above).
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
#[derive(HashStable, TypeFoldable)]
pub struct ExistentialTraitRef<'tcx> {
pub def_id: DefId,
pub substs: SubstsRef<'tcx>,
}
impl<'tcx> ExistentialTraitRef<'tcx> {
pub fn erase_self_ty(
tcx: TyCtxt<'tcx>,
trait_ref: ty::TraitRef<'tcx>,
) -> ty::ExistentialTraitRef<'tcx> {
// Assert there is a Self.
trait_ref.substs.type_at(0);
ty::ExistentialTraitRef {
def_id: trait_ref.def_id,
substs: tcx.intern_substs(&trait_ref.substs[1..]),
}
}
/// Object types don't have a self type specified. Therefore, when
/// we convert the principal trait-ref into a normal trait-ref,
/// you must give *some* self type. A common choice is `mk_err()`
/// or some placeholder type.
pub fn with_self_ty(&self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> ty::TraitRef<'tcx> {
// otherwise the escaping vars would be captured by the binder
// debug_assert!(!self_ty.has_escaping_bound_vars());
ty::TraitRef { def_id: self.def_id, substs: tcx.mk_substs_trait(self_ty, self.substs) }
}
}
pub type PolyExistentialTraitRef<'tcx> = Binder<'tcx, ExistentialTraitRef<'tcx>>;
impl<'tcx> PolyExistentialTraitRef<'tcx> {
pub fn def_id(&self) -> DefId {
self.skip_binder().def_id
}
/// Object types don't have a self type specified. Therefore, when
/// we convert the principal trait-ref into a normal trait-ref,
/// you must give *some* self type. A common choice is `mk_err()`
/// or some placeholder type.
pub fn with_self_ty(&self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> ty::PolyTraitRef<'tcx> {
self.map_bound(|trait_ref| trait_ref.with_self_ty(tcx, self_ty))
}
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
#[derive(HashStable)]
pub enum BoundVariableKind {
Ty(BoundTyKind),
Region(BoundRegionKind),
Const,
}
/// Binder is a binder for higher-ranked lifetimes or types. It is part of the
/// compiler's representation for things like `for<'a> Fn(&'a isize)`
/// (which would be represented by the type `PolyTraitRef ==
/// Binder<'tcx, TraitRef>`). Note that when we instantiate,
/// erase, or otherwise "discharge" these bound vars, we change the
/// type from `Binder<'tcx, T>` to just `T` (see
/// e.g., `liberate_late_bound_regions`).
///
/// `Decodable` and `Encodable` are implemented for `Binder<T>` using the `impl_binder_encode_decode!` macro.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub struct Binder<'tcx, T>(T, &'tcx List<BoundVariableKind>);
impl<'tcx, T> Binder<'tcx, T>
where
T: TypeFoldable<'tcx>,
{
/// Wraps `value` in a binder, asserting that `value` does not
/// contain any bound vars that would be bound by the
/// binder. This is commonly used to 'inject' a value T into a
/// different binding level.
pub fn dummy(value: T) -> Binder<'tcx, T> {
debug_assert!(!value.has_escaping_bound_vars());
Binder(value, ty::List::empty())
}
/// Wraps `value` in a binder, binding higher-ranked vars (if any).
pub fn bind(value: T, tcx: TyCtxt<'tcx>) -> Binder<'tcx, T> {
let mut collector = BoundVarsCollector::new();
value.visit_with(&mut collector);
Binder(value, collector.into_vars(tcx))
}
pub fn bind_with_vars(value: T, vars: &'tcx List<BoundVariableKind>) -> Binder<'tcx, T> {
if cfg!(debug_assertions) {
let mut validator = ValidateBoundVars::new(vars);
value.visit_with(&mut validator);
}
Binder(value, vars)
}
}
impl<'tcx, T> Binder<'tcx, T> {
/// Skips the binder and returns the "bound" value. This is a
/// risky thing to do because it's easy to get confused about
/// De Bruijn indices and the like. It is usually better to
/// discharge the binder using `no_bound_vars` or
/// `replace_late_bound_regions` or something like
/// that. `skip_binder` is only valid when you are either
/// extracting data that has nothing to do with bound vars, you
/// are doing some sort of test that does not involve bound
/// regions, or you are being very careful about your depth
/// accounting.
///
/// Some examples where `skip_binder` is reasonable:
///
/// - extracting the `DefId` from a PolyTraitRef;
/// - comparing the self type of a PolyTraitRef to see if it is equal to
/// a type parameter `X`, since the type `X` does not reference any regions
pub fn skip_binder(self) -> T {
self.0
}
pub fn bound_vars(&self) -> &'tcx List<BoundVariableKind> {
self.1
}
pub fn as_ref(&self) -> Binder<'tcx, &T> {
Binder(&self.0, self.1)
}
pub fn map_bound_ref_unchecked<F, U>(&self, f: F) -> Binder<'tcx, U>
where
F: FnOnce(&T) -> U,
{
let value = f(&self.0);
Binder(value, self.1)
}
pub fn map_bound_ref<F, U: TypeFoldable<'tcx>>(&self, f: F) -> Binder<'tcx, U>
where
F: FnOnce(&T) -> U,
{
self.as_ref().map_bound(f)
}
pub fn map_bound<F, U: TypeFoldable<'tcx>>(self, f: F) -> Binder<'tcx, U>
where
F: FnOnce(T) -> U,
{
let value = f(self.0);
if cfg!(debug_assertions) {
let mut validator = ValidateBoundVars::new(self.1);
value.visit_with(&mut validator);
}
Binder(value, self.1)
}
/// Wraps a `value` in a binder, using the same bound variables as the
/// current `Binder`. This should not be used if the new value *changes*
/// the bound variables. Note: the (old or new) value itself does not
/// necessarily need to *name* all the bound variables.
///
/// This currently doesn't do anything different than `bind`, because we
/// don't actually track bound vars. However, semantically, it is different
/// because bound vars aren't allowed to change here, whereas they are
/// in `bind`. This may be (debug) asserted in the future.
pub fn rebind<U>(&self, value: U) -> Binder<'tcx, U>
where
U: TypeFoldable<'tcx>,
{
if cfg!(debug_assertions) {
let mut validator = ValidateBoundVars::new(self.bound_vars());
value.visit_with(&mut validator);
}
Binder(value, self.1)
}
/// Unwraps and returns the value within, but only if it contains
/// no bound vars at all. (In other words, if this binder --
/// and indeed any enclosing binder -- doesn't bind anything at
/// all.) Otherwise, returns `None`.
///
/// (One could imagine having a method that just unwraps a single
/// binder, but permits late-bound vars bound by enclosing
/// binders, but that would require adjusting the debruijn
/// indices, and given the shallow binding structure we often use,
/// would not be that useful.)
pub fn no_bound_vars(self) -> Option<T>
where
T: TypeFoldable<'tcx>,
{
if self.0.has_escaping_bound_vars() { None } else { Some(self.skip_binder()) }
}
/// Splits the contents into two things that share the same binder
/// level as the original, returning two distinct binders.
///
/// `f` should consider bound regions at depth 1 to be free, and
/// anything it produces with bound regions at depth 1 will be
/// bound in the resulting return values.
pub fn split<U, V, F>(self, f: F) -> (Binder<'tcx, U>, Binder<'tcx, V>)
where
F: FnOnce(T) -> (U, V),
{
let (u, v) = f(self.0);
(Binder(u, self.1), Binder(v, self.1))
}
}
impl<'tcx, T> Binder<'tcx, Option<T>> {
pub fn transpose(self) -> Option<Binder<'tcx, T>> {
let bound_vars = self.1;
self.0.map(|v| Binder(v, bound_vars))
}
}
/// Represents the projection of an associated type. In explicit UFCS
/// form this would be written `<T as Trait<..>>::N`.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
#[derive(HashStable, TypeFoldable)]
pub struct ProjectionTy<'tcx> {
/// The parameters of the associated item.
pub substs: SubstsRef<'tcx>,
/// The `DefId` of the `TraitItem` for the associated type `N`.
///
/// Note that this is not the `DefId` of the `TraitRef` containing this
/// associated type, which is in `tcx.associated_item(item_def_id).container`.
pub item_def_id: DefId,
}
impl<'tcx> ProjectionTy<'tcx> {
pub fn trait_def_id(&self, tcx: TyCtxt<'tcx>) -> DefId {
tcx.associated_item(self.item_def_id).container.id()
}
/// Extracts the underlying trait reference and own substs from this projection.
/// For example, if this is a projection of `<T as StreamingIterator>::Item<'a>`,
/// then this function would return a `T: Iterator` trait reference and `['a]` as the own substs
pub fn trait_ref_and_own_substs(
&self,
tcx: TyCtxt<'tcx>,
) -> (ty::TraitRef<'tcx>, &'tcx [ty::GenericArg<'tcx>]) {
let def_id = tcx.associated_item(self.item_def_id).container.id();
let trait_generics = tcx.generics_of(def_id);
(
ty::TraitRef { def_id, substs: self.substs.truncate_to(tcx, trait_generics) },
&self.substs[trait_generics.count()..],
)
}
/// Extracts the underlying trait reference from this projection.
/// For example, if this is a projection of `<T as Iterator>::Item`,
/// then this function would return a `T: Iterator` trait reference.
///
/// WARNING: This will drop the substs for generic associated types
/// consider calling [Self::trait_ref_and_own_substs] to get those
/// as well.
pub fn trait_ref(&self, tcx: TyCtxt<'tcx>) -> ty::TraitRef<'tcx> {
let def_id = self.trait_def_id(tcx);
ty::TraitRef { def_id, substs: self.substs.truncate_to(tcx, tcx.generics_of(def_id)) }
}
pub fn self_ty(&self) -> Ty<'tcx> {
self.substs.type_at(0)
}
}
#[derive(Copy, Clone, Debug, TypeFoldable)]
pub struct GenSig<'tcx> {
pub resume_ty: Ty<'tcx>,
pub yield_ty: Ty<'tcx>,
pub return_ty: Ty<'tcx>,
}
pub type PolyGenSig<'tcx> = Binder<'tcx, GenSig<'tcx>>;
/// Signature of a function type, which we have arbitrarily
/// decided to use to refer to the input/output types.
///
/// - `inputs`: is the list of arguments and their modes.
/// - `output`: is the return type.
/// - `c_variadic`: indicates whether this is a C-variadic function.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
#[derive(HashStable, TypeFoldable)]
pub struct FnSig<'tcx> {
pub inputs_and_output: &'tcx List<Ty<'tcx>>,
pub c_variadic: bool,
pub unsafety: hir::Unsafety,
pub abi: abi::Abi,
}
impl<'tcx> FnSig<'tcx> {
pub fn inputs(&self) -> &'tcx [Ty<'tcx>] {
&self.inputs_and_output[..self.inputs_and_output.len() - 1]
}
pub fn output(&self) -> Ty<'tcx> {
self.inputs_and_output[self.inputs_and_output.len() - 1]
}
// Creates a minimal `FnSig` to be used when encountering a `TyKind::Error` in a fallible
// method.
fn fake() -> FnSig<'tcx> {
FnSig {
inputs_and_output: List::empty(),
c_variadic: false,
unsafety: hir::Unsafety::Normal,
abi: abi::Abi::Rust,
}
}
}
pub type PolyFnSig<'tcx> = Binder<'tcx, FnSig<'tcx>>;
impl<'tcx> PolyFnSig<'tcx> {
#[inline]
pub fn inputs(&self) -> Binder<'tcx, &'tcx [Ty<'tcx>]> {
self.map_bound_ref_unchecked(|fn_sig| fn_sig.inputs())
}
#[inline]
pub fn input(&self, index: usize) -> ty::Binder<'tcx, Ty<'tcx>> {
self.map_bound_ref(|fn_sig| fn_sig.inputs()[index])
}
pub fn inputs_and_output(&self) -> ty::Binder<'tcx, &'tcx List<Ty<'tcx>>> {
self.map_bound_ref(|fn_sig| fn_sig.inputs_and_output)
}
#[inline]
pub fn output(&self) -> ty::Binder<'tcx, Ty<'tcx>> {
self.map_bound_ref(|fn_sig| fn_sig.output())
}
pub fn c_variadic(&self) -> bool {
self.skip_binder().c_variadic
}
pub fn unsafety(&self) -> hir::Unsafety {
self.skip_binder().unsafety
}
pub fn abi(&self) -> abi::Abi {
self.skip_binder().abi
}
}
pub type CanonicalPolyFnSig<'tcx> = Canonical<'tcx, Binder<'tcx, FnSig<'tcx>>>;
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
#[derive(HashStable)]
pub struct ParamTy {
pub index: u32,
pub name: Symbol,
}
impl<'tcx> ParamTy {
pub fn new(index: u32, name: Symbol) -> ParamTy {
ParamTy { index, name }
}
pub fn for_def(def: &ty::GenericParamDef) -> ParamTy {
ParamTy::new(def.index, def.name)
}
#[inline]
pub fn to_ty(self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
tcx.mk_ty_param(self.index, self.name)
}
}
#[derive(Copy, Clone, Hash, TyEncodable, TyDecodable, Eq, PartialEq, Ord, PartialOrd)]
#[derive(HashStable)]
pub struct ParamConst {
pub index: u32,
pub name: Symbol,
}
impl ParamConst {
pub fn new(index: u32, name: Symbol) -> ParamConst {
ParamConst { index, name }
}
pub fn for_def(def: &ty::GenericParamDef) -> ParamConst {
ParamConst::new(def.index, def.name)
}
}
pub type Region<'tcx> = &'tcx RegionKind;
/// Representation of regions. Note that the NLL checker uses a distinct
/// representation of regions. For this reason, it internally replaces all the
/// regions with inference variables -- the index of the variable is then used
/// to index into internal NLL data structures. See `rustc_mir::borrow_check`
/// module for more information.
///
/// ## The Region lattice within a given function
///
/// In general, the region lattice looks like
///
/// ```
/// static ----------+-----...------+ (greatest)
/// | | |
/// early-bound and | |
/// free regions | |
/// | | |
/// | | |
/// empty(root) placeholder(U1) |
/// | / |
/// | / placeholder(Un)
/// empty(U1) -- /
/// | /
/// ... /
/// | /
/// empty(Un) -------- (smallest)
/// ```
///
/// Early-bound/free regions are the named lifetimes in scope from the
/// function declaration. They have relationships to one another
/// determined based on the declared relationships from the
/// function.
///
/// Note that inference variables and bound regions are not included
/// in this diagram. In the case of inference variables, they should
/// be inferred to some other region from the diagram. In the case of
/// bound regions, they are excluded because they don't make sense to
/// include -- the diagram indicates the relationship between free
/// regions.
///
/// ## Inference variables
///
/// During region inference, we sometimes create inference variables,
/// represented as `ReVar`. These will be inferred by the code in
/// `infer::lexical_region_resolve` to some free region from the
/// lattice above (the minimal region that meets the
/// constraints).
///
/// During NLL checking, where regions are defined differently, we
/// also use `ReVar` -- in that case, the index is used to index into
/// the NLL region checker's data structures. The variable may in fact
/// represent either a free region or an inference variable, in that
/// case.
///
/// ## Bound Regions
///
/// These are regions that are stored behind a binder and must be substituted
/// with some concrete region before being used. There are two kind of
/// bound regions: early-bound, which are bound in an item's `Generics`,
/// and are substituted by a `InternalSubsts`, and late-bound, which are part of
/// higher-ranked types (e.g., `for<'a> fn(&'a ())`), and are substituted by
/// the likes of `liberate_late_bound_regions`. The distinction exists
/// because higher-ranked lifetimes aren't supported in all places. See [1][2].
///
/// Unlike `Param`s, bound regions are not supposed to exist "in the wild"
/// outside their binder, e.g., in types passed to type inference, and
/// should first be substituted (by placeholder regions, free regions,
/// or region variables).
///
/// ## Placeholder and Free Regions
///
/// One often wants to work with bound regions without knowing their precise
/// identity. For example, when checking a function, the lifetime of a borrow
/// can end up being assigned to some region parameter. In these cases,
/// it must be ensured that bounds on the region can't be accidentally
/// assumed without being checked.
///
/// To do this, we replace the bound regions with placeholder markers,
/// which don't satisfy any relation not explicitly provided.
///
/// There are two kinds of placeholder regions in rustc: `ReFree` and
/// `RePlaceholder`. When checking an item's body, `ReFree` is supposed
/// to be used. These also support explicit bounds: both the internally-stored
/// *scope*, which the region is assumed to outlive, as well as other
/// relations stored in the `FreeRegionMap`. Note that these relations
/// aren't checked when you `make_subregion` (or `eq_types`), only by
/// `resolve_regions_and_report_errors`.
///
/// When working with higher-ranked types, some region relations aren't
/// yet known, so you can't just call `resolve_regions_and_report_errors`.
/// `RePlaceholder` is designed for this purpose. In these contexts,
/// there's also the risk that some inference variable laying around will
/// get unified with your placeholder region: if you want to check whether
/// `for<'a> Foo<'_>: 'a`, and you substitute your bound region `'a`
/// with a placeholder region `'%a`, the variable `'_` would just be
/// instantiated to the placeholder region `'%a`, which is wrong because
/// the inference variable is supposed to satisfy the relation
/// *for every value of the placeholder region*. To ensure that doesn't
/// happen, you can use `leak_check`. This is more clearly explained
/// by the [rustc dev guide].
///
/// [1]: http://smallcultfollowing.com/babysteps/blog/2013/10/29/intermingled-parameter-lists/
/// [2]: http://smallcultfollowing.com/babysteps/blog/2013/11/04/intermingled-parameter-lists/
/// [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/traits/hrtb.html
#[derive(Clone, PartialEq, Eq, Hash, Copy, TyEncodable, TyDecodable, PartialOrd, Ord)]
pub enum RegionKind {
/// Region bound in a type or fn declaration which will be
/// substituted 'early' -- that is, at the same time when type
/// parameters are substituted.
ReEarlyBound(EarlyBoundRegion),
/// Region bound in a function scope, which will be substituted when the
/// function is called.
ReLateBound(ty::DebruijnIndex, BoundRegion),
/// When checking a function body, the types of all arguments and so forth
/// that refer to bound region parameters are modified to refer to free
/// region parameters.
ReFree(FreeRegion),
/// Static data that has an "infinite" lifetime. Top in the region lattice.
ReStatic,
/// A region variable. Should not exist after typeck.
ReVar(RegionVid),
/// A placeholder region -- basically, the higher-ranked version of `ReFree`.
/// Should not exist after typeck.
RePlaceholder(ty::PlaceholderRegion),
/// Empty lifetime is for data that is never accessed. We tag the
/// empty lifetime with a universe -- the idea is that we don't
/// want `exists<'a> { forall<'b> { 'b: 'a } }` to be satisfiable.
/// Therefore, the `'empty` in a universe `U` is less than all
/// regions visible from `U`, but not less than regions not visible
/// from `U`.
ReEmpty(ty::UniverseIndex),
/// Erased region, used by trait selection, in MIR and during codegen.
ReErased,
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable, Debug, PartialOrd, Ord)]
pub struct EarlyBoundRegion {
pub def_id: DefId,
pub index: u32,
pub name: Symbol,
}
/// A **`const`** **v**ariable **ID**.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
pub struct ConstVid<'tcx> {
pub index: u32,
pub phantom: PhantomData<&'tcx ()>,
}
rustc_index::newtype_index! {
/// A **region** (lifetime) **v**ariable **ID**.
pub struct RegionVid {
DEBUG_FORMAT = custom,
}
}
impl Atom for RegionVid {
fn index(self) -> usize {
Idx::index(self)
}
}
rustc_index::newtype_index! {
pub struct BoundVar { .. }
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
#[derive(HashStable)]
pub struct BoundTy {
pub var: BoundVar,
pub kind: BoundTyKind,
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
#[derive(HashStable)]
pub enum BoundTyKind {
Anon,
Param(Symbol),
}
impl From<BoundVar> for BoundTy {
fn from(var: BoundVar) -> Self {
BoundTy { var, kind: BoundTyKind::Anon }
}
}
/// A `ProjectionPredicate` for an `ExistentialTraitRef`.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
#[derive(HashStable, TypeFoldable)]
pub struct ExistentialProjection<'tcx> {
pub item_def_id: DefId,
pub substs: SubstsRef<'tcx>,
pub ty: Ty<'tcx>,
}
pub type PolyExistentialProjection<'tcx> = Binder<'tcx, ExistentialProjection<'tcx>>;
impl<'tcx> ExistentialProjection<'tcx> {
/// Extracts the underlying existential trait reference from this projection.
/// For example, if this is a projection of `exists T. <T as Iterator>::Item == X`,
/// then this function would return a `exists T. T: Iterator` existential trait
/// reference.
pub fn trait_ref(&self, tcx: TyCtxt<'tcx>) -> ty::ExistentialTraitRef<'tcx> {
let def_id = tcx.associated_item(self.item_def_id).container.id();
let subst_count = tcx.generics_of(def_id).count() - 1;
let substs = tcx.intern_substs(&self.substs[..subst_count]);
ty::ExistentialTraitRef { def_id, substs }
}
pub fn with_self_ty(
&self,
tcx: TyCtxt<'tcx>,
self_ty: Ty<'tcx>,
) -> ty::ProjectionPredicate<'tcx> {
// otherwise the escaping regions would be captured by the binders
debug_assert!(!self_ty.has_escaping_bound_vars());
ty::ProjectionPredicate {
projection_ty: ty::ProjectionTy {
item_def_id: self.item_def_id,
substs: tcx.mk_substs_trait(self_ty, self.substs),
},
ty: self.ty,
}
}
pub fn erase_self_ty(
tcx: TyCtxt<'tcx>,
projection_predicate: ty::ProjectionPredicate<'tcx>,
) -> Self {
// Assert there is a Self.
projection_predicate.projection_ty.substs.type_at(0);
Self {
item_def_id: projection_predicate.projection_ty.item_def_id,
substs: tcx.intern_substs(&projection_predicate.projection_ty.substs[1..]),
ty: projection_predicate.ty,
}
}
}
impl<'tcx> PolyExistentialProjection<'tcx> {
pub fn with_self_ty(
&self,
tcx: TyCtxt<'tcx>,
self_ty: Ty<'tcx>,
) -> ty::PolyProjectionPredicate<'tcx> {
self.map_bound(|p| p.with_self_ty(tcx, self_ty))
}
pub fn item_def_id(&self) -> DefId {
self.skip_binder().item_def_id
}
}
/// Region utilities
impl RegionKind {
/// Is this region named by the user?
pub fn has_name(&self) -> bool {
match *self {
RegionKind::ReEarlyBound(ebr) => ebr.has_name(),
RegionKind::ReLateBound(_, br) => br.kind.is_named(),
RegionKind::ReFree(fr) => fr.bound_region.is_named(),
RegionKind::ReStatic => true,
RegionKind::ReVar(..) => false,
RegionKind::RePlaceholder(placeholder) => placeholder.name.is_named(),
RegionKind::ReEmpty(_) => false,
RegionKind::ReErased => false,
}
}
#[inline]
pub fn is_late_bound(&self) -> bool {
matches!(*self, ty::ReLateBound(..))
}
#[inline]
pub fn is_placeholder(&self) -> bool {
matches!(*self, ty::RePlaceholder(..))
}
#[inline]
pub fn bound_at_or_above_binder(&self, index: ty::DebruijnIndex) -> bool {
match *self {
ty::ReLateBound(debruijn, _) => debruijn >= index,
_ => false,
}
}
pub fn type_flags(&self) -> TypeFlags {
let mut flags = TypeFlags::empty();
match *self {
ty::ReVar(..) => {
flags = flags | TypeFlags::HAS_FREE_REGIONS;
flags = flags | TypeFlags::HAS_FREE_LOCAL_REGIONS;
flags = flags | TypeFlags::HAS_RE_INFER;
}
ty::RePlaceholder(..) => {
flags = flags | TypeFlags::HAS_FREE_REGIONS;
flags = flags | TypeFlags::HAS_FREE_LOCAL_REGIONS;
flags = flags | TypeFlags::HAS_RE_PLACEHOLDER;
}
ty::ReEarlyBound(..) => {
flags = flags | TypeFlags::HAS_FREE_REGIONS;
flags = flags | TypeFlags::HAS_FREE_LOCAL_REGIONS;
flags = flags | TypeFlags::HAS_RE_PARAM;
}
ty::ReFree { .. } => {
flags = flags | TypeFlags::HAS_FREE_REGIONS;
flags = flags | TypeFlags::HAS_FREE_LOCAL_REGIONS;
}
ty::ReEmpty(_) | ty::ReStatic => {
flags = flags | TypeFlags::HAS_FREE_REGIONS;
}
ty::ReLateBound(..) => {
flags = flags | TypeFlags::HAS_RE_LATE_BOUND;
}
ty::ReErased => {
flags = flags | TypeFlags::HAS_RE_ERASED;
}
}
debug!("type_flags({:?}) = {:?}", self, flags);
flags
}
/// Given an early-bound or free region, returns the `DefId` where it was bound.
/// For example, consider the regions in this snippet of code:
///
/// ```
/// impl<'a> Foo {
/// ^^ -- early bound, declared on an impl
///
/// fn bar<'b, 'c>(x: &self, y: &'b u32, z: &'c u64) where 'static: 'c
/// ^^ ^^ ^ anonymous, late-bound
/// | early-bound, appears in where-clauses
/// late-bound, appears only in fn args
/// {..}
/// }
/// ```
///
/// Here, `free_region_binding_scope('a)` would return the `DefId`
/// of the impl, and for all the other highlighted regions, it
/// would return the `DefId` of the function. In other cases (not shown), this
/// function might return the `DefId` of a closure.
pub fn free_region_binding_scope(&self, tcx: TyCtxt<'_>) -> DefId {
match self {
ty::ReEarlyBound(br) => tcx.parent(br.def_id).unwrap(),
ty::ReFree(fr) => fr.scope,
_ => bug!("free_region_binding_scope invoked on inappropriate region: {:?}", self),
}
}
}
/// Type utilities
impl<'tcx> TyS<'tcx> {
#[inline(always)]
pub fn kind(&self) -> &TyKind<'tcx> {
&self.kind
}
#[inline(always)]
pub fn flags(&self) -> TypeFlags {
self.flags
}
#[inline]
pub fn is_unit(&self) -> bool {
match self.kind() {
Tuple(ref tys) => tys.is_empty(),
_ => false,
}
}
#[inline]
pub fn is_never(&self) -> bool {
matches!(self.kind(), Never)
}
#[inline]
pub fn is_primitive(&self) -> bool {
self.kind().is_primitive()
}
#[inline]
pub fn is_adt(&self) -> bool {
matches!(self.kind(), Adt(..))
}
#[inline]
pub fn is_ref(&self) -> bool {
matches!(self.kind(), Ref(..))
}
#[inline]
pub fn is_ty_var(&self) -> bool {
matches!(self.kind(), Infer(TyVar(_)))
}
#[inline]
pub fn is_ty_infer(&self) -> bool {
matches!(self.kind(), Infer(_))
}
#[inline]
pub fn is_phantom_data(&self) -> bool {
if let Adt(def, _) = self.kind() { def.is_phantom_data() } else { false }
}
#[inline]
pub fn is_bool(&self) -> bool {
*self.kind() == Bool
}
/// Returns `true` if this type is a `str`.
#[inline]
pub fn is_str(&self) -> bool {
*self.kind() == Str
}
#[inline]
pub fn is_param(&self, index: u32) -> bool {
match self.kind() {
ty::Param(ref data) => data.index == index,
_ => false,
}
}
#[inline]
pub fn is_slice(&self) -> bool {
match self.kind() {
RawPtr(TypeAndMut { ty, .. }) | Ref(_, ty, _) => matches!(ty.kind(), Slice(_) | Str),
_ => false,
}
}
#[inline]
pub fn is_array(&self) -> bool {
matches!(self.kind(), Array(..))
}
#[inline]
pub fn is_simd(&self) -> bool {
match self.kind() {
Adt(def, _) => def.repr.simd(),
_ => false,
}
}
pub fn sequence_element_type(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
match self.kind() {
Array(ty, _) | Slice(ty) => ty,
Str => tcx.mk_mach_uint(ty::UintTy::U8),
_ => bug!("`sequence_element_type` called on non-sequence value: {}", self),
}
}
pub fn simd_size_and_type(&self, tcx: TyCtxt<'tcx>) -> (u64, Ty<'tcx>) {
match self.kind() {
Adt(def, substs) => {
let variant = def.non_enum_variant();
let f0_ty = variant.fields[0].ty(tcx, substs);
match f0_ty.kind() {
Array(f0_elem_ty, f0_len) => {
// FIXME(repr_simd): https://github.com/rust-lang/rust/pull/78863#discussion_r522784112
// The way we evaluate the `N` in `[T; N]` here only works since we use
// `simd_size_and_type` post-monomorphization. It will probably start to ICE
// if we use it in generic code. See the `simd-array-trait` ui test.
(f0_len.eval_usize(tcx, ParamEnv::empty()) as u64, f0_elem_ty)
}
_ => (variant.fields.len() as u64, f0_ty),
}
}
_ => bug!("`simd_size_and_type` called on invalid type"),
}
}
#[inline]
pub fn is_region_ptr(&self) -> bool {
matches!(self.kind(), Ref(..))
}
#[inline]
pub fn is_mutable_ptr(&self) -> bool {
matches!(
self.kind(),
RawPtr(TypeAndMut { mutbl: hir::Mutability::Mut, .. })
| Ref(_, _, hir::Mutability::Mut)
)
}
/// Get the mutability of the reference or `None` when not a reference
#[inline]
pub fn ref_mutability(&self) -> Option<hir::Mutability> {
match self.kind() {
Ref(_, _, mutability) => Some(*mutability),
_ => None,
}
}
#[inline]
pub fn is_unsafe_ptr(&self) -> bool {
matches!(self.kind(), RawPtr(_))
}
/// Tests if this is any kind of primitive pointer type (reference, raw pointer, fn pointer).
#[inline]
pub fn is_any_ptr(&self) -> bool {
self.is_region_ptr() || self.is_unsafe_ptr() || self.is_fn_ptr()
}
#[inline]
pub fn is_box(&self) -> bool {
match self.kind() {
Adt(def, _) => def.is_box(),
_ => false,
}
}
/// Panics if called on any type other than `Box<T>`.
pub fn boxed_ty(&self) -> Ty<'tcx> {
match self.kind() {
Adt(def, substs) if def.is_box() => substs.type_at(0),
_ => bug!("`boxed_ty` is called on non-box type {:?}", self),
}
}
/// A scalar type is one that denotes an atomic datum, with no sub-components.
/// (A RawPtr is scalar because it represents a non-managed pointer, so its
/// contents are abstract to rustc.)
#[inline]
pub fn is_scalar(&self) -> bool {
matches!(
self.kind(),
Bool | Char
| Int(_)
| Float(_)
| Uint(_)
| FnDef(..)
| FnPtr(_)
| RawPtr(_)
| Infer(IntVar(_) | FloatVar(_))
)
}
/// Returns `true` if this type is a floating point type.
#[inline]
pub fn is_floating_point(&self) -> bool {
matches!(self.kind(), Float(_) | Infer(FloatVar(_)))
}
#[inline]
pub fn is_trait(&self) -> bool {
matches!(self.kind(), Dynamic(..))
}
#[inline]
pub fn is_enum(&self) -> bool {
matches!(self.kind(), Adt(adt_def, _) if adt_def.is_enum())
}
#[inline]
pub fn is_union(&self) -> bool {
matches!(self.kind(), Adt(adt_def, _) if adt_def.is_union())
}
#[inline]
pub fn is_closure(&self) -> bool {
matches!(self.kind(), Closure(..))
}
#[inline]
pub fn is_generator(&self) -> bool {
matches!(self.kind(), Generator(..))
}
#[inline]
pub fn is_integral(&self) -> bool {
matches!(self.kind(), Infer(IntVar(_)) | Int(_) | Uint(_))
}
#[inline]
pub fn is_fresh_ty(&self) -> bool {
matches!(self.kind(), Infer(FreshTy(_)))
}
#[inline]
pub fn is_fresh(&self) -> bool {
matches!(self.kind(), Infer(FreshTy(_) | FreshIntTy(_) | FreshFloatTy(_)))
}
#[inline]
pub fn is_char(&self) -> bool {
matches!(self.kind(), Char)
}
#[inline]
pub fn is_numeric(&self) -> bool {
self.is_integral() || self.is_floating_point()
}
#[inline]
pub fn is_signed(&self) -> bool {
matches!(self.kind(), Int(_))
}
#[inline]
pub fn is_ptr_sized_integral(&self) -> bool {
matches!(self.kind(), Int(ty::IntTy::Isize) | Uint(ty::UintTy::Usize))
}
#[inline]
pub fn has_concrete_skeleton(&self) -> bool {
!matches!(self.kind(), Param(_) | Infer(_) | Error(_))
}
/// Returns the type and mutability of `*ty`.
///
/// The parameter `explicit` indicates if this is an *explicit* dereference.
/// Some types -- notably unsafe ptrs -- can only be dereferenced explicitly.
pub fn builtin_deref(&self, explicit: bool) -> Option<TypeAndMut<'tcx>> {
match self.kind() {
Adt(def, _) if def.is_box() => {
Some(TypeAndMut { ty: self.boxed_ty(), mutbl: hir::Mutability::Not })
}
Ref(_, ty, mutbl) => Some(TypeAndMut { ty, mutbl: *mutbl }),
RawPtr(mt) if explicit => Some(*mt),
_ => None,
}
}
/// Returns the type of `ty[i]`.
pub fn builtin_index(&self) -> Option<Ty<'tcx>> {
match self.kind() {
Array(ty, _) | Slice(ty) => Some(ty),
_ => None,
}
}
pub fn fn_sig(&self, tcx: TyCtxt<'tcx>) -> PolyFnSig<'tcx> {
match self.kind() {
FnDef(def_id, substs) => tcx.fn_sig(*def_id).subst(tcx, substs),
FnPtr(f) => *f,
Error(_) => {
// ignore errors (#54954)
ty::Binder::dummy(FnSig::fake())
}
Closure(..) => bug!(
"to get the signature of a closure, use `substs.as_closure().sig()` not `fn_sig()`",
),
_ => bug!("Ty::fn_sig() called on non-fn type: {:?}", self),
}
}
#[inline]
pub fn is_fn(&self) -> bool {
matches!(self.kind(), FnDef(..) | FnPtr(_))
}
#[inline]
pub fn is_fn_ptr(&self) -> bool {
matches!(self.kind(), FnPtr(_))
}
#[inline]
pub fn is_impl_trait(&self) -> bool {
matches!(self.kind(), Opaque(..))
}
#[inline]
pub fn ty_adt_def(&self) -> Option<&'tcx AdtDef> {
match self.kind() {
Adt(adt, _) => Some(adt),
_ => None,
}
}
/// Iterates over tuple fields.
/// Panics when called on anything but a tuple.
pub fn tuple_fields(&self) -> impl DoubleEndedIterator<Item = Ty<'tcx>> {
match self.kind() {
Tuple(substs) => substs.iter().map(|field| field.expect_ty()),
_ => bug!("tuple_fields called on non-tuple"),
}
}
/// Get the `i`-th element of a tuple.
/// Panics when called on anything but a tuple.
pub fn tuple_element_ty(&self, i: usize) -> Option<Ty<'tcx>> {
match self.kind() {
Tuple(substs) => substs.iter().nth(i).map(|field| field.expect_ty()),
_ => bug!("tuple_fields called on non-tuple"),
}
}
/// If the type contains variants, returns the valid range of variant indices.
//
// FIXME: This requires the optimized MIR in the case of generators.
#[inline]
pub fn variant_range(&self, tcx: TyCtxt<'tcx>) -> Option<Range<VariantIdx>> {
match self.kind() {
TyKind::Adt(adt, _) => Some(adt.variant_range()),
TyKind::Generator(def_id, substs, _) => {
Some(substs.as_generator().variant_range(*def_id, tcx))
}
_ => None,
}
}
/// If the type contains variants, returns the variant for `variant_index`.
/// Panics if `variant_index` is out of range.
//
// FIXME: This requires the optimized MIR in the case of generators.
#[inline]
pub fn discriminant_for_variant(
&self,
tcx: TyCtxt<'tcx>,
variant_index: VariantIdx,
) -> Option<Discr<'tcx>> {
match self.kind() {
TyKind::Adt(adt, _) if adt.variants.is_empty() => {
bug!("discriminant_for_variant called on zero variant enum");
}
TyKind::Adt(adt, _) if adt.is_enum() => {
Some(adt.discriminant_for_variant(tcx, variant_index))
}
TyKind::Generator(def_id, substs, _) => {
Some(substs.as_generator().discriminant_for_variant(*def_id, tcx, variant_index))
}
_ => None,
}
}
/// Returns the type of the discriminant of this type.
pub fn discriminant_ty(&'tcx self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
match self.kind() {
ty::Adt(adt, _) if adt.is_enum() => adt.repr.discr_type().to_ty(tcx),
ty::Generator(_, substs, _) => substs.as_generator().discr_ty(tcx),
ty::Param(_) | ty::Projection(_) | ty::Opaque(..) | ty::Infer(ty::TyVar(_)) => {
let assoc_items =
tcx.associated_items(tcx.lang_items().discriminant_kind_trait().unwrap());
let discriminant_def_id = assoc_items.in_definition_order().next().unwrap().def_id;
tcx.mk_projection(discriminant_def_id, tcx.mk_substs([self.into()].iter()))
}
ty::Bool
| ty::Char
| ty::Int(_)
| ty::Uint(_)
| ty::Float(_)
| ty::Adt(..)
| ty::Foreign(_)
| ty::Str
| ty::Array(..)
| ty::Slice(_)
| ty::RawPtr(_)
| ty::Ref(..)
| ty::FnDef(..)
| ty::FnPtr(..)
| ty::Dynamic(..)
| ty::Closure(..)
| ty::GeneratorWitness(..)
| ty::Never
| ty::Tuple(_)
| ty::Error(_)
| ty::Infer(IntVar(_) | FloatVar(_)) => tcx.types.u8,
ty::Bound(..)
| ty::Placeholder(_)
| ty::Infer(FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => {
bug!("`discriminant_ty` applied to unexpected type: {:?}", self)
}
}
}
/// Returns the type of metadata for (potentially fat) pointers to this type.
pub fn ptr_metadata_ty(&'tcx self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
// FIXME: should this normalize?
let tail = tcx.struct_tail_without_normalization(self);
match tail.kind() {
// Sized types
ty::Infer(ty::IntVar(_) | ty::FloatVar(_))
| ty::Uint(_)
| ty::Int(_)
| ty::Bool
| ty::Float(_)
| ty::FnDef(..)
| ty::FnPtr(_)
| ty::RawPtr(..)
| ty::Char
| ty::Ref(..)
| ty::Generator(..)
| ty::GeneratorWitness(..)
| ty::Array(..)
| ty::Closure(..)
| ty::Never
| ty::Error(_)
| ty::Foreign(..)
// If returned by `struct_tail_without_normalization` this is a unit struct
// without any fields, or not a struct, and therefore is Sized.
| ty::Adt(..)
// If returned by `struct_tail_without_normalization` this is the empty tuple,
// a.k.a. unit type, which is Sized
| ty::Tuple(..) => tcx.types.unit,
ty::Str | ty::Slice(_) => tcx.types.usize,
ty::Dynamic(..) => {
let dyn_metadata = tcx.lang_items().dyn_metadata().unwrap();
tcx.type_of(dyn_metadata).subst(tcx, &[tail.into()])
},
ty::Projection(_)
| ty::Param(_)
| ty::Opaque(..)
| ty::Infer(ty::TyVar(_))
| ty::Bound(..)
| ty::Placeholder(..)
| ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => {
bug!("`ptr_metadata_ty` applied to unexpected type: {:?}", tail)
}
}
}
/// When we create a closure, we record its kind (i.e., what trait
/// it implements) into its `ClosureSubsts` using a type
/// parameter. This is kind of a phantom type, except that the
/// most convenient thing for us to are the integral types. This
/// function converts such a special type into the closure
/// kind. To go the other way, use
/// `tcx.closure_kind_ty(closure_kind)`.
///
/// Note that during type checking, we use an inference variable
/// to represent the closure kind, because it has not yet been
/// inferred. Once upvar inference (in `rustc_typeck/src/check/upvar.rs`)
/// is complete, that type variable will be unified.
pub fn to_opt_closure_kind(&self) -> Option<ty::ClosureKind> {
match self.kind() {
Int(int_ty) => match int_ty {
ty::IntTy::I8 => Some(ty::ClosureKind::Fn),
ty::IntTy::I16 => Some(ty::ClosureKind::FnMut),
ty::IntTy::I32 => Some(ty::ClosureKind::FnOnce),
_ => bug!("cannot convert type `{:?}` to a closure kind", self),
},
// "Bound" types appear in canonical queries when the
// closure type is not yet known
Bound(..) | Infer(_) => None,
Error(_) => Some(ty::ClosureKind::Fn),
_ => bug!("cannot convert type `{:?}` to a closure kind", self),
}
}
/// Fast path helper for testing if a type is `Sized`.
///
/// Returning true means the type is known to be sized. Returning
/// `false` means nothing -- could be sized, might not be.
///
/// Note that we could never rely on the fact that a type such as `[_]` is
/// trivially `!Sized` because we could be in a type environment with a
/// bound such as `[_]: Copy`. A function with such a bound obviously never
/// can be called, but that doesn't mean it shouldn't typecheck. This is why
/// this method doesn't return `Option<bool>`.
pub fn is_trivially_sized(&self, tcx: TyCtxt<'tcx>) -> bool {
match self.kind() {
ty::Infer(ty::IntVar(_) | ty::FloatVar(_))
| ty::Uint(_)
| ty::Int(_)
| ty::Bool
| ty::Float(_)
| ty::FnDef(..)
| ty::FnPtr(_)
| ty::RawPtr(..)
| ty::Char
| ty::Ref(..)
| ty::Generator(..)
| ty::GeneratorWitness(..)
| ty::Array(..)
| ty::Closure(..)
| ty::Never
| ty::Error(_) => true,
ty::Str | ty::Slice(_) | ty::Dynamic(..) | ty::Foreign(..) => false,
ty::Tuple(tys) => tys.iter().all(|ty| ty.expect_ty().is_trivially_sized(tcx)),
ty::Adt(def, _substs) => def.sized_constraint(tcx).is_empty(),
ty::Projection(_) | ty::Param(_) | ty::Opaque(..) => false,
ty::Infer(ty::TyVar(_)) => false,
ty::Bound(..)
| ty::Placeholder(..)
| ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => {
bug!("`is_trivially_sized` applied to unexpected type: {:?}", self)
}
}
}
}
| 35.838828 | 111 | 0.594529 |
d9fffd3b9417caab796f51dd10f15f67df88c546 | 2,627 | const INPUT: &str = include_str!("../input/day08.txt");
#[derive(Clone, Copy)]
enum Inst {
Nop,
Acc,
Jmp,
}
struct Instruction {
pub op: Inst,
pub nb: i32,
}
impl Instruction {
pub fn new(op: Inst, nb: i32) -> Self {
Self { op, nb }
}
}
impl From<&str> for Instruction {
fn from(s: &str) -> Self {
let mut t = s.split(' ');
let inst_str = t.next().unwrap();
let nb = t.next().unwrap().parse().unwrap();
match inst_str {
"nop" => Self::new(Inst::Nop, nb),
"acc" => Self::new(Inst::Acc, nb),
"jmp" => Self::new(Inst::Jmp, nb),
_ => unreachable!(),
}
}
}
struct VM {
pub accumulator: i32,
}
impl VM {
pub fn default() -> Self {
Self { accumulator: 0 }
}
pub fn run(&mut self, code: &[Instruction]) -> bool {
let mut visited = vec![false; code.len()];
let mut pc = 0;
while pc < code.len() {
if visited[pc] {
return false;
}
let wrapped_instr = &code[pc];
visited[pc] = true;
match wrapped_instr.op {
Inst::Nop => (),
Inst::Acc => self.accumulator += wrapped_instr.nb,
Inst::Jmp => {
pc = (pc as i64 + wrapped_instr.nb as i64) as usize;
continue;
}
};
pc += 1;
}
true
}
}
fn parse() -> Vec<Instruction> {
let mut instructions = Vec::new();
INPUT
.lines()
.for_each(|l| instructions.push(Instruction::from(l)));
instructions
}
fn part_1(code: &[Instruction]) -> i32 {
let mut machine = VM::default();
machine.run(code);
machine.accumulator
}
fn part_2_invert(instr: &mut Instruction) {
if let Inst::Jmp = instr.op {
instr.op = Inst::Nop;
} else {
instr.op = Inst::Jmp;
}
}
fn part_2(code: &mut [Instruction]) -> i32 {
let mut i = 0;
loop {
let instr = unsafe { code.get_unchecked_mut(i) };
if matches!(instr.op, Inst::Jmp | Inst::Nop) {
part_2_invert(instr);
let mut machine = VM::default();
if machine.run(&code) {
return machine.accumulator;
}
let instr = unsafe { code.get_unchecked_mut(i) };
part_2_invert(instr);
}
i += 1;
}
}
pub fn day08() -> (String, String) {
let mut instructions = parse();
(
format!("{}", part_1(&instructions)),
format!("{}", part_2(&mut instructions)),
)
}
| 23.04386 | 72 | 0.483822 |
1cdfc3d175a049fac528ea8f67a3bdbf3278f70a | 6,151 | //! Implementation of `trussed::Platform` for the board,
//! using the specific implementation of our `crate::traits`.
use core::time::Duration;
use crate::hal::{
peripherals::rtc::Rtc,
typestates::init_state,
};
use crate::traits::buttons::{Press, Edge};
use crate::traits::rgb_led::RgbLed;
use trussed::platform::{
ui,
reboot,
consent,
};
// translated from https://stackoverflow.com/a/2284929/2490057
fn sin(x: f32) -> f32
{
let mut res = 0f32;
let mut pow = x;
let mut fact = 1f32;
for i in 0..5 {
res += pow/fact;
pow *= -1f32 * x * x;
fact *= ((2*(i+1))*(2*(i+1)+1)) as f32;
}
res
}
// Assuming there will only be one way to
// get user presence, this should be fine.
// Used for Ctaphid.keepalive message status.
static mut WAITING: bool = false;
pub struct UserPresenceStatus {}
impl UserPresenceStatus {
pub(crate) fn set_waiting(waiting: bool) {
unsafe { WAITING = waiting };
}
pub fn waiting() -> bool {
unsafe{ WAITING }
}
}
pub struct UserInterface<BUTTONS, RGB>
where
BUTTONS: Press + Edge,
RGB: RgbLed,
{
rtc: Rtc<init_state::Enabled>,
buttons: Option<BUTTONS>,
rgb: Option<RGB>,
wink: Option<core::ops::Range<Duration>>,
provisioner: bool,
}
impl<BUTTONS, RGB> UserInterface<BUTTONS, RGB>
where
BUTTONS: Press + Edge,
RGB: RgbLed,
{
pub fn new(
rtc: Rtc<init_state::Enabled>,
_buttons: Option<BUTTONS>,
rgb: Option<RGB>,
provisioner: bool,
) -> Self {
let wink = None;
#[cfg(not(feature = "no-buttons"))]
let ui = Self { rtc, buttons: _buttons, rgb, wink, provisioner };
#[cfg(feature = "no-buttons")]
let ui = Self { rtc, buttons: None, rgb, wink, provisioner };
ui
}
}
impl<BUTTONS, RGB> trussed::platform::UserInterface for UserInterface<BUTTONS,RGB>
where
BUTTONS: Press + Edge,
RGB: RgbLed,
{
fn check_user_presence(&mut self) -> consent::Level {
match &mut self.buttons {
Some(buttons) => {
// important to read state before checking for edge,
// since reading an edge could clear the state.
let state = buttons.state();
UserPresenceStatus::set_waiting(true);
let press_result = buttons.wait_for_any_new_press();
UserPresenceStatus::set_waiting(false);
if press_result.is_ok() {
if state.a && state.b {
consent::Level::Strong
} else {
consent::Level::Normal
}
} else {
consent::Level::None
}
}
None => {
// With configured with no buttons, that means Solo is operating
// in passive NFC mode, which means user tapped to indicate presence.
consent::Level::Normal
}
}
}
fn set_status(&mut self, status: ui::Status) {
if let Some(rgb) = &mut self.rgb {
match status {
ui::Status::Idle => {
if self.provisioner {
// white
rgb.set(0xff_ff_ff.into());
} else {
// green
rgb.set(0x00_ff_02.into());
}
},
ui::Status::Processing => {
// teal
rgb.set(0x00_ff_5a.into());
}
ui::Status::WaitingForUserPresence => {
// orange
rgb.set(0xff_7e_00.into());
},
ui::Status::Error => {
// Red
rgb.set(0xff_00_00.into());
},
}
}
// Abort winking if the device is no longer idle
if status != ui::Status::Idle {
self.wink = None;
}
}
fn refresh(&mut self) {
if self.rgb.is_none() {
return;
}
if let Some(wink) = self.wink.clone() {
let time = self.uptime();
if wink.contains(&time) {
// 250 ms white, 250 ms off
let color = if (time - wink.start).as_millis() % 500 < 250 {
0xff_ff_ff
} else {
0x00_00_00
};
self.rgb.as_mut().unwrap().set(color.into());
return;
} else {
self.wink = None;
}
}
if self.buttons.is_some() {
// 1. Get time & pick a period (here 4096).
// 2. Map it to a value between 0 and pi.
// 3. Calculate sine and map to amplitude between 0 and 255.
let time = (self.uptime().as_millis()) % 4096;
let amplitude = (sin((time as f32) * 3.14159265f32/4096f32) * 255f32) as u32;
let state = self.buttons.as_mut().unwrap().state();
let color = if state.a || state.b || state.middle {
// Use blue if button is pressed.
0x00_00_01 | (amplitude << 0)
} else {
// Use green if no button is pressed.
0x00_00_01 | (amplitude << 8)
};
// use logging::hex::*;
// use logging::hex;
// crate::logger::info!("time: {}", time).ok();
// crate::logger::info!("amp: {}", hex!(amplitude)).ok();
// crate::logger::info!("color: {}", hex!(color)).ok();
self.rgb.as_mut().unwrap().set(color.into());
}
}
fn uptime(&mut self) -> Duration {
self.rtc.uptime()
}
// delete this function after trussed is updated
fn reboot(&mut self, _to: reboot::To) -> ! {
panic!("this should no longer be called.");
}
fn wink(&mut self, duration: Duration) {
let time = self.uptime();
self.wink = Some(time..time + duration);
self.rgb.as_mut().unwrap().set(0xff_ff_ff.into());
}
}
| 29.290476 | 89 | 0.492278 |
db053080c0e4d9ab0c805f6f02ac22e9a94e8948 | 11,767 | #[allow(unused_imports)]
use super::preprocessor;
#[test]
fn test_data_directives() {
let mut ctx = crate::util::preprocessor_util::Context::default();
let mut out = crate::util::preprocessor_util::Output::default();
let p = preprocessor::PreprocessorParser::new();
let o = p.parse(&mut ctx, &mut out, "set 0x45 set 6");
assert!(o.is_ok());
assert_eq!(out.data.len(), 2);
assert_eq!(out.code.len(), 0);
assert_eq!(out.data[0], "set 69");
assert_eq!(out.data[1], "set 6");
out.clear();
let o = p.parse(&mut ctx, &mut out, "test: DB 5");
assert!(o.is_ok());
assert_eq!(out.data.len(), 1);
assert_eq!(out.code.len(), 0);
assert_eq!(out.data[0], "db 5");
out.clear();
let o = p.parse(&mut ctx, &mut out, "test: DB 5 test: DB 0b1110");
assert!(o.is_err());
out.clear();
ctx.clear();
let o = p.parse(&mut ctx, &mut out, "test: DB [5]");
assert!(o.is_ok());
out.clear();
ctx.clear();
let o = p.parse(&mut ctx, &mut out, "test: DB [5,6]");
assert!(o.is_ok());
}
#[test]
fn test_macro_directives() {
let mut ctx = crate::util::preprocessor_util::Context::default();
let mut out = crate::util::preprocessor_util::Output::default();
let p = crate::preprocessor::preprocessor::PreprocessorParser::new();
let o = p.parse(
&mut ctx,
&mut out,
"macro mcname (a) ->DB [a]<- \nmcname(5)",
);
assert!(o.is_ok());
assert_eq!(out.data.len(), 1);
out.clear();
ctx.clear();
let o = p.parse(
&mut ctx,
&mut out,
"macro mcname (a) -> DB [a] <-\nmcname(hello)",
);
assert!(o.is_err());
assert_eq!(out.data.len(), 0);
}
#[test]
fn test_control_opcode() {
let mut ctx = crate::util::preprocessor_util::Context::default();
let mut out = crate::util::preprocessor_util::Output::default();
let p = crate::preprocessor::preprocessor::PreprocessorParser::new();
let o = p.parse(&mut ctx, &mut out, "stc CMC HLT");
assert!(o.is_ok());
assert_eq!(out.code.len(), 3);
out.clear();
ctx.clear();
let o = p.parse(&mut ctx, &mut out, "ESC LOCK");
assert!(o.is_err());
}
#[test]
fn test_transfer_opcode() {
let mut ctx = crate::util::preprocessor_util::Context::default();
let mut out = crate::util::preprocessor_util::Output::default();
let p = crate::preprocessor::preprocessor::PreprocessorParser::new();
let o = p.parse(&mut ctx, &mut out, "JMP _test JGE go");
assert!(o.is_ok());
assert_eq!(out.code.len(), 2);
out.clear();
ctx.clear();
let o = p.parse(&mut ctx, &mut out, "RET INTO");
assert!(o.is_err());
out.clear();
ctx.clear();
let o = p.parse(&mut ctx,&mut out,"fault:DB 0 JMP fault");
assert!(o.is_err());
}
#[test]
fn test_procedures() {
let mut ctx = crate::util::preprocessor_util::Context::default();
let mut out = crate::util::preprocessor_util::Output::default();
let p = crate::preprocessor::preprocessor::PreprocessorParser::new();
let o = p.parse(&mut ctx, &mut out, "def f { STI CMC }");
assert!(o.is_ok());
assert_eq!(out.code.len(), 3); // One extra for added ret
out.clear();
ctx.clear();
let o = p.parse(&mut ctx, &mut out, "def f { STI CMC } CALL f");
assert!(o.is_ok());
assert_eq!(out.code.len(), 4); // One extra for added ret
}
#[test]
fn test_offset() {
let mut ctx = crate::util::preprocessor_util::Context::default();
let mut out = crate::util::preprocessor_util::Output::default();
let p = crate::preprocessor::preprocessor::PreprocessorParser::new();
let o = p.parse(&mut ctx, &mut out, "DB [0,5] name: DB [2] DB OFFSET name");
assert!(o.is_ok());
assert_eq!(out.data.len(), 3);
assert_eq!(out.data[2], "db 5");
out.clear();
ctx.clear();
let o = p.parse(&mut ctx, &mut out, "def f { STI CMC } DB offset f");
assert!(o.is_err());
}
#[test]
fn test_not() {
let mut ctx = crate::util::preprocessor_util::Context::default();
let mut out = crate::util::preprocessor_util::Output::default();
let p = crate::preprocessor::preprocessor::PreprocessorParser::new();
let o = p.parse(&mut ctx, &mut out, "NOT AX NOT byte [BX]");
assert!(o.is_ok());
assert_eq!(out.code.len(), 2);
let o = p.parse(&mut ctx, &mut out, "NOT byte [BP,SI] not word [bx,DI,-6]");
assert!(o.is_ok());
out.clear();
ctx.clear();
let o = p.parse(&mut ctx, &mut out, "l:DB 5 NOT BYTE l");
assert!(o.is_ok());
assert_eq!(out.code.len(), 1);
}
#[test]
fn test_binary_logical() {
let mut ctx = crate::util::preprocessor_util::Context::default();
let mut out = crate::util::preprocessor_util::Output::default();
let p = crate::preprocessor::preprocessor::PreprocessorParser::new();
let o = p.parse(
&mut ctx,
&mut out,
"AND AX,CX OR AL, BYTE [BX] XOR WORD [BP], AX",
);
assert!(o.is_ok());
assert_eq!(out.code.len(), 3);
let o = p.parse(&mut ctx, &mut out, "OR AX,0x52");
assert!(o.is_ok());
let o = p.parse(&mut ctx, &mut out, "l:DB 8 OR BYTE l,0x52");
assert!(o.is_ok());
let o = p.parse(&mut ctx, &mut out, "OR AL,[BX]");
assert!(o.is_err());
let o = p.parse(&mut ctx, &mut out, "OR [BP],[BX]");
assert!(o.is_err());
}
#[test]
fn test_shift_rotate() {
let mut ctx = crate::util::preprocessor_util::Context::default();
let mut out = crate::util::preprocessor_util::Output::default();
let p = crate::preprocessor::preprocessor::PreprocessorParser::new();
let o = p.parse(
&mut ctx,
&mut out,
"l:DB 6 SAL AX,5 SHL CX,CL SAR byte [BP],CL SHR BYTE l, 0b1101",
);
assert!(o.is_ok());
assert_eq!(out.code.len(), 4);
}
#[test]
fn test_print() {
let mut ctx = crate::util::preprocessor_util::Context::default();
let mut out = crate::util::preprocessor_util::Output::default();
let p = crate::preprocessor::preprocessor::PreprocessorParser::new();
let o = p.parse(
&mut ctx,
&mut out,
"l:DB [7,0] _t:DB 0 print flags print reg print mem 0x0FFFF-> 0x100FF print mem 0xFFF00:50 print mem : offset _t",
);
assert!(o.is_ok());
assert_eq!(out.data.len(), 2);
assert_eq!(out.code.len(), 5);
let o = p.parse(&mut ctx, &mut out, "print mem 0xFFFFF:50");
assert!(o.is_err());
}
#[test]
fn test_arithmetic() {
let mut ctx = crate::util::preprocessor_util::Context::default();
let mut out = crate::util::preprocessor_util::Output::default();
let p = crate::preprocessor::preprocessor::PreprocessorParser::new();
let o = p.parse(
&mut ctx,
&mut out,
"l:DB [7,0] _t:DB 0 ADD AX , -5 IMUL CX DIV byte l DAA CBW",
);
assert!(o.is_ok());
assert_eq!(out.data.len(), 2);
assert_eq!(out.code.len(), 5);
}
#[test]
fn test_string_instructions() {
let mut ctx = crate::util::preprocessor_util::Context::default();
let mut out = crate::util::preprocessor_util::Output::default();
let p = crate::preprocessor::preprocessor::PreprocessorParser::new();
let o = p.parse(&mut ctx, &mut out, "MOVS byte REP LODS word");
assert!(o.is_ok());
assert_eq!(out.code.len(), 2);
let o = p.parse(&mut ctx, &mut out, "MOVS byte REP LODS word rep cmps byte");
assert!(o.is_err());
let o = p.parse(&mut ctx, &mut out, "REPZ cmps byte");
assert!(o.is_ok());
}
#[test]
fn test_data_transfer_unary() {
let mut ctx = crate::util::preprocessor_util::Context::default();
let mut out = crate::util::preprocessor_util::Output::default();
let p = crate::preprocessor::preprocessor::PreprocessorParser::new();
let o = p.parse(&mut ctx, &mut out, "LAHF popf xlat");
assert!(o.is_ok());
assert_eq!(out.code.len(), 3);
}
#[test]
fn test_data_transfer_load() {
let mut ctx = crate::util::preprocessor_util::Context::default();
let mut out = crate::util::preprocessor_util::Output::default();
let p = crate::preprocessor::preprocessor::PreprocessorParser::new();
let o = p.parse(
&mut ctx,
&mut out,
"l:DW 0 LEA AX , word l lea dx , word [SI,7]",
);
assert!(o.is_ok());
assert_eq!(out.code.len(), 2);
let o = p.parse(&mut ctx, &mut out, "LES");
assert!(o.is_err());
let o = p.parse(&mut ctx, &mut out, "LDS");
assert!(o.is_err());
}
#[test]
fn test_data_transfer_push_pop() {
let mut ctx = crate::util::preprocessor_util::Context::default();
let mut out = crate::util::preprocessor_util::Output::default();
let p = crate::preprocessor::preprocessor::PreprocessorParser::new();
let o = p.parse(&mut ctx, &mut out, "l:DW [5,7] push CS push word l");
assert!(o.is_ok());
assert_eq!(out.code.len(), 2);
out.clear();
ctx.clear();
let o = p.parse(&mut ctx, &mut out, "l:DW [5,7] pop ES pop word l");
assert!(o.is_ok());
assert_eq!(out.code.len(), 2);
out.clear();
ctx.clear();
let o = p.parse(&mut ctx, &mut out, "pop CS");
assert!(o.is_err());
}
#[test]
fn test_data_transfer_xchg_in_out() {
let mut ctx = crate::util::preprocessor_util::Context::default();
let mut out = crate::util::preprocessor_util::Output::default();
let p = crate::preprocessor::preprocessor::PreprocessorParser::new();
let o = p.parse(&mut ctx, &mut out, "l:DW 5 xchg AL,CL xchg word l, si");
assert!(o.is_ok());
assert_eq!(out.code.len(), 2);
out.clear();
ctx.clear();
let o = p.parse(&mut ctx, &mut out, "in AX,0x51");
assert!(o.is_err());
let o = p.parse(&mut ctx, &mut out, "out 0x51,AX");
assert!(o.is_err());
}
#[test]
fn test_data_transfer_mov() {
let mut ctx = crate::util::preprocessor_util::Context::default();
let mut out = crate::util::preprocessor_util::Output::default();
let p = crate::preprocessor::preprocessor::PreprocessorParser::new();
let o = p.parse(
&mut ctx,
&mut out,
"l:DW 5 mov AX,CX mov DL,CL mov DX,word l",
);
assert!(o.is_ok());
assert_eq!(out.code.len(), 3);
out.clear();
ctx.clear();
let o = p.parse(&mut ctx, &mut out, "l:DW 5 mov word l, 0x5FFF");
assert!(o.is_ok());
assert_eq!(out.code.len(), 1);
out.clear();
ctx.clear();
}
#[test]
fn test_macro() {
let mut ctx = crate::util::preprocessor_util::Context::default();
let mut out = crate::util::preprocessor_util::Output::default();
let p = crate::preprocessor::preprocessor::PreprocessorParser::new();
let o = p.parse(
&mut ctx,
&mut out,
"MACRO a(_)-> c(_) <- MACRO b(_) ->c(_)<- macro c(_) ->a(_)b(_)<- c(_)",
);
assert!(o.is_err());
let o = p.parse(
&mut ctx,
&mut out,
"MACRO a(q)-> ADD AX,q <- MACRO b(k,q) -> k (q)<- b(b,5)",
);
assert!(o.is_err());
let o = p.parse(
&mut ctx,
&mut out,
"MACRO a(q)-> ADD AX,q <- MACRO b(k) ->a(k)<- b(5)",
);
assert!(o.is_ok());
let o = p.parse(
&mut ctx,
&mut out,
"MACRO a(q)-> ADD AX,q <- MACRO b(k,q) -> k (q)<- b(a,5)",
);
assert!(o.is_ok());
}
#[test]
fn test_segment_override() {
let mut ctx = crate::util::preprocessor_util::Context::default();
let mut out = crate::util::preprocessor_util::Output::default();
let p = crate::preprocessor::preprocessor::PreprocessorParser::new();
let o = p.parse(&mut ctx, &mut out, "mov ax,word es[bp]");
println!("{:?}", o);
assert!(o.is_ok());
let o = p.parse(&mut ctx, &mut out, "mov ax,word ss[5]");
assert!(o.is_ok());
let o = p.parse(&mut ctx, &mut out, "mov ax,word ds[bp,SI]");
assert!(o.is_ok());
let o = p.parse(&mut ctx, &mut out, "mov ax,word CS[bx,di,6]");
assert!(o.is_ok());
}
| 32.960784 | 122 | 0.58885 |
3889d305e00490ac4c712de8d147751f83ffe468 | 600 | use core::arch::asm;
const SYSCALL_WRITE: usize = 64;
const SYSCALL_EXIT: usize = 93;
fn syscall(id: usize, args: [usize; 3]) -> isize {
let mut ret: isize;
unsafe {
asm!(
"ecall",
inlateout("x10") args[0] => ret,
in("x11") args[1],
in("x12") args[2],
in("x17") id
);
}
ret
}
pub fn sys_write(fd: usize, buffer: &[u8]) -> isize {
syscall(SYSCALL_WRITE, [fd, buffer.as_ptr() as usize, buffer.len()])
}
pub fn sys_exit(exit_code: i32) -> isize {
syscall(SYSCALL_EXIT, [exit_code as usize, 0, 0])
}
| 22.222222 | 72 | 0.538333 |
08262d972c03c74d53d6da636284233b1da44b67 | 5,395 | extern crate cc;
extern crate bindgen;
use std::env;
use std::fs;
use std::path::PathBuf;
fn link(name: &str, bundled: bool) {
use std::env::var;
let target = var("TARGET").unwrap();
let target: Vec<_> = target.split('-').collect();
if target.get(2) == Some(&"windows") {
println!("cargo:rustc-link-lib=dylib={}", name);
if bundled && target.get(3) == Some(&"gnu") {
let dir = var("CARGO_MANIFEST_DIR").unwrap();
println!("cargo:rustc-link-search=native={}/{}", dir, target[0]);
}
}
}
fn fail_on_empty_directory(name: &str) {
if fs::read_dir(name).unwrap().count() == 0 {
println!(
"The `{}` directory is empty, did you forget to pull the submodules?",
name
);
println!("Try `git submodule update --init --recursive`");
panic!();
}
}
fn bindgen_rocksdb() {
let bindings = bindgen::Builder::default()
.header("rocksdb/include/rocksdb/c.h")
.blacklist_type("max_align_t") // https://github.com/rust-lang-nursery/rust-bindgen/issues/550
.ctypes_prefix("libc")
.generate()
.expect("unable to generate rocksdb bindings");
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
bindings
.write_to_file(out_path.join("bindings.rs"))
.expect("unable to write rocksdb bindings");
}
fn build_rocksdb() {
let mut config = cc::Build::new();
config.include("rocksdb/include/");
config.include("rocksdb/");
config.include("rocksdb/third-party/gtest-1.7.0/fused-src/");
config.include("snappy/");
config.include(".");
config.define("NDEBUG", Some("1"));
config.define("SNAPPY", Some("1"));
let mut lib_sources = include_str!("rocksdb_lib_sources.txt")
.split(" ")
.collect::<Vec<&'static str>>();
// We have a pregenerated a version of build_version.cc in the local directory
lib_sources = lib_sources
.iter()
.cloned()
.filter(|file| *file != "util/build_version.cc")
.collect::<Vec<&'static str>>();
if cfg!(target_os = "macos") {
config.define("OS_MACOSX", Some("1"));
config.define("ROCKSDB_PLATFORM_POSIX", Some("1"));
config.define("ROCKSDB_LIB_IO_POSIX", Some("1"));
}
if cfg!(target_os = "linux") {
config.define("OS_LINUX", Some("1"));
config.define("ROCKSDB_PLATFORM_POSIX", Some("1"));
config.define("ROCKSDB_LIB_IO_POSIX", Some("1"));
// COMMON_FLAGS="$COMMON_FLAGS -fno-builtin-memcmp"
}
if cfg!(target_os = "freebsd") {
config.define("OS_FREEBSD", Some("1"));
config.define("ROCKSDB_PLATFORM_POSIX", Some("1"));
config.define("ROCKSDB_LIB_IO_POSIX", Some("1"));
}
if cfg!(windows) {
link("rpcrt4", false);
config.define("OS_WIN", Some("1"));
// Remove POSIX-specific sources
lib_sources = lib_sources
.iter()
.cloned()
.filter(|file| match *file {
"port/port_posix.cc" |
"env/env_posix.cc" |
"env/io_posix.cc" => false,
_ => true,
})
.collect::<Vec<&'static str>>();
// Add Windows-specific sources
lib_sources.push("port/win/port_win.cc");
lib_sources.push("port/win/env_win.cc");
lib_sources.push("port/win/env_default.cc");
lib_sources.push("port/win/win_logger.cc");
lib_sources.push("port/win/io_win.cc");
lib_sources.push("port/win/win_thread.cc");
}
if cfg!(target_env = "msvc") {
config.flag("-EHsc");
} else {
config.flag("-std=c++11");
// this was breaking the build on travis due to
// > 4mb of warnings emitted.
config.flag("-Wno-unused-parameter");
}
for file in lib_sources {
let file = "rocksdb/".to_string() + file;
config.file(&file);
}
config.file("build_version.cc");
config.cpp(true);
config.compile("librocksdb.a");
}
fn build_snappy() {
let mut config = cc::Build::new();
config.include("snappy/");
config.include(".");
config.define("NDEBUG", Some("1"));
if cfg!(target_env = "msvc") {
config.flag("-EHsc");
} else {
config.flag("-std=c++11");
}
config.file("snappy/snappy.cc");
config.file("snappy/snappy-sinksource.cc");
config.file("snappy/snappy-c.cc");
config.cpp(true);
config.compile("libsnappy.a");
}
fn try_to_find_and_link_lib(lib_name: &str) -> bool {
if let Ok(lib_dir) = env::var(&format!("{}_LIB_DIR", lib_name)) {
println!("cargo:rustc-link-search=native={}", lib_dir);
let mode = match env::var_os(&format!("{}_STATIC", lib_name)) {
Some(_) => "static",
None => "dylib",
};
println!("cargo:rustc-link-lib={}={}", mode, lib_name.to_lowercase());
return true;
}
false
}
fn main() {
println!("cargo:rerun-if-changed=build.rs");
println!("cargo:rerun-if-changed=rocksdb/");
println!("cargo:rerun-if-changed=snappy/");
fail_on_empty_directory("rocksdb");
fail_on_empty_directory("snappy");
bindgen_rocksdb();
if !try_to_find_and_link_lib("ROCKSDB") {
build_rocksdb();
}
if !try_to_find_and_link_lib("SNAPPY") {
build_snappy();
}
}
| 29.972222 | 102 | 0.575718 |
26edeebb241f28181a65f188ca8e8f3cc2dae6f8 | 7,323 | use std::collections::HashMap;
use geom::Percent;
use map_gui::load::MapLoader;
use map_gui::tools::{open_browser, PopupMsg};
use map_model::PermanentMapEdits;
use widgetry::{DrawBaselayer, EventCtx, GfxCtx, Key, Line, Outcome, Panel, State, Text, Widget};
use crate::app::{App, Transition};
use crate::edit::apply_map_edits;
use crate::sandbox::{GameplayMode, SandboxMode};
pub struct Proposals {
panel: Panel,
proposals: HashMap<String, PermanentMapEdits>,
current: Option<String>,
}
impl Proposals {
pub fn new(ctx: &mut EventCtx, app: &App, current: Option<String>) -> Box<dyn State<App>> {
let mut proposals = HashMap::new();
let mut tab_buttons = Vec::new();
let mut current_tab_rows = Vec::new();
// If a proposal has fallen out of date, it'll be skipped with an error logged. Since these
// are under version control, much more likely to notice when they break (or we could add a
// step to data/regen.sh).
for (name, edits) in
abstio::load_all_objects::<PermanentMapEdits>(abstio::path("system/proposals"))
{
if current == Some(name.clone()) {
let mut txt = Text::new();
txt.add_line(Line(&edits.proposal_description[0]).small_heading());
for l in edits.proposal_description.iter().skip(1) {
txt.add_line(l);
}
current_tab_rows.push(
txt.wrap_to_pct(ctx, 70)
.into_widget(ctx)
.margin_below(15)
.margin_above(15),
);
if edits.proposal_link.is_some() {
current_tab_rows.push(
ctx.style()
.btn_plain
.btn()
.label_underlined_text("Read detailed write-up")
.build_def(ctx)
.margin_below(10),
);
}
current_tab_rows.push(
ctx.style()
.btn_solid_primary
.text("Try out this proposal")
.build_def(ctx),
);
tab_buttons.push(
ctx.style()
.btn_tab
.text(&edits.proposal_description[0])
.disabled(true)
.build_def(ctx)
.margin_below(10),
);
} else {
tab_buttons.push(
ctx.style()
.btn_outline
.text(&edits.proposal_description[0])
.no_tooltip()
.build_widget(ctx, &name)
.margin_below(10),
);
}
proposals.insert(name, edits);
}
let header = Widget::row(vec![
ctx.style()
.btn_back("Home")
.hotkey(Key::Escape)
.build_widget(ctx, "back")
.align_bottom(),
{
let mut txt = Text::from(Line("A/B STREET").display_title());
txt.add_line(Line("PROPOSALS").big_heading_styled());
txt.into_widget(ctx).centered_horiz().fill_width()
},
]);
let body = Widget::col(vec![
{
let mut txt =
Text::from("These are proposed changes to Seattle made by community members.");
txt.add_line("Contact dabreegster@gmail.com to add your idea here!");
txt.into_widget(ctx).centered_horiz()
},
Widget::custom_row(tab_buttons)
.flex_wrap(ctx, Percent::int(80))
.margin_above(60),
Widget::col(current_tab_rows),
])
.bg(app.cs.panel_bg)
.padding(16);
Box::new(Proposals {
proposals,
panel: Panel::new(Widget::custom_col(vec![Widget::col(vec![header, body])]))
.exact_size_percent(90, 85)
.build_custom(ctx),
current,
})
}
}
impl State<App> for Proposals {
fn event(&mut self, ctx: &mut EventCtx, app: &mut App) -> Transition {
match self.panel.event(ctx) {
Outcome::Clicked(x) => match x.as_ref() {
"back" => {
return Transition::Pop;
}
"Try out this proposal" => {
return launch(
ctx,
app,
self.proposals[self.current.as_ref().unwrap()].clone(),
);
}
"Read detailed write-up" => {
open_browser(
self.proposals[self.current.as_ref().unwrap()]
.proposal_link
.clone()
.unwrap(),
);
}
x => {
return Transition::Replace(Proposals::new(ctx, app, Some(x.to_string())));
}
},
_ => {}
}
Transition::Keep
}
fn draw_baselayer(&self) -> DrawBaselayer {
DrawBaselayer::Custom
}
fn draw(&self, g: &mut GfxCtx, app: &App) {
g.clear(app.cs.dialog_bg);
self.panel.draw(g);
}
}
fn launch(ctx: &mut EventCtx, app: &App, edits: PermanentMapEdits) -> Transition {
#[cfg(not(target_arch = "wasm32"))]
{
if !abstio::file_exists(edits.map_name.path()) {
return map_gui::tools::prompt_to_download_missing_data(ctx, edits.map_name.clone());
}
}
Transition::Push(MapLoader::new(
ctx,
app,
edits.map_name.clone(),
Box::new(move |ctx, app| {
// Apply edits before setting up the sandbox, for simplicity
let maybe_err = ctx.loading_screen("apply edits", |ctx, mut timer| {
match edits.to_edits(&app.primary.map) {
Ok(edits) => {
apply_map_edits(ctx, app, edits);
app.primary
.map
.recalculate_pathfinding_after_edits(&mut timer);
None
}
Err(err) => Some(err),
}
});
if let Some(err) = maybe_err {
Transition::Replace(PopupMsg::new(
ctx,
"Can't load proposal",
vec![err.to_string()],
))
} else {
app.primary.layer = Some(Box::new(crate::layer::map::Static::edits(ctx, app)));
Transition::Replace(SandboxMode::simple_new(
app,
GameplayMode::PlayScenario(
app.primary.map.get_name().clone(),
crate::pregame::default_scenario_for_map(app.primary.map.get_name()),
Vec::new(),
),
))
}
}),
))
}
| 35.206731 | 99 | 0.454595 |
71f5546dbbc9d26e255e37e799468305c6531b32 | 2,268 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
use crate::account_address::AccountAddress;
use crate::byte_array::ByteArray;
use failure::prelude::*;
use libra_crypto::{ed25519::Ed25519Signature, HashValue};
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
/// Struct that will be persisted on chain to store the information of the current block.
///
/// The flow will look like following:
/// 1. The executor will pass this struct to VM at the end of a block proposal.
/// 2. The VM will use this struct to create a special system transaction that will modify the on
/// chain resource that represents the information of the current block. This transaction can't
/// be emitted by regular users and is generated by each of the validators on the fly. Such
/// transaction will be executed before all of the user-submitted transactions in the blocks.
/// 3. Once that special resource is modified, the other user transactions can read the consensus
/// info by calling into the read method of that resource, which would thus give users the
/// information such as the current leader.
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct BlockMetadata {
id: HashValue,
timestamp_usec: u64,
// Since Move doesn't support hashmaps, this vote map would be stored as a vector of key value
// pairs in the Move module. Thus we need a BTreeMap here to define how the values are being
// ordered.
previous_block_votes: BTreeMap<AccountAddress, Ed25519Signature>,
proposer: AccountAddress,
}
impl BlockMetadata {
pub fn new(
id: HashValue,
timestamp_usec: u64,
previous_block_votes: BTreeMap<AccountAddress, Ed25519Signature>,
proposer: AccountAddress,
) -> Self {
Self {
id,
timestamp_usec,
previous_block_votes,
proposer,
}
}
pub fn into_inner(self) -> Result<(ByteArray, u64, ByteArray, AccountAddress)> {
let id = ByteArray::new(self.id.to_vec());
let vote_maps = ByteArray::new(lcs::to_bytes(&self.previous_block_votes)?);
Ok((id, self.timestamp_usec, vote_maps, self.proposer))
}
}
| 40.5 | 98 | 0.704586 |
1d9da4798f5044a6241a42a44720df5571c62e9c | 8,822 | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::U1PWRC {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct USBPWRR {
bits: bool,
}
impl USBPWRR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct USUSPENDR {
bits: bool,
}
impl USUSPENDR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct USBBUSYR {
bits: bool,
}
impl USBBUSYR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct USLPGRDR {
bits: bool,
}
impl USLPGRDR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct UACTPNDR {
bits: bool,
}
impl UACTPNDR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Proxy"]
pub struct _USBPWRW<'a> {
w: &'a mut W,
}
impl<'a> _USBPWRW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _USUSPENDW<'a> {
w: &'a mut W,
}
impl<'a> _USUSPENDW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _USBBUSYW<'a> {
w: &'a mut W,
}
impl<'a> _USBBUSYW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _USLPGRDW<'a> {
w: &'a mut W,
}
impl<'a> _USLPGRDW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _UACTPNDW<'a> {
w: &'a mut W,
}
impl<'a> _UACTPNDW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 7;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0"]
#[inline]
pub fn usbpwr(&self) -> USBPWRR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
};
USBPWRR { bits }
}
#[doc = "Bit 1"]
#[inline]
pub fn ususpend(&self) -> USUSPENDR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
};
USUSPENDR { bits }
}
#[doc = "Bit 3"]
#[inline]
pub fn usbbusy(&self) -> USBBUSYR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) != 0
};
USBBUSYR { bits }
}
#[doc = "Bit 4"]
#[inline]
pub fn uslpgrd(&self) -> USLPGRDR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) != 0
};
USLPGRDR { bits }
}
#[doc = "Bit 7"]
#[inline]
pub fn uactpnd(&self) -> UACTPNDR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u32) != 0
};
UACTPNDR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0"]
#[inline]
pub fn usbpwr(&mut self) -> _USBPWRW {
_USBPWRW { w: self }
}
#[doc = "Bit 1"]
#[inline]
pub fn ususpend(&mut self) -> _USUSPENDW {
_USUSPENDW { w: self }
}
#[doc = "Bit 3"]
#[inline]
pub fn usbbusy(&mut self) -> _USBBUSYW {
_USBBUSYW { w: self }
}
#[doc = "Bit 4"]
#[inline]
pub fn uslpgrd(&mut self) -> _USLPGRDW {
_USLPGRDW { w: self }
}
#[doc = "Bit 7"]
#[inline]
pub fn uactpnd(&mut self) -> _UACTPNDW {
_UACTPNDW { w: self }
}
}
| 24.505556 | 59 | 0.490252 |
50815a3e95d9042f76e66b06560f4d0acbde3ddc | 8,018 | #![allow(dead_code)]
pub mod apu;
pub mod clock;
pub mod components;
pub mod controller;
pub mod cpu;
pub mod ines;
pub mod io;
pub mod mappers;
pub mod memory;
pub mod ppu;
pub mod state;
pub mod util;
#[cfg(test)]
mod test;
use std::cell::RefCell;
use std::rc::Rc;
use crate::emulator::apu::AudioOut;
use crate::emulator::controller::Button;
use crate::emulator::io::event::{EventBus, Key};
use crate::emulator::io::Screen;
use crate::emulator::memory::{IORegisters, Writer};
use crate::emulator::state::{NESState, SaveState};
// Timings (NTSC).
// Master clock = 21.477272 MHz ~= 46.5ns per clock.
// CPU clock = 12 master clocks.
// PPU clock = 4 master clocks.
pub const NES_MASTER_CLOCK_HZ: u64 = 21_477_272;
pub const NES_CPU_CLOCK_FACTOR: u32 = 12;
pub const NES_APU_CLOCK_FACTOR: u32 = 24;
pub const NES_PPU_CLOCK_FACTOR: u32 = 4;
pub struct NES {
clock: clock::Clock,
pub cpu: Rc<RefCell<cpu::CPU>>,
pub ppu: Rc<RefCell<ppu::PPU>>,
pub apu: Rc<RefCell<apu::APU>>,
pub mapper: Rc<RefCell<dyn memory::Mapper>>,
pub ram: Rc<RefCell<memory::Memory>>,
pub sram: Rc<RefCell<memory::Memory>>,
pub vram: Rc<RefCell<memory::Memory>>,
pub screen: Rc<RefCell<Screen>>,
pub joy1: Rc<RefCell<controller::Controller>>,
pub joy2: Rc<RefCell<controller::Controller>>,
nmi_pin: bool,
}
impl NES {
pub fn new<A>(
event_bus: Rc<RefCell<EventBus>>,
screen: Rc<RefCell<Screen>>,
audio: A,
rom: ines::ROM,
) -> NES
where
A: AudioOut + 'static,
{
// Create master clock.
let mut clock = clock::Clock::new();
// Load ROM into memory.
let mapper = rom.get_mapper();
// Create RAM modules.
let ram = Rc::new(RefCell::new(memory::Memory::new_ram(0x800)));
let sram = Rc::new(RefCell::new(memory::Memory::new_ram(0x2000)));
let vram = Rc::new(RefCell::new(memory::Memory::new_ram(0x2000)));
// Create graphics output module and PPU.
let ppu_memory = memory::PPUMemory::new(
Box::new(memory::ChrMapper::new(mapper.clone())),
Box::new(mapper.clone()),
Box::new(vram.clone()),
);
let ppu = Rc::new(RefCell::new(ppu::PPU::new(
ppu_memory,
Box::new(screen.clone()),
)));
// Create APU.
let apu = Rc::new(RefCell::new(apu::APU::new(
Box::new(audio),
Box::new(memory::PrgMapper::new(mapper.clone())),
)));
// Create controllers.
let joy1 = Rc::new(RefCell::new(controller::Controller::new(
[
(Key::Z, Button::A),
(Key::X, Button::B),
(Key::A, Button::Start),
(Key::S, Button::Select),
(Key::Up, Button::Up),
(Key::Down, Button::Down),
(Key::Left, Button::Left),
(Key::Right, Button::Right),
]
.iter()
.cloned()
.collect(),
)));
let joy2 = Rc::new(RefCell::new(controller::Controller::new(
[].iter().cloned().collect(),
)));
event_bus.borrow_mut().register(Box::new(joy1.clone()));
event_bus.borrow_mut().register(Box::new(joy2.clone()));
// Create CPU.
let io_registers = Rc::new(RefCell::new(memory::IORegisters::new(
Box::new(apu.clone()),
Box::new(joy1.clone()),
Box::new(joy2.clone()),
)));
let cpu_memory = memory::CPUMemory::new(
Box::new(ram.clone()),
Box::new(ppu.clone()),
Box::new(io_registers.clone()),
Box::new(sram.clone()),
Box::new(memory::PrgMapper::new(mapper.clone())),
);
let cpu = Rc::new(RefCell::new(cpu::new(Box::new(cpu_memory))));
cpu.borrow_mut().disable_bcd();
cpu.borrow_mut().startup_sequence();
let dma_controller = DMAController::new(io_registers.clone(), cpu.clone());
// Wire up the clock timings.
let cpu_ticker = clock::ScaledTicker::new(Box::new(dma_controller), NES_CPU_CLOCK_FACTOR);
let ppu_ticker = clock::ScaledTicker::new(Box::new(ppu.clone()), NES_PPU_CLOCK_FACTOR);
let apu_ticker = clock::ScaledTicker::new(Box::new(apu.clone()), NES_APU_CLOCK_FACTOR);
clock.manage(cpu_ticker);
clock.manage(apu_ticker);
clock.manage(ppu_ticker);
NES {
clock,
cpu,
ppu,
apu,
mapper,
ram,
sram,
vram,
screen,
joy1,
joy2,
nmi_pin: false,
}
}
#[inline]
pub fn tick(&mut self) -> u64 {
let cycles = self.clock.tick();
if self.ppu.borrow().nmi_triggered() {
if self.nmi_pin == false {
self.cpu.borrow_mut().trigger_nmi();
self.nmi_pin = true;
}
} else {
self.nmi_pin = false;
}
if self.apu.borrow().irq_triggered() {
self.cpu.borrow_mut().trigger_irq();
}
if self.mapper.borrow().irq_triggered() {
self.cpu.borrow_mut().trigger_irq();
}
cycles
}
pub fn tick_multi(&mut self, ticks: u32) -> u64 {
let mut cycles = 0u64;
for _ in 0..ticks {
cycles += self.tick()
}
cycles
}
pub fn reset(&mut self) {
// Silence APU.
self.apu.borrow_mut().write(0x4015, 0x00);
// Restart CPU.
self.cpu.borrow_mut().startup_sequence();
}
}
pub struct DMAController {
copies_remaining: u16,
base_address: u16,
io_registers: Rc<RefCell<IORegisters>>,
cpu: Rc<RefCell<cpu::CPU>>,
}
impl DMAController {
pub fn new(
io_registers: Rc<RefCell<IORegisters>>,
cpu: Rc<RefCell<cpu::CPU>>,
) -> DMAController {
DMAController {
copies_remaining: 0,
base_address: 0,
io_registers,
cpu,
}
}
}
impl clock::Ticker for DMAController {
fn tick(&mut self) -> u32 {
match self.io_registers.borrow_mut().get_oamdma() {
None => (),
Some(byte) => {
// DMA triggered.
self.base_address = (byte as u16) << 8;
self.copies_remaining = 256;
}
}
if self.copies_remaining > 0 {
// CPU is suspended during copy.
let byte = self
.cpu
.borrow_mut()
.load_memory(self.base_address.wrapping_add(256 - self.copies_remaining));
self.cpu.borrow_mut().store_memory(0x2004, byte);
self.copies_remaining -= 1;
2
} else {
self.cpu.borrow_mut().tick()
}
}
}
impl<'de> SaveState<'de, NESState> for NES {
fn freeze(&mut self) -> NESState {
NESState {
cpu: self.cpu.borrow_mut().freeze(),
ppu: self.ppu.borrow_mut().freeze(),
mapper: self.mapper.borrow_mut().freeze(),
ram: self.ram.borrow_mut().freeze(),
sram: self.sram.borrow_mut().freeze(),
vram: self.vram.borrow_mut().freeze(),
screen: self.screen.borrow_mut().freeze(),
joy1: self.joy1.borrow_mut().freeze(),
joy2: self.joy2.borrow_mut().freeze(),
}
}
fn hydrate(&mut self, state: NESState) {
self.cpu.borrow_mut().hydrate(state.cpu);
self.ppu.borrow_mut().hydrate(state.ppu);
self.mapper.borrow_mut().hydrate(state.mapper);
self.ram.borrow_mut().hydrate(state.ram);
self.sram.borrow_mut().hydrate(state.sram);
self.vram.borrow_mut().hydrate(state.vram);
self.screen.borrow_mut().hydrate(state.screen);
self.joy1.borrow_mut().hydrate(state.joy1);
self.joy2.borrow_mut().hydrate(state.joy2);
}
}
| 29.262774 | 98 | 0.543901 |
75cf35654818c638951b9ef438c5366663801b83 | 1,687 | //! Serialize and deserialize part_set_header.total (from string or u32), (into u32 in
//! part_set_header.total).
//!
//! The deserializer is created for backwards compatibility: `total` was changed from a
//! string-quoted integer value into an integer value without quotes in Tendermint Core v0.34.0.
//! This deserializer allows backwards-compatibility by deserializing both ways.
//! See also: <https://github.com/informalsystems/tendermint-rs/issues/679>
use serde::{de::Error, de::Visitor, Deserializer, Serialize, Serializer};
use std::convert::TryFrom;
use std::fmt::Formatter;
struct PartSetHeaderTotalStringOrU32;
/// Deserialize (string or u32) into u32(part_set_header.total)
pub fn deserialize<'de, D>(deserializer: D) -> Result<u32, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_any(PartSetHeaderTotalStringOrU32)
}
/// Serialize from u32(part_set_header.total) into u32
pub fn serialize<S>(value: &u32, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
value.serialize(serializer)
}
impl<'de> Visitor<'de> for PartSetHeaderTotalStringOrU32 {
type Value = u32;
fn expecting(&self, formatter: &mut Formatter<'_>) -> std::fmt::Result {
formatter.write_str("an u32 integer or string between 0 and 2^32")
}
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
where
E: Error,
{
u32::try_from(v).map_err(|e| E::custom(format!("part_set_header.total {}", e)))
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: Error,
{
v.parse::<u32>()
.map_err(|e| E::custom(format!("part_set_header.total {}", e)))
}
}
| 32.442308 | 96 | 0.679905 |
1d4ec11762afd77eaa710d2990613062100565d6 | 601 | // Regression test for issue #76740.
// run-pass
// compile-flags: -Zmir-opt-level=4
#[derive(Copy, Clone)]
pub struct V([usize; 4]);
impl V {
fn new() -> Self {
V([0; 4])
}
#[inline(never)]
fn check(mut self) {
assert_eq!(self.0[0], 0);
self.0[0] = 1;
}
}
fn main() {
let v = V::new();
let mut i = 0;
while i != 10 {
// Copy propagation incorrectly assumed that Operand::Move does not
// mutate the local, and used the same v for each V::check call,
// rather than a copy.
v.check();
i += 1;
}
}
| 19.387097 | 75 | 0.514143 |
39926f946e6ff2d887b04a553a65f26f37d20a83 | 1,543 |
pub struct IconSettingsEthernet {
props: crate::Props,
}
impl yew::Component for IconSettingsEthernet {
type Properties = crate::Props;
type Message = ();
fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self
{
Self { props }
}
fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender
{
true
}
fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender
{
false
}
fn view(&self) -> yew::prelude::Html
{
yew::prelude::html! {
<svg
class=self.props.class.unwrap_or("")
width=self.props.size.unwrap_or(24).to_string()
height=self.props.size.unwrap_or(24).to_string()
viewBox="0 0 24 24"
fill=self.props.fill.unwrap_or("none")
stroke=self.props.color.unwrap_or("currentColor")
stroke-width=self.props.stroke_width.unwrap_or(2).to_string()
stroke-linecap=self.props.stroke_linecap.unwrap_or("round")
stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round")
>
<svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0V0z" fill="none"/><path d="M7.77 6.76L6.23 5.48.82 12l5.41 6.52 1.54-1.28L3.42 12l4.35-5.24zM7 13h2v-2H7v2zm10-2h-2v2h2v-2zm-6 2h2v-2h-2v2zm6.77-7.52l-1.54 1.28L20.58 12l-4.35 5.24 1.54 1.28L23.18 12l-5.41-6.52z"/></svg>
</svg>
}
}
}
| 33.543478 | 336 | 0.585872 |
bf76f49a2ada4dd060e35e0ca5f68ddb7955330a | 15,651 | use bstr::ByteSlice;
use crossbeam_utils::atomic::AtomicCell;
use std::mem::size_of;
use std::ops::Deref;
use std::str::FromStr;
use super::objbyteinner::{
ByteInnerFindOptions, ByteInnerNewOptions, ByteInnerPaddingOptions, ByteInnerSplitOptions,
ByteInnerTranslateOptions, PyByteInner,
};
use super::objint::PyIntRef;
use super::objiter;
use super::objslice::PySliceRef;
use super::objstr::{PyString, PyStringRef};
use super::objtype::PyClassRef;
use super::pystr::{self, PyCommonString};
use crate::cformat::CFormatString;
use crate::function::{OptionalArg, OptionalOption};
use crate::obj::objstr::do_cformat_string;
use crate::pyhash;
use crate::pyobject::{
Either, IntoPyObject,
PyArithmaticValue::{self, *},
PyClassImpl, PyComparisonValue, PyContext, PyIterable, PyObjectRef, PyRef, PyResult, PyValue,
ThreadSafe, TryFromObject, TypeProtocol,
};
use crate::vm::VirtualMachine;
/// "bytes(iterable_of_ints) -> bytes\n\
/// bytes(string, encoding[, errors]) -> bytes\n\
/// bytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer\n\
/// bytes(int) -> bytes object of size given by the parameter initialized with null bytes\n\
/// bytes() -> empty bytes object\n\nConstruct an immutable array of bytes from:\n \
/// - an iterable yielding integers in range(256)\n \
/// - a text string encoded using the specified encoding\n \
/// - any object implementing the buffer API.\n \
/// - an integer";
#[pyclass(name = "bytes")]
#[derive(Clone, Debug)]
pub struct PyBytes {
inner: PyByteInner,
}
impl ThreadSafe for PyBytes {}
pub type PyBytesRef = PyRef<PyBytes>;
impl PyBytes {
pub fn new(elements: Vec<u8>) -> Self {
PyBytes {
inner: PyByteInner { elements },
}
}
pub fn get_value(&self) -> &[u8] {
&self.inner.elements
}
}
impl From<Vec<u8>> for PyBytes {
fn from(elements: Vec<u8>) -> Self {
Self::new(elements)
}
}
impl IntoPyObject for Vec<u8> {
fn into_pyobject(self, vm: &VirtualMachine) -> PyResult {
Ok(vm.ctx.new_bytes(self))
}
}
impl Deref for PyBytes {
type Target = [u8];
fn deref(&self) -> &[u8] {
&self.inner.elements
}
}
impl PyValue for PyBytes {
fn class(vm: &VirtualMachine) -> PyClassRef {
vm.ctx.bytes_type()
}
}
pub(crate) fn init(context: &PyContext) {
PyBytes::extend_class(context, &context.types.bytes_type);
let bytes_type = &context.types.bytes_type;
extend_class!(context, bytes_type, {
"maketrans" => context.new_method(PyByteInner::maketrans),
});
PyBytesIterator::extend_class(context, &context.types.bytesiterator_type);
}
#[pyimpl(flags(BASETYPE))]
impl PyBytes {
#[pyslot]
fn tp_new(
cls: PyClassRef,
options: ByteInnerNewOptions,
vm: &VirtualMachine,
) -> PyResult<PyBytesRef> {
PyBytes {
inner: options.get_value(vm)?,
}
.into_ref_with_type(vm, cls)
}
#[pymethod(name = "__repr__")]
fn repr(&self, vm: &VirtualMachine) -> PyResult {
Ok(vm.new_str(format!("b'{}'", self.inner.repr()?)))
}
#[pymethod(name = "__len__")]
pub(crate) fn len(&self) -> usize {
self.inner.len()
}
#[pymethod(name = "__eq__")]
fn eq(&self, other: PyObjectRef, vm: &VirtualMachine) -> PyComparisonValue {
self.inner.eq(other, vm)
}
#[pymethod(name = "__ge__")]
fn ge(&self, other: PyObjectRef, vm: &VirtualMachine) -> PyComparisonValue {
self.inner.ge(other, vm)
}
#[pymethod(name = "__le__")]
fn le(&self, other: PyObjectRef, vm: &VirtualMachine) -> PyComparisonValue {
self.inner.le(other, vm)
}
#[pymethod(name = "__gt__")]
fn gt(&self, other: PyObjectRef, vm: &VirtualMachine) -> PyComparisonValue {
self.inner.gt(other, vm)
}
#[pymethod(name = "__lt__")]
fn lt(&self, other: PyObjectRef, vm: &VirtualMachine) -> PyComparisonValue {
self.inner.lt(other, vm)
}
#[pymethod(name = "__hash__")]
fn hash(&self) -> pyhash::PyHash {
self.inner.hash()
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyBytesIterator {
PyBytesIterator {
position: AtomicCell::new(0),
bytes: zelf,
}
}
#[pymethod(name = "__sizeof__")]
fn sizeof(&self) -> PyResult<usize> {
Ok(size_of::<Self>() + self.inner.elements.len() * size_of::<u8>())
}
#[pymethod(name = "__add__")]
fn add(&self, other: PyObjectRef, vm: &VirtualMachine) -> PyArithmaticValue<PyBytes> {
if let Ok(other) = PyByteInner::try_from_object(vm, other) {
Implemented(self.inner.add(other).into())
} else {
NotImplemented
}
}
#[pymethod(name = "__contains__")]
fn contains(
&self,
needle: Either<PyByteInner, PyIntRef>,
vm: &VirtualMachine,
) -> PyResult<bool> {
self.inner.contains(needle, vm)
}
#[pymethod(name = "__getitem__")]
fn getitem(&self, needle: Either<i32, PySliceRef>, vm: &VirtualMachine) -> PyResult {
self.inner.getitem(needle, vm)
}
#[pymethod(name = "isalnum")]
fn isalnum(&self) -> bool {
self.inner.isalnum()
}
#[pymethod(name = "isalpha")]
fn isalpha(&self) -> bool {
self.inner.isalpha()
}
#[pymethod(name = "isascii")]
fn isascii(&self) -> bool {
self.inner.isascii()
}
#[pymethod(name = "isdigit")]
fn isdigit(&self) -> bool {
self.inner.isdigit()
}
#[pymethod(name = "islower")]
fn islower(&self) -> bool {
self.inner.islower()
}
#[pymethod(name = "isspace")]
fn isspace(&self) -> bool {
self.inner.isspace()
}
#[pymethod(name = "isupper")]
fn isupper(&self) -> bool {
self.inner.isupper()
}
#[pymethod(name = "istitle")]
fn istitle(&self) -> bool {
self.inner.istitle()
}
#[pymethod(name = "lower")]
fn lower(&self) -> PyBytes {
self.inner.lower().into()
}
#[pymethod(name = "upper")]
fn upper(&self) -> PyBytes {
self.inner.upper().into()
}
#[pymethod(name = "capitalize")]
fn capitalize(&self) -> PyBytes {
self.inner.capitalize().into()
}
#[pymethod(name = "swapcase")]
fn swapcase(&self) -> PyBytes {
self.inner.swapcase().into()
}
#[pymethod(name = "hex")]
fn hex(&self) -> String {
self.inner.hex()
}
#[pymethod]
fn fromhex(string: PyStringRef, vm: &VirtualMachine) -> PyResult<PyBytes> {
Ok(PyByteInner::fromhex(string.as_str(), vm)?.into())
}
#[pymethod(name = "center")]
fn center(&self, options: ByteInnerPaddingOptions, vm: &VirtualMachine) -> PyResult<PyBytes> {
Ok(self.inner.center(options, vm)?.into())
}
#[pymethod(name = "ljust")]
fn ljust(&self, options: ByteInnerPaddingOptions, vm: &VirtualMachine) -> PyResult<PyBytes> {
Ok(self.inner.ljust(options, vm)?.into())
}
#[pymethod(name = "rjust")]
fn rjust(&self, options: ByteInnerPaddingOptions, vm: &VirtualMachine) -> PyResult<PyBytes> {
Ok(self.inner.rjust(options, vm)?.into())
}
#[pymethod(name = "count")]
fn count(&self, options: ByteInnerFindOptions, vm: &VirtualMachine) -> PyResult<usize> {
self.inner.count(options, vm)
}
#[pymethod(name = "join")]
fn join(&self, iter: PyIterable<PyByteInner>, vm: &VirtualMachine) -> PyResult<PyBytes> {
Ok(self.inner.join(iter, vm)?.into())
}
#[pymethod(name = "endswith")]
fn endswith(&self, options: pystr::StartsEndsWithArgs, vm: &VirtualMachine) -> PyResult<bool> {
self.inner.elements[..].py_startsendswith(
options,
"endswith",
"bytes",
|s, x: &PyByteInner| s.ends_with(&x.elements[..]),
vm,
)
}
#[pymethod(name = "startswith")]
fn startswith(
&self,
options: pystr::StartsEndsWithArgs,
vm: &VirtualMachine,
) -> PyResult<bool> {
self.inner.elements[..].py_startsendswith(
options,
"startswith",
"bytes",
|s, x: &PyByteInner| s.starts_with(&x.elements[..]),
vm,
)
}
#[pymethod(name = "find")]
fn find(&self, options: ByteInnerFindOptions, vm: &VirtualMachine) -> PyResult<isize> {
let index = self.inner.find(options, |h, n| h.find(n), vm)?;
Ok(index.map_or(-1, |v| v as isize))
}
#[pymethod(name = "index")]
fn index(&self, options: ByteInnerFindOptions, vm: &VirtualMachine) -> PyResult<usize> {
let index = self.inner.find(options, |h, n| h.find(n), vm)?;
index.ok_or_else(|| vm.new_value_error("substring not found".to_owned()))
}
#[pymethod(name = "rfind")]
fn rfind(&self, options: ByteInnerFindOptions, vm: &VirtualMachine) -> PyResult<isize> {
let index = self.inner.find(options, |h, n| h.rfind(n), vm)?;
Ok(index.map_or(-1, |v| v as isize))
}
#[pymethod(name = "rindex")]
fn rindex(&self, options: ByteInnerFindOptions, vm: &VirtualMachine) -> PyResult<usize> {
let index = self.inner.find(options, |h, n| h.rfind(n), vm)?;
index.ok_or_else(|| vm.new_value_error("substring not found".to_owned()))
}
#[pymethod(name = "translate")]
fn translate(
&self,
options: ByteInnerTranslateOptions,
vm: &VirtualMachine,
) -> PyResult<PyBytes> {
Ok(self.inner.translate(options, vm)?.into())
}
#[pymethod(name = "strip")]
fn strip(&self, chars: OptionalOption<PyByteInner>) -> PyBytes {
self.inner.strip(chars).into()
}
#[pymethod(name = "lstrip")]
fn lstrip(&self, chars: OptionalOption<PyByteInner>) -> PyBytes {
self.inner.lstrip(chars).into()
}
#[pymethod(name = "rstrip")]
fn rstrip(&self, chars: OptionalOption<PyByteInner>) -> PyBytes {
self.inner.rstrip(chars).into()
}
#[pymethod(name = "split")]
fn split(&self, options: ByteInnerSplitOptions, vm: &VirtualMachine) -> PyResult {
self.inner
.split(options, |s, vm| vm.ctx.new_bytes(s.to_vec()), vm)
}
#[pymethod(name = "rsplit")]
fn rsplit(&self, options: ByteInnerSplitOptions, vm: &VirtualMachine) -> PyResult {
self.inner
.rsplit(options, |s, vm| vm.ctx.new_bytes(s.to_vec()), vm)
}
#[pymethod(name = "partition")]
fn partition(&self, sep: PyObjectRef, vm: &VirtualMachine) -> PyResult {
let sub = PyByteInner::try_from_object(vm, sep.clone())?;
let (front, has_mid, back) = self.inner.partition(&sub, vm)?;
Ok(vm.ctx.new_tuple(vec![
vm.ctx.new_bytes(front),
if has_mid {
sep
} else {
vm.ctx.new_bytes(Vec::new())
},
vm.ctx.new_bytes(back),
]))
}
#[pymethod(name = "rpartition")]
fn rpartition(&self, sep: PyObjectRef, vm: &VirtualMachine) -> PyResult {
let sub = PyByteInner::try_from_object(vm, sep.clone())?;
let (front, has_mid, back) = self.inner.rpartition(&sub, vm)?;
Ok(vm.ctx.new_tuple(vec![
vm.ctx.new_bytes(front),
if has_mid {
sep
} else {
vm.ctx.new_bytes(Vec::new())
},
vm.ctx.new_bytes(back),
]))
}
#[pymethod(name = "expandtabs")]
fn expandtabs(&self, options: pystr::ExpandTabsArgs) -> PyBytes {
self.inner.expandtabs(options).into()
}
#[pymethod(name = "splitlines")]
fn splitlines(&self, options: pystr::SplitLinesArgs, vm: &VirtualMachine) -> PyResult {
let as_bytes = self
.inner
.splitlines(options)
.iter()
.map(|x| vm.ctx.new_bytes(x.to_vec()))
.collect::<Vec<PyObjectRef>>();
Ok(vm.ctx.new_list(as_bytes))
}
#[pymethod(name = "zfill")]
fn zfill(&self, width: isize) -> PyBytes {
self.inner.zfill(width).into()
}
#[pymethod(name = "replace")]
fn replace(
&self,
old: PyByteInner,
new: PyByteInner,
count: OptionalArg<isize>,
vm: &VirtualMachine,
) -> PyResult<PyBytes> {
Ok(self.inner.replace(old, new, count, vm)?.into())
}
#[pymethod(name = "title")]
fn title(&self) -> PyBytes {
self.inner.title().into()
}
#[pymethod(name = "__mul__")]
#[pymethod(name = "__rmul__")]
fn repeat(&self, value: isize, vm: &VirtualMachine) -> PyResult<PyBytes> {
if value > 0 && self.inner.len() as isize > std::isize::MAX / value {
return Err(vm.new_overflow_error("repeated bytes are too long".to_owned()));
}
Ok(self.inner.repeat(value).into())
}
fn do_cformat(
&self,
vm: &VirtualMachine,
format_string: CFormatString,
values_obj: PyObjectRef,
) -> PyResult {
let final_string = do_cformat_string(vm, format_string, values_obj)?;
Ok(vm
.ctx
.new_bytes(final_string.as_str().as_bytes().to_owned()))
}
#[pymethod(name = "__mod__")]
fn modulo(&self, values: PyObjectRef, vm: &VirtualMachine) -> PyResult {
let format_string_text = std::str::from_utf8(&self.inner.elements).unwrap();
let format_string = CFormatString::from_str(format_string_text)
.map_err(|err| vm.new_value_error(err.to_string()))?;
self.do_cformat(vm, format_string, values.clone())
}
#[pymethod(name = "__rmod__")]
fn rmod(&self, _values: PyObjectRef, vm: &VirtualMachine) -> PyObjectRef {
vm.ctx.not_implemented()
}
/// Return a string decoded from the given bytes.
/// Default encoding is 'utf-8'.
/// Default errors is 'strict', meaning that encoding errors raise a UnicodeError.
/// Other possible values are 'ignore', 'replace'
/// For a list of possible encodings,
/// see https://docs.python.org/3/library/codecs.html#standard-encodings
/// currently, only 'utf-8' and 'ascii' emplemented
#[pymethod(name = "decode")]
fn decode(
zelf: PyRef<Self>,
encoding: OptionalArg<PyStringRef>,
errors: OptionalArg<PyStringRef>,
vm: &VirtualMachine,
) -> PyResult<PyStringRef> {
let encoding = encoding.into_option();
vm.decode(zelf.into_object(), encoding.clone(), errors.into_option())?
.downcast::<PyString>()
.map_err(|obj| {
vm.new_type_error(format!(
"'{}' decoder returned '{}' instead of 'str'; use codecs.encode() to \
encode arbitrary types",
encoding.as_ref().map_or("utf-8", |s| s.as_str()),
obj.class().name,
))
})
}
}
#[pyclass]
#[derive(Debug)]
pub struct PyBytesIterator {
position: AtomicCell<usize>,
bytes: PyBytesRef,
}
impl PyValue for PyBytesIterator {
fn class(vm: &VirtualMachine) -> PyClassRef {
vm.ctx.bytesiterator_type()
}
}
#[pyimpl]
impl PyBytesIterator {
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult<u8> {
let pos = self.position.fetch_add(1);
if let Some(&ret) = self.bytes.get_value().get(pos) {
Ok(ret)
} else {
Err(objiter::new_stop_iteration(vm))
}
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
}
| 30.040307 | 99 | 0.586161 |
22939ebbfcb34dc93dd03110b55efcfcef092c17 | 5,249 | // Generated from definition io.k8s.api.apps.v1beta1.RollingUpdateDeployment
/// Spec to control the desired behavior of rolling update.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct RollingUpdateDeployment {
/// The maximum number of pods that can be scheduled above the desired number of pods. Value can be an absolute number (ex: 5) or a percentage of desired pods (ex: 10%). This can not be 0 if MaxUnavailable is 0. Absolute number is calculated from percentage by rounding up. Defaults to 25%. Example: when this is set to 30%, the new RC can be scaled up immediately when the rolling update starts, such that the total number of old and new pods do not exceed 130% of desired pods. Once old pods have been killed, new RC can be scaled up further, ensuring that total number of pods running at any time during the update is atmost 130% of desired pods.
pub max_surge: Option<crate::v1_8::apimachinery::pkg::util::intstr::IntOrString>,
/// The maximum number of pods that can be unavailable during the update. Value can be an absolute number (ex: 5) or a percentage of desired pods (ex: 10%). Absolute number is calculated from percentage by rounding down. This can not be 0 if MaxSurge is 0. Defaults to 25%. Example: when this is set to 30%, the old RC can be scaled down to 70% of desired pods immediately when the rolling update starts. Once new pods are ready, old RC can be scaled down further, followed by scaling up the new RC, ensuring that the total number of pods available at all times during the update is at least 70% of desired pods.
pub max_unavailable: Option<crate::v1_8::apimachinery::pkg::util::intstr::IntOrString>,
}
impl<'de> serde::Deserialize<'de> for RollingUpdateDeployment {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_max_surge,
Key_max_unavailable,
Other,
}
impl<'de> serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error {
Ok(match v {
"maxSurge" => Field::Key_max_surge,
"maxUnavailable" => Field::Key_max_unavailable,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = RollingUpdateDeployment;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "struct RollingUpdateDeployment")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> {
let mut value_max_surge: Option<crate::v1_8::apimachinery::pkg::util::intstr::IntOrString> = None;
let mut value_max_unavailable: Option<crate::v1_8::apimachinery::pkg::util::intstr::IntOrString> = None;
while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_max_surge => value_max_surge = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_max_unavailable => value_max_unavailable = serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(RollingUpdateDeployment {
max_surge: value_max_surge,
max_unavailable: value_max_unavailable,
})
}
}
deserializer.deserialize_struct(
"RollingUpdateDeployment",
&[
"maxSurge",
"maxUnavailable",
],
Visitor,
)
}
}
impl serde::Serialize for RollingUpdateDeployment {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer {
let mut state = serializer.serialize_struct(
"RollingUpdateDeployment",
self.max_surge.as_ref().map_or(0, |_| 1) +
self.max_unavailable.as_ref().map_or(0, |_| 1),
)?;
if let Some(value) = &self.max_surge {
serde::ser::SerializeStruct::serialize_field(&mut state, "maxSurge", value)?;
}
if let Some(value) = &self.max_unavailable {
serde::ser::SerializeStruct::serialize_field(&mut state, "maxUnavailable", value)?;
}
serde::ser::SerializeStruct::end(state)
}
}
| 51.970297 | 653 | 0.597066 |
abdd5dde4980f5280a28d5a583dc9e7cdf21fbd9 | 26,989 | use super::*;
use libs::fastrand;
pub fn add_intrinsics(impls: &mut Implementations) {
add! { impls,
(get, Pipeline)
(
".",
ast::DotOperatorBlock::instrinsic,
Pipeline,
ast::DotOperatorBlock::help
)
("\\", in, Pipeline)
(len, Pipeline)
(let, Pipeline)
(nth, Pipeline)
(rand, Pipeline)
(range, Pipeline)
(Table, table, Pipeline)
("to-str", to_str, Pipeline)
(Tuple, tuple, Pipeline)
};
}
// ------ Get ------------------------------------------------------------------
fn get_help() -> HelpMessage {
HelpMessage {
desc: "extract a value out of a data structure
optionally specify a default value if the get type does not match"
.into(),
params: vec![
HelpParameter::Required("field".into()),
HelpParameter::Optional("default".into()),
],
examples: vec![
HelpExample {
desc: "get the x field of a user defined Point type",
code: "Point 1 3 | get x",
},
HelpExample {
desc: "get the entry of a table row under the column 'size'",
code: "ls | filter { get size | > 100 }",
},
HelpExample {
desc: "get all files in the directory, using the --Str flag",
code: "ls | filter { get type --Str | = 'file' }",
},
HelpExample {
desc: "sum the size of files, using a default of zero",
code: "ls | fold 0 { + {\\$row | get size 0} }",
},
],
flags: vec![(
"<type>",
"assert that the entry is of type. defaults to Num if not specified",
)],
..HelpMessage::new("get")
}
}
fn get_intrinsic(mut blk: Block) -> Result<Step> {
match blk.in_ty().clone() {
Ty::TabRow => {
let colarg = blk
.next_arg()?
.supplied(Type::Nil)?
.returns(Ty::Str)?
.concrete()?;
// this is the default arg: 'get foo 0'
let get_type = match blk.args_len() {
1 => blk
.next_arg()?
.supplied(Type::Nil)?
.concrete()
.map(TableGetType::Default)?,
// use the type flag
_ => type_flag(&mut blk)
.and_then(|ty| {
// otherwise try to infer the output
ty.map(Ok).unwrap_or_else(|| {
blk.output_ty()
.ok_or_else(|| Error::unknown_blk_output_type(blk.blk_tag()))
})
})
.map(TableGetType::Flag)?,
};
blk.eval(get_type.ty().clone(), move |x, cx| {
let trow: TableRow = x.try_into()?;
table_row_get(&trow, &colarg, &get_type, cx)
})
}
t => {
let field_arg = blk.next_arg()?.supplied(None)?.concrete()?;
let (facc, out_ty) = FieldAccessor::construct(&t, &field_arg, blk.op_tag())?;
blk.eval(out_ty, move |input, cx| {
facc.get(input).and_then(|x| cx.done(x))
})
}
}
}
enum TableGetType {
Default(eng::Argument),
Flag(Type),
}
impl TableGetType {
fn ty(&self) -> &Type {
match self {
TableGetType::Default(x) => x.out_ty(),
TableGetType::Flag(x) => x,
}
}
}
fn table_row_get(
trow: &TableRow,
colarg: &eng::Argument,
ty: &TableGetType,
cx: Context,
) -> Result<(Value, eng::Environment)> {
let colname = colarg.resolve(|| Value::Nil, &cx).and_then(Str::try_from)?;
let idx = trow.idx;
let entry = trow.entry(colname.as_str(), &colarg.tag)?;
let v = match ty {
TableGetType::Default(x) => {
let entry: Value = entry.into();
if &entry.ty() != x.out_ty() {
x.resolve(|| Value::Nil, &cx)
} else {
Ok(entry)
}
}
TableGetType::Flag(x) => TableRow::cnv_value(entry, x, idx, &colname, &colarg.tag),
};
v.and_then(|x| cx.done(x))
}
struct FieldAccessor(usize);
impl FieldAccessor {
/// Construct a field accessor for the type `ty`. Returns the accessor and the return type of
/// the field.
fn construct(ty: &Type, field_arg: &eng::Argument, err_tag: &Tag) -> Result<(Self, Type)> {
match ty {
Ty::Def(tydef) => {
// TypeDefs can use `get` to access a field, so only works for product types.
// The field is checked, then the accessor index is passed through for the eval Step
if !matches!(tydef.structure(), types::TypeVariant::Product(_)) {
let mut err = Error::wrong_op_input_type(ty, err_tag);
err.help_msg = Some("types with `sum` structure cannot be queried into".into());
return Err(err);
}
let fields = match tydef.structure() {
types::TypeVariant::Product(fields) => fields,
_ => unreachable!("just checked that we are on Product type"),
};
let field_name = field_arg.extract_literal::<Str>()?.as_str();
let (idx, field) = fields
.iter()
.enumerate()
.find(|(_, f)| f.name().str() == field_name)
.ok_or_else(|| Error::field_not_found(&field_arg.tag, tydef))?;
let out_ty = field.ty().clone();
Ok((FieldAccessor(idx), out_ty))
}
x => Err(Error::wrong_op_input_type(x, err_tag)),
}
}
fn get(&self, val: Value) -> Result<Value> {
let mut x: OgmaData = val.try_into()?;
Ok(if let Some(x) = x.get_mut() {
x.data.remove(self.0)
} else {
x.data()[self.0].clone()
})
}
}
// ------ Dot Op ---------------------------------------------------------------
impl ast::DotOperatorBlock {
fn help() -> HelpMessage {
HelpMessage {
desc: "extract a value out of a structure using an infix operator".into(),
params: vec![HelpParameter::Required("=> $foo.bar".into())],
examples: vec![
HelpExample {
desc: "extract the x coord of a point structure",
code: "$point.x",
},
HelpExample {
desc: "get the value of a column entry in a TableRow",
code: "$table-row.col-name",
},
HelpExample {
desc: "explicitly constrain output type of a column",
code: "$table-row.col-name:Str",
},
],
..HelpMessage::new(".")
}
}
/// Consists of 2 terms: `input.field`.
/// For TableRow input we handle separately
fn instrinsic(mut blk: Block) -> Result<Step> {
let input = blk.next_arg()?.supplied(None)?.concrete()?;
let field = blk.next_arg()?.supplied(Ty::Nil)?;
match input.out_ty() {
Ty::TabRow => {
let colarg = field.returns(Ty::Str)?.concrete()?;
let ty = blk
.output_ty()
.ok_or_else(|| Error::unknown_blk_output_type(blk.blk_tag()))?;
let ty = TableGetType::Flag(ty);
blk.eval(ty.ty().clone(), move |lhs_input, cx| {
let trow: TableRow = input.resolve(|| lhs_input, &cx)?.try_into()?;
table_row_get(&trow, &colarg, &ty, cx)
})
}
x => {
let field = field.concrete()?;
let (facc, out_ty) = FieldAccessor::construct(x, &field, blk.op_tag())?;
blk.eval(out_ty, move |lhs_input, cx| {
let input = input.resolve(|| lhs_input, &cx)?;
facc.get(input).and_then(|x| cx.done(x))
})
}
}
}
}
// ------ Input ----------------------------------------------------------------
fn in_help() -> HelpMessage {
HelpMessage {
desc: "sets the input value for the next pipeline block".into(),
params: vec![HelpParameter::Required("input".into())],
examples: vec![
HelpExample {
desc: "feed in a number",
code: "\\ 3.14",
},
HelpExample {
desc: "feed in a string",
code: "\\ 'hello, world!'",
},
],
..HelpMessage::new("\\")
}
}
fn in_intrinsic(mut blk: Block) -> Result<Step> {
let arg = blk.next_arg()?.supplied(None)?.concrete()?;
blk.eval(arg.out_ty().clone(), move |val, cx| {
arg.resolve(|| val, &cx).and_then(|x| cx.done(x))
})
}
// ------ Length ---------------------------------------------------------------
fn len_help() -> HelpMessage {
HelpMessage {
desc: "return the length of a table or string (chars)
table length **does not include header row**"
.into(),
flags: vec![("cols", "return the number of columns in a table")],
examples: vec![
HelpExample {
desc: "return the number of files on the filesystem",
code: "ls | filter type --Str eq file | len",
},
HelpExample {
desc: "columns in the ls table",
code: "ls | len --cols",
},
HelpExample {
desc: "length of a string",
code: "\\ 'Hello, 🌎!' | len",
},
],
..HelpMessage::new("len")
}
}
fn len_intrinsic(mut blk: Block) -> Result<Step> {
match blk.in_ty() {
Ty::Str => blk.eval_o(|i, cx| {
Str::try_from(i)
.map(|s| s.chars().count())
.map(Number::from)
.and_then(|x| cx.done_o(x))
}),
Ty::Tab => {
let cols = blk.get_flag("cols").is_some();
blk.eval_o(move |t, cx| {
Table::try_from(t)
.map(|t| {
if cols {
t.cols_len()
} else {
t.rows_len().saturating_sub(1)
}
})
.map(Number::from)
.and_then(|x| cx.done_o(x))
})
}
x => Err(Error::wrong_op_input_type(x, blk.op_tag())),
}
}
// ------ Let ------------------------------------------------------------------
fn let_help() -> HelpMessage {
HelpMessage {
desc: "assign variable identifiers to expression results
each binding takes the form `<expr> $var`
optionally a final `$var` can be specified which assigns the input
to `$var` and throughputs the input as the output
variables are scoped to within the expression they are defined"
.into(),
params: vec![
HelpParameter::Optional("<expr-1> $var-1".into()),
HelpParameter::Optional("<expr-2> $var-2".into()),
HelpParameter::Required("...".into()),
HelpParameter::Optional("$var-final".into()),
],
examples: vec![
HelpExample {
desc: "assign $x to the number 5",
code: "\\ 5 | let $x",
},
HelpExample {
desc: "assign $x to 1, $y to 2, $z to 3",
code: "\\ 6 | let {- 5} $x {/ 3} $y {* 0.5} $z",
},
HelpExample {
desc: "assign $x to double input, assign $y to input and pass through",
code: "let {* 2} $x $y",
},
],
..HelpMessage::new("let")
}
}
fn let_intrinsic(mut blk: Block) -> Result<Step> {
type Binding = (eng::Variable, eng::Argument);
let mut bindings = Vec::with_capacity(blk.args_len() / 2);
while blk.args_len() > 1 {
let e = blk.next_arg()?.supplied(None)?.concrete()?;
let argnode = blk.next_arg()?.node();
let v = blk.create_var_ref(argnode, e.out_ty().clone())?;
bindings.push((v, e));
}
// if there is a trailing binding, the input is bound to that variable, and passed through the
// block as the output. if not, `let` returns the input type!
let ty = blk.in_ty().clone();
let trailing_binding = if blk.args_len() > 0 {
let argnode = blk.next_arg()?.node();
let v = blk.create_var_ref(argnode, ty.clone())?;
Some(v)
} else {
None
};
fn bind_vars(bindings: &[Binding], value: &Value, cx: &mut Context) -> Result<()> {
for (var, e) in bindings {
let v = e.resolve(|| value.clone(), cx)?;
var.set_data(&mut cx.env, v);
}
Ok(())
}
blk.eval(ty, move |value, mut cx| {
bind_vars(&bindings, &value, &mut cx)?;
if let Some(trailing_var) = &trailing_binding {
trailing_var.set_data(&mut cx.env, value.clone());
}
cx.done(value)
})
}
// ------ Nth ------------------------------------------------------------------
fn nth_help() -> HelpMessage {
HelpMessage {
desc: "retrieve the nth element of a data structure
String: retrieves the nth character
Table: retrieves the nth row and applies the expression"
.into(),
params: vec![
HelpParameter::Required("index".into()),
HelpParameter::Optional("expr".into()),
],
examples: vec![
HelpExample {
desc: "get the first row of a table",
code: "nth 0 {get col-name}",
},
HelpExample {
desc: "get the 2nd last row of a table",
code: "nth {len | - 2} {get col-name}",
},
HelpExample {
desc: "get the 10th character of a string",
code: "\\ 'Hello, world!' | nth 10",
},
],
..HelpMessage::new("nth")
}
}
fn nth_intrinsic(mut blk: Block) -> Result<Step> {
match blk.in_ty() {
Ty::Tab => {
let n = blk
.next_arg()?
.supplied(None)?
.returns(Ty::Num)?
.concrete()?;
let expr = blk.next_arg()?.supplied(Ty::TabRow)?.concrete()?;
let oty = expr.out_ty().clone();
blk.eval(oty, move |table, cx| {
// nth is adj by one to account for header
let nth = n
.resolve(|| table.clone(), &cx)
.and_then(|v| cnv_num_to_uint::<usize>(v, &n.tag))?;
let table = Table::try_from(table)?;
if nth + 1 >= table.rows_len() {
return Err(Error::eval(
&n.tag,
"index is outside table bounds",
format!("this resolves to `{}`", nth),
None,
));
}
let trow = TableRow::new(table, Default::default(), nth + 1);
expr.resolve(|| trow.into(), &cx).and_then(|v| cx.done(v))
})
}
Ty::Str => {
let n = blk
.next_arg()?
.supplied(None)?
.returns(Ty::Num)?
.concrete()?;
blk.eval_o::<_, Str>(move |string, cx| {
let nth = n
.resolve(|| string.clone(), &cx)
.and_then(|v| cnv_num_to_uint::<usize>(v, &n.tag))?;
Str::try_from(string)
.and_then(|s| {
s.chars().nth(nth).ok_or_else(|| {
Error::eval(
&n.tag,
"index is outside string bounds",
format!("this resolves to `{}`", nth),
None,
)
})
})
.map(Str::from)
.and_then(|x| cx.done_o(x))
})
}
x => Err(Error::wrong_op_input_type(x, blk.op_tag())),
}
}
// ------ Rand -----------------------------------------------------------------
fn rand_help() -> HelpMessage {
HelpMessage {
desc: "return a random number
rand has four ways of calling:
1. Without arguments: this returns a number (0,1],
2. With one argument: this returns a number (0,to],
3. With two arguments: this returns a number (from,to],
4. With three arguments: this returns a table populated with random numbers (from,to]"
.into(),
params: vec![
HelpParameter::Optional("from".into()),
HelpParameter::Optional("to".into()),
HelpParameter::Optional("length".into()),
],
examples: vec![
HelpExample {
desc: "random integer from 0 to 9",
code: "rand 0 10 | floor",
},
HelpExample {
desc: "create 10 random numbers",
code: "rand 0 1 10",
},
],
..HelpMessage::new("rand")
}
}
fn rand_intrinsic(mut blk: Block) -> Result<Step> {
let args = blk.args_len();
let mut next_num = || {
blk.next_arg()
.and_then(|x| x.supplied(None))
.and_then(|x| x.returns(Ty::Num))
.and_then(|x| x.concrete())
.map(Some)
};
let (from, to, len) = match args {
1 => (None, next_num()?, None),
2 => (next_num()?, next_num()?, None),
3 => (next_num()?, next_num()?, next_num()?),
_ => (None, None, None),
};
fn bnd(arg: Option<&eng::Argument>, i: &mut Value, cx: &Context, def: f64) -> Result<f64> {
match arg {
Some(x) => Ok(Number::try_from(x.resolve(|| i.clone(), cx)?)?.as_f64()),
None => Ok(def),
}
}
let tag = blk.op_tag().clone();
if args == 3 {
let len = len.unwrap();
blk.eval_o(move |mut i, cx| {
let f = bnd(from.as_ref(), &mut i, &cx, 0.0)?;
let t = bnd(to.as_ref(), &mut i, &cx, 1.0)?;
let d = t - f;
let len: usize = cnv_num_to_uint(len.resolve(|| i, &cx)?, &len.tag)?;
check_from_lt_to(f, t, &tag)?;
let mut table = InnerTable::new();
let rng = fastrand::Rng::new();
table
.add_col(once(o("rand")).chain(repeat_with(|| rng.f64() * d + f).take(len).map(n)));
cx.done_o(Table::from(table))
})
} else {
blk.eval_o(move |mut i, cx| {
let f = bnd(from.as_ref(), &mut i, &cx, 0.0)?;
let t = bnd(to.as_ref(), &mut i, &cx, 1.0)?;
let d = t - f;
check_from_lt_to(f, t, &tag)?;
cx.done_o(Number::from(fastrand::f64() * d + f))
})
}
}
fn check_from_lt_to(from: f64, to: f64, tag: &Tag) -> Result<()> {
if from >= to {
Err(Error::eval(
tag,
format!("from must be less than to. found from: {} to: {}", from, to),
None,
None,
))
} else {
Ok(())
}
}
// ------ Range ----------------------------------------------------------------
fn range_help() -> HelpMessage {
HelpMessage {
desc: "create a single column table of integers (from,to]
`from` is inclusive, `to` is exclusive
`to` can be omitted if input is a number"
.into(),
params: vec![
HelpParameter::Required("from".into()),
HelpParameter::Optional("to".into()),
],
examples: vec![
HelpExample {
desc: "integers from 0 to 9",
code: "range 0 10",
},
HelpExample {
desc: "the five preceding numbers",
code: "\\ 10 | range - 5",
},
],
..HelpMessage::new("range")
}
}
fn range_intrinsic(mut blk: Block) -> Result<Step> {
fn table_range(from: u128, to: u128) -> Table {
let mut t = vec![vec![o("i")]];
t.par_extend((from..to).into_par_iter().map(|x| vec![n(x)]));
Table::from(::table::Table::from(t))
}
blk.assert_output(Type::Tab);
let from = blk
.next_arg()?
.supplied(None)?
.returns(Type::Num)?
.concrete()?;
let alen = blk.args_len();
match (alen, blk.in_ty()) {
(0, Ty::Num) => {
let blktag = blk.blk_tag().clone();
blk.eval_o(move |input, cx| {
let from = from
.resolve(|| input.clone(), &cx)
.and_then(|n| cnv_num_to_uint(n, &from.tag))?;
let to = cnv_num_to_uint(input, &blktag)?;
cx.done_o(table_range(from, to))
})
}
_ => {
let to = blk
.next_arg()?
.supplied(None)?
.returns(Type::Num)?
.concrete()?;
blk.eval_o(move |input, cx| {
let from = from
.resolve(|| input.clone(), &cx)
.and_then(|n| cnv_num_to_uint(n, &from.tag))?;
let to = to
.resolve(|| input.clone(), &cx)
.and_then(|n| cnv_num_to_uint(n, &to.tag))?;
cx.done_o(table_range(from, to))
})
}
}
}
// ------ Table ctor -----------------------------------------------------------
fn table_help() -> HelpMessage {
variadic_help(
"Table",
"create an empty table with the given table headers",
vec![
HelpExample {
desc: "create an empty table",
code: "Table",
},
HelpExample {
desc: "create table with the headers 'Foo' and 'Bar'",
code: "Table 'Foo' 'Bar'",
},
],
)
}
fn table_intrinsic(mut blk: Block) -> Result<Step> {
// table takes zero or more arguments that resolve to Str (header name)
let mut names = Vec::with_capacity(blk.args_len());
for _ in 0..blk.args_len() {
names.push(
blk.next_arg()?
.supplied(None)?
.returns(Ty::Str)?
.concrete()?,
);
}
blk.eval_o(move |i, cx| {
let mut t = table::Table::new();
for name in &names {
t.add_col(once(name.resolve(|| i.clone(), &cx)?));
}
cx.done_o(Table::from(t))
})
}
// ------ To Str ---------------------------------------------------------------
fn to_str_help() -> HelpMessage {
HelpMessage {
desc: "convert the input into a string".into(),
params: vec![HelpParameter::Optional("fmt".into())],
examples: vec![HelpExample {
desc: "format a number as a percentage",
code: "\\ 0.4123 | to-str '[.2%]'",
}],
..HelpMessage::new("to-str")
}
}
fn to_str_intrinsic(mut blk: Block) -> Result<Step> {
blk.assert_output(Ty::Str);
match blk.in_ty() {
Ty::Bool => blk.eval_o(|v, c| c.done_o(Str::from(bool::try_from(v)?.to_string()))),
Ty::Num => {
let fmt = if blk.args_len() == 0 {
None
} else {
let f = blk
.next_arg()?
.supplied(None)?
.returns(Ty::Str)?
.concrete()?;
Some(f.extract_literal::<Str>()?.parse::<numfmt::Formatter>().map_err(|e| {
Error {
cat: err::Category::Parsing,
desc: format!("invalid format string: {}", e),
traces: vec![err::Trace::from_tag(&f.tag, Some("invalid format string".into()))],
help_msg: Some("Number formatting syntax can be found at
<https://daedalus.report/d/docs/ogma.book/05%20syntax%20and%20semantics/5.4%20number%20formatting.md?pwd-raw=docs>".into()),
hard: true,
}
})?)
};
blk.eval_o(move |v, cx| {
let n = Number::try_from(v)?;
let s = fmt
.clone()
.as_mut()
.map(|f| f.fmt(n.as_f64()).to_string())
.unwrap_or_else(|| n.to_string());
cx.done_o(Str::from(s))
})
}
Ty::Str => blk.eval_o(|v, c| c.done_o(Str::try_from(v)?)),
_ => blk.eval_o(|v, cx| {
cx.done_o(print::fmt_cell(
&Entry::from(v),
&mut numfmt::Formatter::default(),
))
}),
}
}
// ------ Tuple ----------------------------------------------------------------
fn tuple_help() -> HelpMessage {
variadic_help(
"Tuple",
"construct a tuple of the result of each expression
tuples impl `eq` and `cmp` if all its fields also implement `eq` and `cmp`
tuples have unique types: `U_<t0_Ty>-<t1_Ty>_`
access of the fields is using `get t#` with the field number",
vec![
HelpExample {
desc: "create a two element tuple of numbers. type: U_Num-Num_",
code: "Tuple 1 2",
},
HelpExample {
desc: "create 3 numbers after input. type: U_Num-Num-Num_",
code: "\\ 3 | Tuple {+ 1} {+ 2} {+ 3}",
},
HelpExample {
desc: "tuples are heterogeneous. type: U_Num-Str-Bool_",
code: "Tuple 1 'foo' #t",
},
HelpExample {
desc: "get the first and third element",
code: "Tuple 1 'foo' 2 | + {get t0} {get t2}",
},
],
)
}
fn tuple_intrinsic(mut blk: Block) -> Result<Step> {
let len = blk.args_len();
if len < 2 {
return Err(Error::insufficient_args(blk.blk_tag(), len as u8, None));
}
let mut v = Vec::with_capacity(len);
for _ in 0..len {
v.push(blk.next_arg()?.supplied(None)?.concrete()?);
}
let ty = Arc::new(Tuple::ty(v.iter().map(|x| x.out_ty().clone()).collect()));
let oty = Type::Def(ty.clone());
blk.assert_output(oty.clone());
blk.eval(oty, move |input, cx| {
let mut data = Vec::with_capacity(v.len());
for arg in &v {
data.push(arg.resolve(|| input.clone(), &cx)?);
}
cx.done(OgmaData::new(ty.clone(), None, data))
})
}
| 33.905779 | 134 | 0.449776 |
eb3288a28e2bccb9ac918d049cfa6c786a449141 | 328 | #![feature(box_syntax)]
fn main() {
let y: Box<isize> = box 42;
let mut x: Box<isize>;
loop {
println!("{}", y);
loop {
loop {
loop {
x = y; //~ ERROR use of moved value
x.clone();
}
}
}
}
}
| 18.222222 | 55 | 0.32622 |
1468c85f5916abdd5efbd50e8a66d154e1439b8b | 115 | fn main() {
macro_rules! try {
($e:expr) => (match $e { Ok(e) => e, Err(e) => return Err(e) })
}
}
| 19.166667 | 71 | 0.426087 |
11aa307d5fa6a9243f6c38c1e4e3d4b1ecd0a146 | 12,819 | use simple_error::{SimpleResult, bail};
use serde::{Serialize, Deserialize};
use std::fmt;
use cipher::{NewStreamCipher, SyncStreamCipher};
use salsa20::Salsa20;
use chacha20::ChaCha20Legacy;
// These aren't implemented in the standard cipher crate
use nettle::cipher::Salsa20_128;
use nettle::cipher::insecure_do_not_use::ArcFour;
use crate::{Transformation, TransformerTrait, KeyOrIV};
/// Which stream cipher should we use?
#[allow(non_camel_case_types)]
#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Copy, Serialize, Deserialize)]
pub enum StreamCipherType {
/// Salsa20 (20 rounds, 128 or 256-bit key, 64-bit IV)
Salsa20,
/// ChaCha20 (20 rounds, 256-bit key, 64-bit IV)
ChaCha,
/// Arc4 / RC4 (any size key (though we only support powers of 2), no IV
Arc4,
}
/// Configures a stream cipher.
///
/// Configure all the settings for a stream cipher in a serializable place. Note
/// that the settings must match standards or an error will be returned when
/// creating (or transforming).
#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Copy, Serialize, Deserialize)]
pub struct TransformStreamCipher {
cipher: StreamCipherType,
key: KeyOrIV,
iv: Option<KeyOrIV>,
offset: u64,
}
impl fmt::Display for TransformStreamCipher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl TransformStreamCipher {
/// Create a new instance of [`TransformStreamCipher`].
pub fn new(cipher: StreamCipherType, key: Vec<u8>, iv: Option<Vec<u8>>) -> SimpleResult<Transformation> {
// Validate and store the key / iv
let key = KeyOrIV::new(key)?;
let iv = match iv {
Some(iv) => Some(KeyOrIV::new(iv)?),
None => None,
};
// Create the result so we can validate it
let result = TransformStreamCipher {
cipher: cipher,
key: key,
iv: iv,
offset: 0,
};
// This validates the key length and iv and other characteristics
result.validate_settings()?;
Ok(Transformation::FromStreamCipher(result))
}
/// Internal function to decrypt
fn decrypt_salsa20(self, buffer: &Vec<u8>) -> SimpleResult<Vec<u8>> {
// Make sure the nonce is sane
let nonce = match self.iv {
Some(iv) => iv.get64()?,
None => bail!("Salsa20 requires an IV/Nonce"),
};
match self.key {
// The stream-cipher library pack doesn't seem to support
// Salsa20-128, so we use the nettle library (which is simpler to
// use, but seems to have some bugs)
KeyOrIV::Bits128(k) => {
// Create a buffer for the output
let mut encrypted = vec![0; buffer.len()];
let mut c = match Salsa20_128::with_key_and_nonce(&k, &nonce) {
Ok(c) => c,
Err(e) => bail!("Salsa20_128 cipher failed: {}", e),
};
c.crypt(&mut encrypted, buffer);
Ok(encrypted)
},
KeyOrIV::Bits256(k) => {
let key = salsa20::Key::from_slice(&k);
let nonce = salsa20::Nonce::from_slice(&nonce);
let mut cipher = Salsa20::new(&key, &nonce);
// Clone the buffer to something mutable
let mut buffer = buffer.clone();
// Apply the keystream to decrypt it
cipher.apply_keystream(&mut buffer);
// And I guess that's it
Ok(buffer)
},
_ => bail!("Invalid key size for Salsa20"),
}
}
/// Internal function to encrypt
fn encrypt_salsa20(self, buffer: &Vec<u8>) -> SimpleResult<Vec<u8>> {
// Encrypting is literally identical to decrypting
self.decrypt_salsa20(buffer)
}
/// Internal function to decrypt
fn decrypt_chacha(self, buffer: &Vec<u8>) -> SimpleResult<Vec<u8>> {
// Get the nonce
// Make sure the nonce is sane
let nonce = match self.iv {
Some(iv) => iv.get64()?,
None => bail!("ChaCha requires an IV/Nonce"),
};
let nonce = chacha20::LegacyNonce::from_slice(&nonce);
// Get the key
let key = self.key.get256()?;
let key = chacha20::Key::from_slice(&key);
let mut cipher = ChaCha20Legacy::new(&key, &nonce);
// Clone the buffer to something mutable
let mut buffer = buffer.clone();
// Apply the keystream to decrypt it
cipher.apply_keystream(&mut buffer);
// And I guess that's it
Ok(buffer)
}
/// Internal function to encrypt
fn encrypt_chacha(self, buffer: &Vec<u8>) -> SimpleResult<Vec<u8>> {
// Encrypting is literally identical to decrypting
self.decrypt_chacha(buffer)
}
/// Internal function to decrypt
fn decrypt_arc4(self, buffer: &Vec<u8>) -> SimpleResult<Vec<u8>> {
let mut c = match self.key {
KeyOrIV::Bits64(k) => ArcFour::with_key(&k),
KeyOrIV::Bits128(k) => ArcFour::with_key(&k),
KeyOrIV::Bits192(k) => ArcFour::with_key(&k),
KeyOrIV::Bits256(k) => ArcFour::with_key(&k),
};
// Create a buffer for the output
let mut encrypted = vec![0; buffer.len()];
// Do the encryption
c.crypt(&mut encrypted, buffer);
Ok(encrypted)
}
/// Internal function to encrypt
fn encrypt_arc4(self, buffer: &Vec<u8>) -> SimpleResult<Vec<u8>> {
// Encrypting is literally identical to decrypting
self.decrypt_arc4(buffer)
}
/// Sanity check settings (key size, IV, etc).
fn validate_settings(self) -> SimpleResult<()> {
match (self.cipher, self.iv, self.key) {
(StreamCipherType::Salsa20, Some(KeyOrIV::Bits64(_)), KeyOrIV::Bits128(_)) => (),
(StreamCipherType::Salsa20, Some(KeyOrIV::Bits64(_)), KeyOrIV::Bits256(_)) => (),
(StreamCipherType::Salsa20, _, _ ) => bail!("Invalid stream cipher settings for Salsa20"),
(StreamCipherType::ChaCha, Some(KeyOrIV::Bits64(_)), KeyOrIV::Bits256(_)) => (),
(StreamCipherType::ChaCha, Some(KeyOrIV::Bits64(_)), _ ) => bail!("Invalid key size for ChaCha"),
(StreamCipherType::ChaCha, _, KeyOrIV::Bits256(_)) => bail!("Invalid iv size for ChaCha"),
(StreamCipherType::ChaCha, _, _ ) => bail!("Invalid key and iv sizes for ChaCha"),
(StreamCipherType::Arc4, None, _ ) => (),
(StreamCipherType::Arc4, _, _ ) => bail!("Arc4 does not support an IV"),
}
Ok(())
}
}
impl TransformerTrait for TransformStreamCipher {
/// transform() =~ decrypt
fn transform(&self, buffer: &Vec<u8>) -> SimpleResult<Vec<u8>> {
self.validate_settings()?;
match self.cipher {
StreamCipherType::Salsa20 => self.decrypt_salsa20(buffer),
StreamCipherType::ChaCha => self.decrypt_chacha(buffer),
StreamCipherType::Arc4 => self.decrypt_arc4(buffer),
}
}
/// transform() =~ encrypt
fn untransform(&self, buffer: &Vec<u8>) -> SimpleResult<Vec<u8>> {
self.validate_settings()?;
match self.cipher {
StreamCipherType::Salsa20 => self.encrypt_salsa20(buffer),
StreamCipherType::ChaCha => self.encrypt_chacha(buffer),
StreamCipherType::Arc4 => self.encrypt_arc4(buffer),
}
}
fn is_two_way(&self) -> bool {
true
}
fn detect(_buffer: &Vec<u8>) -> Vec<Transformation> where Self: Sized {
vec![]
}
// Unfortunately, we can never tell whether a stream cipher is valid
fn can_transform(&self, _buffer: &Vec<u8>) -> bool {
true
}
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn test_salsa20() -> SimpleResult<()> {
let tests: Vec<(Vec<u8>, Vec<u8>, Vec<u8>, Vec<u8>)> = vec![
(
b"Testing Salsa20".to_vec(), // Plaintext
b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA".to_vec(), // Key
b"BBBBBBBB".to_vec(), // IV
// Ciphertext
b"\x96\xb9\x31\xa2\x3b\xff\x65\x11\xe4\xba\x52\x79\xb1\xaa\x57".to_vec(),
),
(
b"Testing Salsa20".to_vec(), // Plaintext
b"AAAAAAAAAAAAAAAA".to_vec(), // Key
b"BBBBBBBB".to_vec(), // IV
// Ciphertext
b"\xe8\xc2\x42\x85\x3d\x40\x9b\xa6\x06\xe6\x83\xe0\x0d\x37\xd6".to_vec(),
),
];
for (plaintext, key, iv, ciphertext) in tests {
let transformation = TransformStreamCipher::new(
StreamCipherType::Salsa20,
key,
Some(iv),
)?;
let result = transformation.transform(&ciphertext)?;
assert_eq!(plaintext, result, "salsa20 transform {}", std::str::from_utf8(&plaintext).unwrap());
let result = transformation.untransform(&result)?;
assert_eq!(ciphertext, result, "salsa20 untransform {}", std::str::from_utf8(&plaintext).unwrap());
}
Ok(())
}
#[test]
fn test_chacha() -> SimpleResult<()> {
// Test vectors from https://tools.ietf.org/html/draft-strombergson-chacha-test-vectors-00
let tests: Vec<(Vec<u8>, Vec<u8>, Vec<u8>)> = vec![
(
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00".to_vec(), // Key
b"\x00\x00\x00\x00\x00\x00\x00\x00".to_vec(), // IV
// Ciphertext
b"\x76\xb8\xe0\xad\xa0\xf1\x3d\x90".to_vec(),
),
(
b"\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00".to_vec(), // Key
b"\x00\x00\x00\x00\x00\x00\x00\x00".to_vec(), // IV
// Ciphertext
b"\xc5\xd3\x0a\x7c\xe1\xec\x11\x93".to_vec(),
),
];
for (key, iv, ciphertext) in tests {
let transformation = TransformStreamCipher::new(
StreamCipherType::ChaCha,
key,
Some(iv),
)?;
let result = transformation.transform(&ciphertext)?;
assert_eq!(vec![0; ciphertext.len()], result);
let result = transformation.untransform(&result)?;
assert_eq!(ciphertext, result);
}
Ok(())
}
#[test]
fn test_arc4() -> SimpleResult<()> {
let tests: Vec<(Vec<u8>, Vec<u8>, Vec<u8>)> = vec![
(
b"My fun RC4 test".to_vec(), // Plaintext
b"AAAAAAAA".to_vec(), // Key
// Ciphertext
b"\x8f\xb2\xc6\x5b\xb5\xcd\xed\xf3\xf9\x9f\x49\x28\x81\x83\x41".to_vec(),
),
(
b"Testing RC4 with longer key".to_vec(), // Plaintext
b"AAAAAAAAAAAAAAAA".to_vec(), // Key
// Ciphertext
b"\x96\xae\x95\x49\xa9\xcd\xaa\x81\xe8\xe8\x5d\x7c\x93\x99\x41\x43\x57\x29\xfd\xdc\x73\xcc\x77\x59\x01\x0e\xd0".to_vec(),
),
(
b"Testing 192-bit key".to_vec(), // Plaintext
b"AAAAAAAAAAAAAAAAAAAAAAAA".to_vec(), // Key
// Ciphertext
b"\x96\xae\x95\x49\xa9\xcd\xaa\x81\x8b\x92\x5b\x71\x86\x99\x41\x0b\x1c\x20\xeb".to_vec(),
),
];
for (plaintext, key, ciphertext) in tests {
let transformation = TransformStreamCipher::new(
StreamCipherType::Arc4,
key,
None,
)?;
let result = transformation.transform(&ciphertext)?;
assert_eq!(plaintext, result, "Arc4 transform {}", std::str::from_utf8(&plaintext).unwrap());
let result = transformation.untransform(&result)?;
assert_eq!(ciphertext, result, "Arc4 untransform {}", std::str::from_utf8(&plaintext).unwrap());
}
Ok(())
}
}
| 36.314448 | 142 | 0.535221 |
1ca6b6976b38f6a6485b089bd86304feb58d2843 | 668 | // run-pass
#![allow(dead_code, unused_variables)]
/// Should not trigger an ICE in `SpanlessEq` / `consts::constant`
///
/// Issue: https://github.com/rust-lang/rust-clippy/issues/1782
use std::{mem, ptr};
fn spanless_eq_ice() {
let txt = "something";
match txt {
"something" => unsafe {
ptr::write(
ptr::null_mut() as *mut u32,
mem::transmute::<[u8; 4], _>([0, 0, 0, 255]),
)
},
_ => unsafe {
ptr::write(
ptr::null_mut() as *mut u32,
mem::transmute::<[u8; 4], _>([13, 246, 24, 255]),
)
},
}
}
fn main() {}
| 23.034483 | 66 | 0.468563 |
01e5a05f68d40af5a7a3e6d6e88412a4c5b06d24 | 2,881 | /*
Attempt at hot pixel detection and removal.
Method:
For each pixel (excluding image border pixels):
1. Compute the standard deviation of a window of pixels (3x3, say)
2. Compute the z-score for the target pixel
3. If the z-score exceeds a threshold variance from the mean
we replace the pixel value with a median filter
*/
use crate::{
error,
imagebuffer::ImageBuffer
};
//https://rust-lang-nursery.github.io/rust-cookbook/science/mathematics/statistics.html
fn mean(data: &[f32]) -> Option<f32> {
let sum = data.iter().sum::<f32>() as f32;
let count = data.len();
match count {
positive if positive > 0 => Some(sum / count as f32),
_ => None,
}
}
fn std_deviation(data: &[f32]) -> Option<f32> {
match (mean(data), data.len()) {
(Some(data_mean), count) if count > 0 => {
let variance = data.iter().map(|value| {
let diff = data_mean - (*value as f32);
diff * diff
}).sum::<f32>() / count as f32;
Some(variance.sqrt())
},
_ => None
}
}
fn z_score(pixel_value:f32, data:&[f32]) -> Option<f32> {
let data_mean = mean(&data);
let data_std_deviation = std_deviation(&data);
let data_value = pixel_value;
match (data_mean, data_std_deviation) {
(Some(mean), Some(std_deviation)) => {
let diff = data_value as f32 - mean;
Some(diff / std_deviation)
},
_ => None
}
}
fn isolate_window(buffer:&ImageBuffer, window_size:i32, x:usize, y:usize) -> error::Result<Vec<f32>> {
let mut v:Vec<f32> = Vec::with_capacity(36);
let start = window_size / 2 * -1;
let end = window_size / 2 + 1;
for _y in start..end as i32 {
for _x in start..end as i32 {
let get_x = x as i32 + _x;
let get_y = y as i32 + _y;
if get_x >= 0 && get_x < buffer.width as i32 && get_y >= 0 && get_y < buffer.height as i32 {
v.push(buffer.get(get_x as usize, get_y as usize).unwrap());
}
}
}
Ok(v)
}
pub fn hot_pixel_detection(buffer:&ImageBuffer, window_size:i32, threshold:f32) -> error::Result<ImageBuffer> {
let mut map = ImageBuffer::new(buffer.width, buffer.height).unwrap();
for y in 1..buffer.height - 1 {
for x in 1..buffer.width -1 {
let pixel_value = buffer.get(x, y).unwrap();
let window = isolate_window(buffer, window_size, x, y).unwrap();
let zscore = z_score(pixel_value, &window[0..]);
if zscore.unwrap() > threshold {
let m = mean(&window[0..]).unwrap();
map.put(x, y, m).unwrap();
} else {
map.put(x, y, buffer.get(x, y).unwrap()).unwrap();
}
}
}
Ok(map)
} | 30.978495 | 111 | 0.552586 |
7abe5a84c5fff80fde3206c6e0987aa91316cb5a | 151,350 | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "rustc_resolve"]
#![unstable(feature = "rustc_private")]
#![staged_api]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/")]
#![feature(alloc)]
#![feature(associated_consts)]
#![feature(collections)]
#![feature(rustc_diagnostic_macros)]
#![feature(rustc_private)]
#![feature(staged_api)]
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
#[macro_use] #[no_link] extern crate rustc_bitflags;
extern crate rustc;
use self::PatternBindingMode::*;
use self::Namespace::*;
use self::NamespaceResult::*;
use self::NameDefinition::*;
use self::ResolveResult::*;
use self::FallbackSuggestion::*;
use self::TypeParameters::*;
use self::RibKind::*;
use self::UseLexicalScopeFlag::*;
use self::ModulePrefixResult::*;
use self::AssocItemResolveResult::*;
use self::NameSearchType::*;
use self::BareIdentifierPatternResolution::*;
use self::ParentLink::*;
use self::ModuleKind::*;
use self::FallbackChecks::*;
use rustc::session::Session;
use rustc::lint;
use rustc::metadata::csearch;
use rustc::metadata::decoder::{DefLike, DlDef, DlField, DlImpl};
use rustc::middle::def::*;
use rustc::middle::lang_items::LanguageItems;
use rustc::middle::pat_util::pat_bindings;
use rustc::middle::privacy::*;
use rustc::middle::subst::{ParamSpace, FnSpace, TypeSpace};
use rustc::middle::ty::{Freevar, FreevarMap, TraitMap, GlobMap};
use rustc::util::nodemap::{NodeMap, NodeSet, DefIdSet, FnvHashMap};
use rustc::util::lev_distance::lev_distance;
use syntax::ast::{Arm, BindByRef, BindByValue, BindingMode, Block};
use syntax::ast::{ConstImplItem, Crate, CrateNum};
use syntax::ast::{DefId, Expr, ExprAgain, ExprBreak, ExprField};
use syntax::ast::{ExprLoop, ExprWhile, ExprMethodCall};
use syntax::ast::{ExprPath, ExprStruct, FnDecl};
use syntax::ast::{ForeignItemFn, ForeignItemStatic, Generics};
use syntax::ast::{Ident, ImplItem, Item, ItemConst, ItemEnum, ItemExternCrate};
use syntax::ast::{ItemFn, ItemForeignMod, ItemImpl, ItemMac, ItemMod, ItemStatic, ItemDefaultImpl};
use syntax::ast::{ItemStruct, ItemTrait, ItemTy, ItemUse};
use syntax::ast::{Local, MethodImplItem, Name, NodeId};
use syntax::ast::{Pat, PatEnum, PatIdent, PatLit, PatQPath};
use syntax::ast::{PatRange, PatStruct, Path, PrimTy};
use syntax::ast::{TraitRef, Ty, TyBool, TyChar, TyF32};
use syntax::ast::{TyF64, TyFloat, TyIs, TyI8, TyI16, TyI32, TyI64, TyInt};
use syntax::ast::{TyPath, TyPtr};
use syntax::ast::{TyRptr, TyStr, TyUs, TyU8, TyU16, TyU32, TyU64, TyUint};
use syntax::ast::TypeImplItem;
use syntax::ast;
use syntax::ast_map;
use syntax::ast_util::{local_def, walk_pat};
use syntax::attr::AttrMetaMethods;
use syntax::ext::mtwt;
use syntax::parse::token::{self, special_names, special_idents};
use syntax::ptr::P;
use syntax::codemap::{self, Span, Pos};
use syntax::visit::{self, Visitor};
use std::collections::{HashMap, HashSet};
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::cell::{Cell, RefCell};
use std::fmt;
use std::mem::replace;
use std::rc::{Rc, Weak};
use std::usize;
use resolve_imports::{Target, ImportDirective, ImportResolution};
use resolve_imports::Shadowable;
// NB: This module needs to be declared first so diagnostics are
// registered before they are used.
pub mod diagnostics;
mod check_unused;
mod record_exports;
mod build_reduced_graph;
mod resolve_imports;
#[derive(Copy, Clone)]
struct BindingInfo {
span: Span,
binding_mode: BindingMode,
}
// Map from the name in a pattern to its binding mode.
type BindingMap = HashMap<Name, BindingInfo>;
#[derive(Copy, Clone, PartialEq)]
enum PatternBindingMode {
RefutableMode,
LocalIrrefutableMode,
ArgumentIrrefutableMode,
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
enum Namespace {
TypeNS,
ValueNS
}
/// A NamespaceResult represents the result of resolving an import in
/// a particular namespace. The result is either definitely-resolved,
/// definitely- unresolved, or unknown.
#[derive(Clone)]
enum NamespaceResult {
/// Means that resolve hasn't gathered enough information yet to determine
/// whether the name is bound in this namespace. (That is, it hasn't
/// resolved all `use` directives yet.)
UnknownResult,
/// Means that resolve has determined that the name is definitely
/// not bound in the namespace.
UnboundResult,
/// Means that resolve has determined that the name is bound in the Module
/// argument, and specified by the NameBindings argument.
BoundResult(Rc<Module>, Rc<NameBindings>)
}
impl NamespaceResult {
fn is_unknown(&self) -> bool {
match *self {
UnknownResult => true,
_ => false
}
}
fn is_unbound(&self) -> bool {
match *self {
UnboundResult => true,
_ => false
}
}
}
enum NameDefinition {
// The name was unbound.
NoNameDefinition,
// The name identifies an immediate child.
ChildNameDefinition(Def, LastPrivate),
// The name identifies an import.
ImportNameDefinition(Def, LastPrivate),
}
impl<'a, 'v, 'tcx> Visitor<'v> for Resolver<'a, 'tcx> {
fn visit_item(&mut self, item: &Item) {
self.resolve_item(item);
}
fn visit_arm(&mut self, arm: &Arm) {
self.resolve_arm(arm);
}
fn visit_block(&mut self, block: &Block) {
self.resolve_block(block);
}
fn visit_expr(&mut self, expr: &Expr) {
self.resolve_expr(expr);
}
fn visit_local(&mut self, local: &Local) {
self.resolve_local(local);
}
fn visit_ty(&mut self, ty: &Ty) {
self.resolve_type(ty);
}
fn visit_generics(&mut self, generics: &Generics) {
self.resolve_generics(generics);
}
fn visit_poly_trait_ref(&mut self,
tref: &ast::PolyTraitRef,
m: &ast::TraitBoundModifier) {
match self.resolve_trait_reference(tref.trait_ref.ref_id, &tref.trait_ref.path, 0) {
Ok(def) => self.record_def(tref.trait_ref.ref_id, def),
Err(_) => { /* error already reported */ }
}
visit::walk_poly_trait_ref(self, tref, m);
}
fn visit_variant(&mut self, variant: &ast::Variant, generics: &Generics) {
if let Some(ref dis_expr) = variant.node.disr_expr {
// resolve the discriminator expr as a constant
self.with_constant_rib(|this| {
this.visit_expr(&**dis_expr);
});
}
// `visit::walk_variant` without the discriminant expression.
match variant.node.kind {
ast::TupleVariantKind(ref variant_arguments) => {
for variant_argument in variant_arguments.iter() {
self.visit_ty(&*variant_argument.ty);
}
}
ast::StructVariantKind(ref struct_definition) => {
self.visit_struct_def(&**struct_definition,
variant.node.name,
generics,
variant.node.id);
}
}
}
fn visit_foreign_item(&mut self, foreign_item: &ast::ForeignItem) {
let type_parameters = match foreign_item.node {
ForeignItemFn(_, ref generics) => {
HasTypeParameters(generics, FnSpace, ItemRibKind)
}
ForeignItemStatic(..) => NoTypeParameters
};
self.with_type_parameter_rib(type_parameters, |this| {
visit::walk_foreign_item(this, foreign_item);
});
}
fn visit_fn(&mut self,
function_kind: visit::FnKind<'v>,
declaration: &'v FnDecl,
block: &'v Block,
_: Span,
node_id: NodeId) {
let rib_kind = match function_kind {
visit::FkItemFn(_, generics, _, _, _) => {
self.visit_generics(generics);
ItemRibKind
}
visit::FkMethod(_, sig, _) => {
self.visit_generics(&sig.generics);
self.visit_explicit_self(&sig.explicit_self);
MethodRibKind
}
visit::FkFnBlock(..) => ClosureRibKind(node_id)
};
self.resolve_function(rib_kind, declaration, block);
}
}
type ErrorMessage = Option<(Span, String)>;
enum ResolveResult<T> {
Failed(ErrorMessage), // Failed to resolve the name, optional helpful error message.
Indeterminate, // Couldn't determine due to unresolved globs.
Success(T) // Successfully resolved the import.
}
impl<T> ResolveResult<T> {
fn indeterminate(&self) -> bool {
match *self { Indeterminate => true, _ => false }
}
}
enum FallbackSuggestion {
NoSuggestion,
Field,
Method,
TraitItem,
StaticMethod(String),
TraitMethod(String),
}
#[derive(Copy, Clone)]
enum TypeParameters<'a> {
NoTypeParameters,
HasTypeParameters(
// Type parameters.
&'a Generics,
// Identifies the things that these parameters
// were declared on (type, fn, etc)
ParamSpace,
// The kind of the rib used for type parameters.
RibKind)
}
// The rib kind controls the translation of local
// definitions (`DefLocal`) to upvars (`DefUpvar`).
#[derive(Copy, Clone, Debug)]
enum RibKind {
// No translation needs to be applied.
NormalRibKind,
// We passed through a closure scope at the given node ID.
// Translate upvars as appropriate.
ClosureRibKind(NodeId /* func id */),
// We passed through an impl or trait and are now in one of its
// methods. Allow references to ty params that impl or trait
// binds. Disallow any other upvars (including other ty params that are
// upvars).
MethodRibKind,
// We passed through an item scope. Disallow upvars.
ItemRibKind,
// We're in a constant item. Can't refer to dynamic stuff.
ConstantItemRibKind
}
#[derive(Copy, Clone)]
enum UseLexicalScopeFlag {
DontUseLexicalScope,
UseLexicalScope
}
enum ModulePrefixResult {
NoPrefixFound,
PrefixFound(Rc<Module>, usize)
}
#[derive(Copy, Clone)]
enum AssocItemResolveResult {
/// Syntax such as `<T>::item`, which can't be resolved until type
/// checking.
TypecheckRequired,
/// We should have been able to resolve the associated item.
ResolveAttempt(Option<PathResolution>),
}
#[derive(Copy, Clone, PartialEq)]
enum NameSearchType {
/// We're doing a name search in order to resolve a `use` directive.
ImportSearch,
/// We're doing a name search in order to resolve a path type, a path
/// expression, or a path pattern.
PathSearch,
}
#[derive(Copy, Clone)]
enum BareIdentifierPatternResolution {
FoundStructOrEnumVariant(Def, LastPrivate),
FoundConst(Def, LastPrivate),
BareIdentifierPatternUnresolved
}
/// One local scope.
#[derive(Debug)]
struct Rib {
bindings: HashMap<Name, DefLike>,
kind: RibKind,
}
impl Rib {
fn new(kind: RibKind) -> Rib {
Rib {
bindings: HashMap::new(),
kind: kind
}
}
}
/// The link from a module up to its nearest parent node.
#[derive(Clone,Debug)]
enum ParentLink {
NoParentLink,
ModuleParentLink(Weak<Module>, Name),
BlockParentLink(Weak<Module>, NodeId)
}
/// The type of module this is.
#[derive(Copy, Clone, PartialEq, Debug)]
enum ModuleKind {
NormalModuleKind,
TraitModuleKind,
EnumModuleKind,
TypeModuleKind,
AnonymousModuleKind,
}
/// One node in the tree of modules.
pub struct Module {
parent_link: ParentLink,
def_id: Cell<Option<DefId>>,
kind: Cell<ModuleKind>,
is_public: bool,
children: RefCell<HashMap<Name, Rc<NameBindings>>>,
imports: RefCell<Vec<ImportDirective>>,
// The external module children of this node that were declared with
// `extern crate`.
external_module_children: RefCell<HashMap<Name, Rc<Module>>>,
// The anonymous children of this node. Anonymous children are pseudo-
// modules that are implicitly created around items contained within
// blocks.
//
// For example, if we have this:
//
// fn f() {
// fn g() {
// ...
// }
// }
//
// There will be an anonymous module created around `g` with the ID of the
// entry block for `f`.
anonymous_children: RefCell<NodeMap<Rc<Module>>>,
// The status of resolving each import in this module.
import_resolutions: RefCell<HashMap<Name, ImportResolution>>,
// The number of unresolved globs that this module exports.
glob_count: Cell<usize>,
// The index of the import we're resolving.
resolved_import_count: Cell<usize>,
// Whether this module is populated. If not populated, any attempt to
// access the children must be preceded with a
// `populate_module_if_necessary` call.
populated: Cell<bool>,
}
impl Module {
fn new(parent_link: ParentLink,
def_id: Option<DefId>,
kind: ModuleKind,
external: bool,
is_public: bool)
-> Module {
Module {
parent_link: parent_link,
def_id: Cell::new(def_id),
kind: Cell::new(kind),
is_public: is_public,
children: RefCell::new(HashMap::new()),
imports: RefCell::new(Vec::new()),
external_module_children: RefCell::new(HashMap::new()),
anonymous_children: RefCell::new(NodeMap()),
import_resolutions: RefCell::new(HashMap::new()),
glob_count: Cell::new(0),
resolved_import_count: Cell::new(0),
populated: Cell::new(!external),
}
}
fn all_imports_resolved(&self) -> bool {
self.imports.borrow().len() == self.resolved_import_count.get()
}
}
impl fmt::Debug for Module {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}, kind: {:?}, {}",
self.def_id,
self.kind,
if self.is_public { "public" } else { "private" } )
}
}
bitflags! {
#[derive(Debug)]
flags DefModifiers: u8 {
const PUBLIC = 1 << 0,
const IMPORTABLE = 1 << 1,
}
}
// Records a possibly-private type definition.
#[derive(Clone,Debug)]
struct TypeNsDef {
modifiers: DefModifiers, // see note in ImportResolution about how to use this
module_def: Option<Rc<Module>>,
type_def: Option<Def>,
type_span: Option<Span>
}
// Records a possibly-private value definition.
#[derive(Clone, Copy, Debug)]
struct ValueNsDef {
modifiers: DefModifiers, // see note in ImportResolution about how to use this
def: Def,
value_span: Option<Span>,
}
// Records the definitions (at most one for each namespace) that a name is
// bound to.
#[derive(Debug)]
pub struct NameBindings {
type_def: RefCell<Option<TypeNsDef>>, //< Meaning in type namespace.
value_def: RefCell<Option<ValueNsDef>>, //< Meaning in value namespace.
}
impl NameBindings {
fn new() -> NameBindings {
NameBindings {
type_def: RefCell::new(None),
value_def: RefCell::new(None),
}
}
/// Creates a new module in this set of name bindings.
fn define_module(&self,
parent_link: ParentLink,
def_id: Option<DefId>,
kind: ModuleKind,
external: bool,
is_public: bool,
sp: Span) {
// Merges the module with the existing type def or creates a new one.
let modifiers = if is_public {
DefModifiers::PUBLIC
} else {
DefModifiers::empty()
} | DefModifiers::IMPORTABLE;
let module_ = Rc::new(Module::new(parent_link,
def_id,
kind,
external,
is_public));
let type_def = self.type_def.borrow().clone();
match type_def {
None => {
*self.type_def.borrow_mut() = Some(TypeNsDef {
modifiers: modifiers,
module_def: Some(module_),
type_def: None,
type_span: Some(sp)
});
}
Some(type_def) => {
*self.type_def.borrow_mut() = Some(TypeNsDef {
modifiers: modifiers,
module_def: Some(module_),
type_span: Some(sp),
type_def: type_def.type_def
});
}
}
}
/// Sets the kind of the module, creating a new one if necessary.
fn set_module_kind(&self,
parent_link: ParentLink,
def_id: Option<DefId>,
kind: ModuleKind,
external: bool,
is_public: bool,
_sp: Span) {
let modifiers = if is_public {
DefModifiers::PUBLIC
} else {
DefModifiers::empty()
} | DefModifiers::IMPORTABLE;
let type_def = self.type_def.borrow().clone();
match type_def {
None => {
let module = Module::new(parent_link,
def_id,
kind,
external,
is_public);
*self.type_def.borrow_mut() = Some(TypeNsDef {
modifiers: modifiers,
module_def: Some(Rc::new(module)),
type_def: None,
type_span: None,
});
}
Some(type_def) => {
match type_def.module_def {
None => {
let module = Module::new(parent_link,
def_id,
kind,
external,
is_public);
*self.type_def.borrow_mut() = Some(TypeNsDef {
modifiers: modifiers,
module_def: Some(Rc::new(module)),
type_def: type_def.type_def,
type_span: None,
});
}
Some(module_def) => module_def.kind.set(kind),
}
}
}
}
/// Records a type definition.
fn define_type(&self, def: Def, sp: Span, modifiers: DefModifiers) {
debug!("defining type for def {:?} with modifiers {:?}", def, modifiers);
// Merges the type with the existing type def or creates a new one.
let type_def = self.type_def.borrow().clone();
match type_def {
None => {
*self.type_def.borrow_mut() = Some(TypeNsDef {
module_def: None,
type_def: Some(def),
type_span: Some(sp),
modifiers: modifiers,
});
}
Some(type_def) => {
*self.type_def.borrow_mut() = Some(TypeNsDef {
module_def: type_def.module_def,
type_def: Some(def),
type_span: Some(sp),
modifiers: modifiers,
});
}
}
}
/// Records a value definition.
fn define_value(&self, def: Def, sp: Span, modifiers: DefModifiers) {
debug!("defining value for def {:?} with modifiers {:?}", def, modifiers);
*self.value_def.borrow_mut() = Some(ValueNsDef {
def: def,
value_span: Some(sp),
modifiers: modifiers,
});
}
/// Returns the module node if applicable.
fn get_module_if_available(&self) -> Option<Rc<Module>> {
match *self.type_def.borrow() {
Some(ref type_def) => type_def.module_def.clone(),
None => None
}
}
/// Returns the module node. Panics if this node does not have a module
/// definition.
fn get_module(&self) -> Rc<Module> {
match self.get_module_if_available() {
None => {
panic!("get_module called on a node with no module \
definition!")
}
Some(module_def) => module_def
}
}
fn defined_in_namespace(&self, namespace: Namespace) -> bool {
match namespace {
TypeNS => return self.type_def.borrow().is_some(),
ValueNS => return self.value_def.borrow().is_some()
}
}
fn defined_in_public_namespace(&self, namespace: Namespace) -> bool {
self.defined_in_namespace_with(namespace, DefModifiers::PUBLIC)
}
fn defined_in_namespace_with(&self, namespace: Namespace, modifiers: DefModifiers) -> bool {
match namespace {
TypeNS => match *self.type_def.borrow() {
Some(ref def) => def.modifiers.contains(modifiers), None => false
},
ValueNS => match *self.value_def.borrow() {
Some(ref def) => def.modifiers.contains(modifiers), None => false
}
}
}
fn def_for_namespace(&self, namespace: Namespace) -> Option<Def> {
match namespace {
TypeNS => {
match *self.type_def.borrow() {
None => None,
Some(ref type_def) => {
match type_def.type_def {
Some(type_def) => Some(type_def),
None => {
match type_def.module_def {
Some(ref module) => {
match module.def_id.get() {
Some(did) => Some(DefMod(did)),
None => None,
}
}
None => None,
}
}
}
}
}
}
ValueNS => {
match *self.value_def.borrow() {
None => None,
Some(value_def) => Some(value_def.def)
}
}
}
}
fn span_for_namespace(&self, namespace: Namespace) -> Option<Span> {
if self.defined_in_namespace(namespace) {
match namespace {
TypeNS => {
match *self.type_def.borrow() {
None => None,
Some(ref type_def) => type_def.type_span
}
}
ValueNS => {
match *self.value_def.borrow() {
None => None,
Some(ref value_def) => value_def.value_span
}
}
}
} else {
None
}
}
fn is_public(&self, namespace: Namespace) -> bool {
match namespace {
TypeNS => {
let type_def = self.type_def.borrow();
type_def.as_ref().unwrap().modifiers.contains(DefModifiers::PUBLIC)
}
ValueNS => {
let value_def = self.value_def.borrow();
value_def.as_ref().unwrap().modifiers.contains(DefModifiers::PUBLIC)
}
}
}
}
/// Interns the names of the primitive types.
struct PrimitiveTypeTable {
primitive_types: HashMap<Name, PrimTy>,
}
impl PrimitiveTypeTable {
fn new() -> PrimitiveTypeTable {
let mut table = PrimitiveTypeTable {
primitive_types: HashMap::new()
};
table.intern("bool", TyBool);
table.intern("char", TyChar);
table.intern("f32", TyFloat(TyF32));
table.intern("f64", TyFloat(TyF64));
table.intern("isize", TyInt(TyIs));
table.intern("i8", TyInt(TyI8));
table.intern("i16", TyInt(TyI16));
table.intern("i32", TyInt(TyI32));
table.intern("i64", TyInt(TyI64));
table.intern("str", TyStr);
table.intern("usize", TyUint(TyUs));
table.intern("u8", TyUint(TyU8));
table.intern("u16", TyUint(TyU16));
table.intern("u32", TyUint(TyU32));
table.intern("u64", TyUint(TyU64));
table
}
fn intern(&mut self, string: &str, primitive_type: PrimTy) {
self.primitive_types.insert(token::intern(string), primitive_type);
}
}
/// The main resolver class.
pub struct Resolver<'a, 'tcx:'a> {
session: &'a Session,
ast_map: &'a ast_map::Map<'tcx>,
graph_root: NameBindings,
trait_item_map: FnvHashMap<(Name, DefId), DefId>,
structs: FnvHashMap<DefId, Vec<Name>>,
// The number of imports that are currently unresolved.
unresolved_imports: usize,
// The module that represents the current item scope.
current_module: Rc<Module>,
// The current set of local scopes, for values.
// FIXME #4948: Reuse ribs to avoid allocation.
value_ribs: Vec<Rib>,
// The current set of local scopes, for types.
type_ribs: Vec<Rib>,
// The current set of local scopes, for labels.
label_ribs: Vec<Rib>,
// The trait that the current context can refer to.
current_trait_ref: Option<(DefId, TraitRef)>,
// The current self type if inside an impl (used for better errors).
current_self_type: Option<Ty>,
// The idents for the primitive types.
primitive_type_table: PrimitiveTypeTable,
def_map: DefMap,
freevars: RefCell<FreevarMap>,
freevars_seen: RefCell<NodeMap<NodeSet>>,
export_map: ExportMap,
trait_map: TraitMap,
external_exports: ExternalExports,
// Whether or not to print error messages. Can be set to true
// when getting additional info for error message suggestions,
// so as to avoid printing duplicate errors
emit_errors: bool,
make_glob_map: bool,
// Maps imports to the names of items actually imported (this actually maps
// all imports, but only glob imports are actually interesting).
glob_map: GlobMap,
used_imports: HashSet<(NodeId, Namespace)>,
used_crates: HashSet<CrateNum>,
}
#[derive(PartialEq)]
enum FallbackChecks {
Everything,
OnlyTraitAndStatics
}
impl<'a, 'tcx> Resolver<'a, 'tcx> {
fn new(session: &'a Session,
ast_map: &'a ast_map::Map<'tcx>,
crate_span: Span,
make_glob_map: MakeGlobMap) -> Resolver<'a, 'tcx> {
let graph_root = NameBindings::new();
graph_root.define_module(NoParentLink,
Some(DefId { krate: 0, node: 0 }),
NormalModuleKind,
false,
true,
crate_span);
let current_module = graph_root.get_module();
Resolver {
session: session,
ast_map: ast_map,
// The outermost module has def ID 0; this is not reflected in the
// AST.
graph_root: graph_root,
trait_item_map: FnvHashMap(),
structs: FnvHashMap(),
unresolved_imports: 0,
current_module: current_module,
value_ribs: Vec::new(),
type_ribs: Vec::new(),
label_ribs: Vec::new(),
current_trait_ref: None,
current_self_type: None,
primitive_type_table: PrimitiveTypeTable::new(),
def_map: RefCell::new(NodeMap()),
freevars: RefCell::new(NodeMap()),
freevars_seen: RefCell::new(NodeMap()),
export_map: NodeMap(),
trait_map: NodeMap(),
used_imports: HashSet::new(),
used_crates: HashSet::new(),
external_exports: DefIdSet(),
emit_errors: true,
make_glob_map: make_glob_map == MakeGlobMap::Yes,
glob_map: HashMap::new(),
}
}
#[inline]
fn record_import_use(&mut self, import_id: NodeId, name: Name) {
if !self.make_glob_map {
return;
}
if self.glob_map.contains_key(&import_id) {
self.glob_map.get_mut(&import_id).unwrap().insert(name);
return;
}
let mut new_set = HashSet::new();
new_set.insert(name);
self.glob_map.insert(import_id, new_set);
}
fn get_trait_name(&self, did: DefId) -> Name {
if did.krate == ast::LOCAL_CRATE {
self.ast_map.expect_item(did.node).ident.name
} else {
csearch::get_trait_name(&self.session.cstore, did)
}
}
fn create_name_bindings_from_module(module: Rc<Module>) -> NameBindings {
NameBindings {
type_def: RefCell::new(Some(TypeNsDef {
modifiers: DefModifiers::IMPORTABLE,
module_def: Some(module),
type_def: None,
type_span: None
})),
value_def: RefCell::new(None),
}
}
/// Checks that the names of external crates don't collide with other
/// external crates.
fn check_for_conflicts_between_external_crates(&self,
module: &Module,
name: Name,
span: Span) {
if module.external_module_children.borrow().contains_key(&name) {
span_err!(self.session, span, E0259,
"an external crate named `{}` has already \
been imported into this module",
&token::get_name(name));
}
}
/// Checks that the names of items don't collide with external crates.
fn check_for_conflicts_between_external_crates_and_items(&self,
module: &Module,
name: Name,
span: Span) {
if module.external_module_children.borrow().contains_key(&name) {
span_err!(self.session, span, E0260,
"the name `{}` conflicts with an external \
crate that has been imported into this \
module",
&token::get_name(name));
}
}
/// Resolves the given module path from the given root `module_`.
fn resolve_module_path_from_root(&mut self,
module_: Rc<Module>,
module_path: &[Name],
index: usize,
span: Span,
name_search_type: NameSearchType,
lp: LastPrivate)
-> ResolveResult<(Rc<Module>, LastPrivate)> {
fn search_parent_externals(needle: Name, module: &Rc<Module>)
-> Option<Rc<Module>> {
match module.external_module_children.borrow().get(&needle) {
Some(_) => Some(module.clone()),
None => match module.parent_link {
ModuleParentLink(ref parent, _) => {
search_parent_externals(needle, &parent.upgrade().unwrap())
}
_ => None
}
}
}
let mut search_module = module_;
let mut index = index;
let module_path_len = module_path.len();
let mut closest_private = lp;
// Resolve the module part of the path. This does not involve looking
// upward though scope chains; we simply resolve names directly in
// modules as we go.
while index < module_path_len {
let name = module_path[index];
match self.resolve_name_in_module(search_module.clone(),
name,
TypeNS,
name_search_type,
false) {
Failed(None) => {
let segment_name = token::get_name(name);
let module_name = module_to_string(&*search_module);
let mut span = span;
let msg = if "???" == &module_name[..] {
span.hi = span.lo + Pos::from_usize(segment_name.len());
match search_parent_externals(name,
&self.current_module) {
Some(module) => {
let path_str = names_to_string(module_path);
let target_mod_str = module_to_string(&*module);
let current_mod_str =
module_to_string(&*self.current_module);
let prefix = if target_mod_str == current_mod_str {
"self::".to_string()
} else {
format!("{}::", target_mod_str)
};
format!("Did you mean `{}{}`?", prefix, path_str)
},
None => format!("Maybe a missing `extern crate {}`?",
segment_name),
}
} else {
format!("Could not find `{}` in `{}`",
segment_name,
module_name)
};
return Failed(Some((span, msg)));
}
Failed(err) => return Failed(err),
Indeterminate => {
debug!("(resolving module path for import) module \
resolution is indeterminate: {}",
token::get_name(name));
return Indeterminate;
}
Success((target, used_proxy)) => {
// Check to see whether there are type bindings, and, if
// so, whether there is a module within.
match *target.bindings.type_def.borrow() {
Some(ref type_def) => {
match type_def.module_def {
None => {
let msg = format!("Not a module `{}`",
token::get_name(name));
return Failed(Some((span, msg)));
}
Some(ref module_def) => {
search_module = module_def.clone();
// track extern crates for unused_extern_crate lint
if let Some(did) = module_def.def_id.get() {
self.used_crates.insert(did.krate);
}
// Keep track of the closest
// private module used when
// resolving this import chain.
if !used_proxy && !search_module.is_public {
if let Some(did) = search_module.def_id.get() {
closest_private = LastMod(DependsOn(did));
}
}
}
}
}
None => {
// There are no type bindings at all.
let msg = format!("Not a module `{}`",
token::get_name(name));
return Failed(Some((span, msg)));
}
}
}
}
index += 1;
}
return Success((search_module, closest_private));
}
/// Attempts to resolve the module part of an import directive or path
/// rooted at the given module.
///
/// On success, returns the resolved module, and the closest *private*
/// module found to the destination when resolving this path.
fn resolve_module_path(&mut self,
module_: Rc<Module>,
module_path: &[Name],
use_lexical_scope: UseLexicalScopeFlag,
span: Span,
name_search_type: NameSearchType)
-> ResolveResult<(Rc<Module>, LastPrivate)> {
let module_path_len = module_path.len();
assert!(module_path_len > 0);
debug!("(resolving module path for import) processing `{}` rooted at `{}`",
names_to_string(module_path),
module_to_string(&*module_));
// Resolve the module prefix, if any.
let module_prefix_result = self.resolve_module_prefix(module_.clone(),
module_path);
let search_module;
let start_index;
let last_private;
match module_prefix_result {
Failed(None) => {
let mpath = names_to_string(module_path);
let mpath = &mpath[..];
match mpath.rfind(':') {
Some(idx) => {
let msg = format!("Could not find `{}` in `{}`",
// idx +- 1 to account for the
// colons on either side
&mpath[idx + 1..],
&mpath[..idx - 1]);
return Failed(Some((span, msg)));
},
None => {
return Failed(None)
}
}
}
Failed(err) => return Failed(err),
Indeterminate => {
debug!("(resolving module path for import) indeterminate; \
bailing");
return Indeterminate;
}
Success(NoPrefixFound) => {
// There was no prefix, so we're considering the first element
// of the path. How we handle this depends on whether we were
// instructed to use lexical scope or not.
match use_lexical_scope {
DontUseLexicalScope => {
// This is a crate-relative path. We will start the
// resolution process at index zero.
search_module = self.graph_root.get_module();
start_index = 0;
last_private = LastMod(AllPublic);
}
UseLexicalScope => {
// This is not a crate-relative path. We resolve the
// first component of the path in the current lexical
// scope and then proceed to resolve below that.
match self.resolve_module_in_lexical_scope(module_,
module_path[0]) {
Failed(err) => return Failed(err),
Indeterminate => {
debug!("(resolving module path for import) \
indeterminate; bailing");
return Indeterminate;
}
Success(containing_module) => {
search_module = containing_module;
start_index = 1;
last_private = LastMod(AllPublic);
}
}
}
}
}
Success(PrefixFound(ref containing_module, index)) => {
search_module = containing_module.clone();
start_index = index;
last_private = LastMod(DependsOn(containing_module.def_id
.get()
.unwrap()));
}
}
self.resolve_module_path_from_root(search_module,
module_path,
start_index,
span,
name_search_type,
last_private)
}
/// Invariant: This must only be called during main resolution, not during
/// import resolution.
fn resolve_item_in_lexical_scope(&mut self,
module_: Rc<Module>,
name: Name,
namespace: Namespace)
-> ResolveResult<(Target, bool)> {
debug!("(resolving item in lexical scope) resolving `{}` in \
namespace {:?} in `{}`",
token::get_name(name),
namespace,
module_to_string(&*module_));
// The current module node is handled specially. First, check for
// its immediate children.
build_reduced_graph::populate_module_if_necessary(self, &module_);
match module_.children.borrow().get(&name) {
Some(name_bindings)
if name_bindings.defined_in_namespace(namespace) => {
debug!("top name bindings succeeded");
return Success((Target::new(module_.clone(),
name_bindings.clone(),
Shadowable::Never),
false));
}
Some(_) | None => { /* Not found; continue. */ }
}
// Now check for its import directives. We don't have to have resolved
// all its imports in the usual way; this is because chains of
// adjacent import statements are processed as though they mutated the
// current scope.
if let Some(import_resolution) = module_.import_resolutions.borrow().get(&name) {
match (*import_resolution).target_for_namespace(namespace) {
None => {
// Not found; continue.
debug!("(resolving item in lexical scope) found \
import resolution, but not in namespace {:?}",
namespace);
}
Some(target) => {
debug!("(resolving item in lexical scope) using \
import resolution");
// track used imports and extern crates as well
let id = import_resolution.id(namespace);
self.used_imports.insert((id, namespace));
self.record_import_use(id, name);
if let Some(DefId{krate: kid, ..}) = target.target_module.def_id.get() {
self.used_crates.insert(kid);
}
return Success((target, false));
}
}
}
// Search for external modules.
if namespace == TypeNS {
// FIXME (21114): In principle unclear `child` *has* to be lifted.
let child = module_.external_module_children.borrow().get(&name).cloned();
if let Some(module) = child {
let name_bindings =
Rc::new(Resolver::create_name_bindings_from_module(module));
debug!("lower name bindings succeeded");
return Success((Target::new(module_,
name_bindings,
Shadowable::Never),
false));
}
}
// Finally, proceed up the scope chain looking for parent modules.
let mut search_module = module_;
loop {
// Go to the next parent.
match search_module.parent_link.clone() {
NoParentLink => {
// No more parents. This module was unresolved.
debug!("(resolving item in lexical scope) unresolved \
module");
return Failed(None);
}
ModuleParentLink(parent_module_node, _) => {
match search_module.kind.get() {
NormalModuleKind => {
// We stop the search here.
debug!("(resolving item in lexical \
scope) unresolved module: not \
searching through module \
parents");
return Failed(None);
}
TraitModuleKind |
EnumModuleKind |
TypeModuleKind |
AnonymousModuleKind => {
search_module = parent_module_node.upgrade().unwrap();
}
}
}
BlockParentLink(ref parent_module_node, _) => {
search_module = parent_module_node.upgrade().unwrap();
}
}
// Resolve the name in the parent module.
match self.resolve_name_in_module(search_module.clone(),
name,
namespace,
PathSearch,
true) {
Failed(Some((span, msg))) =>
self.resolve_error(span, &format!("failed to resolve. {}",
msg)),
Failed(None) => (), // Continue up the search chain.
Indeterminate => {
// We couldn't see through the higher scope because of an
// unresolved import higher up. Bail.
debug!("(resolving item in lexical scope) indeterminate \
higher scope; bailing");
return Indeterminate;
}
Success((target, used_reexport)) => {
// We found the module.
debug!("(resolving item in lexical scope) found name \
in module, done");
return Success((target, used_reexport));
}
}
}
}
/// Resolves a module name in the current lexical scope.
fn resolve_module_in_lexical_scope(&mut self,
module_: Rc<Module>,
name: Name)
-> ResolveResult<Rc<Module>> {
// If this module is an anonymous module, resolve the item in the
// lexical scope. Otherwise, resolve the item from the crate root.
let resolve_result = self.resolve_item_in_lexical_scope(module_, name, TypeNS);
match resolve_result {
Success((target, _)) => {
let bindings = &*target.bindings;
match *bindings.type_def.borrow() {
Some(ref type_def) => {
match type_def.module_def {
None => {
debug!("!!! (resolving module in lexical \
scope) module wasn't actually a \
module!");
return Failed(None);
}
Some(ref module_def) => {
return Success(module_def.clone());
}
}
}
None => {
debug!("!!! (resolving module in lexical scope) module
wasn't actually a module!");
return Failed(None);
}
}
}
Indeterminate => {
debug!("(resolving module in lexical scope) indeterminate; \
bailing");
return Indeterminate;
}
Failed(err) => {
debug!("(resolving module in lexical scope) failed to resolve");
return Failed(err);
}
}
}
/// Returns the nearest normal module parent of the given module.
fn get_nearest_normal_module_parent(&mut self, module_: Rc<Module>)
-> Option<Rc<Module>> {
let mut module_ = module_;
loop {
match module_.parent_link.clone() {
NoParentLink => return None,
ModuleParentLink(new_module, _) |
BlockParentLink(new_module, _) => {
let new_module = new_module.upgrade().unwrap();
match new_module.kind.get() {
NormalModuleKind => return Some(new_module),
TraitModuleKind |
EnumModuleKind |
TypeModuleKind |
AnonymousModuleKind => module_ = new_module,
}
}
}
}
}
/// Returns the nearest normal module parent of the given module, or the
/// module itself if it is a normal module.
fn get_nearest_normal_module_parent_or_self(&mut self, module_: Rc<Module>)
-> Rc<Module> {
match module_.kind.get() {
NormalModuleKind => return module_,
TraitModuleKind |
EnumModuleKind |
TypeModuleKind |
AnonymousModuleKind => {
match self.get_nearest_normal_module_parent(module_.clone()) {
None => module_,
Some(new_module) => new_module
}
}
}
}
/// Resolves a "module prefix". A module prefix is one or both of (a) `self::`;
/// (b) some chain of `super::`.
/// grammar: (SELF MOD_SEP ) ? (SUPER MOD_SEP) *
fn resolve_module_prefix(&mut self,
module_: Rc<Module>,
module_path: &[Name])
-> ResolveResult<ModulePrefixResult> {
// Start at the current module if we see `self` or `super`, or at the
// top of the crate otherwise.
let mut containing_module;
let mut i;
let first_module_path_string = token::get_name(module_path[0]);
if "self" == &first_module_path_string[..] {
containing_module =
self.get_nearest_normal_module_parent_or_self(module_);
i = 1;
} else if "super" == &first_module_path_string[..] {
containing_module =
self.get_nearest_normal_module_parent_or_self(module_);
i = 0; // We'll handle `super` below.
} else {
return Success(NoPrefixFound);
}
// Now loop through all the `super`s we find.
while i < module_path.len() {
let string = token::get_name(module_path[i]);
if "super" != &string[..] {
break
}
debug!("(resolving module prefix) resolving `super` at {}",
module_to_string(&*containing_module));
match self.get_nearest_normal_module_parent(containing_module) {
None => return Failed(None),
Some(new_module) => {
containing_module = new_module;
i += 1;
}
}
}
debug!("(resolving module prefix) finished resolving prefix at {}",
module_to_string(&*containing_module));
return Success(PrefixFound(containing_module, i));
}
/// Attempts to resolve the supplied name in the given module for the
/// given namespace. If successful, returns the target corresponding to
/// the name.
///
/// The boolean returned on success is an indicator of whether this lookup
/// passed through a public re-export proxy.
fn resolve_name_in_module(&mut self,
module_: Rc<Module>,
name: Name,
namespace: Namespace,
name_search_type: NameSearchType,
allow_private_imports: bool)
-> ResolveResult<(Target, bool)> {
debug!("(resolving name in module) resolving `{}` in `{}`",
&token::get_name(name),
module_to_string(&*module_));
// First, check the direct children of the module.
build_reduced_graph::populate_module_if_necessary(self, &module_);
match module_.children.borrow().get(&name) {
Some(name_bindings)
if name_bindings.defined_in_namespace(namespace) => {
debug!("(resolving name in module) found node as child");
return Success((Target::new(module_.clone(),
name_bindings.clone(),
Shadowable::Never),
false));
}
Some(_) | None => {
// Continue.
}
}
// Next, check the module's imports if necessary.
// If this is a search of all imports, we should be done with glob
// resolution at this point.
if name_search_type == PathSearch {
assert_eq!(module_.glob_count.get(), 0);
}
// Check the list of resolved imports.
match module_.import_resolutions.borrow().get(&name) {
Some(import_resolution) if allow_private_imports ||
import_resolution.is_public => {
if import_resolution.is_public &&
import_resolution.outstanding_references != 0 {
debug!("(resolving name in module) import \
unresolved; bailing out");
return Indeterminate;
}
match import_resolution.target_for_namespace(namespace) {
None => {
debug!("(resolving name in module) name found, \
but not in namespace {:?}",
namespace);
}
Some(target) => {
debug!("(resolving name in module) resolved to \
import");
// track used imports and extern crates as well
let id = import_resolution.id(namespace);
self.used_imports.insert((id, namespace));
self.record_import_use(id, name);
if let Some(DefId{krate: kid, ..}) = target.target_module.def_id.get() {
self.used_crates.insert(kid);
}
return Success((target, true));
}
}
}
Some(..) | None => {} // Continue.
}
// Finally, search through external children.
if namespace == TypeNS {
// FIXME (21114): In principle unclear `child` *has* to be lifted.
let child = module_.external_module_children.borrow().get(&name).cloned();
if let Some(module) = child {
let name_bindings =
Rc::new(Resolver::create_name_bindings_from_module(module));
return Success((Target::new(module_,
name_bindings,
Shadowable::Never),
false));
}
}
// We're out of luck.
debug!("(resolving name in module) failed to resolve `{}`",
&token::get_name(name));
return Failed(None);
}
fn report_unresolved_imports(&mut self, module_: Rc<Module>) {
let index = module_.resolved_import_count.get();
let imports = module_.imports.borrow();
let import_count = imports.len();
if index != import_count {
let sn = self.session
.codemap()
.span_to_snippet((*imports)[index].span)
.unwrap();
if sn.contains("::") {
self.resolve_error((*imports)[index].span,
"unresolved import");
} else {
let err = format!("unresolved import (maybe you meant `{}::*`?)",
sn);
self.resolve_error((*imports)[index].span, &err[..]);
}
}
// Descend into children and anonymous children.
build_reduced_graph::populate_module_if_necessary(self, &module_);
for (_, child_node) in &*module_.children.borrow() {
match child_node.get_module_if_available() {
None => {
// Continue.
}
Some(child_module) => {
self.report_unresolved_imports(child_module);
}
}
}
for (_, module_) in &*module_.anonymous_children.borrow() {
self.report_unresolved_imports(module_.clone());
}
}
// AST resolution
//
// We maintain a list of value ribs and type ribs.
//
// Simultaneously, we keep track of the current position in the module
// graph in the `current_module` pointer. When we go to resolve a name in
// the value or type namespaces, we first look through all the ribs and
// then query the module graph. When we resolve a name in the module
// namespace, we can skip all the ribs (since nested modules are not
// allowed within blocks in Rust) and jump straight to the current module
// graph node.
//
// Named implementations are handled separately. When we find a method
// call, we consult the module node to find all of the implementations in
// scope. This information is lazily cached in the module node. We then
// generate a fake "implementation scope" containing all the
// implementations thus found, for compatibility with old resolve pass.
fn with_scope<F>(&mut self, name: Option<Name>, f: F) where
F: FnOnce(&mut Resolver),
{
let orig_module = self.current_module.clone();
// Move down in the graph.
match name {
None => {
// Nothing to do.
}
Some(name) => {
build_reduced_graph::populate_module_if_necessary(self, &orig_module);
match orig_module.children.borrow().get(&name) {
None => {
debug!("!!! (with scope) didn't find `{}` in `{}`",
token::get_name(name),
module_to_string(&*orig_module));
}
Some(name_bindings) => {
match (*name_bindings).get_module_if_available() {
None => {
debug!("!!! (with scope) didn't find module \
for `{}` in `{}`",
token::get_name(name),
module_to_string(&*orig_module));
}
Some(module_) => {
self.current_module = module_;
}
}
}
}
}
}
f(self);
self.current_module = orig_module;
}
/// Wraps the given definition in the appropriate number of `DefUpvar`
/// wrappers.
fn upvarify(&self,
ribs: &[Rib],
def_like: DefLike,
span: Span)
-> Option<DefLike> {
let mut def = match def_like {
DlDef(def) => def,
_ => return Some(def_like)
};
match def {
DefUpvar(..) => {
self.session.span_bug(span,
&format!("unexpected {:?} in bindings", def))
}
DefLocal(node_id) => {
for rib in ribs {
match rib.kind {
NormalRibKind => {
// Nothing to do. Continue.
}
ClosureRibKind(function_id) => {
let prev_def = def;
def = DefUpvar(node_id, function_id);
let mut seen = self.freevars_seen.borrow_mut();
let seen = match seen.entry(function_id) {
Occupied(v) => v.into_mut(),
Vacant(v) => v.insert(NodeSet()),
};
if seen.contains(&node_id) {
continue;
}
match self.freevars.borrow_mut().entry(function_id) {
Occupied(v) => v.into_mut(),
Vacant(v) => v.insert(vec![]),
}.push(Freevar { def: prev_def, span: span });
seen.insert(node_id);
}
ItemRibKind | MethodRibKind => {
// This was an attempt to access an upvar inside a
// named function item. This is not allowed, so we
// report an error.
self.resolve_error(span,
"can't capture dynamic environment in a fn item; \
use the || { ... } closure form instead");
return None;
}
ConstantItemRibKind => {
// Still doesn't deal with upvars
self.resolve_error(span,
"attempt to use a non-constant \
value in a constant");
return None;
}
}
}
}
DefTyParam(..) | DefSelfTy(..) => {
for rib in ribs {
match rib.kind {
NormalRibKind | MethodRibKind | ClosureRibKind(..) => {
// Nothing to do. Continue.
}
ItemRibKind => {
// This was an attempt to use a type parameter outside
// its scope.
self.resolve_error(span,
"can't use type parameters from \
outer function; try using a local \
type parameter instead");
return None;
}
ConstantItemRibKind => {
// see #9186
self.resolve_error(span,
"cannot use an outer type \
parameter in this context");
return None;
}
}
}
}
_ => {}
}
Some(DlDef(def))
}
/// Searches the current set of local scopes and
/// applies translations for closures.
fn search_ribs(&self,
ribs: &[Rib],
name: Name,
span: Span)
-> Option<DefLike> {
// FIXME #4950: Try caching?
for (i, rib) in ribs.iter().enumerate().rev() {
if let Some(def_like) = rib.bindings.get(&name).cloned() {
return self.upvarify(&ribs[i + 1..], def_like, span);
}
}
None
}
/// Searches the current set of local scopes for labels.
/// Stops after meeting a closure.
fn search_label(&self, name: Name) -> Option<DefLike> {
for rib in self.label_ribs.iter().rev() {
match rib.kind {
NormalRibKind => {
// Continue
}
_ => {
// Do not resolve labels across function boundary
return None
}
}
let result = rib.bindings.get(&name).cloned();
if result.is_some() {
return result
}
}
None
}
fn resolve_crate(&mut self, krate: &ast::Crate) {
debug!("(resolving crate) starting");
visit::walk_crate(self, krate);
}
fn check_if_primitive_type_name(&self, name: Name, span: Span) {
if let Some(_) = self.primitive_type_table.primitive_types.get(&name) {
span_err!(self.session, span, E0317,
"user-defined types or type parameters cannot shadow the primitive types");
}
}
fn resolve_item(&mut self, item: &Item) {
let name = item.ident.name;
debug!("(resolving item) resolving {}",
token::get_name(name));
match item.node {
ItemEnum(_, ref generics) |
ItemTy(_, ref generics) |
ItemStruct(_, ref generics) => {
self.check_if_primitive_type_name(name, item.span);
self.with_type_parameter_rib(HasTypeParameters(generics,
TypeSpace,
ItemRibKind),
|this| visit::walk_item(this, item));
}
ItemFn(_, _, _, ref generics, _) => {
self.with_type_parameter_rib(HasTypeParameters(generics,
FnSpace,
ItemRibKind),
|this| visit::walk_item(this, item));
}
ItemDefaultImpl(_, ref trait_ref) => {
self.with_optional_trait_ref(Some(trait_ref), |_, _| {});
}
ItemImpl(_,
_,
ref generics,
ref opt_trait_ref,
ref self_type,
ref impl_items) => {
self.resolve_implementation(generics,
opt_trait_ref,
&**self_type,
item.id,
&impl_items[..]);
}
ItemTrait(_, ref generics, ref bounds, ref trait_items) => {
self.check_if_primitive_type_name(name, item.span);
// Create a new rib for the trait-wide type parameters.
self.with_type_parameter_rib(HasTypeParameters(generics,
TypeSpace,
ItemRibKind),
|this| {
this.with_self_rib(DefSelfTy(Some(local_def(item.id)), None), |this| {
this.visit_generics(generics);
visit::walk_ty_param_bounds_helper(this, bounds);
for trait_item in trait_items {
// Create a new rib for the trait_item-specific type
// parameters.
//
// FIXME #4951: Do we need a node ID here?
match trait_item.node {
ast::ConstTraitItem(_, ref default) => {
// Only impose the restrictions of
// ConstRibKind if there's an actual constant
// expression in a provided default.
if default.is_some() {
this.with_constant_rib(|this| {
visit::walk_trait_item(this, trait_item)
});
} else {
visit::walk_trait_item(this, trait_item)
}
}
ast::MethodTraitItem(ref sig, _) => {
let type_parameters =
HasTypeParameters(&sig.generics,
FnSpace,
MethodRibKind);
this.with_type_parameter_rib(type_parameters, |this| {
visit::walk_trait_item(this, trait_item)
});
}
ast::TypeTraitItem(..) => {
this.check_if_primitive_type_name(trait_item.ident.name,
trait_item.span);
this.with_type_parameter_rib(NoTypeParameters, |this| {
visit::walk_trait_item(this, trait_item)
});
}
};
}
});
});
}
ItemMod(_) | ItemForeignMod(_) => {
self.with_scope(Some(name), |this| {
visit::walk_item(this, item);
});
}
ItemConst(..) | ItemStatic(..) => {
self.with_constant_rib(|this| {
visit::walk_item(this, item);
});
}
ItemUse(ref view_path) => {
// check for imports shadowing primitive types
if let ast::ViewPathSimple(ident, _) = view_path.node {
match self.def_map.borrow().get(&item.id).map(|d| d.full_def()) {
Some(DefTy(..)) | Some(DefStruct(..)) | Some(DefTrait(..)) | None => {
self.check_if_primitive_type_name(ident.name, item.span);
}
_ => {}
}
}
}
ItemExternCrate(_) | ItemMac(..) => {
// do nothing, these are just around to be encoded
}
}
}
fn with_type_parameter_rib<F>(&mut self, type_parameters: TypeParameters, f: F) where
F: FnOnce(&mut Resolver),
{
match type_parameters {
HasTypeParameters(generics, space, rib_kind) => {
let mut function_type_rib = Rib::new(rib_kind);
let mut seen_bindings = HashSet::new();
for (index, type_parameter) in generics.ty_params.iter().enumerate() {
let name = type_parameter.ident.name;
debug!("with_type_parameter_rib: {}", type_parameter.id);
if seen_bindings.contains(&name) {
self.resolve_error(type_parameter.span,
&format!("the name `{}` is already \
used for a type \
parameter in this type \
parameter list",
token::get_name(name)))
}
seen_bindings.insert(name);
// plain insert (no renaming)
function_type_rib.bindings.insert(name,
DlDef(DefTyParam(space,
index as u32,
local_def(type_parameter.id),
name)));
}
self.type_ribs.push(function_type_rib);
}
NoTypeParameters => {
// Nothing to do.
}
}
f(self);
match type_parameters {
HasTypeParameters(..) => { self.type_ribs.pop(); }
NoTypeParameters => { }
}
}
fn with_label_rib<F>(&mut self, f: F) where
F: FnOnce(&mut Resolver),
{
self.label_ribs.push(Rib::new(NormalRibKind));
f(self);
self.label_ribs.pop();
}
fn with_constant_rib<F>(&mut self, f: F) where
F: FnOnce(&mut Resolver),
{
self.value_ribs.push(Rib::new(ConstantItemRibKind));
self.type_ribs.push(Rib::new(ConstantItemRibKind));
f(self);
self.type_ribs.pop();
self.value_ribs.pop();
}
fn resolve_function(&mut self,
rib_kind: RibKind,
declaration: &FnDecl,
block: &Block) {
// Create a value rib for the function.
self.value_ribs.push(Rib::new(rib_kind));
// Create a label rib for the function.
self.label_ribs.push(Rib::new(rib_kind));
// Add each argument to the rib.
let mut bindings_list = HashMap::new();
for argument in &declaration.inputs {
self.resolve_pattern(&*argument.pat,
ArgumentIrrefutableMode,
&mut bindings_list);
self.visit_ty(&*argument.ty);
debug!("(resolving function) recorded argument");
}
visit::walk_fn_ret_ty(self, &declaration.output);
// Resolve the function body.
self.visit_block(&*block);
debug!("(resolving function) leaving function");
self.label_ribs.pop();
self.value_ribs.pop();
}
fn resolve_trait_reference(&mut self,
id: NodeId,
trait_path: &Path,
path_depth: usize)
-> Result<PathResolution, ()> {
if let Some(path_res) = self.resolve_path(id, trait_path, path_depth, TypeNS, true) {
if let DefTrait(_) = path_res.base_def {
debug!("(resolving trait) found trait def: {:?}", path_res);
Ok(path_res)
} else {
self.resolve_error(trait_path.span,
&format!("`{}` is not a trait",
path_names_to_string(trait_path, path_depth)));
// If it's a typedef, give a note
if let DefTy(..) = path_res.base_def {
self.session.span_note(trait_path.span,
"`type` aliases cannot be used for traits");
}
Err(())
}
} else {
let msg = format!("use of undeclared trait name `{}`",
path_names_to_string(trait_path, path_depth));
self.resolve_error(trait_path.span, &msg);
Err(())
}
}
fn resolve_generics(&mut self, generics: &Generics) {
for type_parameter in &*generics.ty_params {
self.check_if_primitive_type_name(type_parameter.ident.name, type_parameter.span);
}
for predicate in &generics.where_clause.predicates {
match predicate {
&ast::WherePredicate::BoundPredicate(_) |
&ast::WherePredicate::RegionPredicate(_) => {}
&ast::WherePredicate::EqPredicate(ref eq_pred) => {
let path_res = self.resolve_path(eq_pred.id, &eq_pred.path, 0, TypeNS, true);
if let Some(PathResolution { base_def: DefTyParam(..), .. }) = path_res {
self.record_def(eq_pred.id, path_res.unwrap());
} else {
self.resolve_error(eq_pred.path.span, "undeclared associated type");
}
}
}
}
visit::walk_generics(self, generics);
}
fn with_current_self_type<T, F>(&mut self, self_type: &Ty, f: F) -> T
where F: FnOnce(&mut Resolver) -> T
{
// Handle nested impls (inside fn bodies)
let previous_value = replace(&mut self.current_self_type, Some(self_type.clone()));
let result = f(self);
self.current_self_type = previous_value;
result
}
fn with_optional_trait_ref<T, F>(&mut self,
opt_trait_ref: Option<&TraitRef>,
f: F)
-> T
where F: FnOnce(&mut Resolver, Option<DefId>) -> T
{
let mut new_val = None;
let mut new_id = None;
if let Some(trait_ref) = opt_trait_ref {
if let Ok(path_res) = self.resolve_trait_reference(trait_ref.ref_id,
&trait_ref.path, 0) {
assert!(path_res.depth == 0);
self.record_def(trait_ref.ref_id, path_res);
new_val = Some((path_res.base_def.def_id(), trait_ref.clone()));
new_id = Some(path_res.base_def.def_id());
}
visit::walk_trait_ref(self, trait_ref);
}
let original_trait_ref = replace(&mut self.current_trait_ref, new_val);
let result = f(self, new_id);
self.current_trait_ref = original_trait_ref;
result
}
fn with_self_rib<F>(&mut self, self_def: Def, f: F)
where F: FnOnce(&mut Resolver)
{
let mut self_type_rib = Rib::new(NormalRibKind);
// plain insert (no renaming, types are not currently hygienic....)
let name = special_names::type_self;
self_type_rib.bindings.insert(name, DlDef(self_def));
self.type_ribs.push(self_type_rib);
f(self);
self.type_ribs.pop();
}
fn resolve_implementation(&mut self,
generics: &Generics,
opt_trait_reference: &Option<TraitRef>,
self_type: &Ty,
item_id: NodeId,
impl_items: &[P<ImplItem>]) {
// If applicable, create a rib for the type parameters.
self.with_type_parameter_rib(HasTypeParameters(generics,
TypeSpace,
ItemRibKind),
|this| {
// Resolve the type parameters.
this.visit_generics(generics);
// Resolve the trait reference, if necessary.
this.with_optional_trait_ref(opt_trait_reference.as_ref(), |this, trait_id| {
// Resolve the self type.
this.visit_ty(self_type);
this.with_self_rib(DefSelfTy(trait_id, Some((item_id, self_type.id))), |this| {
this.with_current_self_type(self_type, |this| {
for impl_item in impl_items {
match impl_item.node {
ConstImplItem(..) => {
// If this is a trait impl, ensure the method
// exists in trait
this.check_trait_item(impl_item.ident.name,
impl_item.span);
this.with_constant_rib(|this| {
visit::walk_impl_item(this, impl_item);
});
}
MethodImplItem(ref sig, _) => {
// If this is a trait impl, ensure the method
// exists in trait
this.check_trait_item(impl_item.ident.name,
impl_item.span);
// We also need a new scope for the method-
// specific type parameters.
let type_parameters =
HasTypeParameters(&sig.generics,
FnSpace,
MethodRibKind);
this.with_type_parameter_rib(type_parameters, |this| {
visit::walk_impl_item(this, impl_item);
});
}
TypeImplItem(ref ty) => {
// If this is a trait impl, ensure the method
// exists in trait
this.check_trait_item(impl_item.ident.name,
impl_item.span);
this.visit_ty(ty);
}
ast::MacImplItem(_) => {}
}
}
});
});
});
});
}
fn check_trait_item(&self, name: Name, span: Span) {
// If there is a TraitRef in scope for an impl, then the method must be in the trait.
if let Some((did, ref trait_ref)) = self.current_trait_ref {
if !self.trait_item_map.contains_key(&(name, did)) {
let path_str = path_names_to_string(&trait_ref.path, 0);
self.resolve_error(span,
&format!("method `{}` is not a member of trait `{}`",
token::get_name(name),
path_str));
}
}
}
fn resolve_local(&mut self, local: &Local) {
// Resolve the type.
visit::walk_ty_opt(self, &local.ty);
// Resolve the initializer.
visit::walk_expr_opt(self, &local.init);
// Resolve the pattern.
self.resolve_pattern(&*local.pat,
LocalIrrefutableMode,
&mut HashMap::new());
}
// build a map from pattern identifiers to binding-info's.
// this is done hygienically. This could arise for a macro
// that expands into an or-pattern where one 'x' was from the
// user and one 'x' came from the macro.
fn binding_mode_map(&mut self, pat: &Pat) -> BindingMap {
let mut result = HashMap::new();
pat_bindings(&self.def_map, pat, |binding_mode, _id, sp, path1| {
let name = mtwt::resolve(path1.node);
result.insert(name, BindingInfo {
span: sp,
binding_mode: binding_mode
});
});
return result;
}
// check that all of the arms in an or-pattern have exactly the
// same set of bindings, with the same binding modes for each.
fn check_consistent_bindings(&mut self, arm: &Arm) {
if arm.pats.is_empty() {
return
}
let map_0 = self.binding_mode_map(&*arm.pats[0]);
for (i, p) in arm.pats.iter().enumerate() {
let map_i = self.binding_mode_map(&**p);
for (&key, &binding_0) in &map_0 {
match map_i.get(&key) {
None => {
self.resolve_error(
p.span,
&format!("variable `{}` from pattern #1 is \
not bound in pattern #{}",
token::get_name(key),
i + 1));
}
Some(binding_i) => {
if binding_0.binding_mode != binding_i.binding_mode {
self.resolve_error(
binding_i.span,
&format!("variable `{}` is bound with different \
mode in pattern #{} than in pattern #1",
token::get_name(key),
i + 1));
}
}
}
}
for (&key, &binding) in &map_i {
if !map_0.contains_key(&key) {
self.resolve_error(
binding.span,
&format!("variable `{}` from pattern {}{} is \
not bound in pattern {}1",
token::get_name(key),
"#", i + 1, "#"));
}
}
}
}
fn resolve_arm(&mut self, arm: &Arm) {
self.value_ribs.push(Rib::new(NormalRibKind));
let mut bindings_list = HashMap::new();
for pattern in &arm.pats {
self.resolve_pattern(&**pattern, RefutableMode, &mut bindings_list);
}
// This has to happen *after* we determine which
// pat_idents are variants
self.check_consistent_bindings(arm);
visit::walk_expr_opt(self, &arm.guard);
self.visit_expr(&*arm.body);
self.value_ribs.pop();
}
fn resolve_block(&mut self, block: &Block) {
debug!("(resolving block) entering block");
self.value_ribs.push(Rib::new(NormalRibKind));
// Move down in the graph, if there's an anonymous module rooted here.
let orig_module = self.current_module.clone();
match orig_module.anonymous_children.borrow().get(&block.id) {
None => { /* Nothing to do. */ }
Some(anonymous_module) => {
debug!("(resolving block) found anonymous module, moving \
down");
self.current_module = anonymous_module.clone();
}
}
// Check for imports appearing after non-item statements.
let mut found_non_item = false;
for statement in &block.stmts {
if let ast::StmtDecl(ref declaration, _) = statement.node {
if let ast::DeclItem(ref i) = declaration.node {
match i.node {
ItemExternCrate(_) | ItemUse(_) if found_non_item => {
span_err!(self.session, i.span, E0154,
"imports are not allowed after non-item statements");
}
_ => {}
}
} else {
found_non_item = true
}
} else {
found_non_item = true;
}
}
// Descend into the block.
visit::walk_block(self, block);
// Move back up.
self.current_module = orig_module;
self.value_ribs.pop();
debug!("(resolving block) leaving block");
}
fn resolve_type(&mut self, ty: &Ty) {
match ty.node {
TyPath(ref maybe_qself, ref path) => {
let resolution =
match self.resolve_possibly_assoc_item(ty.id,
maybe_qself.as_ref(),
path,
TypeNS,
true) {
// `<T>::a::b::c` is resolved by typeck alone.
TypecheckRequired => {
// Resolve embedded types.
visit::walk_ty(self, ty);
return;
}
ResolveAttempt(resolution) => resolution,
};
// This is a path in the type namespace. Walk through scopes
// looking for it.
match resolution {
Some(def) => {
// Write the result into the def map.
debug!("(resolving type) writing resolution for `{}` \
(id {}) = {:?}",
path_names_to_string(path, 0),
ty.id, def);
self.record_def(ty.id, def);
}
None => {
// Keep reporting some errors even if they're ignored above.
self.resolve_path(ty.id, path, 0, TypeNS, true);
let kind = if maybe_qself.is_some() {
"associated type"
} else {
"type name"
};
let msg = format!("use of undeclared {} `{}`", kind,
path_names_to_string(path, 0));
self.resolve_error(ty.span, &msg[..]);
}
}
}
_ => {}
}
// Resolve embedded types.
visit::walk_ty(self, ty);
}
fn resolve_pattern(&mut self,
pattern: &Pat,
mode: PatternBindingMode,
// Maps idents to the node ID for the (outermost)
// pattern that binds them
bindings_list: &mut HashMap<Name, NodeId>) {
let pat_id = pattern.id;
walk_pat(pattern, |pattern| {
match pattern.node {
PatIdent(binding_mode, ref path1, _) => {
// The meaning of pat_ident with no type parameters
// depends on whether an enum variant or unit-like struct
// with that name is in scope. The probing lookup has to
// be careful not to emit spurious errors. Only matching
// patterns (match) can match nullary variants or
// unit-like structs. For binding patterns (let), matching
// such a value is simply disallowed (since it's rarely
// what you want).
let ident = path1.node;
let renamed = mtwt::resolve(ident);
match self.resolve_bare_identifier_pattern(ident.name, pattern.span) {
FoundStructOrEnumVariant(def, lp)
if mode == RefutableMode => {
debug!("(resolving pattern) resolving `{}` to \
struct or enum variant",
token::get_name(renamed));
self.enforce_default_binding_mode(
pattern,
binding_mode,
"an enum variant");
self.record_def(pattern.id, PathResolution {
base_def: def,
last_private: lp,
depth: 0
});
}
FoundStructOrEnumVariant(..) => {
self.resolve_error(
pattern.span,
&format!("declaration of `{}` shadows an enum \
variant or unit-like struct in \
scope",
token::get_name(renamed)));
}
FoundConst(def, lp) if mode == RefutableMode => {
debug!("(resolving pattern) resolving `{}` to \
constant",
token::get_name(renamed));
self.enforce_default_binding_mode(
pattern,
binding_mode,
"a constant");
self.record_def(pattern.id, PathResolution {
base_def: def,
last_private: lp,
depth: 0
});
}
FoundConst(..) => {
self.resolve_error(pattern.span,
"only irrefutable patterns \
allowed here");
}
BareIdentifierPatternUnresolved => {
debug!("(resolving pattern) binding `{}`",
token::get_name(renamed));
let def = DefLocal(pattern.id);
// Record the definition so that later passes
// will be able to distinguish variants from
// locals in patterns.
self.record_def(pattern.id, PathResolution {
base_def: def,
last_private: LastMod(AllPublic),
depth: 0
});
// Add the binding to the local ribs, if it
// doesn't already exist in the bindings list. (We
// must not add it if it's in the bindings list
// because that breaks the assumptions later
// passes make about or-patterns.)
if !bindings_list.contains_key(&renamed) {
let this = &mut *self;
let last_rib = this.value_ribs.last_mut().unwrap();
last_rib.bindings.insert(renamed, DlDef(def));
bindings_list.insert(renamed, pat_id);
} else if mode == ArgumentIrrefutableMode &&
bindings_list.contains_key(&renamed) {
// Forbid duplicate bindings in the same
// parameter list.
self.resolve_error(pattern.span,
&format!("identifier `{}` \
is bound more \
than once in \
this parameter \
list",
token::get_ident(
ident))
)
} else if bindings_list.get(&renamed) ==
Some(&pat_id) {
// Then this is a duplicate variable in the
// same disjunction, which is an error.
self.resolve_error(pattern.span,
&format!("identifier `{}` is bound \
more than once in the same \
pattern",
token::get_ident(ident)));
}
// Else, not bound in the same pattern: do
// nothing.
}
}
}
PatEnum(ref path, _) => {
// This must be an enum variant, struct or const.
let resolution =
match self.resolve_possibly_assoc_item(pat_id, None,
path, ValueNS,
false) {
// The below shouldn't happen because all
// qualified paths should be in PatQPath.
TypecheckRequired =>
self.session.span_bug(
path.span,
"resolve_possibly_assoc_item claimed
that a path in PatEnum requires typecheck
to resolve, but qualified paths should be
PatQPath"),
ResolveAttempt(resolution) => resolution,
};
if let Some(path_res) = resolution {
match path_res.base_def {
DefVariant(..) | DefStruct(..) | DefConst(..) => {
self.record_def(pattern.id, path_res);
}
DefStatic(..) => {
self.resolve_error(path.span,
"static variables cannot be \
referenced in a pattern, \
use a `const` instead");
}
_ => {
// If anything ends up here entirely resolved,
// it's an error. If anything ends up here
// partially resolved, that's OK, because it may
// be a `T::CONST` that typeck will resolve to
// an inherent impl.
if path_res.depth == 0 {
self.resolve_error(
path.span,
&format!("`{}` is not an enum variant, struct or const",
token::get_ident(
path.segments.last().unwrap().identifier)));
} else {
self.record_def(pattern.id, path_res);
}
}
}
} else {
self.resolve_error(path.span,
&format!("unresolved enum variant, struct or const `{}`",
token::get_ident(path.segments.last().unwrap().identifier)));
}
visit::walk_path(self, path);
}
PatQPath(ref qself, ref path) => {
// Associated constants only.
let resolution =
match self.resolve_possibly_assoc_item(pat_id, Some(qself),
path, ValueNS,
false) {
TypecheckRequired => {
// All `<T>::CONST` should end up here, and will
// require use of the trait map to resolve
// during typechecking.
let const_name = path.segments.last().unwrap()
.identifier.name;
let traits = self.get_traits_containing_item(const_name);
self.trait_map.insert(pattern.id, traits);
visit::walk_pat(self, pattern);
return true;
}
ResolveAttempt(resolution) => resolution,
};
if let Some(path_res) = resolution {
match path_res.base_def {
// All `<T as Trait>::CONST` should end up here, and
// have the trait already selected.
DefAssociatedConst(..) => {
self.record_def(pattern.id, path_res);
}
_ => {
self.resolve_error(path.span,
&format!("`{}` is not an associated const",
token::get_ident(
path.segments.last().unwrap().identifier)));
}
}
} else {
self.resolve_error(path.span,
&format!("unresolved associated const `{}`",
token::get_ident(path.segments.last().unwrap().identifier)));
}
visit::walk_pat(self, pattern);
}
PatStruct(ref path, _, _) => {
match self.resolve_path(pat_id, path, 0, TypeNS, false) {
Some(definition) => {
self.record_def(pattern.id, definition);
}
result => {
debug!("(resolving pattern) didn't find struct \
def: {:?}", result);
let msg = format!("`{}` does not name a structure",
path_names_to_string(path, 0));
self.resolve_error(path.span, &msg[..]);
}
}
visit::walk_path(self, path);
}
PatLit(_) | PatRange(..) => {
visit::walk_pat(self, pattern);
}
_ => {
// Nothing to do.
}
}
true
});
}
fn resolve_bare_identifier_pattern(&mut self, name: Name, span: Span)
-> BareIdentifierPatternResolution {
let module = self.current_module.clone();
match self.resolve_item_in_lexical_scope(module,
name,
ValueNS) {
Success((target, _)) => {
debug!("(resolve bare identifier pattern) succeeded in \
finding {} at {:?}",
token::get_name(name),
target.bindings.value_def.borrow());
match *target.bindings.value_def.borrow() {
None => {
panic!("resolved name in the value namespace to a \
set of name bindings with no def?!");
}
Some(def) => {
// For the two success cases, this lookup can be
// considered as not having a private component because
// the lookup happened only within the current module.
match def.def {
def @ DefVariant(..) | def @ DefStruct(..) => {
return FoundStructOrEnumVariant(def, LastMod(AllPublic));
}
def @ DefConst(..) | def @ DefAssociatedConst(..) => {
return FoundConst(def, LastMod(AllPublic));
}
DefStatic(..) => {
self.resolve_error(span,
"static variables cannot be \
referenced in a pattern, \
use a `const` instead");
return BareIdentifierPatternUnresolved;
}
_ => {
return BareIdentifierPatternUnresolved;
}
}
}
}
}
Indeterminate => {
panic!("unexpected indeterminate result");
}
Failed(err) => {
match err {
Some((span, msg)) => {
self.resolve_error(span, &format!("failed to resolve: {}",
msg));
}
None => ()
}
debug!("(resolve bare identifier pattern) failed to find {}",
token::get_name(name));
return BareIdentifierPatternUnresolved;
}
}
}
/// Handles paths that may refer to associated items
fn resolve_possibly_assoc_item(&mut self,
id: NodeId,
maybe_qself: Option<&ast::QSelf>,
path: &Path,
namespace: Namespace,
check_ribs: bool)
-> AssocItemResolveResult
{
match maybe_qself {
Some(&ast::QSelf { position: 0, .. }) =>
return TypecheckRequired,
_ => {}
}
let max_assoc_types = if let Some(qself) = maybe_qself {
// Make sure the trait is valid.
let _ = self.resolve_trait_reference(id, path, 1);
path.segments.len() - qself.position
} else {
path.segments.len()
};
let mut resolution = self.with_no_errors(|this| {
this.resolve_path(id, path, 0, namespace, check_ribs)
});
for depth in 1..max_assoc_types {
if resolution.is_some() {
break;
}
self.with_no_errors(|this| {
resolution = this.resolve_path(id, path, depth,
TypeNS, true);
});
}
if let Some(DefMod(_)) = resolution.map(|r| r.base_def) {
// A module is not a valid type or value.
resolution = None;
}
ResolveAttempt(resolution)
}
/// If `check_ribs` is true, checks the local definitions first; i.e.
/// doesn't skip straight to the containing module.
/// Skips `path_depth` trailing segments, which is also reflected in the
/// returned value. See `middle::def::PathResolution` for more info.
fn resolve_path(&mut self,
id: NodeId,
path: &Path,
path_depth: usize,
namespace: Namespace,
check_ribs: bool) -> Option<PathResolution> {
let span = path.span;
let segments = &path.segments[..path.segments.len()-path_depth];
let mk_res = |(def, lp)| PathResolution::new(def, lp, path_depth);
if path.global {
let def = self.resolve_crate_relative_path(span, segments, namespace);
return def.map(mk_res);
}
// Try to find a path to an item in a module.
let unqualified_def =
self.resolve_identifier(segments.last().unwrap().identifier,
namespace,
check_ribs,
span);
if segments.len() <= 1 {
return unqualified_def.map(mk_res);
}
let def = self.resolve_module_relative_path(span, segments, namespace);
match (def, unqualified_def) {
(Some((ref d, _)), Some((ref ud, _))) if *d == *ud => {
self.session
.add_lint(lint::builtin::UNUSED_QUALIFICATIONS,
id, span,
"unnecessary qualification".to_string());
}
_ => {}
}
def.map(mk_res)
}
// Resolve a single identifier.
fn resolve_identifier(&mut self,
identifier: Ident,
namespace: Namespace,
check_ribs: bool,
span: Span)
-> Option<(Def, LastPrivate)> {
// First, check to see whether the name is a primitive type.
if namespace == TypeNS {
if let Some(&prim_ty) = self.primitive_type_table
.primitive_types
.get(&identifier.name) {
return Some((DefPrimTy(prim_ty), LastMod(AllPublic)));
}
}
if check_ribs {
if let Some(def) = self.resolve_identifier_in_local_ribs(identifier,
namespace,
span) {
return Some((def, LastMod(AllPublic)));
}
}
self.resolve_item_by_name_in_lexical_scope(identifier.name, namespace)
}
// FIXME #4952: Merge me with resolve_name_in_module?
fn resolve_definition_of_name_in_module(&mut self,
containing_module: Rc<Module>,
name: Name,
namespace: Namespace)
-> NameDefinition {
// First, search children.
build_reduced_graph::populate_module_if_necessary(self, &containing_module);
match containing_module.children.borrow().get(&name) {
Some(child_name_bindings) => {
match child_name_bindings.def_for_namespace(namespace) {
Some(def) => {
// Found it. Stop the search here.
let p = child_name_bindings.defined_in_public_namespace(namespace);
let lp = if p {LastMod(AllPublic)} else {
LastMod(DependsOn(def.def_id()))
};
return ChildNameDefinition(def, lp);
}
None => {}
}
}
None => {}
}
// Next, search import resolutions.
match containing_module.import_resolutions.borrow().get(&name) {
Some(import_resolution) if import_resolution.is_public => {
if let Some(target) = (*import_resolution).target_for_namespace(namespace) {
match target.bindings.def_for_namespace(namespace) {
Some(def) => {
// Found it.
let id = import_resolution.id(namespace);
// track imports and extern crates as well
self.used_imports.insert((id, namespace));
self.record_import_use(id, name);
match target.target_module.def_id.get() {
Some(DefId{krate: kid, ..}) => {
self.used_crates.insert(kid);
},
_ => {}
}
return ImportNameDefinition(def, LastMod(AllPublic));
}
None => {
// This can happen with external impls, due to
// the imperfect way we read the metadata.
}
}
}
}
Some(..) | None => {} // Continue.
}
// Finally, search through external children.
if namespace == TypeNS {
if let Some(module) = containing_module.external_module_children.borrow()
.get(&name).cloned() {
if let Some(def_id) = module.def_id.get() {
// track used crates
self.used_crates.insert(def_id.krate);
let lp = if module.is_public {LastMod(AllPublic)} else {
LastMod(DependsOn(def_id))
};
return ChildNameDefinition(DefMod(def_id), lp);
}
}
}
return NoNameDefinition;
}
// resolve a "module-relative" path, e.g. a::b::c
fn resolve_module_relative_path(&mut self,
span: Span,
segments: &[ast::PathSegment],
namespace: Namespace)
-> Option<(Def, LastPrivate)> {
let module_path = segments.init().iter()
.map(|ps| ps.identifier.name)
.collect::<Vec<_>>();
let containing_module;
let last_private;
let current_module = self.current_module.clone();
match self.resolve_module_path(current_module,
&module_path[..],
UseLexicalScope,
span,
PathSearch) {
Failed(err) => {
let (span, msg) = match err {
Some((span, msg)) => (span, msg),
None => {
let msg = format!("Use of undeclared type or module `{}`",
names_to_string(&module_path));
(span, msg)
}
};
self.resolve_error(span, &format!("failed to resolve. {}",
msg));
return None;
}
Indeterminate => panic!("indeterminate unexpected"),
Success((resulting_module, resulting_last_private)) => {
containing_module = resulting_module;
last_private = resulting_last_private;
}
}
let name = segments.last().unwrap().identifier.name;
let def = match self.resolve_definition_of_name_in_module(containing_module.clone(),
name,
namespace) {
NoNameDefinition => {
// We failed to resolve the name. Report an error.
return None;
}
ChildNameDefinition(def, lp) | ImportNameDefinition(def, lp) => {
(def, last_private.or(lp))
}
};
if let Some(DefId{krate: kid, ..}) = containing_module.def_id.get() {
self.used_crates.insert(kid);
}
return Some(def);
}
/// Invariant: This must be called only during main resolution, not during
/// import resolution.
fn resolve_crate_relative_path(&mut self,
span: Span,
segments: &[ast::PathSegment],
namespace: Namespace)
-> Option<(Def, LastPrivate)> {
let module_path = segments.init().iter()
.map(|ps| ps.identifier.name)
.collect::<Vec<_>>();
let root_module = self.graph_root.get_module();
let containing_module;
let last_private;
match self.resolve_module_path_from_root(root_module,
&module_path[..],
0,
span,
PathSearch,
LastMod(AllPublic)) {
Failed(err) => {
let (span, msg) = match err {
Some((span, msg)) => (span, msg),
None => {
let msg = format!("Use of undeclared module `::{}`",
names_to_string(&module_path[..]));
(span, msg)
}
};
self.resolve_error(span, &format!("failed to resolve. {}",
msg));
return None;
}
Indeterminate => {
panic!("indeterminate unexpected");
}
Success((resulting_module, resulting_last_private)) => {
containing_module = resulting_module;
last_private = resulting_last_private;
}
}
let name = segments.last().unwrap().identifier.name;
match self.resolve_definition_of_name_in_module(containing_module,
name,
namespace) {
NoNameDefinition => {
// We failed to resolve the name. Report an error.
return None;
}
ChildNameDefinition(def, lp) | ImportNameDefinition(def, lp) => {
return Some((def, last_private.or(lp)));
}
}
}
fn resolve_identifier_in_local_ribs(&mut self,
ident: Ident,
namespace: Namespace,
span: Span)
-> Option<Def> {
// Check the local set of ribs.
let search_result = match namespace {
ValueNS => {
let renamed = mtwt::resolve(ident);
self.search_ribs(&self.value_ribs, renamed, span)
}
TypeNS => {
let name = ident.name;
self.search_ribs(&self.type_ribs, name, span)
}
};
match search_result {
Some(DlDef(def)) => {
debug!("(resolving path in local ribs) resolved `{}` to local: {:?}",
token::get_ident(ident),
def);
Some(def)
}
Some(DlField) | Some(DlImpl(_)) | None => {
None
}
}
}
fn resolve_item_by_name_in_lexical_scope(&mut self,
name: Name,
namespace: Namespace)
-> Option<(Def, LastPrivate)> {
// Check the items.
let module = self.current_module.clone();
match self.resolve_item_in_lexical_scope(module,
name,
namespace) {
Success((target, _)) => {
match (*target.bindings).def_for_namespace(namespace) {
None => {
// This can happen if we were looking for a type and
// found a module instead. Modules don't have defs.
debug!("(resolving item path by identifier in lexical \
scope) failed to resolve {} after success...",
token::get_name(name));
return None;
}
Some(def) => {
debug!("(resolving item path in lexical scope) \
resolved `{}` to item",
token::get_name(name));
// This lookup is "all public" because it only searched
// for one identifier in the current module (couldn't
// have passed through reexports or anything like that.
return Some((def, LastMod(AllPublic)));
}
}
}
Indeterminate => {
panic!("unexpected indeterminate result");
}
Failed(err) => {
debug!("(resolving item path by identifier in lexical scope) \
failed to resolve {}", token::get_name(name));
if let Some((span, msg)) = err {
self.resolve_error(span, &format!("failed to resolve. {}", msg))
}
return None;
}
}
}
fn with_no_errors<T, F>(&mut self, f: F) -> T where
F: FnOnce(&mut Resolver) -> T,
{
self.emit_errors = false;
let rs = f(self);
self.emit_errors = true;
rs
}
fn resolve_error(&self, span: Span, s: &str) {
if self.emit_errors {
self.session.span_err(span, s);
}
}
fn find_fallback_in_self_type(&mut self, name: Name) -> FallbackSuggestion {
fn extract_path_and_node_id(t: &Ty, allow: FallbackChecks)
-> Option<(Path, NodeId, FallbackChecks)> {
match t.node {
TyPath(None, ref path) => Some((path.clone(), t.id, allow)),
TyPtr(ref mut_ty) => extract_path_and_node_id(&*mut_ty.ty, OnlyTraitAndStatics),
TyRptr(_, ref mut_ty) => extract_path_and_node_id(&*mut_ty.ty, allow),
// This doesn't handle the remaining `Ty` variants as they are not
// that commonly the self_type, it might be interesting to provide
// support for those in future.
_ => None,
}
}
fn get_module(this: &mut Resolver, span: Span, name_path: &[ast::Name])
-> Option<Rc<Module>> {
let root = this.current_module.clone();
let last_name = name_path.last().unwrap();
if name_path.len() == 1 {
match this.primitive_type_table.primitive_types.get(last_name) {
Some(_) => None,
None => {
match this.current_module.children.borrow().get(last_name) {
Some(child) => child.get_module_if_available(),
None => None
}
}
}
} else {
match this.resolve_module_path(root,
&name_path[..],
UseLexicalScope,
span,
PathSearch) {
Success((module, _)) => Some(module),
_ => None
}
}
}
fn is_static_method(this: &Resolver, did: DefId) -> bool {
if did.krate == ast::LOCAL_CRATE {
let sig = match this.ast_map.get(did.node) {
ast_map::NodeTraitItem(trait_item) => match trait_item.node {
ast::MethodTraitItem(ref sig, _) => sig,
_ => return false
},
ast_map::NodeImplItem(impl_item) => match impl_item.node {
ast::MethodImplItem(ref sig, _) => sig,
_ => return false
},
_ => return false
};
sig.explicit_self.node == ast::SelfStatic
} else {
csearch::is_static_method(&this.session.cstore, did)
}
}
let (path, node_id, allowed) = match self.current_self_type {
Some(ref ty) => match extract_path_and_node_id(ty, Everything) {
Some(x) => x,
None => return NoSuggestion,
},
None => return NoSuggestion,
};
if allowed == Everything {
// Look for a field with the same name in the current self_type.
match self.def_map.borrow().get(&node_id).map(|d| d.full_def()) {
Some(DefTy(did, _)) |
Some(DefStruct(did)) |
Some(DefVariant(_, did, _)) => match self.structs.get(&did) {
None => {}
Some(fields) => {
if fields.iter().any(|&field_name| name == field_name) {
return Field;
}
}
},
_ => {} // Self type didn't resolve properly
}
}
let name_path = path.segments.iter().map(|seg| seg.identifier.name).collect::<Vec<_>>();
// Look for a method in the current self type's impl module.
if let Some(module) = get_module(self, path.span, &name_path) {
if let Some(binding) = module.children.borrow().get(&name) {
if let Some(DefMethod(did, _)) = binding.def_for_namespace(ValueNS) {
if is_static_method(self, did) {
return StaticMethod(path_names_to_string(&path, 0))
}
if self.current_trait_ref.is_some() {
return TraitItem;
} else if allowed == Everything {
return Method;
}
}
}
}
// Look for a method in the current trait.
if let Some((trait_did, ref trait_ref)) = self.current_trait_ref {
if let Some(&did) = self.trait_item_map.get(&(name, trait_did)) {
if is_static_method(self, did) {
return TraitMethod(path_names_to_string(&trait_ref.path, 0));
} else {
return TraitItem;
}
}
}
NoSuggestion
}
fn find_best_match_for_name(&mut self, name: &str, max_distance: usize)
-> Option<String> {
let this = &mut *self;
let mut maybes: Vec<token::InternedString> = Vec::new();
let mut values: Vec<usize> = Vec::new();
for rib in this.value_ribs.iter().rev() {
for (&k, _) in &rib.bindings {
maybes.push(token::get_name(k));
values.push(usize::MAX);
}
}
let mut smallest = 0;
for (i, other) in maybes.iter().enumerate() {
values[i] = lev_distance(name, &other);
if values[i] <= values[smallest] {
smallest = i;
}
}
if !values.is_empty() &&
values[smallest] != usize::MAX &&
values[smallest] < name.len() + 2 &&
values[smallest] <= max_distance &&
name != &maybes[smallest][..] {
Some(maybes[smallest].to_string())
} else {
None
}
}
fn resolve_expr(&mut self, expr: &Expr) {
// First, record candidate traits for this expression if it could
// result in the invocation of a method call.
self.record_candidate_traits_for_expr_if_necessary(expr);
// Next, resolve the node.
match expr.node {
ExprPath(ref maybe_qself, ref path) => {
let resolution =
match self.resolve_possibly_assoc_item(expr.id,
maybe_qself.as_ref(),
path,
ValueNS,
true) {
// `<T>::a::b::c` is resolved by typeck alone.
TypecheckRequired => {
let method_name = path.segments.last().unwrap().identifier.name;
let traits = self.get_traits_containing_item(method_name);
self.trait_map.insert(expr.id, traits);
visit::walk_expr(self, expr);
return;
}
ResolveAttempt(resolution) => resolution,
};
// This is a local path in the value namespace. Walk through
// scopes looking for it.
if let Some(path_res) = resolution {
// Check if struct variant
if let DefVariant(_, _, true) = path_res.base_def {
let path_name = path_names_to_string(path, 0);
self.resolve_error(expr.span,
&format!("`{}` is a struct variant name, but \
this expression \
uses it like a function name",
path_name));
let msg = format!("did you mean to write: \
`{} {{ /* fields */ }}`?",
path_name);
if self.emit_errors {
self.session.fileline_help(expr.span, &msg);
} else {
self.session.span_help(expr.span, &msg);
}
} else {
// Write the result into the def map.
debug!("(resolving expr) resolved `{}`",
path_names_to_string(path, 0));
// Partial resolutions will need the set of traits in scope,
// so they can be completed during typeck.
if path_res.depth != 0 {
let method_name = path.segments.last().unwrap().identifier.name;
let traits = self.get_traits_containing_item(method_name);
self.trait_map.insert(expr.id, traits);
}
self.record_def(expr.id, path_res);
}
} else {
// Be helpful if the name refers to a struct
// (The pattern matching def_tys where the id is in self.structs
// matches on regular structs while excluding tuple- and enum-like
// structs, which wouldn't result in this error.)
let path_name = path_names_to_string(path, 0);
let type_res = self.with_no_errors(|this| {
this.resolve_path(expr.id, path, 0, TypeNS, false)
});
match type_res.map(|r| r.base_def) {
Some(DefTy(struct_id, _))
if self.structs.contains_key(&struct_id) => {
self.resolve_error(expr.span,
&format!("`{}` is a structure name, but \
this expression \
uses it like a function name",
path_name));
let msg = format!("did you mean to write: \
`{} {{ /* fields */ }}`?",
path_name);
if self.emit_errors {
self.session.fileline_help(expr.span, &msg);
} else {
self.session.span_help(expr.span, &msg);
}
}
_ => {
// Keep reporting some errors even if they're ignored above.
self.resolve_path(expr.id, path, 0, ValueNS, true);
let mut method_scope = false;
self.value_ribs.iter().rev().all(|rib| {
method_scope = match rib.kind {
MethodRibKind => true,
ItemRibKind | ConstantItemRibKind => false,
_ => return true, // Keep advancing
};
false // Stop advancing
});
if method_scope &&
&token::get_name(special_names::self_)[..] == path_name {
self.resolve_error(
expr.span,
"`self` is not available \
in a static method. Maybe a \
`self` argument is missing?");
} else {
let last_name = path.segments.last().unwrap().identifier.name;
let mut msg = match self.find_fallback_in_self_type(last_name) {
NoSuggestion => {
// limit search to 5 to reduce the number
// of stupid suggestions
self.find_best_match_for_name(&path_name, 5)
.map_or("".to_string(),
|x| format!("`{}`", x))
}
Field => format!("`self.{}`", path_name),
Method |
TraitItem =>
format!("to call `self.{}`", path_name),
TraitMethod(path_str) |
StaticMethod(path_str) =>
format!("to call `{}::{}`", path_str, path_name)
};
if !msg.is_empty() {
msg = format!(". Did you mean {}?", msg)
}
self.resolve_error(
expr.span,
&format!("unresolved name `{}`{}",
path_name, msg));
}
}
}
}
visit::walk_expr(self, expr);
}
ExprStruct(ref path, _, _) => {
// Resolve the path to the structure it goes to. We don't
// check to ensure that the path is actually a structure; that
// is checked later during typeck.
match self.resolve_path(expr.id, path, 0, TypeNS, false) {
Some(definition) => self.record_def(expr.id, definition),
None => {
debug!("(resolving expression) didn't find struct def",);
let msg = format!("`{}` does not name a structure",
path_names_to_string(path, 0));
self.resolve_error(path.span, &msg[..]);
}
}
visit::walk_expr(self, expr);
}
ExprLoop(_, Some(label)) | ExprWhile(_, _, Some(label)) => {
self.with_label_rib(|this| {
let def_like = DlDef(DefLabel(expr.id));
{
let rib = this.label_ribs.last_mut().unwrap();
let renamed = mtwt::resolve(label);
rib.bindings.insert(renamed, def_like);
}
visit::walk_expr(this, expr);
})
}
ExprBreak(Some(label)) | ExprAgain(Some(label)) => {
let renamed = mtwt::resolve(label);
match self.search_label(renamed) {
None => {
self.resolve_error(
expr.span,
&format!("use of undeclared label `{}`",
token::get_ident(label)))
}
Some(DlDef(def @ DefLabel(_))) => {
// Since this def is a label, it is never read.
self.record_def(expr.id, PathResolution {
base_def: def,
last_private: LastMod(AllPublic),
depth: 0
})
}
Some(_) => {
self.session.span_bug(expr.span,
"label wasn't mapped to a \
label def!")
}
}
}
_ => {
visit::walk_expr(self, expr);
}
}
}
fn record_candidate_traits_for_expr_if_necessary(&mut self, expr: &Expr) {
match expr.node {
ExprField(_, ident) => {
// FIXME(#6890): Even though you can't treat a method like a
// field, we need to add any trait methods we find that match
// the field name so that we can do some nice error reporting
// later on in typeck.
let traits = self.get_traits_containing_item(ident.node.name);
self.trait_map.insert(expr.id, traits);
}
ExprMethodCall(ident, _, _) => {
debug!("(recording candidate traits for expr) recording \
traits for {}",
expr.id);
let traits = self.get_traits_containing_item(ident.node.name);
self.trait_map.insert(expr.id, traits);
}
_ => {
// Nothing to do.
}
}
}
fn get_traits_containing_item(&mut self, name: Name) -> Vec<DefId> {
debug!("(getting traits containing item) looking for '{}'",
token::get_name(name));
fn add_trait_info(found_traits: &mut Vec<DefId>,
trait_def_id: DefId,
name: Name) {
debug!("(adding trait info) found trait {}:{} for method '{}'",
trait_def_id.krate,
trait_def_id.node,
token::get_name(name));
found_traits.push(trait_def_id);
}
let mut found_traits = Vec::new();
let mut search_module = self.current_module.clone();
loop {
// Look for the current trait.
match self.current_trait_ref {
Some((trait_def_id, _)) => {
if self.trait_item_map.contains_key(&(name, trait_def_id)) {
add_trait_info(&mut found_traits, trait_def_id, name);
}
}
None => {} // Nothing to do.
}
// Look for trait children.
build_reduced_graph::populate_module_if_necessary(self, &search_module);
{
for (_, child_names) in &*search_module.children.borrow() {
let def = match child_names.def_for_namespace(TypeNS) {
Some(def) => def,
None => continue
};
let trait_def_id = match def {
DefTrait(trait_def_id) => trait_def_id,
_ => continue,
};
if self.trait_item_map.contains_key(&(name, trait_def_id)) {
add_trait_info(&mut found_traits, trait_def_id, name);
}
}
}
// Look for imports.
for (_, import) in &*search_module.import_resolutions.borrow() {
let target = match import.target_for_namespace(TypeNS) {
None => continue,
Some(target) => target,
};
let did = match target.bindings.def_for_namespace(TypeNS) {
Some(DefTrait(trait_def_id)) => trait_def_id,
Some(..) | None => continue,
};
if self.trait_item_map.contains_key(&(name, did)) {
add_trait_info(&mut found_traits, did, name);
let id = import.type_id;
self.used_imports.insert((id, TypeNS));
let trait_name = self.get_trait_name(did);
self.record_import_use(id, trait_name);
if let Some(DefId{krate: kid, ..}) = target.target_module.def_id.get() {
self.used_crates.insert(kid);
}
}
}
match search_module.parent_link.clone() {
NoParentLink | ModuleParentLink(..) => break,
BlockParentLink(parent_module, _) => {
search_module = parent_module.upgrade().unwrap();
}
}
}
found_traits
}
fn record_def(&mut self, node_id: NodeId, resolution: PathResolution) {
debug!("(recording def) recording {:?} for {}", resolution, node_id);
assert!(match resolution.last_private {LastImport{..} => false, _ => true},
"Import should only be used for `use` directives");
if let Some(prev_res) = self.def_map.borrow_mut().insert(node_id, resolution) {
let span = self.ast_map.opt_span(node_id).unwrap_or(codemap::DUMMY_SP);
self.session.span_bug(span, &format!("path resolved multiple times \
({:?} before, {:?} now)",
prev_res, resolution));
}
}
fn enforce_default_binding_mode(&mut self,
pat: &Pat,
pat_binding_mode: BindingMode,
descr: &str) {
match pat_binding_mode {
BindByValue(_) => {}
BindByRef(..) => {
self.resolve_error(pat.span,
&format!("cannot use `ref` binding mode \
with {}",
descr));
}
}
}
//
// Diagnostics
//
// Diagnostics are not particularly efficient, because they're rarely
// hit.
//
#[allow(dead_code)] // useful for debugging
fn dump_module(&mut self, module_: Rc<Module>) {
debug!("Dump of module `{}`:", module_to_string(&*module_));
debug!("Children:");
build_reduced_graph::populate_module_if_necessary(self, &module_);
for (&name, _) in &*module_.children.borrow() {
debug!("* {}", token::get_name(name));
}
debug!("Import resolutions:");
let import_resolutions = module_.import_resolutions.borrow();
for (&name, import_resolution) in &*import_resolutions {
let value_repr;
match import_resolution.target_for_namespace(ValueNS) {
None => { value_repr = "".to_string(); }
Some(_) => {
value_repr = " value:?".to_string();
// FIXME #4954
}
}
let type_repr;
match import_resolution.target_for_namespace(TypeNS) {
None => { type_repr = "".to_string(); }
Some(_) => {
type_repr = " type:?".to_string();
// FIXME #4954
}
}
debug!("* {}:{}{}", token::get_name(name), value_repr, type_repr);
}
}
}
fn names_to_string(names: &[Name]) -> String {
let mut first = true;
let mut result = String::new();
for name in names {
if first {
first = false
} else {
result.push_str("::")
}
result.push_str(&token::get_name(*name));
};
result
}
fn path_names_to_string(path: &Path, depth: usize) -> String {
let names: Vec<ast::Name> = path.segments[..path.segments.len()-depth]
.iter()
.map(|seg| seg.identifier.name)
.collect();
names_to_string(&names[..])
}
/// A somewhat inefficient routine to obtain the name of a module.
fn module_to_string(module: &Module) -> String {
let mut names = Vec::new();
fn collect_mod(names: &mut Vec<ast::Name>, module: &Module) {
match module.parent_link {
NoParentLink => {}
ModuleParentLink(ref module, name) => {
names.push(name);
collect_mod(names, &*module.upgrade().unwrap());
}
BlockParentLink(ref module, _) => {
// danger, shouldn't be ident?
names.push(special_idents::opaque.name);
collect_mod(names, &*module.upgrade().unwrap());
}
}
}
collect_mod(&mut names, module);
if names.is_empty() {
return "???".to_string();
}
names_to_string(&names.into_iter().rev().collect::<Vec<ast::Name>>())
}
pub struct CrateMap {
pub def_map: DefMap,
pub freevars: RefCell<FreevarMap>,
pub export_map: ExportMap,
pub trait_map: TraitMap,
pub external_exports: ExternalExports,
pub glob_map: Option<GlobMap>
}
#[derive(PartialEq,Copy, Clone)]
pub enum MakeGlobMap {
Yes,
No
}
/// Entry point to crate resolution.
pub fn resolve_crate<'a, 'tcx>(session: &'a Session,
ast_map: &'a ast_map::Map<'tcx>,
_: &LanguageItems,
krate: &Crate,
make_glob_map: MakeGlobMap)
-> CrateMap {
let mut resolver = Resolver::new(session, ast_map, krate.span, make_glob_map);
build_reduced_graph::build_reduced_graph(&mut resolver, krate);
session.abort_if_errors();
resolve_imports::resolve_imports(&mut resolver);
session.abort_if_errors();
record_exports::record(&mut resolver);
session.abort_if_errors();
resolver.resolve_crate(krate);
session.abort_if_errors();
check_unused::check_crate(&mut resolver, krate);
CrateMap {
def_map: resolver.def_map,
freevars: resolver.freevars,
export_map: resolver.export_map,
trait_map: resolver.trait_map,
external_exports: resolver.external_exports,
glob_map: if resolver.make_glob_map {
Some(resolver.glob_map)
} else {
None
},
}
}
#[cfg(stage0)]
__build_diagnostic_array! { DIAGNOSTICS }
#[cfg(not(stage0))]
__build_diagnostic_array! { librustc_resolve, DIAGNOSTICS }
| 40.751212 | 99 | 0.448358 |
5dc448a3e6cbe27a8776712f7e3dd4b954a41c55 | 1,236 | fn iter<T>(v: ~[T], it: fn(T) -> bool) {
let mut i = 0u, l = v.len();
while i < l {
if !it(v[i]) { break; }
i += 1u;
}
}
fn find_pos<T>(n: T, h: ~[T]) -> option<uint> {
let mut i = 0u;
for iter(h) |e| {
if e == n { ret some(i); }
i += 1u;
}
none
}
fn bail_deep(x: ~[~[bool]]) {
let mut seen = false;
for iter(x) |x| {
for iter(x) |x| {
assert !seen;
if x { seen = true; ret; }
}
}
assert !seen;
}
fn ret_deep() -> ~str {
for iter(~[1, 2]) |e| {
for iter(~[3, 4]) |x| {
if e + x > 4 { ret ~"hi"; }
}
}
ret ~"bye";
}
fn main() {
let mut last = 0;
for vec::all(~[1, 2, 3, 4, 5, 6, 7]) |e| {
last = e;
if e == 5 { break; }
if e % 2 == 1 { again; }
assert e % 2 == 0;
};
assert last == 5;
assert find_pos(1, ~[0, 1, 2, 3]) == some(1u);
assert find_pos(1, ~[0, 4, 2, 3]) == none;
assert find_pos(~"hi", ~[~"foo", ~"bar", ~"baz", ~"hi"]) == some(3u);
bail_deep(~[~[false, false], ~[true, true], ~[false, true]]);
bail_deep(~[~[true]]);
bail_deep(~[~[false, false, false]]);
assert ret_deep() == ~"hi";
}
| 21.310345 | 73 | 0.400485 |
7acbdfebc10d2433f17075187678e35760d0c4ca | 3,279 | use std::{
fs,
io::Read,
net::TcpStream,
process::{Command, Stdio},
};
use ssh2::Session;
use ssh_config::SSHConfig;
pub struct SSHSession {
host: String,
session: Option<ssh2::Session>,
}
pub enum SSHErrorCode {
AgentError,
FileNotFound,
PubKeyAuthenticationDisabled,
AuthenticationFailed,
}
impl SSHSession {
pub fn new(host: String) -> Result<SSHSession, SSHErrorCode> {
let mut session = SSHSession {
host,
session: (None),
};
match session.reset() {
Ok(()) => Ok(session),
Err(e) => Err(e),
}
}
pub fn reset(&mut self) -> Result<(), SSHErrorCode> {
// ssh-add
// run ssh-add if no identities found
let status = Command::new("bash")
.arg("-c")
.arg("ssh-add -l; if [ $? -ne 0 ]; then ssh-add; fi")
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()
.expect("failed to execute process");
if status.code().unwrap() != 0 {
return Err(SSHErrorCode::AgentError);
}
// Connect to the SSH server
let tcp = TcpStream::connect(format!("{}:22", self.host)).unwrap();
let mut sess = Session::new().unwrap();
sess.set_tcp_stream(tcp);
sess.handshake().unwrap();
if self.host == "localhost" {
let username = whoami::username();
// Try to authenticate with the first identity in the agent.
sess.userauth_agent(&username).unwrap();
} else {
// Parse ~/.ssh/config
let mut ssh_config_path = dirs::home_dir().expect("Could not find home dir");
ssh_config_path.push(".ssh");
ssh_config_path.push("config");
if !ssh_config_path.exists() {
return Err(SSHErrorCode::FileNotFound);
}
let ssh_config =
fs::read_to_string(ssh_config_path).expect("Could not read ~/.ssh/config");
let ssh_config =
SSHConfig::parse_str(&ssh_config).expect("Could not parse ~/.ssh/config");
let host = ssh_config.query(self.host.clone());
if host["PubKeyAuthentication"] != "yes" {
return Err(SSHErrorCode::PubKeyAuthenticationDisabled);
}
let username = host["User"].to_string();
// Try to authenticate with the first identity in the agent.
sess.userauth_agent(&username).unwrap();
}
// Make sure we succeeded
if !sess.authenticated() {
return Err(SSHErrorCode::AuthenticationFailed);
}
self.session = Some(sess);
Ok(())
}
pub fn shell(&mut self, cmd: &str) -> (i32, String) {
if self.session.is_none() {
let result = self.reset();
assert!(result.is_ok());
assert!(self.session.is_some());
}
let mut channel = self.session.as_ref().unwrap().channel_session().unwrap();
channel.exec(cmd).unwrap();
let mut s = String::new();
channel.read_to_string(&mut s).unwrap();
assert!(channel.wait_close().is_ok());
(channel.exit_status().unwrap(), s)
}
}
| 29.809091 | 91 | 0.541324 |
e6672f26af5da15883d68deb66b5b222459d4192 | 506 | #[allow(dead_code)]
use std::error::Error;
mod app;
mod args;
mod command;
mod config;
mod event_loop;
mod os_commands;
mod parse;
mod stateful_table;
mod template;
mod terminal_manager;
mod ui;
use app::App;
use args::Args;
use config::storage;
fn main() -> Result<(), Box<dyn Error>> {
let args = Args::new();
let config_path = storage::config_path()?;
let config = storage::prepare_config(&config_path)?;
let app = App::new(&config, config_path, args);
event_loop::run(app)?;
Ok(())
}
| 16.322581 | 54 | 0.683794 |
485930d665351db10720491f6949716ea7dd194b | 4,335 | //use async_trait::async_trait;
use crate::structs::wsfeed::{
InputMDMessage, InputOrderMessage, MarketDataMessage, OrderMessage, OrderStatus,
};
use crate::types::GError;
use crate::{structs::private::Payload, Private};
use futures::{future, Sink, Stream};
use futures_util::{sink::SinkExt, stream::TryStreamExt};
use serde::Serialize;
use tokio_tungstenite::{connect_async, tungstenite::Message as TMessage};
pub struct WSFeed;
#[derive(Serialize, Debug, Clone, Copy)]
#[serde(rename_all = "snake_case")]
pub enum SubscriptionType {
L2,
}
#[derive(Serialize, Debug, Clone)]
pub struct Subscription {
pub name: SubscriptionType,
pub symbols: Vec<String>,
}
#[derive(Serialize, Debug)]
struct Subscribe {
#[serde(rename = "type")]
sub_type: String,
subscriptions: Vec<Subscription>,
}
fn convert_md_msg(msg: TMessage) -> MarketDataMessage {
match msg {
TMessage::Text(str) => serde_json::from_str::<InputMDMessage>(&str)
.map(|x| x.into())
.unwrap_or_else(|e| {
MarketDataMessage::InternalError(GError::SerdeDe {
error: e,
data: str,
})
}),
_ => unreachable!(), // filtered in stream
}
}
fn convert_order_msg(msg: TMessage) -> OrderMessage {
match msg {
TMessage::Text(str) => {
if let Ok(des) = serde_json::from_str::<InputOrderMessage>(&str) {
des.into()
} else {
// try to deserialize a list of order status messages
match serde_json::from_str::<Vec<OrderStatus>>(&str) {
Ok(orders) => OrderMessage::Orders(orders),
Err(e) => OrderMessage::InternalError(GError::SerdeDe {
error: e,
data: str,
}),
}
}
}
_ => unreachable!(), // filtered in stream
}
}
pub trait GeminiStream<A: Sized>: Stream<Item = Result<A, GError>> + Unpin + Send {}
impl<T, A> GeminiStream<A> for T where T: Stream<Item = Result<A, GError>> + Unpin + Send {}
impl<T> GeminiSink for T where T: Sink<TMessage> + Unpin + Send {}
pub trait GeminiSink: Sink<TMessage> + Unpin + Send {}
impl WSFeed {
pub async fn connect_public_data(
uri: &str,
subscriptions: &[Subscription],
) -> Result<impl GeminiStream<MarketDataMessage> + GeminiSink, GError> {
let url = uri.to_string() + "/v2/marketdata";
let sub = Subscribe {
sub_type: "subscribe".to_string(),
subscriptions: subscriptions.to_vec()
};
let (stream, _resp) = connect_async(url).await.map_err(GError::Websocket)?;
let mut stream = stream
.try_filter(|msg| future::ready(msg.is_text()))
.map_ok(convert_md_msg)
.sink_map_err(GError::Websocket)
.map_err(GError::Websocket);
let subscribe_msg = serde_json::to_string(&sub).unwrap();
stream.send(TMessage::text(subscribe_msg)).await?;
Ok(stream)
}
pub async fn connect_private_order_events(
uri: &str,
api_key: &str,
api_secret: &str,
) -> Result<impl GeminiStream<OrderMessage>, GError> {
let endpoint = "/v1/order/events";
let url = uri.to_string() + endpoint;
let payload = {
let body = Payload::empty(&endpoint);
let payload_str = serde_json::to_string(&body).map_err(GError::SerdeSer)?;
base64::encode(&payload_str)
};
let signature = Private::sign(api_secret, &payload);
let req = hyper::Request::builder()
.uri(url)
.header("Content-Type", "text/plain")
.header("X-GEMINI-APIKEY", api_key)
.header("X-GEMINI-PAYLOAD", &payload)
.header("X-GEMINI-SIGNATURE", &signature)
.body(())
.unwrap();
let (stream, _resp) = connect_async(req)
.await
.map_err(GError::Websocket)
.expect("connect-async");
let stream = stream
.try_filter(|msg| future::ready(msg.is_text()))
.map_ok(convert_order_msg)
.sink_map_err(GError::Websocket)
.map_err(GError::Websocket);
Ok(stream)
}
}
| 32.111111 | 92 | 0.576009 |
1eb6b3aedd2719a9d5c5848377c1f599d8f07d2a | 14,597 | // Copyright (c) Microsoft. All rights reserved.
#![deny(rust_2018_idioms)]
#![warn(clippy::all, clippy::pedantic)]
#![allow(
clippy::let_and_return,
clippy::missing_errors_doc,
clippy::must_use_candidate,
clippy::shadow_unrelated
)]
pub struct Client {
api_version: aziot_key_common_http::ApiVersion,
connector: http_common::Connector,
}
impl Client {
pub fn new(
api_version: aziot_key_common_http::ApiVersion,
connector: http_common::Connector,
) -> Self {
Client {
api_version,
connector,
}
}
pub fn create_key_pair_if_not_exists(
&self,
id: &str,
preferred_algorithms: Option<&str>,
) -> std::io::Result<aziot_key_common::KeyHandle> {
let mut stream = self.connector.connect()?;
let body = aziot_key_common_http::create_key_pair_if_not_exists::Request {
id: id.to_owned(),
preferred_algorithms: preferred_algorithms.map(ToOwned::to_owned),
};
let res: aziot_key_common_http::create_key_pair_if_not_exists::Response = request(
&mut stream,
&http::Method::POST,
format_args!("/keypair?api-version={}", self.api_version),
Some(&body),
)?;
Ok(res.handle)
}
pub fn load_key_pair(&self, id: &str) -> std::io::Result<aziot_key_common::KeyHandle> {
let mut stream = self.connector.connect()?;
let res: aziot_key_common_http::load::Response = request::<_, (), _>(
&mut stream,
&http::Method::GET,
format_args!(
"/keypair/{}?api-version={}",
percent_encoding::percent_encode(
id.as_bytes(),
http_common::PATH_SEGMENT_ENCODE_SET
),
self.api_version,
),
None,
)?;
Ok(res.handle)
}
pub fn get_key_pair_public_parameter(
&self,
handle: &aziot_key_common::KeyHandle,
parameter_name: &str,
) -> std::io::Result<String> {
let mut stream = self.connector.connect()?;
let body = aziot_key_common_http::get_key_pair_public_parameter::Request {
key_handle: handle.clone(),
};
let res: aziot_key_common_http::get_key_pair_public_parameter::Response = request(
&mut stream,
&http::Method::POST,
format_args!(
"/parameters/{}?api-version={}",
percent_encoding::percent_encode(
parameter_name.as_bytes(),
http_common::PATH_SEGMENT_ENCODE_SET
),
self.api_version,
),
Some(&body),
)?;
Ok(res.value)
}
pub fn create_key_if_not_exists(
&self,
id: &str,
value: aziot_key_common::CreateKeyValue,
usage: &[aziot_key_common::KeyUsage],
) -> std::io::Result<aziot_key_common::KeyHandle> {
let mut stream = self.connector.connect()?;
let body = match value {
aziot_key_common::CreateKeyValue::Generate => {
aziot_key_common_http::create_key_if_not_exists::Request {
id: id.to_owned(),
import_key_bytes: None,
usage: usage.to_owned(),
}
}
aziot_key_common::CreateKeyValue::Import { bytes } => {
aziot_key_common_http::create_key_if_not_exists::Request {
id: id.to_owned(),
import_key_bytes: Some(http_common::ByteString(bytes)),
usage: usage.to_owned(),
}
}
};
let res: aziot_key_common_http::create_key_if_not_exists::Response = request(
&mut stream,
&http::Method::POST,
format_args!("/key?api-version={}", self.api_version),
Some(&body),
)?;
Ok(res.handle)
}
pub fn load_key(&self, id: &str) -> std::io::Result<aziot_key_common::KeyHandle> {
let mut stream = self.connector.connect()?;
let res: aziot_key_common_http::load::Response = request::<_, (), _>(
&mut stream,
&http::Method::GET,
format_args!(
"/key/{}?api-version={}",
percent_encoding::percent_encode(
id.as_bytes(),
http_common::PATH_SEGMENT_ENCODE_SET
),
self.api_version,
),
None,
)?;
Ok(res.handle)
}
pub fn create_derived_key(
&self,
base_handle: &aziot_key_common::KeyHandle,
derivation_data: &[u8],
) -> std::io::Result<aziot_key_common::KeyHandle> {
let mut stream = self.connector.connect()?;
let body = aziot_key_common_http::create_derived_key::Request {
base_handle: base_handle.clone(),
derivation_data: http_common::ByteString(derivation_data.to_owned()),
};
let res: aziot_key_common_http::create_derived_key::Response = request(
&mut stream,
&http::Method::POST,
format_args!("/derivedkey?api-version={}", self.api_version),
Some(&body),
)?;
Ok(res.handle)
}
pub fn export_derived_key(
&self,
handle: &aziot_key_common::KeyHandle,
) -> std::io::Result<Vec<u8>> {
let mut stream = self.connector.connect()?;
let body = aziot_key_common_http::export_derived_key::Request {
handle: handle.clone(),
};
let res: aziot_key_common_http::export_derived_key::Response = request(
&mut stream,
&http::Method::POST,
format_args!("/derivedkey/export?api-version={}", self.api_version),
Some(&body),
)?;
Ok(res.key.0)
}
pub fn sign(
&self,
handle: &aziot_key_common::KeyHandle,
mechanism: aziot_key_common::SignMechanism,
digest: &[u8],
) -> std::io::Result<Vec<u8>> {
let mut stream = self.connector.connect()?;
let body = aziot_key_common_http::sign::Request {
key_handle: handle.clone(),
parameters: match mechanism {
aziot_key_common::SignMechanism::Ecdsa => {
aziot_key_common_http::sign::Parameters::Ecdsa {
digest: http_common::ByteString(digest.to_owned()),
}
}
aziot_key_common::SignMechanism::HmacSha256 => {
aziot_key_common_http::sign::Parameters::HmacSha256 {
message: http_common::ByteString(digest.to_owned()),
}
}
},
};
let res: aziot_key_common_http::sign::Response = request(
&mut stream,
&http::Method::POST,
format_args!("/sign?api-version={}", self.api_version),
Some(&body),
)?;
let signature = res.signature.0;
Ok(signature)
}
pub fn encrypt(
&self,
handle: &aziot_key_common::KeyHandle,
mechanism: aziot_key_common::EncryptMechanism,
plaintext: &[u8],
) -> std::io::Result<Vec<u8>> {
let mut stream = self.connector.connect()?;
let body = aziot_key_common_http::encrypt::Request {
key_handle: handle.clone(),
parameters: match mechanism {
aziot_key_common::EncryptMechanism::Aead { iv, aad } => {
aziot_key_common_http::encrypt::Parameters::Aead {
iv: http_common::ByteString(iv),
aad: http_common::ByteString(aad),
}
}
aziot_key_common::EncryptMechanism::RsaPkcs1 => {
aziot_key_common_http::encrypt::Parameters::RsaPkcs1
}
aziot_key_common::EncryptMechanism::RsaNoPadding => {
aziot_key_common_http::encrypt::Parameters::RsaNoPadding
}
},
plaintext: http_common::ByteString(plaintext.to_owned()),
};
let res: aziot_key_common_http::encrypt::Response = request(
&mut stream,
&http::Method::POST,
format_args!("/encrypt?api-version={}", self.api_version),
Some(&body),
)?;
let ciphertext = res.ciphertext.0;
Ok(ciphertext)
}
pub fn decrypt(
&self,
handle: &aziot_key_common::KeyHandle,
mechanism: aziot_key_common::EncryptMechanism,
ciphertext: &[u8],
) -> std::io::Result<Vec<u8>> {
let mut stream = self.connector.connect()?;
let body = aziot_key_common_http::decrypt::Request {
key_handle: handle.clone(),
parameters: match mechanism {
aziot_key_common::EncryptMechanism::Aead { iv, aad } => {
aziot_key_common_http::encrypt::Parameters::Aead {
iv: http_common::ByteString(iv),
aad: http_common::ByteString(aad),
}
}
aziot_key_common::EncryptMechanism::RsaPkcs1 => {
aziot_key_common_http::encrypt::Parameters::RsaPkcs1
}
aziot_key_common::EncryptMechanism::RsaNoPadding => {
aziot_key_common_http::encrypt::Parameters::RsaNoPadding
}
},
ciphertext: http_common::ByteString(ciphertext.to_owned()),
};
let res: aziot_key_common_http::decrypt::Response = request(
&mut stream,
&http::Method::POST,
format_args!("/decrypt?api-version={}", self.api_version),
Some(&body),
)?;
let plaintext = res.plaintext.0;
Ok(plaintext)
}
}
impl std::fmt::Debug for Client {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Client").finish()
}
}
fn request<TUri, TRequest, TResponse>(
stream: &mut http_common::Stream,
method: &http::Method,
uri: TUri,
body: Option<&TRequest>,
) -> std::io::Result<TResponse>
where
TUri: std::fmt::Display,
TRequest: serde::Serialize,
TResponse: serde::de::DeserializeOwned,
{
use std::io::{Read, Write};
write!(
stream,
"{method} {uri} HTTP/1.1\r\n",
method = method,
uri = uri
)?;
if let Some(body) = body {
let body =
serde_json::to_string(body).expect("serializing request body to JSON cannot fail");
let body_len = body.len();
write!(
stream,
"\
content-length: {body_len}\r\n\
content-type: application/json\r\n\
\r\n\
{body}\
",
body_len = body_len,
body = body,
)?;
} else {
stream.write_all(b"\r\n")?;
}
let mut buf = vec![0_u8; 512];
let mut read_so_far = 0;
let (res_status_code, body) = loop {
let new_read = loop {
match stream.read(&mut buf[read_so_far..]) {
Ok(new_read) => break new_read,
Err(err) if err.kind() == std::io::ErrorKind::Interrupted => (),
Err(err) => return Err(err),
}
};
read_so_far += new_read;
if let Some((res_status_code, body)) = try_parse_response(&buf[..read_so_far], new_read)? {
break (res_status_code, body);
}
if read_so_far == buf.len() {
buf.resize(buf.len() * 2, 0_u8);
}
};
let res: TResponse = match res_status_code {
Some(200) => {
let res = serde_json::from_slice(body)
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?;
res
}
Some(400..=499) | Some(500..=599) => {
let res: http_common::ErrorBody<'static> = serde_json::from_slice(body)
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?;
return Err(std::io::Error::new(std::io::ErrorKind::Other, res.message));
}
Some(_) | None => {
return Err(std::io::Error::new(
std::io::ErrorKind::Other,
"malformed HTTP response",
))
}
};
Ok(res)
}
fn try_parse_response(
buf: &[u8],
new_read: usize,
) -> std::io::Result<Option<(Option<u16>, &[u8])>> {
let mut headers = [httparse::EMPTY_HEADER; 16];
let mut res = httparse::Response::new(&mut headers);
let body_start_pos = match res.parse(&buf) {
Ok(httparse::Status::Complete(body_start_pos)) => body_start_pos,
Ok(httparse::Status::Partial) if new_read == 0 => return Ok(None),
Ok(httparse::Status::Partial) => return Err(std::io::ErrorKind::UnexpectedEof.into()),
Err(err) => return Err(std::io::Error::new(std::io::ErrorKind::Other, err)),
};
let res_status_code = res.code;
let mut content_length = None;
let mut is_json = false;
for header in &headers {
if header.name.eq_ignore_ascii_case("content-length") {
let value = std::str::from_utf8(header.value)
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?;
let value: usize = value
.parse()
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?;
content_length = Some(value);
} else if header.name.eq_ignore_ascii_case("content-type") {
let value = std::str::from_utf8(header.value)
.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))?;
if value == "application/json" {
is_json = true;
}
}
}
if !is_json {
return Err(std::io::Error::new(
std::io::ErrorKind::Other,
"malformed HTTP response",
));
}
let body = &buf[body_start_pos..];
let body = if let Some(content_length) = content_length {
if body.len() < content_length {
return Ok(None);
} else {
&body[..content_length]
}
} else {
// Without a content-length, read until there's no more to read.
if new_read == 0 {
body
} else {
return Ok(None);
}
};
Ok(Some((res_status_code, body)))
}
| 32.151982 | 99 | 0.536001 |
e684ac1fec9f599af54e28f14eec8a39b5046902 | 4,716 | use ndarray::prelude::*;
use num_traits::AsPrimitive;
use crate::ops::prelude::*;
#[derive(Debug, Clone, new)]
pub struct Shape {
dt: DatumType,
}
impl Shape {
pub fn coerce_to<T>(shape: &[usize]) -> TractResult<SharedTensor>
where
T: Copy + Datum,
usize: AsPrimitive<T>,
{
let array = Array1::from_vec(shape.iter().map(|i| i.as_()).collect());
Ok(array.into())
}
}
impl Op for Shape {
fn name(&self) -> Cow<str> {
"Shape".into()
}
}
impl StatelessOp for Shape {
/// Evaluates the operation given the input tensors.
fn eval(&self, inputs: TVec<SharedTensor>) -> TractResult<TVec<SharedTensor>> {
let shape = inputs[0].shape();
Ok(tvec![dispatch_numbers!(Self::coerce_to(self.dt)(&shape))?])
}
}
impl InferenceRulesOp for Shape {
fn rules<'r, 'p: 'r, 's: 'r>(
&'s self,
s: &mut Solver<'r>,
inputs: &'p [TensorProxy],
outputs: &'p [TensorProxy],
) -> InferenceResult {
check_input_arity(&inputs, 1)?;
check_output_arity(&outputs, 1)?;
s.equals(&outputs[0].rank, 1)?;
s.given(&inputs[0].rank, move |s, r| {
s.equals(&outputs[0].shape[0], r.to_dim())
})?;
s.given(&outputs[0].shape[0], move |s, r| {
if let Ok(d) = r.to_integer() {
s.equals(&inputs[0].rank, d)?;
}
Ok(())
})?;
s.given(&inputs[0].shape, move |s, shape| {
if shape.iter().any(|&d| d.to_integer().is_err()) {
s.equals(&outputs[0].datum_type, DatumType::TDim)?;
let array1: Array1<TDim> = Array1::from_iter(shape);
let tensor: SharedTensor = array1.into();
s.equals(&outputs[0].value, tensor)
} else if self.dt == DatumType::I64 {
s.equals(&outputs[0].datum_type, DatumType::I64)?;
let array1: Array1<i64> = Array1::from_vec(
shape
.iter()
.map(|&i| i.to_integer().unwrap() as i64)
.collect(),
);
let tensor: SharedTensor = array1.into();
s.equals(&outputs[0].value, tensor)
} else {
s.equals(&outputs[0].datum_type, DatumType::I32)?;
let array1: Array1<i32> = Array1::from_vec(
shape
.iter()
.map(|&i| i.to_integer().unwrap() as i32)
.collect(),
);
let tensor: SharedTensor = array1.into();
s.equals(&outputs[0].value, tensor)
}
})
}
}
/*
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn shape_inference_1() {
let input = TensorFact {
datum_type: typefact!(DatumType::F32),
shape: shapefact![1, _, _; ..],
value: valuefact!(_),
};
let output = TensorFact {
datum_type: typefact!(DatumType::TDim),
shape: shapefact![_],
value: valuefact!(_),
};
assert_forward!(Shape::new(DatumType::I32), input, output);
}
#[test]
fn shape_inference_2() {
let input = TensorFact {
datum_type: typefact!(DatumType::F32),
shape: shapefact![1, _, _],
value: valuefact!(_),
};
let output = TensorFact {
datum_type: typefact!(DatumType::TDim),
shape: shapefact![3],
value: valuefact!(_),
};
assert_forward!(Shape::new(DatumType::I32), input, output);
}
#[test]
fn shape_inference_3() {
let input = TensorFact {
datum_type: typefact!(DatumType::F32),
shape: shapefact![1, 2, 3],
value: valuefact!(_),
};
let output = TensorFact {
datum_type: typefact!(DatumType::TDim),
shape: shapefact![3],
value: valuefact!(Tensor::dims(&[3], &[1.to_dim(), 2.to_dim(), 3.to_dim()]).unwrap()),
};
assert_forward!(Shape::new(DatumType::I32), input, output);
}
#[test]
fn shape_inference_4() {
let input = TensorFact {
datum_type: typefact!(_),
shape: shapefact![1, 2, 3],
value: valuefact!(_),
};
let output = TensorFact {
datum_type: typefact!(DatumType::TDim),
shape: shapefact![3],
value: valuefact!(Tensor::dims(&[3], &[1.to_dim(), 2.to_dim(), 3.to_dim()]).unwrap()),
};
assert_backward!(Shape::new(DatumType::I32), input, output);
}
}
*/
| 29.475 | 98 | 0.493851 |
112b4e262bf0446dbf1ce95ba53550ac27de2226 | 4,017 | use std::borrow::Cow;
use std::fmt;
use crate::asn::{AsPath, Asn};
use crate::record::MergeUpdate;
use super::PrefixNlri;
/// BGP message metadata.
#[derive(Clone, Debug)]
pub struct BgpNlriMeta<'a> {
pub nlri: PrefixNlri,
pub attributes: Cow<'a, ExampleBgpPathAttributes>,
}
impl<'a> std::fmt::Display for BgpNlriMeta<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}, {}", self.nlri, self.attributes)
}
}
impl<'a> MergeUpdate for BgpNlriMeta<'a> {
fn merge_update(
&mut self,
update_meta: Self,
) -> Result<(), Box<dyn std::error::Error>> {
self.attributes
.to_mut()
.merge_update(update_meta.attributes.into_owned())?;
Ok(())
}
fn clone_merge_update(
&self,
update_meta: &Self,
) -> Result<Self, Box<dyn std::error::Error>>
where
Self: std::marker::Sized,
{
let mut updated_copy = self.clone();
updated_copy.attributes
.to_mut()
.merge_update(update_meta.attributes.clone().into_owned())?;
Ok(updated_copy)
}
}
/// Example BGP Path Attributes
/// <https://tools.ietf.org/html/rfc4271#section-4.3>
/// TODO TODO!
#[derive(Clone, Debug)]
pub struct ExampleBgpPathAttributes {
pub origin: Asn,
pub as_path: AsPath<Vec<Asn>>,
pub next_hop: std::net::IpAddr,
pub med: u32,
pub local_pref: u32,
pub atomic_aggregate: bool,
pub aggregator: Option<(Asn, u32)>,
pub community: Vec<u32>,
pub ext_community: Vec<u32>,
pub large_community: Vec<u32>,
pub originator_id: Option<String>,
pub cluster_list: Vec<u32>,
pub mp_reach_nlri: Option<PrefixNlri>,
pub mp_unreach_nlri: Option<PrefixNlri>,
pub aigp: u64,
pub unknown: Vec<u8>,
}
impl fmt::Display for ExampleBgpPathAttributes {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Origin: {}, AS_PATH: {}, NEXT_HOP: {}, MED: {}, LOCAL_PREF: {}, ATOMIC_AGGREGATE: {}, AGGREGATOR: {:?}, COMMUNITY: {:?}, EXT_COMMUNITY: {:?}, LARGE_COMMUNITY: {:?}, ORIGINATOR_ID: {:?}, CLUSTER_LIST: {:?}, MP_REACH_NLRI: {:?}, MP_UNREACH_NLRI: {:?}, AIGP: {}, UNKNOWN: {:?}",
self.origin,
self.as_path,
self.next_hop,
self.med,
self.local_pref,
self.atomic_aggregate,
self.aggregator,
self.community,
self.ext_community,
self.large_community,
self.originator_id,
self.cluster_list,
self.mp_reach_nlri,
self.mp_unreach_nlri,
self.aigp,
self.unknown
)
}
}
impl MergeUpdate for ExampleBgpPathAttributes {
fn merge_update(
&mut self,
update_meta: Self,
) -> Result<(), Box<dyn std::error::Error>> {
self.origin = update_meta.origin;
self.as_path = update_meta.as_path;
self.next_hop = update_meta.next_hop;
self.med = update_meta.med;
self.local_pref = update_meta.local_pref;
self.atomic_aggregate = update_meta.atomic_aggregate;
self.aggregator = update_meta.aggregator;
self.community = update_meta.community;
self.ext_community = update_meta.ext_community;
self.large_community = update_meta.large_community;
self.originator_id = update_meta.originator_id;
self.cluster_list = update_meta.cluster_list;
self.mp_reach_nlri = update_meta.mp_reach_nlri;
self.mp_unreach_nlri = update_meta.mp_unreach_nlri;
self.aigp = update_meta.aigp;
self.unknown = update_meta.unknown;
Ok(())
}
fn clone_merge_update(
&self,
update_meta: &Self,
) -> Result<Self, Box<dyn std::error::Error>>
where
Self: std::marker::Sized,
{
let mut updated_copy = self.clone();
updated_copy.merge_update(update_meta.clone())?;
Ok(updated_copy)
}
}
| 30.431818 | 288 | 0.60244 |
677c54667be82065673b4dfc85935d1443ec1156 | 376 | #![deny(warnings)]
extern crate version_check as rustc;
fn main() {
println!("cargo:rerun-if-changed=build.rs");
if let Some(is_feature_flaggable) = rustc::is_feature_flaggable() {
// enable the "map_first_last" feature if using +nightly
if is_feature_flaggable {
println!("cargo:rustc-cfg=feature=\"map_first_last\"");
}
}
}
| 26.857143 | 71 | 0.646277 |
16ecbf4afedc0bfa863dbd4514e4cbb03a67df5f | 11,073 | use super::{types::*, PK_LEN, SECRET_PREFIX};
use crate::Error;
use account_utils::ZeroizeString;
use bytes::Bytes;
use libsecp256k1::{Message, PublicKey, Signature};
use reqwest::{
header::{HeaderMap, HeaderValue},
IntoUrl,
};
use ring::digest::{digest, SHA256};
use sensitive_url::SensitiveUrl;
use serde::{de::DeserializeOwned, Serialize};
pub use reqwest;
pub use reqwest::{Response, StatusCode, Url};
/// A wrapper around `reqwest::Client` which provides convenience methods for interfacing with a
/// Lighthouse Validator Client HTTP server (`validator_client/src/http_api`).
#[derive(Clone)]
pub struct ValidatorClientHttpClient {
client: reqwest::Client,
server: SensitiveUrl,
secret: ZeroizeString,
server_pubkey: PublicKey,
send_authorization_header: bool,
}
/// Parse an API token and return a secp256k1 public key.
pub fn parse_pubkey(secret: &str) -> Result<PublicKey, Error> {
let secret = if !secret.starts_with(SECRET_PREFIX) {
return Err(Error::InvalidSecret(format!(
"secret does not start with {}",
SECRET_PREFIX
)));
} else {
&secret[SECRET_PREFIX.len()..]
};
eth2_serde_utils::hex::decode(secret)
.map_err(|e| Error::InvalidSecret(format!("invalid hex: {:?}", e)))
.and_then(|bytes| {
if bytes.len() != PK_LEN {
return Err(Error::InvalidSecret(format!(
"expected {} bytes not {}",
PK_LEN,
bytes.len()
)));
}
let mut arr = [0; PK_LEN];
arr.copy_from_slice(&bytes);
PublicKey::parse_compressed(&arr)
.map_err(|e| Error::InvalidSecret(format!("invalid secp256k1 pubkey: {:?}", e)))
})
}
impl ValidatorClientHttpClient {
pub fn new(server: SensitiveUrl, secret: String) -> Result<Self, Error> {
Ok(Self {
client: reqwest::Client::new(),
server,
server_pubkey: parse_pubkey(&secret)?,
secret: secret.into(),
send_authorization_header: true,
})
}
pub fn from_components(
server: SensitiveUrl,
client: reqwest::Client,
secret: String,
) -> Result<Self, Error> {
Ok(Self {
client,
server,
server_pubkey: parse_pubkey(&secret)?,
secret: secret.into(),
send_authorization_header: true,
})
}
/// Set to `false` to disable sending the `Authorization` header on requests.
///
/// Failing to send the `Authorization` header will cause the VC to reject requests with a 403.
/// This function is intended only for testing purposes.
pub fn send_authorization_header(&mut self, should_send: bool) {
self.send_authorization_header = should_send;
}
async fn signed_body(&self, response: Response) -> Result<Bytes, Error> {
let sig = response
.headers()
.get("Signature")
.ok_or(Error::MissingSignatureHeader)?
.to_str()
.map_err(|_| Error::InvalidSignatureHeader)?
.to_string();
let body = response.bytes().await.map_err(Error::Reqwest)?;
let message =
Message::parse_slice(digest(&SHA256, &body).as_ref()).expect("sha256 is 32 bytes");
eth2_serde_utils::hex::decode(&sig)
.ok()
.and_then(|bytes| {
let sig = Signature::parse_der(&bytes).ok()?;
Some(libsecp256k1::verify(&message, &sig, &self.server_pubkey))
})
.filter(|is_valid| *is_valid)
.ok_or(Error::InvalidSignatureHeader)?;
Ok(body)
}
async fn signed_json<T: DeserializeOwned>(&self, response: Response) -> Result<T, Error> {
let body = self.signed_body(response).await?;
serde_json::from_slice(&body).map_err(Error::InvalidJson)
}
fn headers(&self) -> Result<HeaderMap, Error> {
let mut headers = HeaderMap::new();
if self.send_authorization_header {
let header_value = HeaderValue::from_str(&format!("Basic {}", self.secret.as_str()))
.map_err(|e| {
Error::InvalidSecret(format!("secret is invalid as a header value: {}", e))
})?;
headers.insert("Authorization", header_value);
}
Ok(headers)
}
/// Perform a HTTP GET request.
async fn get<T: DeserializeOwned, U: IntoUrl>(&self, url: U) -> Result<T, Error> {
let response = self
.client
.get(url)
.headers(self.headers()?)
.send()
.await
.map_err(Error::Reqwest)?;
let response = ok_or_error(response).await?;
self.signed_json(response).await
}
/// Perform a HTTP GET request, returning `None` on a 404 error.
async fn get_opt<T: DeserializeOwned, U: IntoUrl>(&self, url: U) -> Result<Option<T>, Error> {
let response = self
.client
.get(url)
.headers(self.headers()?)
.send()
.await
.map_err(Error::Reqwest)?;
match ok_or_error(response).await {
Ok(resp) => self.signed_json(resp).await.map(Option::Some),
Err(err) => {
if err.status() == Some(StatusCode::NOT_FOUND) {
Ok(None)
} else {
Err(err)
}
}
}
}
/// Perform a HTTP POST request.
async fn post<T: Serialize, U: IntoUrl, V: DeserializeOwned>(
&self,
url: U,
body: &T,
) -> Result<V, Error> {
let response = self
.client
.post(url)
.headers(self.headers()?)
.json(body)
.send()
.await
.map_err(Error::Reqwest)?;
let response = ok_or_error(response).await?;
self.signed_json(response).await
}
/// Perform a HTTP PATCH request.
async fn patch<T: Serialize, U: IntoUrl>(&self, url: U, body: &T) -> Result<(), Error> {
let response = self
.client
.patch(url)
.headers(self.headers()?)
.json(body)
.send()
.await
.map_err(Error::Reqwest)?;
let response = ok_or_error(response).await?;
self.signed_body(response).await?;
Ok(())
}
/// `GET lighthouse/version`
pub async fn get_lighthouse_version(&self) -> Result<GenericResponse<VersionData>, Error> {
let mut path = self.server.full.clone();
path.path_segments_mut()
.map_err(|()| Error::InvalidUrl(self.server.clone()))?
.push("lighthouse")
.push("version");
self.get(path).await
}
/// `GET lighthouse/health`
pub async fn get_lighthouse_health(&self) -> Result<GenericResponse<Health>, Error> {
let mut path = self.server.full.clone();
path.path_segments_mut()
.map_err(|()| Error::InvalidUrl(self.server.clone()))?
.push("lighthouse")
.push("health");
self.get(path).await
}
/// `GET lighthouse/spec`
pub async fn get_lighthouse_spec(&self) -> Result<GenericResponse<ConfigAndPreset>, Error> {
let mut path = self.server.full.clone();
path.path_segments_mut()
.map_err(|()| Error::InvalidUrl(self.server.clone()))?
.push("lighthouse")
.push("spec");
self.get(path).await
}
/// `GET lighthouse/validators`
pub async fn get_lighthouse_validators(
&self,
) -> Result<GenericResponse<Vec<ValidatorData>>, Error> {
let mut path = self.server.full.clone();
path.path_segments_mut()
.map_err(|()| Error::InvalidUrl(self.server.clone()))?
.push("lighthouse")
.push("validators");
self.get(path).await
}
/// `GET lighthouse/validators/{validator_pubkey}`
pub async fn get_lighthouse_validators_pubkey(
&self,
validator_pubkey: &PublicKeyBytes,
) -> Result<Option<GenericResponse<ValidatorData>>, Error> {
let mut path = self.server.full.clone();
path.path_segments_mut()
.map_err(|()| Error::InvalidUrl(self.server.clone()))?
.push("lighthouse")
.push("validators")
.push(&validator_pubkey.to_string());
self.get_opt(path).await
}
/// `POST lighthouse/validators`
pub async fn post_lighthouse_validators(
&self,
validators: Vec<ValidatorRequest>,
) -> Result<GenericResponse<PostValidatorsResponseData>, Error> {
let mut path = self.server.full.clone();
path.path_segments_mut()
.map_err(|()| Error::InvalidUrl(self.server.clone()))?
.push("lighthouse")
.push("validators");
self.post(path, &validators).await
}
/// `POST lighthouse/validators/mnemonic`
pub async fn post_lighthouse_validators_mnemonic(
&self,
request: &CreateValidatorsMnemonicRequest,
) -> Result<GenericResponse<Vec<CreatedValidator>>, Error> {
let mut path = self.server.full.clone();
path.path_segments_mut()
.map_err(|()| Error::InvalidUrl(self.server.clone()))?
.push("lighthouse")
.push("validators")
.push("mnemonic");
self.post(path, &request).await
}
/// `POST lighthouse/validators/keystore`
pub async fn post_lighthouse_validators_keystore(
&self,
request: &KeystoreValidatorsPostRequest,
) -> Result<GenericResponse<ValidatorData>, Error> {
let mut path = self.server.full.clone();
path.path_segments_mut()
.map_err(|()| Error::InvalidUrl(self.server.clone()))?
.push("lighthouse")
.push("validators")
.push("keystore");
self.post(path, &request).await
}
/// `PATCH lighthouse/validators/{validator_pubkey}`
pub async fn patch_lighthouse_validators(
&self,
voting_pubkey: &PublicKeyBytes,
enabled: bool,
) -> Result<(), Error> {
let mut path = self.server.full.clone();
path.path_segments_mut()
.map_err(|()| Error::InvalidUrl(self.server.clone()))?
.push("lighthouse")
.push("validators")
.push(&voting_pubkey.to_string());
self.patch(path, &ValidatorPatchRequest { enabled }).await
}
}
/// Returns `Ok(response)` if the response is a `200 OK` response. Otherwise, creates an
/// appropriate error message.
async fn ok_or_error(response: Response) -> Result<Response, Error> {
let status = response.status();
if status == StatusCode::OK {
Ok(response)
} else if let Ok(message) = response.json().await {
Err(Error::ServerMessage(message))
} else {
Err(Error::StatusCode(status))
}
}
| 31.910663 | 99 | 0.571661 |
561f7be7862c1eb1ea3ba5de57898d1c6652a9a0 | 3,091 | use crate::model::BinaryContent;
use crate::repository::db::DBFileStoreBinaryRepository;
use c3p0::postgres::*;
use lightspeed_core::error::{ErrorCodes, LightSpeedError};
use std::borrow::Cow;
use tokio::fs::File;
use tokio::io::AsyncReadExt;
#[derive(Clone)]
pub struct PgFileStoreBinaryRepository {
table_name: &'static str,
}
impl Default for PgFileStoreBinaryRepository {
fn default() -> Self {
PgFileStoreBinaryRepository { table_name: "LS_FILE_STORE_BINARY" }
}
}
#[async_trait::async_trait]
impl DBFileStoreBinaryRepository for PgFileStoreBinaryRepository {
type Conn = PgConnection;
async fn read_file<'a>(
&self,
conn: &mut Self::Conn,
repository_name: &str,
file_path: &str,
) -> Result<BinaryContent<'a>, LightSpeedError> {
let sql = &format!("SELECT DATA FROM {} WHERE repository = $1 AND filepath = $2", self.table_name);
let content = conn
.fetch_one(sql, &[&repository_name, &file_path], |row| {
let content: Vec<u8> = row.try_get(0).map_err(into_c3p0_error)?;
Ok(content)
})
.await?;
Ok(BinaryContent::InMemory { content: Cow::Owned(content) })
}
async fn save_file<'a>(
&self,
conn: &mut Self::Conn,
repository_name: &str,
file_path: &str,
content: &'a BinaryContent<'a>,
) -> Result<u64, LightSpeedError> {
let binary_content = match content {
BinaryContent::InMemory { content } => Cow::Borrowed(content),
BinaryContent::FromFs { file_path } => {
let mut file = File::open(file_path).await.map_err(|err| LightSpeedError::BadRequest {
message: format!(
"PgFileStoreBinaryRepository - Cannot open file [{}]. Err: {:?}",
file_path.display(),
err
),
code: ErrorCodes::IO_ERROR,
})?;
let mut contents = vec![];
file.read_to_end(&mut contents).await.map_err(|err| LightSpeedError::BadRequest {
message: format!(
"PgFileStoreBinaryRepository - Cannot read file [{}]. Err: {:?}",
file_path.display(),
err
),
code: ErrorCodes::IO_ERROR,
})?;
Cow::Owned(Cow::Owned(contents))
}
};
let sql = &format!("INSERT INTO {} (repository, filepath, data) VALUES ($1, $2, $3)", self.table_name);
Ok(conn.execute(sql, &[&repository_name, &file_path, &binary_content.as_ref().as_ref()]).await?)
}
async fn delete_file(
&self,
conn: &mut Self::Conn,
repository_name: &str,
file_path: &str,
) -> Result<u64, LightSpeedError> {
let sql = &format!("DELETE FROM {} WHERE repository = $1 AND filepath = $2", self.table_name);
Ok(conn.execute(sql, &[&repository_name, &file_path]).await?)
}
}
| 35.528736 | 111 | 0.556131 |
718f7c3caf6f545de9f989d5db3e36ccc24dedf5 | 1,663 | use gl::types::*;
#[derive(Debug, Copy, Clone)]
#[repr(C, packed)]
pub struct Vertex {
position: [GLfloat; 2],
color: [GLfloat; 3],
tex_coords: [GLfloat; 2],
}
impl Vertex {
pub fn new(position: [GLfloat; 2], color: [GLfloat; 3], tex_coords: [GLfloat; 2]) -> Self {
Vertex {
position,
color,
tex_coords,
}
}
pub fn vertex_specification(vao: GLuint, vbo: GLuint) {
unsafe {
// Bind vao and vbo together
gl::VertexArrayVertexBuffer(vao, 0, vbo, 0, std::mem::size_of::<Self>() as GLint);
// layout (location = 0) in vec2 in_position;
let offset = 0;
let location = 0;
gl::EnableVertexArrayAttrib(vao, location);
gl::VertexArrayAttribFormat(vao, location, 2, gl::FLOAT, gl::FALSE, offset);
gl::VertexArrayAttribBinding(vao, location, 0);
// layout (location = 1) in vec3 in_color;
let offset = (2 * std::mem::size_of::<GLfloat>()) as GLuint;
let location = 1;
gl::EnableVertexArrayAttrib(vao, location);
gl::VertexArrayAttribFormat(vao, location, 3, gl::FLOAT, gl::FALSE, offset);
gl::VertexArrayAttribBinding(vao, location, 0);
// layout (location = 2) in vec2 in_color;
let offset = (5 * std::mem::size_of::<GLfloat>()) as GLuint;
let location = 2;
gl::EnableVertexArrayAttrib(vao, location);
gl::VertexArrayAttribFormat(vao, location, 2, gl::FLOAT, gl::FALSE, offset);
gl::VertexArrayAttribBinding(vao, location, 0);
}
}
}
| 35.382979 | 95 | 0.561034 |
72e35117d8ac52c8aef22d24c29b1d3b398c8b6f | 7,192 | use std::sync::Arc;
use std::time::Instant;
use std::io::Cursor;
use std::convert::TryFrom;
use anyhow::{Result, Error};
use tracing::debug;
use wasmtime::{Caller, Extern, Func, Instance, Trap, TypedFunc, Store};
use dataplane::batch::Batch;
use dataplane::batch::MemoryRecords;
use dataplane::smartstream::{
SmartStreamInput, SmartStreamOutput, SmartStreamRuntimeError, SmartStreamInternalError,
};
use fluvio_protocol::{Encoder, Decoder};
use crate::smart_stream::{RecordsCallBack, RecordsMemory, SmartStreamModule, SmartStreamEngine};
use crate::smart_stream::file_batch::FileBatchIterator;
const FILTER_FN_NAME: &str = "filter";
type FilterFn = TypedFunc<(i32, i32), i32>;
pub struct SmartStreamFilter {
store: Store<()>,
instance: Instance,
filter_fn: FilterFn,
records_cb: Arc<RecordsCallBack>,
}
impl SmartStreamFilter {
pub fn new(engine: &SmartStreamEngine, module: &SmartStreamModule) -> Result<Self> {
let mut store = Store::new(&engine.0, ());
let cb = Arc::new(RecordsCallBack::new());
let callback = cb.clone();
let copy_records = Func::wrap(
&mut store,
move |mut caller: Caller<'_, ()>, ptr: i32, len: i32| {
debug!(len, "callback from wasm filter");
let memory = match caller.get_export("memory") {
Some(Extern::Memory(mem)) => mem,
_ => return Err(Trap::new("failed to find host memory")),
};
let records = RecordsMemory { ptr, len, memory };
cb.set(records);
Ok(())
},
);
let instance = Instance::new(&mut store, &module.0, &[copy_records.into()])?;
let filter_fn: FilterFn = instance.get_typed_func(&mut store, FILTER_FN_NAME)?;
Ok(Self {
store,
instance,
filter_fn,
records_cb: callback,
})
}
/// filter batches with maximum bytes to be send back consumer
pub fn filter(
&mut self,
iter: &mut FileBatchIterator,
max_bytes: usize,
) -> Result<(Batch, Option<SmartStreamRuntimeError>), Error> {
let mut memory_filter_batch = Batch::<MemoryRecords>::default();
memory_filter_batch.base_offset = -1; // indicate this is unitialized
memory_filter_batch.set_offset_delta(-1); // make add_to_offset_delta correctly
let mut total_bytes = 0;
loop {
let file_batch = match iter.next() {
// we filter-map entire batches. entire batches are process as group
// if we can't fit current batch into max bytes then it is discarded
Some(batch_result) => batch_result?,
None => {
debug!(
total_records = memory_filter_batch.records().len(),
"no more batches filter end"
);
return Ok((memory_filter_batch, None));
}
};
debug!(
current_batch_offset = file_batch.batch.base_offset,
current_batch_offset_delta = file_batch.offset_delta(),
filter_offset_delta = memory_filter_batch.get_header().last_offset_delta,
filter_base_offset = memory_filter_batch.base_offset,
filter_records = memory_filter_batch.records().len(),
"starting filter processing"
);
let now = Instant::now();
let mut input_data = Vec::new();
let smartstream_input = SmartStreamInput {
base_offset: file_batch.batch.base_offset,
record_data: file_batch.records.clone(),
};
fluvio_protocol::Encoder::encode(&smartstream_input, &mut input_data, 0)?;
self.records_cb.clear();
let array_ptr = super::memory::copy_memory_to_instance(
&mut self.store,
&self.instance,
&input_data,
)?;
let filter_output = self
.filter_fn
.call(&mut self.store, (array_ptr as i32, input_data.len() as i32))?;
debug!(filter_output,filter_execution_time = %now.elapsed().as_millis());
if filter_output < 0 {
let internal_error = SmartStreamInternalError::try_from(filter_output)
.unwrap_or(SmartStreamInternalError::UnknownError);
return Err(internal_error.into());
}
let bytes = self
.records_cb
.get()
.and_then(|m| m.copy_memory_from(&mut self.store).ok())
.unwrap_or_default();
debug!(out_filter_bytes = bytes.len());
// this is inefficient for now
let mut output = SmartStreamOutput::default();
output.decode(&mut Cursor::new(bytes), 0)?;
let maybe_error = output.error;
let mut records = output.successes;
// there are filtered records!!
if records.is_empty() {
debug!("filters records empty");
} else {
// set base offset if this is first time
if memory_filter_batch.base_offset == -1 {
memory_filter_batch.base_offset = file_batch.base_offset();
}
// difference between filter batch and and current batch
// since base are different we need update delta offset for each records
let relative_base_offset =
memory_filter_batch.base_offset - file_batch.base_offset();
for record in &mut records {
record.add_base_offset(relative_base_offset);
}
let record_bytes = records.write_size(0);
// if filter bytes exceed max bytes then we skip this batch
if total_bytes + record_bytes > max_bytes {
debug!(
total_bytes = total_bytes + record_bytes,
max_bytes, "total filter bytes reached"
);
return Ok((memory_filter_batch, maybe_error));
}
total_bytes += record_bytes;
debug!(
filter_records = records.len(),
total_bytes, "finished filtering"
);
memory_filter_batch.mut_records().append(&mut records);
}
// only increment filter offset delta if filter_batch has been initialized
if memory_filter_batch.base_offset != -1 {
debug!(
offset_delta = file_batch.offset_delta(),
"adding to offset delta"
);
memory_filter_batch.add_to_offset_delta(file_batch.offset_delta() + 1);
}
// If we had a filtering error, return current batch and error
if maybe_error.is_some() {
return Ok((memory_filter_batch, maybe_error));
}
}
}
}
| 37.264249 | 96 | 0.556452 |
b9bc8e6d6955acdf9716f949d4cd168b5e68b8a6 | 690 | //! Definition of Deferred Call tasks.
//!
//! Deferred calls allow peripheral drivers to register pseudo interrupts.
//! These are the definitions of which deferred calls this chip needs.
use core::convert::Into;
use core::convert::TryFrom;
/// A type of task to defer a call for
#[derive(Copy, Clone)]
pub enum Task {
Flashcalw = 0,
CRCCU = 1,
}
impl TryFrom<usize> for Task {
type Error = ();
fn try_from(value: usize) -> Result<Task, ()> {
match value {
0 => Ok(Task::Flashcalw),
1 => Ok(Task::CRCCU),
_ => Err(()),
}
}
}
impl Into<usize> for Task {
fn into(self) -> usize {
self as usize
}
}
| 20.909091 | 74 | 0.582609 |
11770dd582dca829d54e0185542a6b2b7ca24ed6 | 1,553 | extern crate clipboard_ext;
extern crate clap;
use std::fs;
use clipboard_ext::prelude::*;
use clipboard_ext::osc52::Osc52ClipboardContext;
use clap::Parser;
use std::io;
use std::io::Read;
use exitcode;
#[derive(Parser)]
#[clap(author, version, about = "Copy files' content to clipboard.", long_about = None)]
struct Cli {
/// Optional files to operate on. '-' means `stdin` (Can only be used once).
/// If no file is given, `stdin` is assumed.
files: Vec<String>,
}
fn main() {
let stdin = io::stdin();
let cli = Cli::parse();
let mut files: Vec<String> = cli.files;
// println!("{:?}", files);
let num_stdin_arg = files.iter().filter(|x| *x == "-").count();
if num_stdin_arg > 1 {
eprintln!("At most one stdin arg (-) be specified. ");
std::process::exit(exitcode::USAGE);
}
if files.is_empty() {
files.push("-".into());
}
let mut contents = String::new();
for filename in &files {
let mut tmp = String::new();
if "-" == filename {
stdin.lock().read_to_string(&mut tmp)
.expect("Read from stdin failed");
} else {
tmp = fs::read_to_string(filename)
.expect(&*format!("Something went wrong reading the file '{}'", filename));
}
if !contents.ends_with('\n') && !contents.is_empty() {
contents.push('\n');
}
contents.push_str(&tmp);
}
let mut ctx = Osc52ClipboardContext::new().unwrap();
ctx.set_contents(contents).unwrap();
}
| 26.775862 | 91 | 0.576948 |