code
stringlengths
3
1.01M
repo_name
stringlengths
5
116
path
stringlengths
3
311
language
stringclasses
30 values
license
stringclasses
15 values
size
int64
3
1.01M
/* * Licensed to DuraSpace under one or more contributor license agreements. * See the NOTICE file distributed with this work for additional information * regarding copyright ownership. * * DuraSpace licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fcrepo.persistence.ocfl.impl; import org.fcrepo.kernel.api.operations.ResourceOperation; import org.fcrepo.persistence.api.PersistentStorageSession; import org.fcrepo.persistence.api.exceptions.PersistentStorageException; import org.fcrepo.persistence.ocfl.api.FedoraToOCFLObjectIndex; import org.fcrepo.persistence.ocfl.api.OCFLObjectSessionFactory; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import static java.util.UUID.randomUUID; /** * Test class for {@link OCFLPersistentSessionManager} * * @author dbernstein */ @RunWith(MockitoJUnitRunner.class) public class OCFLPersistentSessionManagerTest { @InjectMocks private OCFLPersistentSessionManager sessionManager; private PersistentStorageSession readWriteSession; private PersistentStorageSession readOnlySession; private final String testSessionId = randomUUID().toString(); @Mock private ResourceOperation mockOperation; @Mock private FedoraToOCFLObjectIndex index; @Mock private FedoraOCFLMapping mapping; @Mock private OCFLObjectSessionFactory objectSessionFactory; @Before public void setUp() { readWriteSession = this.sessionManager.getSession(testSessionId); readOnlySession = this.sessionManager.getReadOnlySession(); } @Test(expected = UnsupportedOperationException.class) public void testUnsupportedOperationOnUnrecognizedOperation() throws Exception { readWriteSession.persist(mockOperation); } @Test(expected = PersistentStorageException.class) public void testPersistNoSession() throws Exception { readOnlySession.persist(mockOperation); } @Test(expected = IllegalArgumentException.class) public void testNullSessionId() { this.sessionManager.getSession(null); } }
dbernstein/fcrepo4
fcrepo-persistence-ocfl/src/test/java/org/fcrepo/persistence/ocfl/impl/OCFLPersistentSessionManagerTest.java
Java
apache-2.0
2,724
/// Copyright (c) 2012 Ecma International. All rights reserved. /// Ecma International makes this code available under the terms and conditions set /// forth on http://hg.ecmascript.org/tests/test262/raw-file/tip/LICENSE (the /// "Use Terms"). Any redistribution of this code must retain the above /// copyright and this notice and otherwise comply with the Use Terms. /** * @path ch15/15.4/15.4.4/15.4.4.22/15.4.4.22-9-c-ii-32.js * @description Array.prototype.reduceRight - RegExp Object can be used as accumulator */ function testcase() { var accessed = false; var objRegExp = new RegExp(); function callbackfn(prevVal, curVal, idx, obj) { accessed = true; return prevVal === objRegExp; } var obj = { 0: 11, length: 1 }; return Array.prototype.reduceRight.call(obj, callbackfn, objRegExp) === true && accessed; } runTestCase(testcase);
hippich/typescript
tests/Fidelity/test262/suite/ch15/15.4/15.4.4/15.4.4.22/15.4.4.22-9-c-ii-32.js
JavaScript
apache-2.0
954
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.6.0_13) on Wed Aug 05 08:53:18 ICT 2009 --> <META http-equiv="Content-Type" content="text/html; charset=utf8"> <TITLE> Uses of Interface org.jgentleframework.core.handling.IAbstractDefinitionExceptionCatcher </TITLE> <META NAME="date" CONTENT="2009-08-05"> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Interface org.jgentleframework.core.handling.IAbstractDefinitionExceptionCatcher"; } } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <HR> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/jgentleframework/core/handling/IAbstractDefinitionExceptionCatcher.html" title="interface in org.jgentleframework.core.handling"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-files/index-1.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV&nbsp; &nbsp;NEXT</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../index.html?org/jgentleframework/core/handling/\class-useIAbstractDefinitionExceptionCatcher.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="IAbstractDefinitionExceptionCatcher.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <HR> <CENTER> <H2> <B>Uses of Interface<br>org.jgentleframework.core.handling.IAbstractDefinitionExceptionCatcher</B></H2> </CENTER> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2"> Packages that use <A HREF="../../../../../org/jgentleframework/core/handling/IAbstractDefinitionExceptionCatcher.html" title="interface in org.jgentleframework.core.handling">IAbstractDefinitionExceptionCatcher</A></FONT></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><A HREF="#org.jgentleframework.core.handling"><B>org.jgentleframework.core.handling</B></A></TD> <TD>&nbsp;&nbsp;</TD> </TR> </TABLE> &nbsp; <P> <A NAME="org.jgentleframework.core.handling"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2"> Uses of <A HREF="../../../../../org/jgentleframework/core/handling/IAbstractDefinitionExceptionCatcher.html" title="interface in org.jgentleframework.core.handling">IAbstractDefinitionExceptionCatcher</A> in <A HREF="../../../../../org/jgentleframework/core/handling/package-summary.html">org.jgentleframework.core.handling</A></FONT></TH> </TR> </TABLE> &nbsp; <P> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor"> <TH ALIGN="left" COLSPAN="2">Subinterfaces of <A HREF="../../../../../org/jgentleframework/core/handling/IAbstractDefinitionExceptionCatcher.html" title="interface in org.jgentleframework.core.handling">IAbstractDefinitionExceptionCatcher</A> in <A HREF="../../../../../org/jgentleframework/core/handling/package-summary.html">org.jgentleframework.core.handling</A></FONT></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;interface</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../org/jgentleframework/core/handling/DefinitionManager.html" title="interface in org.jgentleframework.core.handling">DefinitionManager</A></B></CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;<A HREF="../../../../../org/jgentleframework/core/handling/DefinitionManager.html" title="interface in org.jgentleframework.core.handling"><CODE>DefinitionManager</CODE></A> is responsible for <A HREF="../../../../../org/jgentleframework/core/reflection/metadata/Definition.html" title="interface in org.jgentleframework.core.reflection.metadata"><CODE>Definition</CODE></A> management.</TD> </TR> </TABLE> &nbsp; <P> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor"> <TH ALIGN="left" COLSPAN="2">Classes in <A HREF="../../../../../org/jgentleframework/core/handling/package-summary.html">org.jgentleframework.core.handling</A> that implement <A HREF="../../../../../org/jgentleframework/core/handling/IAbstractDefinitionExceptionCatcher.html" title="interface in org.jgentleframework.core.handling">IAbstractDefinitionExceptionCatcher</A></FONT></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;class</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../org/jgentleframework/core/handling/AbstractDefinitionController.html" title="class in org.jgentleframework.core.handling">AbstractDefinitionController</A></B></CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Quản lý các phương thức điều khiển các <i>module</i> là các <i>extension-point</i> trong khi thực thi xử lý annotation, bao gồm việc diễn dịch <code>annotation</code> thành <A HREF="../../../../../org/jgentleframework/core/reflection/metadata/AnnoMeta.html" title="interface in org.jgentleframework.core.reflection.metadata"><CODE>AnnoMeta</CODE></A>, quản lý <A HREF="../../../../../org/jgentleframework/core/reflection/DefinitionPostProcessor.html" title="interface in org.jgentleframework.core.reflection"><CODE>DefinitionPostProcessor</CODE></A>, <A HREF="../../../../../org/jgentleframework/core/reflection/annohandler/AnnotationBeanProcessor.html" title="interface in org.jgentleframework.core.reflection.annohandler"><CODE>AnnotationBeanProcessor</CODE></A>, ...</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;class</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../org/jgentleframework/core/handling/AbstractDefinitionExceptionCatcher.html" title="class in org.jgentleframework.core.handling">AbstractDefinitionExceptionCatcher</A></B></CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;The Class AbstractDefinitionExceptionCatcher.</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;class</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../org/jgentleframework/core/handling/DefinitionManagerImpl.html" title="class in org.jgentleframework.core.handling">DefinitionManagerImpl</A></B></CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;An implementation of <A HREF="../../../../../org/jgentleframework/core/handling/DefinitionManager.html" title="interface in org.jgentleframework.core.handling"><CODE>DefinitionManager</CODE></A>.</TD> </TR> </TABLE> &nbsp; <P> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/jgentleframework/core/handling/IAbstractDefinitionExceptionCatcher.html" title="interface in org.jgentleframework.core.handling"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-files/index-1.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV&nbsp; &nbsp;NEXT</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../index.html?org/jgentleframework/core/handling/\class-useIAbstractDefinitionExceptionCatcher.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="IAbstractDefinitionExceptionCatcher.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <HR> </BODY> </HTML>
OldRepoPreservation/jgentle
doc/org/jgentleframework/core/handling/class-use/IAbstractDefinitionExceptionCatcher.html
HTML
apache-2.0
11,812
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.spi.security.authorization.permission; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.plugins.memory.PropertyStates; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants; import org.junit.Test; import org.mockito.Mockito; import javax.jcr.Session; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; public class EmptyPermissionProviderTest { private final PermissionProvider emptyProvider = EmptyPermissionProvider.getInstance(); private final Tree tree = Mockito.mock(Tree.class); @Test public void testGetPrivileges() { assertTrue(emptyProvider.getPrivileges(tree).isEmpty()); } @Test public void testHasPrivileges() { assertFalse(emptyProvider.hasPrivileges(tree, PrivilegeConstants.JCR_READ)); } @Test public void testGetRepositoryPermission() { assertSame(RepositoryPermission.EMPTY, emptyProvider.getRepositoryPermission()); } @Test public void testGetTreePermission() { assertSame(TreePermission.EMPTY, emptyProvider.getTreePermission(tree, TreePermission.EMPTY)); } @Test public void testIsGranted() { assertFalse(emptyProvider.isGranted(tree, null, Permissions.READ)); assertFalse(emptyProvider.isGranted(tree, PropertyStates.createProperty("prop", "value"), Permissions.READ)); } @Test public void testIsGrantedActions() { assertFalse(emptyProvider.isGranted("/", Session.ACTION_READ)); } @Test public void testRefreshMustNotFail() { emptyProvider.refresh(); } }
trekawek/jackrabbit-oak
oak-security-spi/src/test/java/org/apache/jackrabbit/oak/spi/security/authorization/permission/EmptyPermissionProviderTest.java
Java
apache-2.0
2,508
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.expressions import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback import org.apache.spark.sql.types._ /** * A placeholder expression for cube/rollup, which will be replaced by analyzer */ trait GroupingSet extends Expression with CodegenFallback { def groupByExprs: Seq[Expression] override def children: Seq[Expression] = groupByExprs // this should be replaced first override lazy val resolved: Boolean = false override def dataType: DataType = throw new UnsupportedOperationException override def foldable: Boolean = false override def nullable: Boolean = true override def eval(input: InternalRow): Any = throw new UnsupportedOperationException } // scalastyle:off line.size.limit @ExpressionDescription( usage = """ _FUNC_([col1[, col2 ..]]) - create a multi-dimensional cube using the specified columns so that we can run aggregation on them. """, examples = """ Examples: > SELECT name, age, count(*) FROM VALUES (2, 'Alice'), (5, 'Bob') people(age, name) GROUP BY _FUNC_(name, age); NULL 2 1 NULL NULL 2 Alice 2 1 Bob 5 1 NULL 5 1 Bob NULL 1 Alice NULL 1 """, since = "2.0.0") // scalastyle:on line.size.limit case class Cube(groupByExprs: Seq[Expression]) extends GroupingSet {} // scalastyle:off line.size.limit @ExpressionDescription( usage = """ _FUNC_([col1[, col2 ..]]) - create a multi-dimensional rollup using the specified columns so that we can run aggregation on them. """, examples = """ Examples: > SELECT name, age, count(*) FROM VALUES (2, 'Alice'), (5, 'Bob') people(age, name) GROUP BY _FUNC_(name, age); NULL NULL 2 Alice 2 1 Bob 5 1 Bob NULL 1 Alice NULL 1 """, since = "2.0.0") // scalastyle:on line.size.limit case class Rollup(groupByExprs: Seq[Expression]) extends GroupingSet {} /** * Indicates whether a specified column expression in a GROUP BY list is aggregated or not. * GROUPING returns 1 for aggregated or 0 for not aggregated in the result set. */ // scalastyle:off line.size.limit @ExpressionDescription( usage = """ _FUNC_(col) - indicates whether a specified column in a GROUP BY is aggregated or not, returns 1 for aggregated or 0 for not aggregated in the result set.", """, examples = """ Examples: > SELECT name, _FUNC_(name), sum(age) FROM VALUES (2, 'Alice'), (5, 'Bob') people(age, name) GROUP BY cube(name); Alice 0 2 NULL 1 7 Bob 0 5 """, since = "2.0.0") // scalastyle:on line.size.limit case class Grouping(child: Expression) extends Expression with Unevaluable { @transient override lazy val references: AttributeSet = AttributeSet(VirtualColumn.groupingIdAttribute :: Nil) override def children: Seq[Expression] = child :: Nil override def dataType: DataType = ByteType override def nullable: Boolean = false } /** * GroupingID is a function that computes the level of grouping. * * If groupByExprs is empty, it means all grouping expressions in GroupingSets. */ // scalastyle:off line.size.limit @ExpressionDescription( usage = """ _FUNC_([col1[, col2 ..]]) - returns the level of grouping, equals to `(grouping(c1) << (n-1)) + (grouping(c2) << (n-2)) + ... + grouping(cn)` """, examples = """ Examples: > SELECT name, _FUNC_(), sum(age), avg(height) FROM VALUES (2, 'Alice', 165), (5, 'Bob', 180) people(age, name, height) GROUP BY cube(name, height); NULL 2 2 165.0 Alice 0 2 165.0 NULL 2 5 180.0 NULL 3 7 172.5 Bob 0 5 180.0 Bob 1 5 180.0 Alice 1 2 165.0 """, note = """ Input columns should match with grouping columns exactly, or empty (means all the grouping columns). """, since = "2.0.0") // scalastyle:on line.size.limit case class GroupingID(groupByExprs: Seq[Expression]) extends Expression with Unevaluable { @transient override lazy val references: AttributeSet = AttributeSet(VirtualColumn.groupingIdAttribute :: Nil) override def children: Seq[Expression] = groupByExprs override def dataType: DataType = IntegerType override def nullable: Boolean = false override def prettyName: String = "grouping_id" }
aosagie/spark
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/grouping.scala
Scala
apache-2.0
5,380
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.default = getISODay; var _index = _interopRequireDefault(require("../toDate/index.js")); var _index2 = _interopRequireDefault(require("../_lib/requiredArgs/index.js")); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } /** * @name getISODay * @category Weekday Helpers * @summary Get the day of the ISO week of the given date. * * @description * Get the day of the ISO week of the given date, * which is 7 for Sunday, 1 for Monday etc. * * ISO week-numbering year: http://en.wikipedia.org/wiki/ISO_week_date * * ### v2.0.0 breaking changes: * * - [Changes that are common for the whole library](https://github.com/date-fns/date-fns/blob/master/docs/upgradeGuide.md#Common-Changes). * * @param {Date|Number} date - the given date * @returns {Number} the day of ISO week * @throws {TypeError} 1 argument required * * @example * // Which day of the ISO week is 26 February 2012? * var result = getISODay(new Date(2012, 1, 26)) * //=> 7 */ function getISODay(dirtyDate) { (0, _index2.default)(1, arguments); var date = (0, _index.default)(dirtyDate); var day = date.getDay(); if (day === 0) { day = 7; } return day; } module.exports = exports.default;
BigBoss424/portfolio
v8/development/node_modules/date-fns/getISODay/index.js
JavaScript
apache-2.0
1,327
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // run-pass fn test1() { let mut ints = [0; 32]; ints[0] += 1; assert_eq!(ints[0], 1); } fn test2() { let mut ints = [0; 32]; for i in &mut ints { *i += 22; } for i in &ints { assert_eq!(*i, 22); } } pub fn main() { test1(); test2(); }
GBGamer/rust
src/test/run-pass/array-slice-vec/mutability-inherits-through-fixed-length-vec.rs
Rust
apache-2.0
739
package storage import ( "encoding/hex" "fmt" "time" "github.com/docker/go/canonical/json" "github.com/docker/notary" "github.com/docker/notary/storage" "github.com/docker/notary/tuf/data" ) // TUFMetaStorage wraps a MetaStore in order to walk the TUF tree for GetCurrent in a consistent manner, // by always starting from a current timestamp and then looking up other data by hash type TUFMetaStorage struct { MetaStore // cached metadata by checksum cachedMeta map[string]*storedMeta } // NewTUFMetaStorage instantiates a TUFMetaStorage instance func NewTUFMetaStorage(m MetaStore) *TUFMetaStorage { return &TUFMetaStorage{ MetaStore: m, cachedMeta: make(map[string]*storedMeta), } } type storedMeta struct { data []byte createupdate *time.Time } // GetCurrent gets a specific TUF record, by walking from the current Timestamp to other metadata by checksum func (tms TUFMetaStorage) GetCurrent(gun data.GUN, tufRole data.RoleName) (*time.Time, []byte, error) { timestampTime, timestampJSON, err := tms.MetaStore.GetCurrent(gun, data.CanonicalTimestampRole) if err != nil { return nil, nil, err } // If we wanted data for the timestamp role, we're done here if tufRole == data.CanonicalTimestampRole { return timestampTime, timestampJSON, nil } // If we want to lookup another role, walk to it via current timestamp --> snapshot by checksum --> desired role timestampMeta := &data.SignedTimestamp{} if err := json.Unmarshal(timestampJSON, timestampMeta); err != nil { return nil, nil, fmt.Errorf("could not parse current timestamp") } snapshotChecksums, err := timestampMeta.GetSnapshot() if err != nil || snapshotChecksums == nil { return nil, nil, fmt.Errorf("could not retrieve latest snapshot checksum") } snapshotSHA256Bytes, ok := snapshotChecksums.Hashes[notary.SHA256] if !ok { return nil, nil, fmt.Errorf("could not retrieve latest snapshot sha256") } snapshotSHA256Hex := hex.EncodeToString(snapshotSHA256Bytes[:]) // Check the cache if we have our snapshot data var snapshotTime *time.Time var snapshotJSON []byte if cachedSnapshotData, ok := tms.cachedMeta[snapshotSHA256Hex]; ok { snapshotTime = cachedSnapshotData.createupdate snapshotJSON = cachedSnapshotData.data } else { // Get the snapshot from the underlying store by checksum if it isn't cached yet snapshotTime, snapshotJSON, err = tms.GetChecksum(gun, data.CanonicalSnapshotRole, snapshotSHA256Hex) if err != nil { return nil, nil, err } // cache for subsequent lookups tms.cachedMeta[snapshotSHA256Hex] = &storedMeta{data: snapshotJSON, createupdate: snapshotTime} } // If we wanted data for the snapshot role, we're done here if tufRole == data.CanonicalSnapshotRole { return snapshotTime, snapshotJSON, nil } // If it's a different role, we should have the checksum in snapshot metadata, and we can use it to GetChecksum() snapshotMeta := &data.SignedSnapshot{} if err := json.Unmarshal(snapshotJSON, snapshotMeta); err != nil { return nil, nil, fmt.Errorf("could not parse current snapshot") } roleMeta, err := snapshotMeta.GetMeta(tufRole) if err != nil { return nil, nil, err } roleSHA256Bytes, ok := roleMeta.Hashes[notary.SHA256] if !ok { return nil, nil, fmt.Errorf("could not retrieve latest %s sha256", tufRole) } roleSHA256Hex := hex.EncodeToString(roleSHA256Bytes[:]) // check if we can retrieve this data from cache if cachedRoleData, ok := tms.cachedMeta[roleSHA256Hex]; ok { return cachedRoleData.createupdate, cachedRoleData.data, nil } roleTime, roleJSON, err := tms.MetaStore.GetChecksum(gun, tufRole, roleSHA256Hex) if err != nil { return nil, nil, err } // cache for subsequent lookups tms.cachedMeta[roleSHA256Hex] = &storedMeta{data: roleJSON, createupdate: roleTime} return roleTime, roleJSON, nil } // GetChecksum gets a specific TUF record by checksum, also checking the internal cache func (tms TUFMetaStorage) GetChecksum(gun data.GUN, tufRole data.RoleName, checksum string) (*time.Time, []byte, error) { if cachedRoleData, ok := tms.cachedMeta[checksum]; ok { return cachedRoleData.createupdate, cachedRoleData.data, nil } roleTime, roleJSON, err := tms.MetaStore.GetChecksum(gun, tufRole, checksum) if err != nil { return nil, nil, err } // cache for subsequent lookups tms.cachedMeta[checksum] = &storedMeta{data: roleJSON, createupdate: roleTime} return roleTime, roleJSON, nil } // Bootstrap the store with tables if possible func (tms TUFMetaStorage) Bootstrap() error { if s, ok := tms.MetaStore.(storage.Bootstrapper); ok { return s.Bootstrap() } return fmt.Errorf("store does not support bootstrapping") }
umayr/notary
server/storage/tuf_store.go
GO
apache-2.0
4,661
<!--- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> Create Cluster ===== [Back to Resources](index.md#resources) **Summary** Create a cluster identified by ":name". POST /clusters/:name **Response** <table> <tr> <th>HTTP CODE</th> <th>Description</th> </tr> <tr> <td>201</td> <td>Created</td> </tr> <tr> <td>202</td> <td>Accepted</td> </tr> <tr> <td>400</td> <td>Bad Request</td> </tr> <tr> <td>401</td> <td>Unauthorized</td> </tr> <tr> <td>403</td> <td>Forbidden</td> </tr> <tr> <td>500</td> <td>Internal Server Error</td> </tr> </table> **Example** Create a cluster named ‘c1’ with the property ‘Clusters/version’ = ‘HDP-1.2.0’. POST /clusters/c1 { "Clusters": { "version" : "HDP-1.2.0” } } 201 Created
zouzhberk/ambaridemo
demo-server/docs/api/v1/create-cluster.md
Markdown
apache-2.0
1,583
/** * * Copyright (c) Microsoft and contributors. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. package com.microsoft.azure.management.notificationhubs.models; import com.microsoft.windowsazure.core.LazyArrayList; import com.microsoft.windowsazure.core.OperationResponse; import java.util.ArrayList; /** * The response of the List Namespace operation. */ public class AuthorizationRulesListResponse extends OperationResponse { private String nextLink; /** * Optional. Gets or sets link to the next set of results. Not empty if * Value contains incomplete list of AuthorizationRules * @return The NextLink value. */ public String getNextLink() { return this.nextLink; } /** * Optional. Gets or sets link to the next set of results. Not empty if * Value contains incomplete list of AuthorizationRules * @param nextLinkValue The NextLink value. */ public void setNextLink(final String nextLinkValue) { this.nextLink = nextLinkValue; } private ArrayList<AuthorizationRulesResource> value; /** * Optional. Gets or sets result of the List AuthorizationRules operation. * @return The Value value. */ public ArrayList<AuthorizationRulesResource> getValue() { return this.value; } /** * Optional. Gets or sets result of the List AuthorizationRules operation. * @param valueValue The Value value. */ public void setValue(final ArrayList<AuthorizationRulesResource> valueValue) { this.value = valueValue; } /** * Initializes a new instance of the AuthorizationRulesListResponse class. * */ public AuthorizationRulesListResponse() { super(); this.setValue(new LazyArrayList<AuthorizationRulesResource>()); } }
flydream2046/azure-sdk-for-java
service-management/azure-svc-mgmt-notificationhubs/src/main/java/com/microsoft/azure/management/notificationhubs/models/AuthorizationRulesListResponse.java
Java
apache-2.0
2,514
// Copyright 2015 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. import replicationControllerDetailModule from 'replicationcontrollerdetail/replicationcontrollerdetail_module'; import UpdateReplicasDialogController from 'replicationcontrollerdetail/updatereplicas_controller'; describe('Update Replicas controller', () => { /** * Replication Controller Detail controller. * @type {!UpdateReplicasDialogController} */ let ctrl; /** @type {!md.$dialog} */ let mdDialog; /** @type {!ui.router.$state} */ let state; /** @type {!angular.$resource} */ let resource; /** @type {!angular.$httpBackend} */ let httpBackend; /** @type {!angular.$log} */ let log; /** @type {string} */ let namespaceMock = 'foo-namespace'; /** @type {string} */ let replicationControllerMock = 'foo-name'; beforeEach(() => { angular.mock.module(replicationControllerDetailModule.name); angular.mock.inject(($log, $state, $mdDialog, $controller, $httpBackend, $resource) => { mdDialog = $mdDialog; state = $state; resource = $resource; httpBackend = $httpBackend; log = $log; ctrl = $controller( UpdateReplicasDialogController, { $resource: resource, namespace: namespaceMock, replicationController: replicationControllerMock, currentPods: 1, desiredPods: 1, }, {updateReplicasForm: {$valid: true}}); }); }); it('should update controller replicas to given number and log success', () => { // given let replicaSpec = { replicas: 5, }; spyOn(log, 'info'); spyOn(state, 'reload'); httpBackend.whenPOST('api/v1/replicationcontroller/foo-namespace/foo-name/update/pod') .respond(200, replicaSpec); // when ctrl.updateReplicas(); httpBackend.flush(); // then expect(log.info).toHaveBeenCalledWith( `Successfully updated replicas number to ${replicaSpec.replicas}`); expect(state.reload).toHaveBeenCalled(); }); it('should log error on failed update', () => { // given spyOn(log, 'error'); httpBackend.whenPOST('api/v1/replicationcontroller/foo-namespace/foo-name/update/pod') .respond(404); // when ctrl.updateReplicas(); httpBackend.flush(); // then expect(log.error).toHaveBeenCalled(); }); it('should close the dialog on cancel', () => { spyOn(state, 'reload'); // given spyOn(mdDialog, 'cancel'); // when ctrl.cancel(); // then expect(mdDialog.cancel).toHaveBeenCalled(); expect(state.reload).not.toHaveBeenCalled(); }); });
tangfeixiong/dashboard
src/test/frontend/replicationcontrollerdetail/updatereplicas_controller_test.js
JavaScript
apache-2.0
3,190
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.tools; import java.io.*; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Paths; import java.nio.file.attribute.FileTime; import java.util.Arrays; import java.util.List; import org.apache.cassandra.io.sstable.Component; import org.apache.cassandra.io.sstable.Descriptor; import org.apache.cassandra.service.ActiveRepairService; /** * Set repairedAt status on a given set of sstables. * * If you pass --is-repaired, it will set the repairedAt time to the last modified time. * * If you know you ran repair 2 weeks ago, you can do something like * * sstablerepairset --is-repaired -f <(find /var/lib/cassandra/data/.../ -iname "*Data.db*" -mtime +14) * */ public class SSTableRepairedAtSetter { /** * @param args a list of sstables whose metadata we are changing */ public static void main(final String[] args) throws IOException { PrintStream out = System.out; if (args.length == 0) { out.println("This command should be run with Cassandra stopped!"); out.println("Usage: sstablerepairedset [--is-repaired | --is-unrepaired] [-f <sstable-list> | <sstables>]"); System.exit(1); } if (args.length < 3 || !args[0].equals("--really-set") || (!args[1].equals("--is-repaired") && !args[1].equals("--is-unrepaired"))) { out.println("This command should be run with Cassandra stopped, otherwise you will get very strange behavior"); out.println("Verify that Cassandra is not running and then execute the command like this:"); out.println("Usage: sstablelevelreset --really-set [--is-repaired | --is-unrepaired] [-f <sstable-list> | <sstables>]"); System.exit(1); } boolean setIsRepaired = args[1].equals("--is-repaired"); List<String> fileNames; if (args[2].equals("-f")) { fileNames = Files.readAllLines(Paths.get(args[3]), Charset.defaultCharset()); } else { fileNames = Arrays.asList(args).subList(2, args.length); } for (String fname: fileNames) { Descriptor descriptor = Descriptor.fromFilename(fname); if (descriptor.version.hasRepairedAt) { if (setIsRepaired) { FileTime f = Files.getLastModifiedTime(new File(descriptor.filenameFor(Component.DATA)).toPath()); descriptor.getMetadataSerializer().mutateRepairedAt(descriptor, f.toMillis()); } else { descriptor.getMetadataSerializer().mutateRepairedAt(descriptor, ActiveRepairService.UNREPAIRED_SSTABLE); } } else { System.err.println("SSTable " + fname + " does not have repaired property, run upgradesstables"); } } } }
guanxi55nba/db-improvement
src/java/org/apache/cassandra/tools/SSTableRepairedAtSetter.java
Java
apache-2.0
3,787
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.app.engine.impl.cmd; import org.flowable.app.api.repository.AppDefinition; import org.flowable.app.api.repository.AppModel; import org.flowable.app.engine.impl.deployer.AppDeploymentManager; import org.flowable.app.engine.impl.util.CommandContextUtil; import org.flowable.common.engine.api.FlowableIllegalArgumentException; import org.flowable.common.engine.impl.interceptor.Command; import org.flowable.common.engine.impl.interceptor.CommandContext; /** * @author Tijs Rademakers */ public class GetAppModelCmd implements Command<AppModel> { protected String appDefinitionId; public GetAppModelCmd(String appDefinitionId) { this.appDefinitionId = appDefinitionId; } @Override public AppModel execute(CommandContext commandContext) { if (appDefinitionId == null) { throw new FlowableIllegalArgumentException("appDefinitionId is null"); } AppDeploymentManager deploymentManager = CommandContextUtil.getAppEngineConfiguration(commandContext).getDeploymentManager(); AppDefinition appDefinition = deploymentManager.findDeployedAppDefinitionById(appDefinitionId); if (appDefinition != null) { return deploymentManager.resolveAppDefinition(appDefinition).getAppModel(); } return null; } }
dbmalkovsky/flowable-engine
modules/flowable-app-engine/src/main/java/org/flowable/app/engine/impl/cmd/GetAppModelCmd.java
Java
apache-2.0
1,894
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.ignite.internal.processors.query; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteEx; /** * Check query history metrics from client node. */ public class SqlQueryHistoryFromClientSelfTest extends SqlQueryHistorySelfTest { private int idx; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); if (idx++ == 2) cfg.setClientMode(true); return cfg; } /** {@inheritDoc} */ @Override protected IgniteEx queryNode() { IgniteEx node = grid(2); assertTrue(node.context().clientNode()); return node; } /** {@inheritDoc} */ @Override protected void startTestGrid() throws Exception { startGrids(3); } }
shroman/ignite
modules/indexing/src/test/java/org/apache/ignite/internal/processors/query/SqlQueryHistoryFromClientSelfTest.java
Java
apache-2.0
1,732
var q = require('q') var apigeetool = require('apigeetool') var async = require('async') var gutil = require('gulp-util') function createApp(app){ var defer = q.defer() var sdk = apigeetool.getPromiseSDK() var opts=baseopts() opts.name = app.name opts.apiProducts = app.apiProducts opts.email = app.email opts.callback = app.callback sdk.createApp(opts) .then(function(appresponse){ defer.resolve(appresponse) },function(err){ defer.reject(err) }) return defer.promise } function createApps(app,cb){ var sdk = apigeetool.getPromiseSDK() var opts=baseopts() opts.name = app.name opts.apiProducts = app.apiProducts opts.email = app.email opts.callback = app.callback cb(null,sdk.createApp(opts)) } function deleteApps(app,cb){ var sdk = apigeetool.getPromiseSDK() var opts=baseopts() opts.name=app.name opts.email = app.email cb(null,sdk.deleteApp(opts)) } function deleteProducts(prod,cb) { var sdk = apigeetool.getPromiseSDK() var opts = baseopts() opts.productName = prod.name, cb(null,sdk.deleteProduct(opts)) } function createProducts (prod,cb) { var sdk = apigeetool.getPromiseSDK() var opts = baseopts() prod.productName = prod.name, opts.productDesc = prod.displayName var proxies = '' for(var p in prod.proxies) proxies += prod.proxies[p] +',' opts.proxies = proxies var env = '' for(var e in prod.environments) env += prod.environments[e] + ',' opts.environments = env var scopes = '' for(var s in prod.scopes) scopes += prod.scopes[s] + ',' opts.scopes =s opts.productName = prod.name cb(null,sdk.createProduct(opts)) } function createDevelopers (dev,cb) { var sdk = apigeetool.getPromiseSDK() var opts = baseopts() for(k in dev) opts[k]=dev[k] console.log(opts) cb(null,sdk.createDeveloper(opts)) } function deleteDevelopers (dev,cb) { var sdk = apigeetool.getPromiseSDK() var opts = baseopts() opts.email = dev.email cb(null,sdk.deleteDeveloper(opts)) } function deleteApis(it,cb){ var sdk = apigeetool.getPromiseSDK() var opts = baseopts() opts.directory = it.dir opts.api = it.proxy console.log('undeploying ' + opts.api) sdk.undeploy(opts) .then(function(){ console.log('undeployed ' + opts.api) return sdk.delete(opts) },function(err){ console.log(err) return sdk.delete(opts) }) .then(function(){ console.log('deleted ' + opts.api) cb(null, 'done') },function(err){ console.log('delete failed ' + opts.api) cb(err) }) } function deployApis(it,cb) { var sdk = apigeetool.getPromiseSDK() var opts = baseopts() opts.directory = it.dir opts.api = it.proxy cb(null, sdk.deployProxy(opts)) } function createCaches(c,cb){ var sdk = apigeetool.getPromiseSDK() var opts = baseopts() opts.cache = c.name cb(null,sdk.createcache(opts)) } function deleteCaches(c,cb){ var sdk = apigeetool.getPromiseSDK() var opts = baseopts() console.log('deleting cache ' + c.name) opts.cache = c.name console.log(opts) cb(null,sdk.deletecache(opts)) } function run(arr, func){ var defer=q.defer(); async.mapSeries(arr,function(c,cb){ func(c,cb) },function(err,results){ if(err){ console.log(err) defer.reject(err) } q.all(results) .then(function(){ console.log('done') defer.resolve() },function(err){ console.log(err) defer.reject(err) }) }) return defer.promise } function baseopts () { var opts = { organization: gutil.env.org, token: gutil.env.token, environments: gutil.env.env, environment: gutil.env.env, debug: gutil.env.debug , usergrid_org: gutil.env.ug_org, usergrid_app: gutil.env.ug_app, usergrid_client_id: gutil.env.ug_client_id, usergrid_secret: gutil.env.ug_secret } return opts } module.exports = { run:run, createCaches: createCaches, deleteCaches: deleteCaches, deployApis: deployApis, deleteApis: deleteApis, createProducts: createProducts, createDevelopers: createDevelopers, createApp: createApp, createApps: createApps, deleteProducts: deleteProducts, deleteDevelopers: deleteDevelopers, deleteApps: deleteApps }
rupamrai/openbank
lib/edge.js
JavaScript
apache-2.0
4,712
// +build linux /* Copyright 2014 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package hostutil import ( "fmt" "os" "path" "path/filepath" "strings" "syscall" "golang.org/x/sys/unix" "k8s.io/klog/v2" "k8s.io/utils/mount" utilpath "k8s.io/utils/path" ) const ( // Location of the mountinfo file procMountInfoPath = "/proc/self/mountinfo" ) // HostUtil implements HostUtils for Linux platforms. type HostUtil struct { } // NewHostUtil returns a struct that implements the HostUtils interface on // linux platforms func NewHostUtil() *HostUtil { return &HostUtil{} } // DeviceOpened checks if block device in use by calling Open with O_EXCL flag. // If pathname is not a device, log and return false with nil error. // If open returns errno EBUSY, return true with nil error. // If open returns nil, return false with nil error. // Otherwise, return false with error func (hu *HostUtil) DeviceOpened(pathname string) (bool, error) { return ExclusiveOpenFailsOnDevice(pathname) } // PathIsDevice uses FileInfo returned from os.Stat to check if path refers // to a device. func (hu *HostUtil) PathIsDevice(pathname string) (bool, error) { pathType, err := hu.GetFileType(pathname) isDevice := pathType == FileTypeCharDev || pathType == FileTypeBlockDev return isDevice, err } // ExclusiveOpenFailsOnDevice is shared with NsEnterMounter func ExclusiveOpenFailsOnDevice(pathname string) (bool, error) { var isDevice bool finfo, err := os.Stat(pathname) if os.IsNotExist(err) { isDevice = false } // err in call to os.Stat if err != nil { return false, fmt.Errorf( "PathIsDevice failed for path %q: %v", pathname, err) } // path refers to a device if finfo.Mode()&os.ModeDevice != 0 { isDevice = true } if !isDevice { klog.Errorf("Path %q is not referring to a device.", pathname) return false, nil } fd, errno := unix.Open(pathname, unix.O_RDONLY|unix.O_EXCL|unix.O_CLOEXEC, 0) // If the device is in use, open will return an invalid fd. // When this happens, it is expected that Close will fail and throw an error. defer unix.Close(fd) if errno == nil { // device not in use return false, nil } else if errno == unix.EBUSY { // device is in use return true, nil } // error during call to Open return false, errno } // GetDeviceNameFromMount given a mount point, find the device name from its global mount point func (hu *HostUtil) GetDeviceNameFromMount(mounter mount.Interface, mountPath, pluginMountDir string) (string, error) { return getDeviceNameFromMount(mounter, mountPath, pluginMountDir) } // getDeviceNameFromMountLinux find the device name from /proc/mounts in which // the mount path reference should match the given plugin mount directory. In case no mount path reference // matches, returns the volume name taken from its given mountPath func getDeviceNameFromMount(mounter mount.Interface, mountPath, pluginMountDir string) (string, error) { refs, err := mounter.GetMountRefs(mountPath) if err != nil { klog.V(4).Infof("GetMountRefs failed for mount path %q: %v", mountPath, err) return "", err } if len(refs) == 0 { klog.V(4).Infof("Directory %s is not mounted", mountPath) return "", fmt.Errorf("directory %s is not mounted", mountPath) } for _, ref := range refs { if strings.HasPrefix(ref, pluginMountDir) { volumeID, err := filepath.Rel(pluginMountDir, ref) if err != nil { klog.Errorf("Failed to get volume id from mount %s - %v", mountPath, err) return "", err } return volumeID, nil } } return path.Base(mountPath), nil } // MakeRShared checks that given path is on a mount with 'rshared' mount // propagation. If not, it bind-mounts the path as rshared. func (hu *HostUtil) MakeRShared(path string) error { return DoMakeRShared(path, procMountInfoPath) } // GetFileType checks for file/directory/socket/block/character devices. func (hu *HostUtil) GetFileType(pathname string) (FileType, error) { return getFileType(pathname) } // PathExists tests if the given path already exists // Error is returned on any other error than "file not found". func (hu *HostUtil) PathExists(pathname string) (bool, error) { return utilpath.Exists(utilpath.CheckFollowSymlink, pathname) } // EvalHostSymlinks returns the path name after evaluating symlinks. // TODO once the nsenter implementation is removed, this method can be removed // from the interface and filepath.EvalSymlinks used directly func (hu *HostUtil) EvalHostSymlinks(pathname string) (string, error) { return filepath.EvalSymlinks(pathname) } // isShared returns true, if given path is on a mount point that has shared // mount propagation. func isShared(mount string, mountInfoPath string) (bool, error) { info, err := findMountInfo(mount, mountInfoPath) if err != nil { return false, err } // parse optional parameters for _, opt := range info.OptionalFields { if strings.HasPrefix(opt, "shared:") { return true, nil } } return false, nil } func findMountInfo(path, mountInfoPath string) (mount.MountInfo, error) { infos, err := mount.ParseMountInfo(mountInfoPath) if err != nil { return mount.MountInfo{}, err } // process /proc/xxx/mountinfo in backward order and find the first mount // point that is prefix of 'path' - that's the mount where path resides var info *mount.MountInfo for i := len(infos) - 1; i >= 0; i-- { if mount.PathWithinBase(path, infos[i].MountPoint) { info = &infos[i] break } } if info == nil { return mount.MountInfo{}, fmt.Errorf("cannot find mount point for %q", path) } return *info, nil } // DoMakeRShared is common implementation of MakeRShared on Linux. It checks if // path is shared and bind-mounts it as rshared if needed. mountCmd and // mountArgs are expected to contain mount-like command, DoMakeRShared will add // '--bind <path> <path>' and '--make-rshared <path>' to mountArgs. func DoMakeRShared(path string, mountInfoFilename string) error { shared, err := isShared(path, mountInfoFilename) if err != nil { return err } if shared { klog.V(4).Infof("Directory %s is already on a shared mount", path) return nil } klog.V(2).Infof("Bind-mounting %q with shared mount propagation", path) // mount --bind /var/lib/kubelet /var/lib/kubelet if err := syscall.Mount(path, path, "" /*fstype*/, syscall.MS_BIND, "" /*data*/); err != nil { return fmt.Errorf("failed to bind-mount %s: %v", path, err) } // mount --make-rshared /var/lib/kubelet if err := syscall.Mount(path, path, "" /*fstype*/, syscall.MS_SHARED|syscall.MS_REC, "" /*data*/); err != nil { return fmt.Errorf("failed to make %s rshared: %v", path, err) } return nil } // GetSELinux is common implementation of GetSELinuxSupport on Linux. func GetSELinux(path string, mountInfoFilename string) (bool, error) { info, err := findMountInfo(path, mountInfoFilename) if err != nil { return false, err } // "seclabel" can be both in mount options and super options. for _, opt := range info.SuperOptions { if opt == "seclabel" { return true, nil } } for _, opt := range info.MountOptions { if opt == "seclabel" { return true, nil } } return false, nil } // GetSELinuxSupport returns true if given path is on a mount that supports // SELinux. func (hu *HostUtil) GetSELinuxSupport(pathname string) (bool, error) { return GetSELinux(pathname, procMountInfoPath) } // GetOwner returns the integer ID for the user and group of the given path func (hu *HostUtil) GetOwner(pathname string) (int64, int64, error) { realpath, err := filepath.EvalSymlinks(pathname) if err != nil { return -1, -1, err } return GetOwnerLinux(realpath) } // GetMode returns permissions of the path. func (hu *HostUtil) GetMode(pathname string) (os.FileMode, error) { return GetModeLinux(pathname) } // GetOwnerLinux is shared between Linux and NsEnterMounter // pathname must already be evaluated for symlinks func GetOwnerLinux(pathname string) (int64, int64, error) { info, err := os.Stat(pathname) if err != nil { return -1, -1, err } stat := info.Sys().(*syscall.Stat_t) return int64(stat.Uid), int64(stat.Gid), nil } // GetModeLinux is shared between Linux and NsEnterMounter func GetModeLinux(pathname string) (os.FileMode, error) { info, err := os.Stat(pathname) if err != nil { return 0, err } return info.Mode(), nil }
maciaszczykm/kubernetes
pkg/volume/util/hostutil/hostutil_linux.go
GO
apache-2.0
8,822
<?php /* * @version $Id$ ------------------------------------------------------------------------- GLPI - Gestionnaire Libre de Parc Informatique Copyright (C) 2015 Teclib'. http://glpi-project.org based on GLPI - Gestionnaire Libre de Parc Informatique Copyright (C) 2003-2014 by the INDEPNET Development Team. ------------------------------------------------------------------------- LICENSE This file is part of GLPI. GLPI is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. GLPI is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with GLPI. If not, see <http://www.gnu.org/licenses/>. -------------------------------------------------------------------------- */ /** @file * @brief */ if (!defined('GLPI_ROOT')) { die("Sorry. You can't access directly to this file"); } /** * KnowbaseItemTranslation Class * * @since version 0.85 **/ class KnowbaseItemTranslation extends CommonDBChild { static public $itemtype = 'KnowbaseItem'; static public $items_id = 'knowbaseitems_id'; public $dohistory = true; static $rightname = 'knowbase'; static function getTypeName($nb = 0) { return _n('Translation', 'Translations', $nb); } function getForbiddenStandardMassiveAction() { $forbidden = parent::getForbiddenStandardMassiveAction(); $forbidden[] = 'update'; return $forbidden; } /** * @see CommonGLPI::getTabNameForItem() **/ function getTabNameForItem(CommonGLPI $item, $withtemplate=0) { if (self::canBeTranslated($item)) { $nb = 0; if ($_SESSION['glpishow_count_on_tabs']) { $nb = self::getNumberOfTranslationsForItem($item); } return self::createTabEntry(self::getTypeName(Session::getPluralNumber()), $nb); } return ''; } /** * @param $item CommonGLPI object * @param $tabnum (default 1) * @param $withtemplate (default 0) **/ static function displayTabContentForItem(CommonGLPI $item, $tabnum=1, $withtemplate=0) { if (self::canBeTranslated($item)) { self::showTranslations($item); } return true; } /** * Display all translated field for an KnowbaseItem * * @param $item a KnowbaseItem item * * @return true; **/ static function showTranslations(KnowbaseItem $item) { global $DB, $CFG_GLPI; $canedit = $item->can($item->getID(), UPDATE); $rand = mt_rand(); if ($canedit) { echo "<div id='viewtranslation" . $item->getID() . "$rand'></div>\n"; echo "<script type='text/javascript' >\n"; echo "function addTranslation" . $item->getID() . "$rand() {\n"; $params = array('type' => __CLASS__, 'parenttype' => get_class($item), 'knowbaseitems_id' => $item->fields['id'], 'id' => -1); Ajax::updateItemJsCode("viewtranslation" . $item->getID() . "$rand", $CFG_GLPI["root_doc"]."/ajax/viewsubitem.php", $params); echo "};"; echo "</script>\n"; echo "<div class='center'>". "<a class='vsubmit' href='javascript:addTranslation".$item->getID()."$rand();'>". __('Add a new translation')."</a></div><br>"; } $obj = new self; $found = $obj->find("`knowbaseitems_id`='".$item->getID()."'", "`language` ASC"); if (count($found) > 0) { if ($canedit) { Html::openMassiveActionsForm('mass'.__CLASS__.$rand); $massiveactionparams = array('container' => 'mass'.__CLASS__.$rand); Html::showMassiveActions($massiveactionparams); } echo "<div class='center'>"; echo "<table class='tab_cadre_fixehov'><tr class='tab_bg_2'>"; echo "<th colspan='4'>".__("List of translations")."</th></tr>"; if ($canedit) { echo "<th width='10'>"; Html::checkAllAsCheckbox('mass'.__CLASS__.$rand); echo "</th>"; } echo "<th>".__("Language")."</th>"; echo "<th>".__("Subject")."</th>"; foreach ($found as $data) { echo "<tr class='tab_bg_1' ".($canedit ? "style='cursor:pointer' onClick=\"viewEditTranslation".$data['id']."$rand();\"" : '') . ">"; if ($canedit) { echo "<td class='center'>"; Html::showMassiveActionCheckBox(__CLASS__, $data["id"]); echo "</td>"; } echo "<td>"; if ($canedit) { echo "\n<script type='text/javascript' >\n"; echo "function viewEditTranslation". $data["id"]."$rand() {\n"; $params = array('type' => __CLASS__, 'parenttype' => get_class($item), 'knowbaseitems_id' => $item->getID(), 'id' => $data["id"]); Ajax::updateItemJsCode("viewtranslation" . $item->getID() . "$rand", $CFG_GLPI["root_doc"]."/ajax/viewsubitem.php", $params); echo "};"; echo "</script>\n"; } echo Dropdown::getLanguageName($data['language']); echo "</td><td>"; echo $data["name"]; if (isset($data['answer']) && !empty($data['answer'])) { echo "&nbsp;"; Html::showToolTip(Toolbox::unclean_html_cross_side_scripting_deep($data['answer'])); } echo "</td></tr>"; } echo "</table>"; if ($canedit) { $massiveactionparams['ontop'] = false; Html::showMassiveActions($massiveactionparams); Html::closeForm(); } } else { echo "<table class='tab_cadre_fixe'><tr class='tab_bg_2'>"; echo "<th class='b'>" . __("No translation found")."</th></tr></table>"; } return true; } /** * Display translation form * * @param $ID field (default -1) * @param $options array */ function showForm($ID=-1, $options=array()) { global $CFG_GLPI; if (isset($options['parent']) && !empty($options['parent'])) { $item = $options['parent']; } if ($ID > 0) { $this->check($ID, READ); } else { // Create item $options['itemtype'] = get_class($item); $options['knowbaseitems_id'] = $item->getID(); $this->check(-1 , CREATE, $options); } Html::initEditorSystem('answer'); $this->showFormHeader($options); echo "<tr class='tab_bg_1'>"; echo "<td>".__('Language')."&nbsp;:</td>"; echo "<td>"; echo "<input type='hidden' name='knowbaseitems_id' value='".$item->getID()."'>"; if ($ID > 0) { echo Dropdown::getLanguageName($this->fields['language']); } else { Dropdown::showLanguages("language", array('display_none' => false, 'value' => $_SESSION['glpilanguage'], 'used' => self::getAlreadyTranslatedForItem($item))); } echo "</td><td colspan='2'>&nbsp;</td></tr>"; echo "<tr class='tab_bg_1'>"; echo "<td>".__('Subject')."</td>"; echo "<td colspan='3'>"; echo "<textarea cols='100' rows='1' name='name'>".$this->fields["name"]."</textarea>"; echo "</td></tr>\n"; echo "<tr class='tab_bg_1'>"; echo "<td>".__('Content')."</td>"; echo "<td colspan='3'>"; echo "<textarea cols='100' rows='30' id='answer' name='answer'>".$this->fields["answer"]; echo "</textarea>"; echo "</td></tr>\n"; $this->showFormButtons($options); return true; } /** * Get a translation for a value * * @param $item item to translate * @param $field field to return (default 'name') * * @return the field translated if a translation is available, or the original field if not **/ static function getTranslatedValue(KnowbaseItem $item, $field="name") { global $DB; $obj = new self; $found = $obj->find("`knowbaseitems_id` = '".$item->getID(). "' AND `language` = '".$_SESSION['glpilanguage']."'"); if ((count($found) > 0) && in_array($field, array('name', 'answer'))) { $first = array_shift($found); return $first[$field]; } return $item->fields[$field]; } /** * Is kb item translation functionnality active * * @return true if active, false if not **/ static function isKbTranslationActive() { global $CFG_GLPI; return $CFG_GLPI['translate_kb']; } /** * Check if an item can be translated * It be translated if translation if globally on and item is an instance of CommonDropdown * or CommonTreeDropdown and if translation is enabled for this class * * @param item the item to check * * @return true if item can be translated, false otherwise **/ static function canBeTranslated(CommonGLPI $item) { return (self::isKbTranslationActive() && $item instanceof KnowbaseItem); } /** * Return the number of translations for an item * * @param item * * @return the number of translations for this item **/ static function getNumberOfTranslationsForItem($item) { return countElementsInTable(getTableForItemType(__CLASS__), "`knowbaseitems_id`='".$item->getID()."'"); } /** * Get already translated languages for item * * @param item * * @return array of already translated languages **/ static function getAlreadyTranslatedForItem($item) { global $DB; $tab = array(); foreach ($DB->request(getTableForItemType(__CLASS__), "`knowbaseitems_id`='".$item->getID()."'") as $data) { $tab[$data['language']] = $data['language']; } return $tab; } } ?>
ojedawinder/glpi_seducla
inc/knowbaseitemtranslation.class.php
PHP
apache-2.0
10,745
// bslmf_istriviallydefaultconstructible.t.cpp -*-C++-*- #include <bslmf_istriviallydefaultconstructible.h> #include <bslmf_nestedtraitdeclaration.h> #include <bsls_asserttest.h> #include <bsls_bsltestutil.h> #include <stdio.h> // 'printf' #include <stdlib.h> // 'atoi' using namespace BloombergLP; //============================================================================= // TEST PLAN //----------------------------------------------------------------------------- // Overview // -------- // The component under test defines a meta-function, // 'bsl::is_trivially_default_constructible', that determines whether a // template parameter type is trivially default-constructible. By default, the // meta-function supports a restricted set of type categories, but can be // extended to support other types through either template specialization or // use of the 'BSLMF_NESTED_TRAIT_DECLARATION' macro. // // Thus, we need to ensure that the natively-supported types are correctly // identified by the meta-function by testing the meta-function with each of // the type categories. We also need to verify that the meta-function can be // correctly extended to support other types through either of the two // supported mechanisms. // // ---------------------------------------------------------------------------- // PUBLIC CLASS DATA // [ 1] bsl::is_trivially_default_constructible::value // // ---------------------------------------------------------------------------- // [ 3] USAGE EXAMPLE // [ 2] EXTENDING bsl::is_trivially_default_constructible //============================================================================= // STANDARD BDE ASSERT TEST MACRO //----------------------------------------------------------------------------- // NOTE: THIS IS A LOW-LEVEL COMPONENT AND MAY NOT USE ANY C++ LIBRARY // FUNCTIONS, INCLUDING IOSTREAMS. static int testStatus = 0; void aSsErT(bool b, const char *s, int i) { if (b) { printf("Error " __FILE__ "(%d): %s (failed)\n", i, s); if (testStatus >= 0 && testStatus <= 100) ++testStatus; } } # define ASSERT(X) { aSsErT(!(X), #X, __LINE__); } //============================================================================= // STANDARD BDE TEST DRIVER MACROS //----------------------------------------------------------------------------- #define LOOP_ASSERT BSLS_BSLTESTUTIL_LOOP_ASSERT #define LOOP2_ASSERT BSLS_BSLTESTUTIL_LOOP2_ASSERT #define LOOP3_ASSERT BSLS_BSLTESTUTIL_LOOP3_ASSERT #define LOOP4_ASSERT BSLS_BSLTESTUTIL_LOOP4_ASSERT #define LOOP5_ASSERT BSLS_BSLTESTUTIL_LOOP5_ASSERT #define LOOP6_ASSERT BSLS_BSLTESTUTIL_LOOP6_ASSERT #define Q BSLS_BSLTESTUTIL_Q // Quote identifier literally. #define P BSLS_BSLTESTUTIL_P // Print identifier and value. #define P_ BSLS_BSLTESTUTIL_P_ // P(X) without '\n'. #define T_ BSLS_BSLTESTUTIL_T_ // Print a tab (w/o newline). #define L_ BSLS_BSLTESTUTIL_L_ // current Line number //============================================================================= // SEMI-STANDARD NEGATIVE-TESTING MACROS //----------------------------------------------------------------------------- #define ASSERT_SAFE_PASS(EXPR) BSLS_ASSERTTEST_ASSERT_SAFE_PASS(EXPR) #define ASSERT_SAFE_FAIL(EXPR) BSLS_ASSERTTEST_ASSERT_SAFE_FAIL(EXPR) #define ASSERT_PASS(EXPR) BSLS_ASSERTTEST_ASSERT_PASS(EXPR) #define ASSERT_FAIL(EXPR) BSLS_ASSERTTEST_ASSERT_FAIL(EXPR) #define ASSERT_OPT_PASS(EXPR) BSLS_ASSERTTEST_ASSERT_OPT_PASS(EXPR) #define ASSERT_OPT_FAIL(EXPR) BSLS_ASSERTTEST_ASSERT_OPT_FAIL(EXPR) //============================================================================= // GLOBAL TYPEDEFS/CONSTANTS FOR TESTING //----------------------------------------------------------------------------- namespace { class MyTriviallyDefaultConstructibleType { }; struct MyNonTriviallyDefaultConstructibleType { int d_data; MyNonTriviallyDefaultConstructibleType() : d_data(1) { } }; } // close unnamed namespace namespace bsl { template <> struct is_trivially_default_constructible< MyTriviallyDefaultConstructibleType> : bsl::true_type { // This template specialization for // 'is_trivially_default_constructible' indicates that // 'MyTriviallyDefaultConstructibleType' is a trivially // default-constructible type. }; } // close namespace bsl namespace { struct UserDefinedTdcTestType { // This user-defined type, which is marked to be trivially // default-constructible using template specialization (below), is used for // testing. }; struct UserDefinedTdcTestType2 { // This user-defined type, which is marked to be trivially // default-constructible using the 'BSLMF_NESTED_TRAIT_DECLARATION' macro, // is used for testing. BSLMF_NESTED_TRAIT_DECLARATION(UserDefinedTdcTestType2, bsl::is_trivially_default_constructible); }; struct UserDefinedNonTdcTestType { // This user-defined type, which is not marked to be trivially // default-constructible, is used for testing. }; enum EnumTestType { // This 'enum' type is used for testing. }; typedef int *PointerTestType; // This pointer type is used for testing. typedef int& ReferenceTestType; // This reference type is used for testing. typedef int (UserDefinedNonTdcTestType::*MethodPtrTestType) (); // This pointer to non-static function member type is used for testing. } // close unnamed namespace namespace bsl { template <> struct is_trivially_default_constructible< UserDefinedTdcTestType> : bsl::true_type { }; } // close namespace bsl //============================================================================= // MAIN PROGRAM //----------------------------------------------------------------------------- int main(int argc, char *argv[]) { int test = argc > 1 ? atoi(argv[1]) : 0; int verbose = argc > 2; int veryVerbose = argc > 3; int veryVeryVerbose = argc > 4; int veryVeryVeryVerbose = argc > 5; (void) veryVerbose; (void) veryVeryVerbose; (void) veryVeryVeryVerbose; printf("TEST " __FILE__ " CASE %d\n", test); switch (test) { case 0: case 3: { // -------------------------------------------------------------------- // USAGE EXAMPLE // // Concerns: //: 1 The usage example provided in the component header file compiles, //: links, and runs as shown. // // Plan: //: 1 Incorporate usage example from header into test driver, remove //: leading comment characters, and replace 'assert' with 'ASSERT'. //: (C-1) // // Testing: // USAGE EXAMPLE // -------------------------------------------------------------------- if (verbose) printf("\nUSAGE EXAMPLE" "\n=============\n"); ///Usage ///----- // In this section we show intended use of this component. // ///Example 1: Verify Whether Types are Trivially Default-Constructible ///- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Suppose that we want to assert whether a type is trivially // default-constructible. // // First, we define a set of types to evaluate: //.. typedef int MyFundamentalType; typedef int& MyFundamentalTypeReference; // // class MyTriviallyDefaultConstructibleType { // }; // // struct MyNonTriviallyDefaultConstructibleType { // // int d_data; // // MyNonTriviallyDefaultConstructibleType() // : d_data(1) // { // } // }; //.. // Then, since user-defined types cannot be automatically evaluated by // 'is_trivially_default_constructible', we define a template specialization to // specify that 'MyTriviallyDefaultConstructibleType' is trivially // default-constructible: //.. // namespace bsl { // // template <> // struct is_trivially_default_constructible< // MyTriviallyDefaultConstructibleType> : bsl::true_type { // // This template specialization for // // 'is_trivially_default_constructible' indicates that // // 'MyTriviallyDefaultConstructibleType' is a trivially // // default-constructible type. // }; // // } // close namespace bsl //.. // Now, we verify whether each type is trivially default-constructible using // 'bsl::is_trivially_default_constructible': //.. ASSERT(true == bsl::is_trivially_default_constructible<MyFundamentalType>::value); ASSERT(false == bsl::is_trivially_default_constructible< MyFundamentalTypeReference>::value); ASSERT(true == bsl::is_trivially_default_constructible< MyTriviallyDefaultConstructibleType>::value); ASSERT(false == bsl::is_trivially_default_constructible< MyNonTriviallyDefaultConstructibleType>::value); //.. } break; case 2: { // -------------------------------------------------------------------- // Extending 'bsl::is_trivially_default_constructible' // Ensure the 'bsl::is_trivially_default_constructible' meta-function // returns the correct value for types explicitly specified to be // trivially default-constructible. // // Concerns: //: 1 The meta-function returns 'false' for general user-defined types. //: //: 2 The meta-function returns 'true' for a user-defined type if a //: specialization for 'bsl::is_trivially_default_constructible' on //: that type is defined to inherit from 'bsl::true_type'. //: //: 3 The meta-function returns 'true' for a user-defined type that //: specifies it has the trait using the //: 'BSLMF_NESTED_TRAIT_DECLARATION' macro. // // Plan: // Verify that 'bsl::is_trivially_default_constructible' returns // the correct value for each type listed in the concerns. // // Testing: // Extending bsl::is_trivially_default_constructible // -------------------------------------------------------------------- if (verbose) printf("\nExtending 'bsl::is_trivially_default_constructible'\n" "\n===================================================\n"); // C-1 ASSERT(!bsl::is_trivially_default_constructible< UserDefinedNonTdcTestType>::value); // C-2 ASSERT( bsl::is_trivially_default_constructible< UserDefinedTdcTestType>::value); // C-3 ASSERT( bsl::is_trivially_default_constructible< UserDefinedTdcTestType2>::value); } break; case 1: { // -------------------------------------------------------------------- // 'bsl::is_trivially_default_constructible::value' // Ensure the 'bsl::is_trivially_default_constructible' meta-function // returns the correct value for intrinsically supported types. // // Concerns: //: 1 The meta-function returns 'false' for reference types. //: //: 2 The meta-function returns 'true' for fundamental types. //: //: 3 The meta-function returns 'true' for enum types. //: //: 4 The meta-function returns 'true' for pointer types. //: //: 5 The meta-function returns 'true' for pointer to member types. // // Plan: // Verify that 'bsl::is_trivially_default_constructible' returns the // correct value for each type category listed in the concerns. // // Testing: // bsl::is_trivially_default_constructible::value // -------------------------------------------------------------------- if (verbose) printf("\n'bsl::is_trivially_default_constructible::value'\n" "\n================================================\n"); // C-1 ASSERT(!bsl::is_trivially_default_constructible< ReferenceTestType>::value); // C-2 ASSERT( bsl::is_trivially_default_constructible<int>::value); ASSERT( bsl::is_trivially_default_constructible<char>::value); ASSERT( bsl::is_trivially_default_constructible<void>::value); // C-3 ASSERT( bsl::is_trivially_default_constructible<EnumTestType>::value); // C-4 ASSERT( bsl::is_trivially_default_constructible< PointerTestType>::value); // C-5 ASSERT( bsl::is_trivially_default_constructible< MethodPtrTestType>::value); } break; default: { fprintf(stderr, "WARNING: CASE `%d' NOT FOUND.\n", test); testStatus = -1; } } if (testStatus > 0) { fprintf(stderr, "Error, non-zero test status = %d.\n", testStatus); } return testStatus; } // ---------------------------------------------------------------------------- // Copyright 2013 Bloomberg Finance L.P. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ----------------------------- END-OF-FILE ----------------------------------
idispatch/bde
groups/bsl/bslmf/bslmf_istriviallydefaultconstructible.t.cpp
C++
apache-2.0
14,203
package com.google.api.ads.adwords.jaxws.v201502.cm; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlType; /** * * Operations for adding/removing labels from Campaign. * * * <p>Java class for CampaignLabelOperation complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="CampaignLabelOperation"> * &lt;complexContent> * &lt;extension base="{https://adwords.google.com/api/adwords/cm/v201502}Operation"> * &lt;sequence> * &lt;element name="operand" type="{https://adwords.google.com/api/adwords/cm/v201502}CampaignLabel" minOccurs="0"/> * &lt;/sequence> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "CampaignLabelOperation", propOrder = { "operand" }) public class CampaignLabelOperation extends Operation { protected CampaignLabel operand; /** * Gets the value of the operand property. * * @return * possible object is * {@link CampaignLabel } * */ public CampaignLabel getOperand() { return operand; } /** * Sets the value of the operand property. * * @param value * allowed object is * {@link CampaignLabel } * */ public void setOperand(CampaignLabel value) { this.operand = value; } }
stoksey69/googleads-java-lib
modules/adwords_appengine/src/main/java/com/google/api/ads/adwords/jaxws/v201502/cm/CampaignLabelOperation.java
Java
apache-2.0
1,590
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::io; fn main() { let stdout = &io::stdout() as &io::WriterUtil; stdout.write_line("Hello!"); }
j16r/rust
src/test/run-pass/issue-4333.rs
Rust
apache-2.0
579
package org.onepf.oms; import junit.framework.Assert; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.onepf.oms.appstore.SamsungSkuFormatException; import org.onepf.oms.appstore.nokiaUtils.NokiaSkuFormatException; import org.robolectric.RobolectricTestRunner; import org.robolectric.annotation.Config; import java.util.List; /** * Created by krozov on 01.09.14. */ @Config(emulateSdk = 18, manifest = Config.NONE) @RunWith(RobolectricTestRunner.class) public class SkuManagerTest { private static final String STORE_SKU_GOOGLE = "test_sku_google"; private static final String STORE_SKU_SAMSUNG = "2014/test_sku_samsung"; private static final String STORE_SKU_NOKIA = "12451"; private static final String ITEM_SKU = "test_sku"; @Test public void testMapSku() throws Exception { SkuManager sm = SkuManager.getInstance(); sm.mapSku(ITEM_SKU, OpenIabHelper.NAME_GOOGLE, STORE_SKU_GOOGLE); String storeSku = sm.getStoreSku(OpenIabHelper.NAME_GOOGLE, ITEM_SKU); Assert.assertNotNull(storeSku); Assert.assertEquals(STORE_SKU_GOOGLE, storeSku); String sku = sm.getSku(OpenIabHelper.NAME_GOOGLE, STORE_SKU_GOOGLE); Assert.assertNotNull(sku); Assert.assertEquals(ITEM_SKU, sku); List<String> googlePlaySkus = sm.getAllStoreSkus(OpenIabHelper.NAME_GOOGLE); Assert.assertNotNull(googlePlaySkus); Assert.assertEquals(1, googlePlaySkus.size()); Assert.assertNotNull(googlePlaySkus.get(0)); Assert.assertEquals(STORE_SKU_GOOGLE, googlePlaySkus.get(0)); } @Test public void testMapNokiaStoreSku() { SkuManager sm = SkuManager.getInstance(); sm.mapSku(ITEM_SKU, OpenIabHelper.NAME_NOKIA, STORE_SKU_NOKIA); String storeSku = sm.getStoreSku(OpenIabHelper.NAME_NOKIA, ITEM_SKU); Assert.assertNotNull(storeSku); Assert.assertEquals(STORE_SKU_NOKIA, storeSku); String sku = sm.getSku(OpenIabHelper.NAME_NOKIA, STORE_SKU_NOKIA); Assert.assertNotNull(sku); Assert.assertEquals(ITEM_SKU, sku); List<String> samsungAppsSkus = sm.getAllStoreSkus(OpenIabHelper.NAME_NOKIA); Assert.assertNotNull(samsungAppsSkus); Assert.assertEquals(1, samsungAppsSkus.size()); Assert.assertNotNull(samsungAppsSkus.get(0)); Assert.assertEquals(STORE_SKU_NOKIA, samsungAppsSkus.get(0)); } @Test public void testMapSamsungSku() { SkuManager sm = SkuManager.getInstance(); sm.mapSku(ITEM_SKU, OpenIabHelper.NAME_SAMSUNG, STORE_SKU_SAMSUNG); String storeSku = sm.getStoreSku(OpenIabHelper.NAME_SAMSUNG, ITEM_SKU); Assert.assertNotNull(storeSku); Assert.assertEquals(STORE_SKU_SAMSUNG, storeSku); String sku = sm.getSku(OpenIabHelper.NAME_SAMSUNG, STORE_SKU_SAMSUNG); Assert.assertNotNull(sku); Assert.assertEquals(ITEM_SKU, sku); List<String> samsungAppsSkus = sm.getAllStoreSkus(OpenIabHelper.NAME_SAMSUNG); Assert.assertNotNull(samsungAppsSkus); Assert.assertEquals(1, samsungAppsSkus.size()); Assert.assertNotNull(samsungAppsSkus.get(0)); Assert.assertEquals(STORE_SKU_SAMSUNG, samsungAppsSkus.get(0)); } @Test(expected = SamsungSkuFormatException.class) public void testIllegalFormatSamsungSKUMapping() { SkuManager sm = SkuManager.getInstance(); sm.mapSku("wrong_sku", OpenIabHelper.NAME_SAMSUNG, "test_group/test_item_id"); } @Test(expected = NokiaSkuFormatException.class) public void testIllegalFormatNokiaSKUMapping() { SkuManager sm = SkuManager.getInstance(); sm.mapSku("wrong_sku", OpenIabHelper.NAME_NOKIA, "test_nokia_store_sku"); } @Test(expected = SkuMappingException.class) public void testMapNullSkuMapping() { final SkuManager sm = SkuManager.getInstance(); sm.mapSku(null, OpenIabHelper.NAME_GOOGLE, STORE_SKU_GOOGLE); } @Test(expected = SkuMappingException.class) public void testMapNullStoreNameMapping() { final SkuManager sm = SkuManager.getInstance(); sm.mapSku(ITEM_SKU, null, STORE_SKU_GOOGLE); } @Test(expected = SkuMappingException.class) public void testMapEmptyStoreSkuMapping() { final SkuManager sm = SkuManager.getInstance(); sm.mapSku(ITEM_SKU, OpenIabHelper.NAME_GOOGLE, ""); } }
ptornhult/OpenIAB
library/src/androidTest/java/org/onepf/oms/SkuManagerTest.java
Java
apache-2.0
4,440
<html lang="or"> <p><span style="font-weight:400">କୁନ୍ତଳା</span></p> </html>
googlei18n/noto-source
test/Oriya/fontdiff-or-20180314.html
HTML
apache-2.0
93
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.doplgangr.secrecy.filesystem; public interface CryptStateListener { void updateProgress(int progress); void setMax(int max); void onFailed(int statCode); void Finished(); }
liangie/chooserTest
app/src/main/java/com/doplgangr/secrecy/filesystem/CryptStateListener.java
Java
apache-2.0
1,014
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Functions to bridge `Distribution`s and `tf.contrib.learn.estimator` APIs.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib.learn.python.learn.estimators.head import _compute_weighted_loss from tensorflow.contrib.learn.python.learn.estimators.head import _RegressionHead from tensorflow.python.framework import ops from tensorflow.python.framework import tensor_util from tensorflow.python.ops import array_ops __all__ = [ "estimator_head_distribution_regression", ] def estimator_head_distribution_regression(make_distribution_fn, label_dimension=1, logits_dimension=None, label_name=None, weight_column_name=None, enable_centered_bias=False, head_name=None): """Creates a `Head` for regression under a generic distribution. Args: make_distribution_fn: Python `callable` which returns a `tf.Distribution` instance created using only logits. label_dimension: Number of regression labels per example. This is the size of the last dimension of the labels `Tensor` (typically, this has shape `[batch_size, label_dimension]`). logits_dimension: Number of logits per example. This is the size of the last dimension of the logits `Tensor` (typically, this has shape `[batch_size, logits_dimension]`). Default value: `label_dimension`. label_name: Python `str`, name of the key in label `dict`. Can be `None` if label is a `Tensor` (single headed models). weight_column_name: Python `str` defining feature column name representing weights. It is used to down weight or boost examples during training. It will be multiplied by the loss of the example. enable_centered_bias: Python `bool`. If `True`, estimator will learn a centered bias variable for each class. Rest of the model structure learns the residual after centered bias. head_name: Python `str`, name of the head. Predictions, summary and metrics keys are suffixed by `"/" + head_name` and the default variable scope is `head_name`. Returns: An instance of `Head` for generic regression. """ return _DistributionRegressionHead( make_distribution_fn=make_distribution_fn, label_dimension=label_dimension, logits_dimension=logits_dimension, label_name=label_name, weight_column_name=weight_column_name, enable_centered_bias=enable_centered_bias, head_name=head_name) class _DistributionRegressionHead(_RegressionHead): """Creates a _RegressionHead instance from an arbitrary `Distribution`.""" def __init__(self, make_distribution_fn, label_dimension, logits_dimension=None, label_name=None, weight_column_name=None, enable_centered_bias=False, head_name=None): """`Head` for regression. Args: make_distribution_fn: Python `callable` which returns a `tf.Distribution` instance created using only logits. label_dimension: Number of regression labels per example. This is the size of the last dimension of the labels `Tensor` (typically, this has shape `[batch_size, label_dimension]`). logits_dimension: Number of logits per example. This is the size of the last dimension of the logits `Tensor` (typically, this has shape `[batch_size, logits_dimension]`). Default value: `label_dimension`. label_name: Python `str`, name of the key in label `dict`. Can be `None` if label is a tensor (single headed models). weight_column_name: Python `str` defining feature column name representing weights. It is used to down weight or boost examples during training. It will be multiplied by the loss of the example. enable_centered_bias: Python `bool`. If `True`, estimator will learn a centered bias variable for each class. Rest of the model structure learns the residual after centered bias. head_name: Python `str`, name of the head. Predictions, summary and metrics keys are suffixed by `"/" + head_name` and the default variable scope is `head_name`. Raises: TypeError: if `make_distribution_fn` is not `callable`. """ if not callable(make_distribution_fn): raise TypeError("`make_distribution_fn` must be a callable function.") self._distributions = {} self._make_distribution_fn = make_distribution_fn def static_value(x): """Returns the static value of a `Tensor` or `None`.""" return tensor_util.constant_value(ops.convert_to_tensor(x)) def concat_vectors(*args): """Concatenates input vectors, statically if possible.""" args_ = [static_value(x) for x in args] if any(vec is None for vec in args_): return array_ops.concat(args, axis=0) return [val for vec in args_ for val in vec] def loss_fn(labels, logits, weights=None): """Returns the loss of using `logits` to predict `labels`.""" d = self.distribution(logits) labels_batch_shape = labels.shape.with_rank_at_least(1)[:-1] labels_batch_shape = ( labels_batch_shape.as_list() if labels_batch_shape.is_fully_defined() else array_ops.shape(labels)[:-1]) labels = array_ops.reshape( labels, shape=concat_vectors(labels_batch_shape, d.event_shape_tensor())) return _compute_weighted_loss( loss_unweighted=-d.log_prob(labels), weight=weights) def link_fn(logits): """Returns the inverse link function at `logits`.""" # Note: What the API calls a "link function" is really the inverse-link # function, i.e., the "mean". d = self.distribution(logits) return d.mean() super(_DistributionRegressionHead, self).__init__( label_dimension=label_dimension, loss_fn=loss_fn, link_fn=link_fn, logits_dimension=logits_dimension, label_name=label_name, weight_column_name=weight_column_name, enable_centered_bias=enable_centered_bias, head_name=head_name) @property def distributions(self): """Returns all distributions created by `DistributionRegressionHead`.""" return self._distributions def distribution(self, logits, name=None): """Retrieves a distribution instance, parameterized by `logits`. Args: logits: `float`-like `Tensor` representing the parameters of the underlying distribution. name: The Python `str` name to given to this op. Default value: "distribution". Returns: distribution: `tf.Distribution` instance parameterized by `logits`. """ with ops.name_scope(name, "distribution", [logits]): d = self._distributions.get(logits, None) if d is None: d = self._make_distribution_fn(logits) self._distributions[logits] = d return d
allenlavoie/tensorflow
tensorflow/contrib/distributions/python/ops/estimator.py
Python
apache-2.0
7,908
/* * Copyright 2004-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.compass.core.engine; /** * Optimizes search engine index data. * * Can control a scheduled optimizer that will run periodically using {@link #start()} and {@link #stop()}. * * @author kimchy */ public interface SearchEngineOptimizer { /** * Starts the given optimizer. Will start a scheduled optimizer if * configured. If not, does nothing. */ void start() throws SearchEngineException; /** * Stops the given optimizer. Will stop the scheduled optimizer if * configured. If not, does nothing. * * <p>Note that if the optimizer is stopped while optimizing, it might take * some time till the optimizer will actually stop. */ void stop() throws SearchEngineException; /** * Returns <code>true</code> if the optimizer is running a scheduled optimizer. */ boolean isRunning(); /** * Optimizes the search engine index if it requires optimization. The optimization will be perfomed on all * sub indexes and based on configuration. For example, the default optimizer will use the configured * <code>maxNumberOfSegments</code> in order to perform the optimization. */ void optimize() throws SearchEngineException; /** * Optimizes all the sub indexes down to the required maximum number of segments. */ void optimize(int maxNumberOfSegments) throws SearchEngineException; /** * Optimizes the sub index does to a configured max number of segments. */ void optimize(String subIndex) throws SearchEngineException; /** * Optimizes a specific sub index down to a required maximum number of segments. */ void optimize(String subIndex, int maxNumberOfSegments) throws SearchEngineException; }
baboune/compass
src/main/src/org/compass/core/engine/SearchEngineOptimizer.java
Java
apache-2.0
2,389
/* Copyright 2014 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package cmd import ( "fmt" "io" "regexp" "strings" "github.com/renstrom/dedent" "github.com/spf13/cobra" "k8s.io/kubernetes/pkg/kubectl" cmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util" "k8s.io/kubernetes/pkg/kubectl/resource" "k8s.io/kubernetes/pkg/runtime" "k8s.io/kubernetes/pkg/util/validation" ) // ExposeOptions is the start of the data required to perform the operation. As new fields are added, add them here instead of // referencing the cmd.Flags() type ExposeOptions struct { Filenames []string Recursive bool } var ( expose_resources = dedent.Dedent(` pod (po), service (svc), replicationcontroller (rc), deployment (deploy), replicaset (rs) `) expose_long = dedent.Dedent(` Expose a resource as a new Kubernetes service. Looks up a deployment, service, replica set, replication controller or pod by name and uses the selector for that resource as the selector for a new service on the specified port. A deployment or replica set will be exposed as a service only if its selector is convertible to a selector that service supports, i.e. when the selector contains only the matchLabels component. Note that if no port is specified via --port and the exposed resource has multiple ports, all will be re-used by the new service. Also if no labels are specified, the new service will re-use the labels from the resource it exposes. Possible resources include (case insensitive): `) + expose_resources expose_example = dedent.Dedent(` # Create a service for a replicated nginx, which serves on port 80 and connects to the containers on port 8000. kubectl expose rc nginx --port=80 --target-port=8000 # Create a service for a replication controller identified by type and name specified in "nginx-controller.yaml", which serves on port 80 and connects to the containers on port 8000. kubectl expose -f nginx-controller.yaml --port=80 --target-port=8000 # Create a service for a pod valid-pod, which serves on port 444 with the name "frontend" kubectl expose pod valid-pod --port=444 --name=frontend # Create a second service based on the above service, exposing the container port 8443 as port 443 with the name "nginx-https" kubectl expose service nginx --port=443 --target-port=8443 --name=nginx-https # Create a service for a replicated streaming application on port 4100 balancing UDP traffic and named 'video-stream'. kubectl expose rc streamer --port=4100 --protocol=udp --name=video-stream # Create a service for a replicated nginx using replica set, which serves on port 80 and connects to the containers on port 8000. kubectl expose rs nginx --port=80 --target-port=8000 # Create a service for an nginx deployment, which serves on port 80 and connects to the containers on port 8000. kubectl expose deployment nginx --port=80 --target-port=8000`) ) func NewCmdExposeService(f *cmdutil.Factory, out io.Writer) *cobra.Command { options := &ExposeOptions{} validArgs, argAliases := []string{}, []string{} resources := regexp.MustCompile(`\s*,`).Split(expose_resources, -1) for _, r := range resources { validArgs = append(validArgs, strings.Fields(r)[0]) argAliases = kubectl.ResourceAliases(validArgs) } cmd := &cobra.Command{ Use: "expose (-f FILENAME | TYPE NAME) [--port=port] [--protocol=TCP|UDP] [--target-port=number-or-name] [--name=name] [--external-ip=external-ip-of-service] [--type=type]", Short: "Take a replication controller, service, deployment or pod and expose it as a new Kubernetes Service", Long: expose_long, Example: expose_example, Run: func(cmd *cobra.Command, args []string) { err := RunExpose(f, out, cmd, args, options) cmdutil.CheckErr(err) }, ValidArgs: validArgs, ArgAliases: argAliases, } cmdutil.AddPrinterFlags(cmd) cmd.Flags().String("generator", "service/v2", "The name of the API generator to use. There are 2 generators: 'service/v1' and 'service/v2'. The only difference between them is that service port in v1 is named 'default', while it is left unnamed in v2. Default is 'service/v2'.") cmd.Flags().String("protocol", "", "The network protocol for the service to be created. Default is 'TCP'.") cmd.Flags().String("port", "", "The port that the service should serve on. Copied from the resource being exposed, if unspecified") cmd.Flags().String("type", "", "Type for this service: ClusterIP, NodePort, or LoadBalancer. Default is 'ClusterIP'.") // TODO: remove create-external-load-balancer in code on or after Aug 25, 2016. cmd.Flags().Bool("create-external-load-balancer", false, "If true, create an external load balancer for this service (trumped by --type). Implementation is cloud provider dependent. Default is 'false'.") cmd.Flags().MarkDeprecated("create-external-load-balancer", "use --type=\"LoadBalancer\" instead") cmd.Flags().String("load-balancer-ip", "", "IP to assign to the Load Balancer. If empty, an ephemeral IP will be created and used (cloud-provider specific).") cmd.Flags().String("selector", "", "A label selector to use for this service. Only equality-based selector requirements are supported. If empty (the default) infer the selector from the replication controller or replica set.") cmd.Flags().StringP("labels", "l", "", "Labels to apply to the service created by this call.") cmd.Flags().String("container-port", "", "Synonym for --target-port") cmd.Flags().MarkDeprecated("container-port", "--container-port will be removed in the future, please use --target-port instead") cmd.Flags().String("target-port", "", "Name or number for the port on the container that the service should direct traffic to. Optional.") cmd.Flags().String("external-ip", "", "Additional external IP address (not managed by Kubernetes) to accept for the service. If this IP is routed to a node, the service can be accessed by this IP in addition to its generated service IP.") cmd.Flags().String("overrides", "", "An inline JSON override for the generated object. If this is non-empty, it is used to override the generated object. Requires that the object supply a valid apiVersion field.") cmd.Flags().String("name", "", "The name for the newly created object.") cmd.Flags().String("session-affinity", "", "If non-empty, set the session affinity for the service to this; legal values: 'None', 'ClientIP'") cmd.Flags().String("cluster-ip", "", "ClusterIP to be assigned to the service. Leave empty to auto-allocate, or set to 'None' to create a headless service.") usage := "Filename, directory, or URL to a file identifying the resource to expose a service" kubectl.AddJsonFilenameFlag(cmd, &options.Filenames, usage) cmdutil.AddDryRunFlag(cmd) cmdutil.AddRecursiveFlag(cmd, &options.Recursive) cmdutil.AddApplyAnnotationFlags(cmd) cmdutil.AddRecordFlag(cmd) return cmd } func RunExpose(f *cmdutil.Factory, out io.Writer, cmd *cobra.Command, args []string, options *ExposeOptions) error { namespace, enforceNamespace, err := f.DefaultNamespace() if err != nil { return err } mapper, typer := f.Object(false) r := resource.NewBuilder(mapper, typer, resource.ClientMapperFunc(f.ClientForMapping), f.Decoder(true)). ContinueOnError(). NamespaceParam(namespace).DefaultNamespace(). FilenameParam(enforceNamespace, options.Recursive, options.Filenames...). ResourceTypeOrNameArgs(false, args...). Flatten(). Do() err = r.Err() if err != nil { return cmdutil.UsageError(cmd, err.Error()) } // Get the generator, setup and validate all required parameters generatorName := cmdutil.GetFlagString(cmd, "generator") generators := f.Generators("expose") generator, found := generators[generatorName] if !found { return cmdutil.UsageError(cmd, fmt.Sprintf("generator %q not found.", generatorName)) } names := generator.ParamNames() err = r.Visit(func(info *resource.Info, err error) error { if err != nil { return err } mapping := info.ResourceMapping() if err := f.CanBeExposed(mapping.GroupVersionKind.GroupKind()); err != nil { return err } params := kubectl.MakeParams(cmd, names) name := info.Name if len(name) > validation.DNS1035LabelMaxLength { name = name[:validation.DNS1035LabelMaxLength] } params["default-name"] = name // For objects that need a pod selector, derive it from the exposed object in case a user // didn't explicitly specify one via --selector if s, found := params["selector"]; found && kubectl.IsZero(s) { s, err := f.MapBasedSelectorForObject(info.Object) if err != nil { return cmdutil.UsageError(cmd, fmt.Sprintf("couldn't retrieve selectors via --selector flag or introspection: %s", err)) } params["selector"] = s } // For objects that need a port, derive it from the exposed object in case a user // didn't explicitly specify one via --port if port, found := params["port"]; found && kubectl.IsZero(port) { ports, err := f.PortsForObject(info.Object) if err != nil { return cmdutil.UsageError(cmd, fmt.Sprintf("couldn't find port via --port flag or introspection: %s", err)) } switch len(ports) { case 0: return cmdutil.UsageError(cmd, "couldn't find port via --port flag or introspection") case 1: params["port"] = ports[0] default: params["ports"] = strings.Join(ports, ",") } } // Always try to derive protocols from the exposed object, may use // different protocols for different ports. if _, found := params["protocol"]; found { protocolsMap, err := f.ProtocolsForObject(info.Object) if err != nil { return cmdutil.UsageError(cmd, fmt.Sprintf("couldn't find protocol via introspection: %s", err)) } if protocols := kubectl.MakeProtocols(protocolsMap); !kubectl.IsZero(protocols) { params["protocols"] = protocols } } if kubectl.IsZero(params["labels"]) { labels, err := f.LabelsForObject(info.Object) if err != nil { return err } params["labels"] = kubectl.MakeLabels(labels) } if err = kubectl.ValidateParams(names, params); err != nil { return err } // Check for invalid flags used against the present generator. if err := kubectl.EnsureFlagsValid(cmd, generators, generatorName); err != nil { return err } // Generate new object object, err := generator.Generate(params) if err != nil { return err } if inline := cmdutil.GetFlagString(cmd, "overrides"); len(inline) > 0 { codec := runtime.NewCodec(f.JSONEncoder(), f.Decoder(true)) object, err = cmdutil.Merge(codec, object, inline, mapping.GroupVersionKind.Kind) if err != nil { return err } } resourceMapper := &resource.Mapper{ ObjectTyper: typer, RESTMapper: mapper, ClientMapper: resource.ClientMapperFunc(f.ClientForMapping), Decoder: f.Decoder(true), } info, err = resourceMapper.InfoForObject(object, nil) if err != nil { return err } if cmdutil.ShouldRecord(cmd, info) { if err := cmdutil.RecordChangeCause(object, f.Command()); err != nil { return err } } info.Refresh(object, true) if cmdutil.GetDryRunFlag(cmd) { return f.PrintObject(cmd, mapper, object, out) } if err := kubectl.CreateOrUpdateAnnotation(cmdutil.GetFlagBool(cmd, cmdutil.ApplyAnnotationsFlag), info, f.JSONEncoder()); err != nil { return err } // Serialize the object with the annotation applied. object, err = resource.NewHelper(info.Client, info.Mapping).Create(namespace, false, object) if err != nil { return err } if len(cmdutil.GetFlagString(cmd, "output")) > 0 { return f.PrintObject(cmd, mapper, object, out) } cmdutil.PrintSuccess(mapper, false, out, info.Mapping.Resource, info.Name, "exposed") return nil }) if err != nil { return err } return nil }
inlandsee/origin
vendor/k8s.io/kubernetes/pkg/kubectl/cmd/expose.go
GO
apache-2.0
12,252
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.examples.tree; import java.util.ArrayList; import java.util.List; import org.apache.wicket.Page; import org.apache.wicket.protocol.http.WebApplication; import org.apache.wicket.response.filter.ServerAndClientTimeFilter; /** * Application class for tree examples. * * @author svenmeier */ public class TreeApplication extends WebApplication { /** * All root {@link Foo}s. */ public List<Foo> foos = new ArrayList<>(); /** * Constructor. */ public TreeApplication() { Foo fooA = new Foo("A"); { Foo fooAA = new Foo(fooA, "AA"); { new Foo(fooAA, "AAA"); new Foo(fooAA, "AAB"); } Foo fooAB = new Foo(fooA, "AB"); { new Foo(fooAB, "ABA"); Foo fooABB = new Foo(fooAB, "ABB"); { new Foo(fooABB, "ABBA"); Foo fooABBB = new Foo(fooABB, "ABBB"); { new Foo(fooABBB, "ABBBA"); } } new Foo(fooAB, "ABC"); new Foo(fooAB, "ABD"); } Foo fooAC = new Foo(fooA, "AC"); { new Foo(fooAC, "ACA"); new Foo(fooAC, "ACB"); } } foos.add(fooA); Foo fooB = new Foo("B"); { new Foo(fooB, "BA"); new Foo(fooB, "BB"); } foos.add(fooB); Foo fooC = new Foo("C"); foos.add(fooC); } @Override protected void init() { getDebugSettings().setDevelopmentUtilitiesEnabled(true); getRequestCycleSettings().addResponseFilter(new ServerAndClientTimeFilter()); } @Override public Class<? extends Page> getHomePage() { return BeginnersTreePage.class; } /** * Get a {@link Foo} by its id. */ public Foo getFoo(String id) { return findFoo(foos, id); } private static Foo findFoo(List<Foo> foos, String id) { for (Foo foo : foos) { if (foo.getId().equals(id)) { return foo; } Foo temp = findFoo(foo.getFoos(), id); if (temp != null) { return temp; } } return null; } public static TreeApplication get() { return (TreeApplication)WebApplication.get(); } }
dashorst/wicket
wicket-examples/src/main/java/org/apache/wicket/examples/tree/TreeApplication.java
Java
apache-2.0
2,757
# Copyright 2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from migrate.changeset import UniqueConstraint from migrate import ForeignKeyConstraint from oslo_log import log as logging from sqlalchemy import Boolean, BigInteger, Column, DateTime, Enum, Float from sqlalchemy import dialects from sqlalchemy import ForeignKey, Index, Integer, MetaData, String, Table from sqlalchemy import Text from sqlalchemy.types import NullType from nova.i18n import _LE LOG = logging.getLogger(__name__) # Note on the autoincrement flag: this is defaulted for primary key columns # of integral type, so is no longer set explicitly in such cases. # NOTE(dprince): This wrapper allows us to easily match the Folsom MySQL # Schema. In Folsom we created tables as latin1 and converted them to utf8 # later. This conversion causes some of the Text columns on MySQL to get # created as mediumtext instead of just text. def MediumText(): return Text().with_variant(dialects.mysql.MEDIUMTEXT(), 'mysql') def Inet(): return String(length=43).with_variant(dialects.postgresql.INET(), 'postgresql') def InetSmall(): return String(length=39).with_variant(dialects.postgresql.INET(), 'postgresql') def _create_shadow_tables(migrate_engine): meta = MetaData(migrate_engine) meta.reflect(migrate_engine) table_names = list(meta.tables.keys()) meta.bind = migrate_engine for table_name in table_names: table = Table(table_name, meta, autoload=True) columns = [] for column in table.columns: column_copy = None # NOTE(boris-42): BigInteger is not supported by sqlite, so # after copy it will have NullType, other # types that are used in Nova are supported by # sqlite. if isinstance(column.type, NullType): column_copy = Column(column.name, BigInteger(), default=0) if table_name == 'instances' and column.name == 'locked_by': enum = Enum('owner', 'admin', name='shadow_instances0locked_by') column_copy = Column(column.name, enum) else: column_copy = column.copy() columns.append(column_copy) shadow_table_name = 'shadow_' + table_name shadow_table = Table(shadow_table_name, meta, *columns, mysql_engine='InnoDB') try: shadow_table.create() except Exception: LOG.info(repr(shadow_table)) LOG.exception(_LE('Exception while creating table.')) raise # NOTE(dprince): we add these here so our schema contains dump tables # which were added in migration 209 (in Havana). We can drop these in # Icehouse: https://bugs.launchpad.net/nova/+bug/1266538 def _create_dump_tables(migrate_engine): meta = MetaData(migrate_engine) meta.reflect(migrate_engine) table_names = ['compute_node_stats', 'compute_nodes', 'instance_actions', 'instance_actions_events', 'instance_faults', 'migrations'] for table_name in table_names: table = Table(table_name, meta, autoload=True) dump_table_name = 'dump_' + table.name columns = [] for column in table.columns: # NOTE(dprince): The dump_ tables were originally created from an # earlier schema version so we don't want to add the pci_stats # column so that schema diffs are exactly the same. if column.name == 'pci_stats': continue else: columns.append(column.copy()) table_dump = Table(dump_table_name, meta, *columns, mysql_engine='InnoDB') table_dump.create() def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine agent_builds = Table('agent_builds', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('hypervisor', String(length=255)), Column('os', String(length=255)), Column('architecture', String(length=255)), Column('version', String(length=255)), Column('url', String(length=255)), Column('md5hash', String(length=255)), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) aggregate_hosts = Table('aggregate_hosts', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('host', String(length=255)), Column('aggregate_id', Integer, ForeignKey('aggregates.id'), nullable=False), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) aggregate_metadata = Table('aggregate_metadata', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('aggregate_id', Integer, ForeignKey('aggregates.id'), nullable=False), Column('key', String(length=255), nullable=False), Column('value', String(length=255), nullable=False), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) aggregates = Table('aggregates', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('name', String(length=255)), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) block_device_mapping = Table('block_device_mapping', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('device_name', String(length=255), nullable=True), Column('delete_on_termination', Boolean), Column('snapshot_id', String(length=36), nullable=True), Column('volume_id', String(length=36), nullable=True), Column('volume_size', Integer), Column('no_device', Boolean), Column('connection_info', MediumText()), Column('instance_uuid', String(length=36)), Column('deleted', Integer), Column('source_type', String(length=255), nullable=True), Column('destination_type', String(length=255), nullable=True), Column('guest_format', String(length=255), nullable=True), Column('device_type', String(length=255), nullable=True), Column('disk_bus', String(length=255), nullable=True), Column('boot_index', Integer), Column('image_id', String(length=36), nullable=True), mysql_engine='InnoDB', mysql_charset='utf8' ) bw_usage_cache = Table('bw_usage_cache', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('start_period', DateTime, nullable=False), Column('last_refreshed', DateTime), Column('bw_in', BigInteger), Column('bw_out', BigInteger), Column('mac', String(length=255)), Column('uuid', String(length=36)), Column('last_ctr_in', BigInteger()), Column('last_ctr_out', BigInteger()), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) cells = Table('cells', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('api_url', String(length=255)), Column('weight_offset', Float), Column('weight_scale', Float), Column('name', String(length=255)), Column('is_parent', Boolean), Column('deleted', Integer), Column('transport_url', String(length=255), nullable=False), mysql_engine='InnoDB', mysql_charset='utf8' ) certificates = Table('certificates', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('user_id', String(length=255)), Column('project_id', String(length=255)), Column('file_name', String(length=255)), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) compute_node_stats = Table('compute_node_stats', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('compute_node_id', Integer, nullable=False), Column('key', String(length=255), nullable=False), Column('value', String(length=255)), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) compute_nodes = Table('compute_nodes', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('service_id', Integer, nullable=False), Column('vcpus', Integer, nullable=False), Column('memory_mb', Integer, nullable=False), Column('local_gb', Integer, nullable=False), Column('vcpus_used', Integer, nullable=False), Column('memory_mb_used', Integer, nullable=False), Column('local_gb_used', Integer, nullable=False), Column('hypervisor_type', MediumText(), nullable=False), Column('hypervisor_version', Integer, nullable=False), Column('cpu_info', MediumText(), nullable=False), Column('disk_available_least', Integer), Column('free_ram_mb', Integer), Column('free_disk_gb', Integer), Column('current_workload', Integer), Column('running_vms', Integer), Column('hypervisor_hostname', String(length=255)), Column('deleted', Integer), Column('host_ip', InetSmall()), Column('supported_instances', Text), Column('pci_stats', Text, nullable=True), mysql_engine='InnoDB', mysql_charset='utf8' ) console_pools = Table('console_pools', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('address', InetSmall()), Column('username', String(length=255)), Column('password', String(length=255)), Column('console_type', String(length=255)), Column('public_hostname', String(length=255)), Column('host', String(length=255)), Column('compute_host', String(length=255)), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) consoles_instance_uuid_column_args = ['instance_uuid', String(length=36)] consoles_instance_uuid_column_args.append( ForeignKey('instances.uuid', name='consoles_instance_uuid_fkey')) consoles = Table('consoles', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('instance_name', String(length=255)), Column('password', String(length=255)), Column('port', Integer), Column('pool_id', Integer, ForeignKey('console_pools.id')), Column(*consoles_instance_uuid_column_args), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) dns_domains = Table('dns_domains', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('deleted', Boolean), Column('domain', String(length=255), primary_key=True, nullable=False), Column('scope', String(length=255)), Column('availability_zone', String(length=255)), Column('project_id', String(length=255)), mysql_engine='InnoDB', mysql_charset='utf8' ) fixed_ips = Table('fixed_ips', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('address', InetSmall()), Column('network_id', Integer), Column('allocated', Boolean), Column('leased', Boolean), Column('reserved', Boolean), Column('virtual_interface_id', Integer), Column('host', String(length=255)), Column('instance_uuid', String(length=36)), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) floating_ips = Table('floating_ips', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('address', InetSmall()), Column('fixed_ip_id', Integer), Column('project_id', String(length=255)), Column('host', String(length=255)), Column('auto_assigned', Boolean), Column('pool', String(length=255)), Column('interface', String(length=255)), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) instance_faults = Table('instance_faults', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('instance_uuid', String(length=36)), Column('code', Integer, nullable=False), Column('message', String(length=255)), Column('details', MediumText()), Column('host', String(length=255)), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) instance_id_mappings = Table('instance_id_mappings', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('uuid', String(36), nullable=False), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) instance_info_caches = Table('instance_info_caches', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('network_info', MediumText()), Column('instance_uuid', String(length=36), nullable=False), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) groups = Table('instance_groups', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('deleted', Integer), Column('id', Integer, primary_key=True, nullable=False), Column('user_id', String(length=255)), Column('project_id', String(length=255)), Column('uuid', String(length=36), nullable=False), Column('name', String(length=255)), UniqueConstraint('uuid', 'deleted', name='uniq_instance_groups0uuid0deleted'), mysql_engine='InnoDB', mysql_charset='utf8', ) group_metadata = Table('instance_group_metadata', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('deleted', Integer), Column('id', Integer, primary_key=True, nullable=False), Column('key', String(length=255)), Column('value', String(length=255)), Column('group_id', Integer, ForeignKey('instance_groups.id'), nullable=False), mysql_engine='InnoDB', mysql_charset='utf8', ) group_policy = Table('instance_group_policy', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('deleted', Integer), Column('id', Integer, primary_key=True, nullable=False), Column('policy', String(length=255)), Column('group_id', Integer, ForeignKey('instance_groups.id'), nullable=False), mysql_engine='InnoDB', mysql_charset='utf8', ) group_member = Table('instance_group_member', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('deleted', Integer), Column('id', Integer, primary_key=True, nullable=False), Column('instance_id', String(length=255)), Column('group_id', Integer, ForeignKey('instance_groups.id'), nullable=False), mysql_engine='InnoDB', mysql_charset='utf8', ) instance_metadata = Table('instance_metadata', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('key', String(length=255)), Column('value', String(length=255)), Column('instance_uuid', String(length=36), nullable=True), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) instance_system_metadata = Table('instance_system_metadata', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('instance_uuid', String(length=36), nullable=False), Column('key', String(length=255), nullable=False), Column('value', String(length=255)), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) instance_type_extra_specs = Table('instance_type_extra_specs', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('instance_type_id', Integer, ForeignKey('instance_types.id'), nullable=False), Column('key', String(length=255)), Column('value', String(length=255)), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) instance_type_projects = Table('instance_type_projects', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('instance_type_id', Integer, nullable=False), Column('project_id', String(length=255)), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) instance_types = Table('instance_types', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('name', String(length=255)), Column('id', Integer, primary_key=True, nullable=False), Column('memory_mb', Integer, nullable=False), Column('vcpus', Integer, nullable=False), Column('swap', Integer, nullable=False), Column('vcpu_weight', Integer), Column('flavorid', String(length=255)), Column('rxtx_factor', Float), Column('root_gb', Integer), Column('ephemeral_gb', Integer), Column('disabled', Boolean), Column('is_public', Boolean), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) inst_lock_enum = Enum('owner', 'admin', name='instances0locked_by') instances = Table('instances', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('internal_id', Integer), Column('user_id', String(length=255)), Column('project_id', String(length=255)), Column('image_ref', String(length=255)), Column('kernel_id', String(length=255)), Column('ramdisk_id', String(length=255)), Column('launch_index', Integer), Column('key_name', String(length=255)), Column('key_data', MediumText()), Column('power_state', Integer), Column('vm_state', String(length=255)), Column('memory_mb', Integer), Column('vcpus', Integer), Column('hostname', String(length=255)), Column('host', String(length=255)), Column('user_data', MediumText()), Column('reservation_id', String(length=255)), Column('scheduled_at', DateTime), Column('launched_at', DateTime), Column('terminated_at', DateTime), Column('display_name', String(length=255)), Column('display_description', String(length=255)), Column('availability_zone', String(length=255)), Column('locked', Boolean), Column('os_type', String(length=255)), Column('launched_on', MediumText()), Column('instance_type_id', Integer), Column('vm_mode', String(length=255)), Column('uuid', String(length=36)), Column('architecture', String(length=255)), Column('root_device_name', String(length=255)), Column('access_ip_v4', InetSmall()), Column('access_ip_v6', InetSmall()), Column('config_drive', String(length=255)), Column('task_state', String(length=255)), Column('default_ephemeral_device', String(length=255)), Column('default_swap_device', String(length=255)), Column('progress', Integer), Column('auto_disk_config', Boolean), Column('shutdown_terminate', Boolean), Column('disable_terminate', Boolean), Column('root_gb', Integer), Column('ephemeral_gb', Integer), Column('cell_name', String(length=255)), Column('node', String(length=255)), Column('deleted', Integer), Column('locked_by', inst_lock_enum), Column('cleaned', Integer, default=0), mysql_engine='InnoDB', mysql_charset='utf8' ) instance_actions = Table('instance_actions', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('action', String(length=255)), Column('instance_uuid', String(length=36)), Column('request_id', String(length=255)), Column('user_id', String(length=255)), Column('project_id', String(length=255)), Column('start_time', DateTime), Column('finish_time', DateTime), Column('message', String(length=255)), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8', ) instance_actions_events = Table('instance_actions_events', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('event', String(length=255)), Column('action_id', Integer, ForeignKey('instance_actions.id')), Column('start_time', DateTime), Column('finish_time', DateTime), Column('result', String(length=255)), Column('traceback', Text), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8', ) iscsi_targets = Table('iscsi_targets', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('target_num', Integer), Column('host', String(length=255)), Column('volume_id', String(length=36), nullable=True), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) key_pairs = Table('key_pairs', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('name', String(length=255)), Column('user_id', String(length=255)), Column('fingerprint', String(length=255)), Column('public_key', MediumText()), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) migrations = Table('migrations', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('source_compute', String(length=255)), Column('dest_compute', String(length=255)), Column('dest_host', String(length=255)), Column('status', String(length=255)), Column('instance_uuid', String(length=36)), Column('old_instance_type_id', Integer), Column('new_instance_type_id', Integer), Column('source_node', String(length=255)), Column('dest_node', String(length=255)), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) networks = Table('networks', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('injected', Boolean), Column('cidr', Inet()), Column('netmask', InetSmall()), Column('bridge', String(length=255)), Column('gateway', InetSmall()), Column('broadcast', InetSmall()), Column('dns1', InetSmall()), Column('vlan', Integer), Column('vpn_public_address', InetSmall()), Column('vpn_public_port', Integer), Column('vpn_private_address', InetSmall()), Column('dhcp_start', InetSmall()), Column('project_id', String(length=255)), Column('host', String(length=255)), Column('cidr_v6', Inet()), Column('gateway_v6', InetSmall()), Column('label', String(length=255)), Column('netmask_v6', InetSmall()), Column('bridge_interface', String(length=255)), Column('multi_host', Boolean), Column('dns2', InetSmall()), Column('uuid', String(length=36)), Column('priority', Integer), Column('rxtx_base', Integer), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) pci_devices_uc_name = 'uniq_pci_devices0compute_node_id0address0deleted' pci_devices = Table('pci_devices', meta, Column('created_at', DateTime(timezone=False)), Column('updated_at', DateTime(timezone=False)), Column('deleted_at', DateTime(timezone=False)), Column('deleted', Integer, default=0, nullable=False), Column('id', Integer, primary_key=True), Column('compute_node_id', Integer, nullable=False), Column('address', String(12), nullable=False), Column('product_id', String(4)), Column('vendor_id', String(4)), Column('dev_type', String(8)), Column('dev_id', String(255)), Column('label', String(255), nullable=False), Column('status', String(36), nullable=False), Column('extra_info', Text, nullable=True), Column('instance_uuid', String(36), nullable=True), Index('ix_pci_devices_compute_node_id_deleted', 'compute_node_id', 'deleted'), Index('ix_pci_devices_instance_uuid_deleted', 'instance_uuid', 'deleted'), UniqueConstraint('compute_node_id', 'address', 'deleted', name=pci_devices_uc_name), mysql_engine='InnoDB', mysql_charset='utf8') provider_fw_rules = Table('provider_fw_rules', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('protocol', String(length=5)), Column('from_port', Integer), Column('to_port', Integer), Column('cidr', Inet()), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) quota_classes = Table('quota_classes', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('class_name', String(length=255)), Column('resource', String(length=255)), Column('hard_limit', Integer), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) quota_usages = Table('quota_usages', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('project_id', String(length=255)), Column('resource', String(length=255)), Column('in_use', Integer, nullable=False), Column('reserved', Integer, nullable=False), Column('until_refresh', Integer), Column('deleted', Integer), Column('user_id', String(length=255)), mysql_engine='InnoDB', mysql_charset='utf8' ) quotas = Table('quotas', meta, Column('id', Integer, primary_key=True, nullable=False), Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('project_id', String(length=255)), Column('resource', String(length=255), nullable=False), Column('hard_limit', Integer), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) uniq_name = "uniq_project_user_quotas0user_id0project_id0resource0deleted" project_user_quotas = Table('project_user_quotas', meta, Column('id', Integer, primary_key=True, nullable=False), Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('deleted', Integer), Column('user_id', String(length=255), nullable=False), Column('project_id', String(length=255), nullable=False), Column('resource', String(length=255), nullable=False), Column('hard_limit', Integer, nullable=True), UniqueConstraint('user_id', 'project_id', 'resource', 'deleted', name=uniq_name), mysql_engine='InnoDB', mysql_charset='utf8', ) reservations = Table('reservations', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('uuid', String(length=36), nullable=False), Column('usage_id', Integer, nullable=False), Column('project_id', String(length=255)), Column('resource', String(length=255)), Column('delta', Integer, nullable=False), Column('expire', DateTime), Column('deleted', Integer), Column('user_id', String(length=255)), mysql_engine='InnoDB', mysql_charset='utf8' ) s3_images = Table('s3_images', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('uuid', String(length=36), nullable=False), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) security_group_instance_association = \ Table('security_group_instance_association', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('security_group_id', Integer), Column('instance_uuid', String(length=36)), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) security_group_rules = Table('security_group_rules', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('parent_group_id', Integer, ForeignKey('security_groups.id')), Column('protocol', String(length=255)), Column('from_port', Integer), Column('to_port', Integer), Column('cidr', Inet()), Column('group_id', Integer, ForeignKey('security_groups.id')), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) security_groups = Table('security_groups', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('name', String(length=255)), Column('description', String(length=255)), Column('user_id', String(length=255)), Column('project_id', String(length=255)), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) security_group_default_rules = Table('security_group_default_rules', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('deleted', Integer, default=0), Column('id', Integer, primary_key=True, nullable=False), Column('protocol', String(length=5)), Column('from_port', Integer), Column('to_port', Integer), Column('cidr', Inet()), mysql_engine='InnoDB', mysql_charset='utf8', ) services = Table('services', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('host', String(length=255)), Column('binary', String(length=255)), Column('topic', String(length=255)), Column('report_count', Integer, nullable=False), Column('disabled', Boolean), Column('deleted', Integer), Column('disabled_reason', String(length=255)), mysql_engine='InnoDB', mysql_charset='utf8' ) snapshot_id_mappings = Table('snapshot_id_mappings', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('uuid', String(length=36), nullable=False), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) snapshots = Table('snapshots', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', String(length=36), primary_key=True, nullable=False), Column('volume_id', String(length=36), nullable=False), Column('user_id', String(length=255)), Column('project_id', String(length=255)), Column('status', String(length=255)), Column('progress', String(length=255)), Column('volume_size', Integer), Column('scheduled_at', DateTime), Column('display_name', String(length=255)), Column('display_description', String(length=255)), Column('deleted', String(length=36)), mysql_engine='InnoDB', mysql_charset='utf8' ) task_log = Table('task_log', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('task_name', String(length=255), nullable=False), Column('state', String(length=255), nullable=False), Column('host', String(length=255), nullable=False), Column('period_beginning', DateTime, nullable=False), Column('period_ending', DateTime, nullable=False), Column('message', String(length=255), nullable=False), Column('task_items', Integer), Column('errors', Integer), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) virtual_interfaces = Table('virtual_interfaces', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('address', String(length=255)), Column('network_id', Integer), Column('uuid', String(length=36)), Column('instance_uuid', String(length=36), nullable=True), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) volume_id_mappings = Table('volume_id_mappings', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', Integer, primary_key=True, nullable=False), Column('uuid', String(length=36), nullable=False), Column('deleted', Integer), mysql_engine='InnoDB', mysql_charset='utf8' ) volumes = Table('volumes', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('id', String(length=36), primary_key=True, nullable=False), Column('ec2_id', String(length=255)), Column('user_id', String(length=255)), Column('project_id', String(length=255)), Column('host', String(length=255)), Column('size', Integer), Column('availability_zone', String(length=255)), Column('mountpoint', String(length=255)), Column('status', String(length=255)), Column('attach_status', String(length=255)), Column('scheduled_at', DateTime), Column('launched_at', DateTime), Column('terminated_at', DateTime), Column('display_name', String(length=255)), Column('display_description', String(length=255)), Column('provider_location', String(length=256)), Column('provider_auth', String(length=256)), Column('snapshot_id', String(length=36)), Column('volume_type_id', Integer), Column('instance_uuid', String(length=36)), Column('attach_time', DateTime), Column('deleted', String(length=36)), mysql_engine='InnoDB', mysql_charset='utf8' ) volume_usage_cache = Table('volume_usage_cache', meta, Column('created_at', DateTime(timezone=False)), Column('updated_at', DateTime(timezone=False)), Column('deleted_at', DateTime(timezone=False)), Column('id', Integer(), primary_key=True, nullable=False), Column('volume_id', String(36), nullable=False), Column('tot_last_refreshed', DateTime(timezone=False)), Column('tot_reads', BigInteger(), default=0), Column('tot_read_bytes', BigInteger(), default=0), Column('tot_writes', BigInteger(), default=0), Column('tot_write_bytes', BigInteger(), default=0), Column('curr_last_refreshed', DateTime(timezone=False)), Column('curr_reads', BigInteger(), default=0), Column('curr_read_bytes', BigInteger(), default=0), Column('curr_writes', BigInteger(), default=0), Column('curr_write_bytes', BigInteger(), default=0), Column('deleted', Integer), Column("instance_uuid", String(length=36)), Column("project_id", String(length=36)), Column("user_id", String(length=36)), Column("availability_zone", String(length=255)), mysql_engine='InnoDB', mysql_charset='utf8' ) instances.create() Index('project_id', instances.c.project_id).create() Index('uuid', instances.c.uuid, unique=True).create() # create all tables tables = [aggregates, console_pools, instance_types, security_groups, snapshots, volumes, # those that are children and others later agent_builds, aggregate_hosts, aggregate_metadata, block_device_mapping, bw_usage_cache, cells, certificates, compute_node_stats, compute_nodes, consoles, dns_domains, fixed_ips, floating_ips, instance_faults, instance_id_mappings, instance_info_caches, instance_metadata, instance_system_metadata, instance_type_extra_specs, instance_type_projects, instance_actions, instance_actions_events, groups, group_metadata, group_policy, group_member, iscsi_targets, key_pairs, migrations, networks, pci_devices, provider_fw_rules, quota_classes, quota_usages, quotas, project_user_quotas, reservations, s3_images, security_group_instance_association, security_group_rules, security_group_default_rules, services, snapshot_id_mappings, task_log, virtual_interfaces, volume_id_mappings, volume_usage_cache] for table in tables: try: table.create() except Exception: LOG.info(repr(table)) LOG.exception(_LE('Exception while creating table.')) raise # task log unique constraint task_log_uc = "uniq_task_log0task_name0host0period_beginning0period_ending" task_log_cols = ('task_name', 'host', 'period_beginning', 'period_ending') uc = UniqueConstraint(*task_log_cols, table=task_log, name=task_log_uc) uc.create() # networks unique constraint UniqueConstraint('vlan', 'deleted', table=networks, name='uniq_networks0vlan0deleted').create() # instance_type_name constraint UniqueConstraint('name', 'deleted', table=instance_types, name='uniq_instance_types0name0deleted').create() # flavorid unique constraint UniqueConstraint('flavorid', 'deleted', table=instance_types, name='uniq_instance_types0flavorid0deleted').create() # keypair contraint UniqueConstraint('user_id', 'name', 'deleted', table=key_pairs, name='uniq_key_pairs0user_id0name0deleted').create() # instance_type_projects constraint inst_type_uc_name = 'uniq_instance_type_projects0instance_type_id0' + \ 'project_id0deleted' UniqueConstraint('instance_type_id', 'project_id', 'deleted', table=instance_type_projects, name=inst_type_uc_name).create() # floating_ips unique constraint UniqueConstraint('address', 'deleted', table=floating_ips, name='uniq_floating_ips0address0deleted').create() # instance_info_caches UniqueConstraint('instance_uuid', table=instance_info_caches, name='uniq_instance_info_caches0instance_uuid').create() UniqueConstraint('address', 'deleted', table=virtual_interfaces, name='uniq_virtual_interfaces0address0deleted').create() # cells UniqueConstraint('name', 'deleted', table=cells, name='uniq_cells0name0deleted').create() # security_groups uc = UniqueConstraint('project_id', 'name', 'deleted', table=security_groups, name='uniq_security_groups0project_id0name0deleted') uc.create() # quotas UniqueConstraint('project_id', 'resource', 'deleted', table=quotas, name='uniq_quotas0project_id0resource0deleted').create() # fixed_ips UniqueConstraint('address', 'deleted', table=fixed_ips, name='uniq_fixed_ips0address0deleted').create() # services UniqueConstraint('host', 'topic', 'deleted', table=services, name='uniq_services0host0topic0deleted').create() UniqueConstraint('host', 'binary', 'deleted', table=services, name='uniq_services0host0binary0deleted').create() # agent_builds uc_name = 'uniq_agent_builds0hypervisor0os0architecture0deleted' UniqueConstraint('hypervisor', 'os', 'architecture', 'deleted', table=agent_builds, name=uc_name).create() uc_name = 'uniq_console_pools0host0console_type0compute_host0deleted' UniqueConstraint('host', 'console_type', 'compute_host', 'deleted', table=console_pools, name=uc_name).create() uc_name = 'uniq_aggregate_hosts0host0aggregate_id0deleted' UniqueConstraint('host', 'aggregate_id', 'deleted', table=aggregate_hosts, name=uc_name).create() uc_name = 'uniq_aggregate_metadata0aggregate_id0key0deleted' UniqueConstraint('aggregate_id', 'key', 'deleted', table=aggregate_metadata, name=uc_name).create() uc_name = 'uniq_instance_type_extra_specs0instance_type_id0key0deleted' UniqueConstraint('instance_type_id', 'key', 'deleted', table=instance_type_extra_specs, name=uc_name).create() # created first (to preserve ordering for schema diffs) mysql_pre_indexes = [ Index('instance_type_id', instance_type_projects.c.instance_type_id), Index('project_id', dns_domains.c.project_id), Index('fixed_ip_id', floating_ips.c.fixed_ip_id), Index('network_id', virtual_interfaces.c.network_id), Index('network_id', fixed_ips.c.network_id), Index('fixed_ips_virtual_interface_id_fkey', fixed_ips.c.virtual_interface_id), Index('address', fixed_ips.c.address), Index('fixed_ips_instance_uuid_fkey', fixed_ips.c.instance_uuid), Index('instance_uuid', instance_system_metadata.c.instance_uuid), Index('iscsi_targets_volume_id_fkey', iscsi_targets.c.volume_id), Index('snapshot_id', block_device_mapping.c.snapshot_id), Index('usage_id', reservations.c.usage_id), Index('virtual_interfaces_instance_uuid_fkey', virtual_interfaces.c.instance_uuid), Index('volume_id', block_device_mapping.c.volume_id), Index('security_group_id', security_group_instance_association.c.security_group_id), ] # Common indexes (indexes we apply to all databases) # NOTE: order specific for MySQL diff support common_indexes = [ # aggregate_metadata Index('aggregate_metadata_key_idx', aggregate_metadata.c.key), # agent_builds Index('agent_builds_hypervisor_os_arch_idx', agent_builds.c.hypervisor, agent_builds.c.os, agent_builds.c.architecture), # block_device_mapping Index('block_device_mapping_instance_uuid_idx', block_device_mapping.c.instance_uuid), Index('block_device_mapping_instance_uuid_device_name_idx', block_device_mapping.c.instance_uuid, block_device_mapping.c.device_name), # NOTE(dprince): This is now a duplicate index on MySQL and needs to # be removed there. We leave it here so the Index ordering # matches on schema diffs (for MySQL). # See Havana migration 186_new_bdm_format where we dropped the # virtual_name column. # IceHouse fix is here: https://bugs.launchpad.net/nova/+bug/1265839 Index( 'block_device_mapping_instance_uuid_virtual_name_device_name_idx', block_device_mapping.c.instance_uuid, block_device_mapping.c.device_name), Index('block_device_mapping_instance_uuid_volume_id_idx', block_device_mapping.c.instance_uuid, block_device_mapping.c.volume_id), # bw_usage_cache Index('bw_usage_cache_uuid_start_period_idx', bw_usage_cache.c.uuid, bw_usage_cache.c.start_period), Index('certificates_project_id_deleted_idx', certificates.c.project_id, certificates.c.deleted), Index('certificates_user_id_deleted_idx', certificates.c.user_id, certificates.c.deleted), # compute_node_stats Index('ix_compute_node_stats_compute_node_id', compute_node_stats.c.compute_node_id), Index('compute_node_stats_node_id_and_deleted_idx', compute_node_stats.c.compute_node_id, compute_node_stats.c.deleted), # consoles Index('consoles_instance_uuid_idx', consoles.c.instance_uuid), # dns_domains Index('dns_domains_domain_deleted_idx', dns_domains.c.domain, dns_domains.c.deleted), # fixed_ips Index('fixed_ips_host_idx', fixed_ips.c.host), Index('fixed_ips_network_id_host_deleted_idx', fixed_ips.c.network_id, fixed_ips.c.host, fixed_ips.c.deleted), Index('fixed_ips_address_reserved_network_id_deleted_idx', fixed_ips.c.address, fixed_ips.c.reserved, fixed_ips.c.network_id, fixed_ips.c.deleted), Index('fixed_ips_deleted_allocated_idx', fixed_ips.c.address, fixed_ips.c.deleted, fixed_ips.c.allocated), # floating_ips Index('floating_ips_host_idx', floating_ips.c.host), Index('floating_ips_project_id_idx', floating_ips.c.project_id), Index('floating_ips_pool_deleted_fixed_ip_id_project_id_idx', floating_ips.c.pool, floating_ips.c.deleted, floating_ips.c.fixed_ip_id, floating_ips.c.project_id), # group_member Index('instance_group_member_instance_idx', group_member.c.instance_id), # group_metadata Index('instance_group_metadata_key_idx', group_metadata.c.key), # group_policy Index('instance_group_policy_policy_idx', group_policy.c.policy), # instances Index('instances_reservation_id_idx', instances.c.reservation_id), Index('instances_terminated_at_launched_at_idx', instances.c.terminated_at, instances.c.launched_at), Index('instances_task_state_updated_at_idx', instances.c.task_state, instances.c.updated_at), Index('instances_host_deleted_idx', instances.c.host, instances.c.deleted), Index('instances_uuid_deleted_idx', instances.c.uuid, instances.c.deleted), Index('instances_host_node_deleted_idx', instances.c.host, instances.c.node, instances.c.deleted), Index('instances_host_deleted_cleaned_idx', instances.c.host, instances.c.deleted, instances.c.cleaned), # instance_actions Index('instance_uuid_idx', instance_actions.c.instance_uuid), Index('request_id_idx', instance_actions.c.request_id), # instance_faults Index('instance_faults_host_idx', instance_faults.c.host), Index('instance_faults_instance_uuid_deleted_created_at_idx', instance_faults.c.instance_uuid, instance_faults.c.deleted, instance_faults.c.created_at), # instance_id_mappings Index('ix_instance_id_mappings_uuid', instance_id_mappings.c.uuid), # instance_metadata Index('instance_metadata_instance_uuid_idx', instance_metadata.c.instance_uuid), # instance_type_extra_specs Index('instance_type_extra_specs_instance_type_id_key_idx', instance_type_extra_specs.c.instance_type_id, instance_type_extra_specs.c.key), # iscsi_targets Index('iscsi_targets_host_idx', iscsi_targets.c.host), Index('iscsi_targets_host_volume_id_deleted_idx', iscsi_targets.c.host, iscsi_targets.c.volume_id, iscsi_targets.c.deleted), # migrations Index('migrations_by_host_nodes_and_status_idx', migrations.c.deleted, migrations.c.source_compute, migrations.c.dest_compute, migrations.c.source_node, migrations.c.dest_node, migrations.c.status), Index('migrations_instance_uuid_and_status_idx', migrations.c.deleted, migrations.c.instance_uuid, migrations.c.status), # networks Index('networks_host_idx', networks.c.host), Index('networks_cidr_v6_idx', networks.c.cidr_v6), Index('networks_bridge_deleted_idx', networks.c.bridge, networks.c.deleted), Index('networks_project_id_deleted_idx', networks.c.project_id, networks.c.deleted), Index('networks_uuid_project_id_deleted_idx', networks.c.uuid, networks.c.project_id, networks.c.deleted), Index('networks_vlan_deleted_idx', networks.c.vlan, networks.c.deleted), # project_user_quotas Index('project_user_quotas_project_id_deleted_idx', project_user_quotas.c.project_id, project_user_quotas.c.deleted), Index('project_user_quotas_user_id_deleted_idx', project_user_quotas.c.user_id, project_user_quotas.c.deleted), # reservations Index('ix_reservations_project_id', reservations.c.project_id), Index('ix_reservations_user_id_deleted', reservations.c.user_id, reservations.c.deleted), Index('reservations_uuid_idx', reservations.c.uuid), # security_group_instance_association Index('security_group_instance_association_instance_uuid_idx', security_group_instance_association.c.instance_uuid), # task_log Index('ix_task_log_period_beginning', task_log.c.period_beginning), Index('ix_task_log_host', task_log.c.host), Index('ix_task_log_period_ending', task_log.c.period_ending), # quota_classes Index('ix_quota_classes_class_name', quota_classes.c.class_name), # quota_usages Index('ix_quota_usages_project_id', quota_usages.c.project_id), Index('ix_quota_usages_user_id_deleted', quota_usages.c.user_id, quota_usages.c.deleted), # volumes Index('volumes_instance_uuid_idx', volumes.c.instance_uuid), ] # MySQL specific indexes if migrate_engine.name == 'mysql': for index in mysql_pre_indexes: index.create(migrate_engine) # mysql-specific index by leftmost 100 chars. (mysql gets angry if the # index key length is too long.) sql = ("create index migrations_by_host_nodes_and_status_idx ON " "migrations (deleted, source_compute(100), dest_compute(100), " "source_node(100), dest_node(100), status)") migrate_engine.execute(sql) # PostgreSQL specific indexes if migrate_engine.name == 'postgresql': Index('address', fixed_ips.c.address).create() # NOTE(dprince): PostgreSQL doesn't allow duplicate indexes # so we skip creation of select indexes (so schemas match exactly). POSTGRES_INDEX_SKIPS = [ # See Havana migration 186_new_bdm_format where we dropped the # virtual_name column. # IceHouse fix is here: https://bugs.launchpad.net/nova/+bug/1265839 'block_device_mapping_instance_uuid_virtual_name_device_name_idx' ] MYSQL_INDEX_SKIPS = [ # we create this one manually for MySQL above 'migrations_by_host_nodes_and_status_idx' ] for index in common_indexes: if ((migrate_engine.name == 'postgresql' and index.name in POSTGRES_INDEX_SKIPS) or (migrate_engine.name == 'mysql' and index.name in MYSQL_INDEX_SKIPS)): continue else: index.create(migrate_engine) Index('project_id', dns_domains.c.project_id).drop # Common foreign keys fkeys = [ [[instance_type_projects.c.instance_type_id], [instance_types.c.id], 'instance_type_projects_ibfk_1'], [[iscsi_targets.c.volume_id], [volumes.c.id], 'iscsi_targets_volume_id_fkey'], [[reservations.c.usage_id], [quota_usages.c.id], 'reservations_ibfk_1'], [[security_group_instance_association.c.security_group_id], [security_groups.c.id], 'security_group_instance_association_ibfk_1'], [[compute_node_stats.c.compute_node_id], [compute_nodes.c.id], 'fk_compute_node_stats_compute_node_id'], [[compute_nodes.c.service_id], [services.c.id], 'fk_compute_nodes_service_id'], ] secgroup_instance_association_instance_uuid_fkey = ( 'security_group_instance_association_instance_uuid_fkey') fkeys.extend( [ [[fixed_ips.c.instance_uuid], [instances.c.uuid], 'fixed_ips_instance_uuid_fkey'], [[block_device_mapping.c.instance_uuid], [instances.c.uuid], 'block_device_mapping_instance_uuid_fkey'], [[instance_info_caches.c.instance_uuid], [instances.c.uuid], 'instance_info_caches_instance_uuid_fkey'], [[instance_metadata.c.instance_uuid], [instances.c.uuid], 'instance_metadata_instance_uuid_fkey'], [[instance_system_metadata.c.instance_uuid], [instances.c.uuid], 'instance_system_metadata_ibfk_1'], [[security_group_instance_association.c.instance_uuid], [instances.c.uuid], secgroup_instance_association_instance_uuid_fkey], [[virtual_interfaces.c.instance_uuid], [instances.c.uuid], 'virtual_interfaces_instance_uuid_fkey'], [[instance_actions.c.instance_uuid], [instances.c.uuid], 'fk_instance_actions_instance_uuid'], [[instance_faults.c.instance_uuid], [instances.c.uuid], 'fk_instance_faults_instance_uuid'], [[migrations.c.instance_uuid], [instances.c.uuid], 'fk_migrations_instance_uuid'] ]) for fkey_pair in fkeys: if migrate_engine.name in ('mysql'): # For MySQL we name our fkeys explicitly # so they match Havana fkey = ForeignKeyConstraint(columns=fkey_pair[0], refcolumns=fkey_pair[1], name=fkey_pair[2]) fkey.create() elif migrate_engine.name == 'postgresql': # PostgreSQL names things like it wants (correct and compatible!) fkey = ForeignKeyConstraint(columns=fkey_pair[0], refcolumns=fkey_pair[1]) fkey.create() if migrate_engine.name == 'mysql': # In Folsom we explicitly converted migrate_version to UTF8. migrate_engine.execute( 'ALTER TABLE migrate_version CONVERT TO CHARACTER SET utf8') # Set default DB charset to UTF8. migrate_engine.execute( 'ALTER DATABASE %s DEFAULT CHARACTER SET utf8' % migrate_engine.url.database) _create_shadow_tables(migrate_engine) _create_dump_tables(migrate_engine)
hanlind/nova
nova/db/sqlalchemy/migrate_repo/versions/216_havana.py
Python
apache-2.0
62,236
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.platform.http.spi; import org.apache.camel.Consumer; import org.apache.camel.Processor; import org.apache.camel.component.platform.http.PlatformHttpEndpoint; /** * An abstraction of an HTTP Server engine on which HTTP endpoints can be deployed. */ public interface PlatformHttpEngine { /** * Creates a new {@link Consumer} for the given {@link PlatformHttpEndpoint}. * * @param platformHttpEndpoint the {@link PlatformHttpEndpoint} to create a consumer for * @param processor the Processor to pass to * @return a new {@link Consumer} */ Consumer createConsumer(PlatformHttpEndpoint platformHttpEndpoint, Processor processor); }
DariusX/camel
components/camel-platform-http/src/main/java/org/apache/camel/component/platform/http/spi/PlatformHttpEngine.java
Java
apache-2.0
1,510
/* * Copyright 1999-2006 University of Chicago * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * close barrier test * ----------------- * * verifies that all successful callback are devilvered before * any eof callbacks. * * working options * -i call finish inline * -r <int> number of outstanding reads that can be out at oncea * -R <int> total number of bytes to "read" * -w <int> number of outstanding writes that can be out at oncea * -W <int> total number of bytes to "write" * -c <int> chuck size to finish at once * -b <int> buffer size to post * * test suite * ---------- * - should be called w/ and w/o -i : * 2 * - different amounts of reads and * writes. * (0,1,2,4,8) * (0,1,2,4,8) - 1 : * 24 * [not doing 0x0] * - called with -c < -b and with : * 3 ( / 2; / 2.3; / 1) * numbers that do not end in nice * math * - different drivers : * 6 * 1) transport * 2) transport simple * 3) transport bounce * 4) transport simple bounce * 5) transport simple bounce simple * 6) transport bounce simple bounce * 864 */ #include "globus_xio.h" #include "globus_common.h" #include "test_common.h" #include "globus_xio_test_transport.h" static globus_mutex_t globus_l_mutex; static globus_cond_t globus_l_cond; static globus_bool_t globus_l_close_called = GLOBUS_FALSE; static globus_bool_t globus_l_closed = GLOBUS_FALSE; #define OP_COUNT 8 #define SLEEP_TIME 10000 static void data_cb( globus_xio_handle_t handle, globus_result_t result, globus_byte_t * buffer, globus_size_t len, globus_size_t nbytes, globus_xio_data_descriptor_t data_desc, void * user_arg) { globus_result_t res; globus_mutex_lock(&globus_l_mutex); { res = globus_xio_close( handle, NULL); test_res(GLOBUS_XIO_TEST_FAIL_NONE, res, __LINE__, __FILE__); globus_l_closed = GLOBUS_TRUE; globus_cond_signal(&globus_l_cond); } globus_mutex_unlock(&globus_l_mutex); } static void open_cb( globus_xio_handle_t handle, globus_result_t result, void * user_arg) { globus_result_t res; int ctr; globus_byte_t * buffer; globus_size_t buffer_length; buffer = globus_l_test_info.buffer; buffer_length = globus_l_test_info.buffer_length; globus_mutex_lock(&globus_l_mutex); { for(ctr = 0; ctr < OP_COUNT; ctr++) { if(globus_l_test_info.write_count > 0) { res = globus_xio_register_write( handle, buffer, buffer_length, buffer_length, NULL, data_cb, user_arg); } else { res = globus_xio_register_read( handle, buffer, buffer_length, buffer_length, NULL, data_cb, user_arg); } test_res(GLOBUS_XIO_TEST_FAIL_NONE, res, __LINE__, __FILE__); } } globus_mutex_unlock(&globus_l_mutex); } int close_barrier2_main( int argc, char ** argv) { int rc; globus_xio_stack_t stack; globus_xio_handle_t handle; globus_result_t res; globus_abstime_t end_time; globus_xio_attr_t attr; globus_l_close_called = GLOBUS_FALSE; globus_l_closed = GLOBUS_FALSE; rc = globus_module_activate(GLOBUS_XIO_MODULE); globus_assert(rc == 0); res = globus_xio_attr_init(&attr); test_res(GLOBUS_XIO_TEST_FAIL_NONE, res, __LINE__, __FILE__); res = globus_xio_stack_init(&stack, NULL); test_res(GLOBUS_XIO_TEST_FAIL_NONE, res, __LINE__, __FILE__); parse_parameters(argc, argv, stack, attr); globus_mutex_init(&globus_l_mutex, NULL); globus_cond_init(&globus_l_cond, NULL); res = globus_xio_handle_create(&handle, stack); test_res(GLOBUS_XIO_TEST_FAIL_NONE, res, __LINE__, __FILE__); res = globus_xio_register_open( handle, "whatever", attr, open_cb, NULL); test_res(GLOBUS_XIO_TEST_FAIL_NONE, res, __LINE__, __FILE__); globus_mutex_lock(&globus_l_mutex); { while(!globus_l_closed) { globus_cond_wait(&globus_l_cond, &globus_l_mutex); } } globus_mutex_unlock(&globus_l_mutex); res = globus_xio_attr_destroy(attr); test_res(GLOBUS_XIO_TEST_FAIL_NONE, res, __LINE__, __FILE__); res = globus_xio_stack_destroy(stack); test_res(GLOBUS_XIO_TEST_FAIL_NONE, res, __LINE__, __FILE__); test_common_end(); globus_mutex_destroy(&globus_l_mutex); globus_cond_destroy(&globus_l_cond); rc = globus_module_deactivate(GLOBUS_XIO_MODULE); globus_assert(rc == 0); fprintf(stdout, "Success.\n"); return 0; }
gridcf/gct
xio/src/test/close_barrier2_test.c
C
apache-2.0
6,404
/* * Copyright 2004-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.compass.gps.device.jdbc; import org.compass.gps.device.jdbc.snapshot.JdbcSnapshotEventListener; import org.compass.gps.device.jdbc.snapshot.JdbcSnapshotPersister; import org.compass.gps.device.jdbc.snapshot.RAMJdbcSnapshotPersister; /** * A helper base class for Jdbc active mirror gps device. * * @author kimchy */ public abstract class AbstractJdbcActiveMirrorGpsDevice extends AbstractJdbcGpsDevice implements JdbcActiveMirrorGpsDevice { private boolean mirrorDataChanges = true; private JdbcSnapshotPersister snapshotPersister = new RAMJdbcSnapshotPersister(); private JdbcSnapshotEventListener snapshotEventListener = new ResultSetSnapshotEventListener(); private boolean saveSnapshotAfterMirror = false; public boolean isMirrorDataChanges() { return mirrorDataChanges; } public void setMirrorDataChanges(boolean mirrorDataChanges) { this.mirrorDataChanges = mirrorDataChanges; } public JdbcSnapshotEventListener getSnapshotEventListener() { return snapshotEventListener; } public void setSnapshotEventListener(JdbcSnapshotEventListener snapshotEventListener) { this.snapshotEventListener = snapshotEventListener; } public JdbcSnapshotPersister getSnapshotPersister() { return snapshotPersister; } public void setSnapshotPersister(JdbcSnapshotPersister snapshotPersister) { this.snapshotPersister = snapshotPersister; } public boolean isSaveSnapshotAfterMirror() { return saveSnapshotAfterMirror; } public void setSaveSnapshotAfterMirror(boolean saveSnapshotAfterMirror) { this.saveSnapshotAfterMirror = saveSnapshotAfterMirror; } }
baboune/compass
src/main/src/org/compass/gps/device/jdbc/AbstractJdbcActiveMirrorGpsDevice.java
Java
apache-2.0
2,346
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.impl.util; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.nio.charset.Charset; import org.camunda.bpm.engine.impl.ProcessEngineLogger; /** * @author Tom Baeyens * @author Frederik Heremans * @author Joram Barrez */ public class IoUtil { private static final EngineUtilLogger LOG = ProcessEngineLogger.UTIL_LOGGER; public static byte[] readInputStream(InputStream inputStream, String inputStreamName) { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); byte[] buffer = new byte[16*1024]; try { int bytesRead = inputStream.read(buffer); while (bytesRead!=-1) { outputStream.write(buffer, 0, bytesRead); bytesRead = inputStream.read(buffer); } } catch (Exception e) { throw LOG.exceptionWhileReadingStream(inputStreamName, e); } return outputStream.toByteArray(); } public static String readFileAsString(String filePath) { byte[] buffer = new byte[(int) getFile(filePath).length()]; BufferedInputStream inputStream = null; try { inputStream = new BufferedInputStream(new FileInputStream(getFile(filePath))); inputStream.read(buffer); } catch(Exception e) { throw LOG.exceptionWhileReadingFile(filePath, e); } finally { IoUtil.closeSilently(inputStream); } return new String(buffer, Charset.forName("UTF-8")); } public static File getFile(String filePath) { URL url = IoUtil.class.getClassLoader().getResource(filePath); try { return new File(url.toURI()); } catch (Exception e) { throw LOG.exceptionWhileGettingFile(filePath, e); } } public static void writeStringToFile(String content, String filePath) { BufferedOutputStream outputStream = null; try { outputStream = new BufferedOutputStream(new FileOutputStream(getFile(filePath))); outputStream.write(content.getBytes()); outputStream.flush(); } catch(Exception e) { throw LOG.exceptionWhileWritingToFile(filePath, e); } finally { IoUtil.closeSilently(outputStream); } } /** * Closes the given stream. The same as calling {@link Closeable#close()}, but * errors while closing are silently ignored. */ public static void closeSilently(Closeable closeable) { try { if(closeable != null) { closeable.close(); } } catch(IOException ignore) { LOG.debugCloseException(ignore); } } }
hawky-4s-/camunda-bpm-platform
engine/src/main/java/org/camunda/bpm/engine/impl/util/IoUtil.java
Java
apache-2.0
3,307
--- license: > Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --- cameraOptions ============= カメラの設定をカスタマイズするのためのオプションパラメーターです。 { quality : 75, destinationType : Camera.DestinationType.DATA_URL, sourceType : Camera.PictureSourceType.CAMERA, allowEdit : true, encodingType: Camera.EncodingType.JPEG, targetWidth: 100, targetHeight: 100, popoverOptions: CameraPopoverOptions, saveToPhotoAlbum: false }; オプション ------- - __quality:__ イメージの画質を指定します。 範囲: 0から100 (`Number`) - __destinationType:__ 返り値のフォーマットを指定します。フォーマットは navigator.camera.DestinationType で定義されています。 (`Number`) Camera.DestinationType = { DATA_URL : 0, // 画像を Base64 形式で取得 FILE_URI : 1 // 画像をファイル URI として取得 }; - __sourceType:__ 取得ソースを指定します。ソースは nagivator.camera.PictureSourceType で定義されています。 (`Number`) Camera.PictureSourceType = { PHOTOLIBRARY : 0, CAMERA : 1, SAVEDPHOTOALBUM : 2 }; - __allowEdit:__ イメージ選択の前に、簡単な編集を許可します。 (`Boolean`) - __encodingType:__ 画像ファイルのエンコード形式を選択します。形式は navigator.camera.EncodingType で定義されています。 (`Number`) Camera.EncodingType = { JPEG : 0, // 画像を JPEG 形式で取得 PNG : 1 // 画像を PNG 形式で取得 }; - __targetWidth:__ 画像をスケールするための幅をピクセルで指定します。 targetHeight と同時に使用してください。アスペクト比は保持されます。 (`Number`) - __targetHeight:__ 画像をスケールするための高さをピクセルで指定します。 targetWidth と同時に使用してください。アスペクト比は保持されます。 (`Number`) - __mediaType:__ 画像の取得元を指定します。 PictureSourceType に PHOTOLIBRARY もしくは SAVEPHOTOALBUM が指定されている場合にのみ有効です。取得元は nagivator.camera.MediaType で定義されています。 (`Number`) Camera.MediaType = { PICTURE: 0, // 取得元は静止画像のみとします。デフォルトです。返り値のフォーマットは DestinationType によって指定されたものになります。 VIDEO: 1, // 取得元はビデオのみとします。戻り値のフォーマットは常にファイル URI となります。 ALLMEDIA : 2 // 全てのメディアタイプからの取得を許可します。 }; - __correctOrientation:__ 写真が撮影されたときと同じ向きになるよう写真を回転させます。 (`Boolean`) - __saveToPhotoAlbum:__ 写真が撮影された後、デバイスのフォトアルバムに画像を保存します。 (`Boolean`) - __popoverOptions:__ iPad でのポップオーバーの位置を指定します。iOS のみのオプションです。 CameraPopoverOptions で定義されます。 Android に関する注意点 -------------- - `allowEdit` は無視されます。 - Camera.PictureSourceType.PHOTOLIBRARY と Camera.PictureSourceType.SAVEDPHOTOALBUM は同じフォトアルバムを表示します。 BlackBerry に関する注意点 ----------------- - `quality` パラメーターは無視されます。 - `sourceType` パラメーターは無視されます。 - `allowEdit` は無視されます。 - 撮影後にカメラアプリを閉じるためには、アプリケーションにキー入力許可の権限が付与されている必要があります。 - 大きなサイズで撮影を行った場合、高画質カメラを搭載したデバイスでエンコードすることができない場合があります。 (Torch 9800 など) - Camera.MediaType はサポートされていません。 - `correctOrientation` パラメーターは無視されます。 WebOS に関する注意点 ----------- - `quality` パラメーターは無視されます。 - `sourceType` パラメーターは無視されます。 - `allowEdit` は無視されます。 - Camera.MediaType はサポートされていません。 - `correctOrientation` パラメーターは無視されます。 - `saveToPhotoAlbum` パラメーターは無視されます。 iOS に関する注意点 -------------- - メモリエラーを防ぐには、 `quality` パラメーターを50以下に設定してください。 - `destinationType.FILE_URI` が使用された場合、撮影された写真や編集された写真はアプリケーションの temporary ディレクトリに保存されます。もしストレージの空きが少ない場合、このディレクトリは navigator.fileMgr API を使って消去できます。 Windows Phone 7 に関する注意点 -------------- - `allowEdit` は無視されます。 - `correctOrientation` パラメーターは無視されます。 Bada 1.2 に関する注意点 -------------- - オプションはサポートされていません。 - 常に FILE URI を返します。
drbeermann/cordova-docs
docs/ja/2.0.0/cordova/camera/parameter/cameraOptions.md
Markdown
apache-2.0
6,162
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.client.listeners; import com.hazelcast.core.EntryAdapter; import com.hazelcast.core.EntryEvent; import com.hazelcast.multimap.MultiMap; import com.hazelcast.multimap.impl.MultiMapService; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.annotation.ParallelJVMTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.util.UUID; @RunWith(HazelcastParallelClassRunner.class) @Category({QuickTest.class, ParallelJVMTest.class}) public class MultiMapEntryListenerOnReconnectTest extends AbstractListenersOnReconnectTest { private MultiMap<String, String> multiMap; @Override String getServiceName() { return MultiMapService.SERVICE_NAME; } @Override protected UUID addListener() { multiMap = client.getMultiMap(randomString()); EntryAdapter<String, String> listener = new EntryAdapter<String, String>() { @Override public void onEntryEvent(EntryEvent<String, String> event) { onEvent(event.getKey()); } }; return multiMap.addEntryListener(listener, true); } @Override public void produceEvent(String event) { multiMap.put(event, randomString()); } @Override public boolean removeListener(UUID registrationId) { return multiMap.removeEntryListener(registrationId); } }
emre-aydin/hazelcast
hazelcast/src/test/java/com/hazelcast/client/listeners/MultiMapEntryListenerOnReconnectTest.java
Java
apache-2.0
2,107
var LoadingScreen = { startLoad: function(successCallback, errorCallback, options) { if(!options) { options = {}; } var _options = { label: options.label ? options.label : 'Loading...', style: options.style ? options.style : LoadingScreen.STYLEWHITELARGE, bgColor: options.isBlackBackground ? 'black' : 'clear' }; if(options.frame) { jQuery.extend(_options, {frame: options.frame}); } DynamicApp.exec(successCallback, errorCallback, 'LoadingScreen', 'startLoad', [_options]); }, stopLoad: function(successCallback, errorCallback) { DynamicApp.exec(successCallback, errorCallback, 'LoadingScreen', 'stopLoad', [{}]); } }; LoadingScreen.STYLEWHITELARGE = 0; LoadingScreen.STYLEWHITE = 1; LoadingScreen.STYLEGRAY = 2;
dynamicapp/dynamicapp
lib/www/js/loadingScreen.js
JavaScript
apache-2.0
870
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Linq; using System.Runtime.CompilerServices; using System.Threading; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.Shared.Extensions { internal static partial class INamespaceOrTypeSymbolExtensions { private static readonly ConditionalWeakTable<INamespaceOrTypeSymbol, List<string>> s_namespaceOrTypeToNameMap = new ConditionalWeakTable<INamespaceOrTypeSymbol, List<string>>(); public static readonly ConditionalWeakTable<INamespaceOrTypeSymbol, List<string>>.CreateValueCallback s_getNamePartsCallBack = namespaceSymbol => { var result = new List<string>(); GetNameParts(namespaceSymbol, result); return result; }; private static readonly SymbolDisplayFormat s_shortNameFormat = new SymbolDisplayFormat( miscellaneousOptions: SymbolDisplayMiscellaneousOptions.UseSpecialTypes | SymbolDisplayMiscellaneousOptions.ExpandNullable); public static string GetShortName(this INamespaceOrTypeSymbol symbol) { return symbol.ToDisplayString(s_shortNameFormat); } public static IEnumerable<IPropertySymbol> GetIndexers(this INamespaceOrTypeSymbol symbol) { return symbol == null ? SpecializedCollections.EmptyEnumerable<IPropertySymbol>() : symbol.GetMembers(WellKnownMemberNames.Indexer).OfType<IPropertySymbol>().Where(p => p.IsIndexer); } public static IReadOnlyList<string> GetNameParts(this INamespaceOrTypeSymbol symbol) => s_namespaceOrTypeToNameMap.GetValue(symbol, s_getNamePartsCallBack); public static int CompareNameParts( IReadOnlyList<string> names1, IReadOnlyList<string> names2, bool placeSystemNamespaceFirst) { for (var i = 0; i < Math.Min(names1.Count, names2.Count); i++) { var name1 = names1[i]; var name2 = names2[i]; if (i == 0 && placeSystemNamespaceFirst) { var name1IsSystem = name1 == nameof(System); var name2IsSystem = name2 == nameof(System); if (name1IsSystem && !name2IsSystem) { return -1; } else if (!name1IsSystem && name2IsSystem) { return 1; } } var comp = name1.CompareTo(name2); if (comp != 0) { return comp; } } return names1.Count - names2.Count; } private static void GetNameParts(INamespaceOrTypeSymbol namespaceOrTypeSymbol, List<string> result) { if (namespaceOrTypeSymbol == null || (namespaceOrTypeSymbol.IsNamespace && ((INamespaceSymbol)namespaceOrTypeSymbol).IsGlobalNamespace)) { return; } GetNameParts(namespaceOrTypeSymbol.ContainingNamespace, result); result.Add(namespaceOrTypeSymbol.Name); } /// <summary> /// Lazily returns all nested types contained (recursively) within this namespace or type. /// In case of a type, it is included itself as the first result. /// </summary> public static IEnumerable<INamedTypeSymbol> GetAllTypes( this INamespaceOrTypeSymbol namespaceOrTypeSymbol, CancellationToken cancellationToken) { var stack = new Stack<INamespaceOrTypeSymbol>(); stack.Push(namespaceOrTypeSymbol); while (stack.Count > 0) { cancellationToken.ThrowIfCancellationRequested(); var current = stack.Pop(); if (current is INamespaceSymbol currentNs) { stack.Push(currentNs.GetMembers()); } else { var namedType = (INamedTypeSymbol)current; stack.Push(namedType.GetTypeMembers()); yield return namedType; } } } } }
DustinCampbell/roslyn
src/Workspaces/Core/Portable/Shared/Extensions/INamespaceOrTypeSymbolExtensions.cs
C#
apache-2.0
4,500
/** @file @brief Test Implementation @date 2015 @author Sensics, Inc. <http://sensics.com/osvr> */ // Copyright 2015 Sensics, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Internal Includes #include "TypePackTestShared.h" // Yes, I know these are all static (compile-time) tests, but using the gtest // structure to split them into logical units. TEST(TypePack, splitList) { static_assert(is_same<tp::head<mylist>, myhead>::value, "Correct head"); static_assert(is_same<tp::tail<mylist>, mytail>::value, "Correct tail"); }
ccccjason/OSVR_device_plugin
tests/cplusplus/TypePack/SplitList.cpp
C++
apache-2.0
1,078
""" Support for Vera sensors. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/sensor.vera/ """ import logging from homeassistant.const import ( TEMP_CELSIUS, TEMP_FAHRENHEIT) from homeassistant.helpers.entity import Entity from homeassistant.components.sensor import ENTITY_ID_FORMAT from homeassistant.components.vera import ( VERA_CONTROLLER, VERA_DEVICES, VeraDevice) DEPENDENCIES = ['vera'] _LOGGER = logging.getLogger(__name__) def setup_platform(hass, config, add_devices, discovery_info=None): """Perform the setup for Vera controller devices.""" add_devices( VeraSensor(device, VERA_CONTROLLER) for device in VERA_DEVICES['sensor']) class VeraSensor(VeraDevice, Entity): """Representation of a Vera Sensor.""" def __init__(self, vera_device, controller): """Initialize the sensor.""" self.current_value = None self._temperature_units = None VeraDevice.__init__(self, vera_device, controller) self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id) @property def state(self): """Return the name of the sensor.""" return self.current_value @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" if self.vera_device.category == "Temperature Sensor": return self._temperature_units elif self.vera_device.category == "Light Sensor": return 'lux' elif self.vera_device.category == "Humidity Sensor": return '%' def update(self): """Update the state.""" if self.vera_device.category == "Temperature Sensor": self.current_value = self.vera_device.temperature vera_temp_units = ( self.vera_device.vera_controller.temperature_units) if vera_temp_units == 'F': self._temperature_units = TEMP_FAHRENHEIT else: self._temperature_units = TEMP_CELSIUS elif self.vera_device.category == "Light Sensor": self.current_value = self.vera_device.light elif self.vera_device.category == "Humidity Sensor": self.current_value = self.vera_device.humidity elif self.vera_device.category == "Sensor": tripped = self.vera_device.is_tripped self.current_value = 'Tripped' if tripped else 'Not Tripped' else: self.current_value = 'Unknown'
morphis/home-assistant
homeassistant/components/sensor/vera.py
Python
apache-2.0
2,532
import Enumerator from './enumerator'; import { makeSettledResult } from './enumerator'; import Promise from './promise'; import { o_create } from './utils'; function AllSettled(Constructor, entries, label) { this._superConstructor(Constructor, entries, false /* don't abort on reject */, label); } AllSettled.prototype = o_create(Enumerator.prototype); AllSettled.prototype._superConstructor = Enumerator; AllSettled.prototype._makeResult = makeSettledResult; AllSettled.prototype._validationError = function() { return new Error("allSettled must be called with an array"); }; /** `RSVP.allSettled` is similar to `RSVP.all`, but instead of implementing a fail-fast method, it waits until all the promises have returned and shows you all the results. This is useful if you want to handle multiple promises' failure states together as a set. Returns a promise that is fulfilled when all the given promises have been settled. The return promise is fulfilled with an array of the states of the promises passed into the `promises` array argument. Each state object will either indicate fulfillment or rejection, and provide the corresponding value or reason. The states will take one of the following formats: ```javascript { state: 'fulfilled', value: value } or { state: 'rejected', reason: reason } ``` Example: ```javascript var promise1 = RSVP.Promise.resolve(1); var promise2 = RSVP.Promise.reject(new Error('2')); var promise3 = RSVP.Promise.reject(new Error('3')); var promises = [ promise1, promise2, promise3 ]; RSVP.allSettled(promises).then(function(array){ // array == [ // { state: 'fulfilled', value: 1 }, // { state: 'rejected', reason: Error }, // { state: 'rejected', reason: Error } // ] // Note that for the second item, reason.message will be "2", and for the // third item, reason.message will be "3". }, function(error) { // Not run. (This block would only be called if allSettled had failed, // for instance if passed an incorrect argument type.) }); ``` @method allSettled @static @for RSVP @param {Array} promises @param {String} label - optional string that describes the promise. Useful for tooling. @return {Promise} promise that is fulfilled with an array of the settled states of the constituent promises. */ export default function allSettled(entries, label) { return new AllSettled(Promise, entries, label).promise; }
drdrej/webapi-skeleton
source/node_modules/rsvp/lib/rsvp/all-settled.js
JavaScript
apache-2.0
2,474
using Shouldly; using System.Diagnostics; using Xunit; namespace StructureMap.Testing.Bugs { public class container_configuration_with_generics_inheritance { public class Base { } public class Derived1 : Base { } public class Derived2 : Derived1 { } public class Derived3 : Derived2 { } public class Derived4 : Derived3 { } public class Derived5 : Derived4 { } public class Derived6 : Derived5 { } private static bool CanResolve<T>(IContainer container) { return container.TryGetInstance(typeof(T)) != null; } public class Register_generic_types_with_contravariance { [Fact] public void configure_container_during_construction() { var container = new Container(new TestRegistry()); AssertConfigurationIsCorrect(container); } [Fact] public void configure_container_after_construction() { var container = new Container(); container.Configure(x => x.IncludeRegistry(new TestRegistry())); AssertConfigurationIsCorrect(container); } private static void AssertConfigurationIsCorrect(Container container) { // can resolve explicitly registered generics container.GetInstance<IGenericContravariant<Derived1>>().ShouldBeOfType(typeof(GenericContravariant<Derived1>)); container.GetInstance<IGenericContravariant<Derived3>>().ShouldBeOfType(typeof(GenericContravariant<Derived3>)); container.GetInstance<IGenericContravariant<Derived5>>().ShouldBeOfType(typeof(GenericContravariant<Derived5>)); // contravariance - can resolve generics with type param which inherits from an explicitly registered one // KNOWING WHICH TYPE EXACTLY IT WILL BE IS NOT SUPPORTED, AND NOT GOING TO. BUT AT LEAST CHECK IT'S ONE OF THE CORRECT ONES CanResolve<IGenericContravariant<Base>>(container).ShouldBeFalse(); container.GetInstance<IGenericContravariant<Derived2>>().ShouldBeOfType(typeof(GenericContravariant<Derived1>)); container.GetInstance<IGenericContravariant<Derived4>>().GetType().ShouldBeOneOf(typeof(GenericContravariant<Derived1>), typeof(GenericContravariant<Derived3>)); container.GetInstance<IGenericContravariant<Derived6>>().GetType().ShouldBeOneOf(typeof(GenericContravariant<Derived1>), typeof(GenericContravariant<Derived3>), typeof(GenericContravariant<Derived5>)); } public interface IGenericContravariant<in T> { } public class GenericContravariant<T> : IGenericContravariant<T> { } public class TestRegistry : Registry { public TestRegistry() { // registration order: // Derived1 before Derived5 - base class before inheriting class // Derived5 before Derived3 - base class after inheriting class For<IGenericContravariant<Derived1>>().Use<GenericContravariant<Derived1>>(); For<IGenericContravariant<Derived5>>().Use<GenericContravariant<Derived5>>(); For<IGenericContravariant<Derived3>>().Use<GenericContravariant<Derived3>>(); } } } public class Register_generic_types_with_covariance { [Fact] public void configure_container_during_construction() { var container = new Container(new TestRegistry()); AssertConfigurationIsCorrect(container); } [Fact] public void configure_container_after_construction() { var container = new Container(); container.Configure(x => x.IncludeRegistry(new TestRegistry())); AssertConfigurationIsCorrect(container); Debug.WriteLine(container.WhatDoIHave()); } private static void AssertConfigurationIsCorrect(Container container) { // can resolve explicitly registered generics container.GetInstance<IGenericCovariant<Derived1>>().ShouldBeOfType(typeof(GenericCovariant<Derived1>)); container.GetInstance<IGenericCovariant<Derived3>>().ShouldBeOfType(typeof(GenericCovariant<Derived3>)); container.GetInstance<IGenericCovariant<Derived5>>().ShouldBeOfType(typeof(GenericCovariant<Derived5>)); // covariance - can resolve generics with type param which is base of an explicitly registered one // KNOWING WHICH TYPE EXACTLY IT WILL BE IS NOT SUPPORTED, AND NOT GOING TO. BUT AT LEAST CHECK IT'S ONE OF THE CORRECT ONES container.GetInstance<IGenericCovariant<Base>>().GetType().ShouldBeOneOf(typeof(GenericCovariant<Derived5>), typeof(GenericCovariant<Derived3>), typeof(GenericCovariant<Derived1>)); container.GetInstance<IGenericCovariant<Derived2>>().GetType().ShouldBeOneOf(typeof(GenericCovariant<Derived5>), typeof(GenericCovariant<Derived3>)); container.GetInstance<IGenericCovariant<Derived4>>().ShouldBeOfType(typeof(GenericCovariant<Derived5>)); CanResolve<IGenericCovariant<Derived6>>(container).ShouldBeFalse(); } public interface IGenericCovariant<out T> { } public class GenericCovariant<T> : IGenericCovariant<T> { } public class TestRegistry : Registry { public TestRegistry() { // registration order: // Derived1 before Derived5 - base class before inheriting class // Derived5 before Derived3 - base class after inheriting class For<IGenericCovariant<Derived1>>().Use<GenericCovariant<Derived1>>(); For<IGenericCovariant<Derived5>>().Use<GenericCovariant<Derived5>>(); For<IGenericCovariant<Derived3>>().Use<GenericCovariant<Derived3>>(); } } } public class Register_generic_types_with_no_variance { [Fact] public void configure_container_during_construction() { var container = new Container(new TestRegistry()); AssertConfigurationIsCorrect(container); } [Fact] public void configure_container_after_construction() { var container = new Container(); container.Configure(x => x.IncludeRegistry(new TestRegistry())); AssertConfigurationIsCorrect(container); } private static void AssertConfigurationIsCorrect(Container container) { // can resolve explicitly registered generics container.GetInstance<IGeneric<Derived1>>().ShouldBeOfType(typeof(Generic<Derived1>)); container.GetInstance<IGeneric<Derived3>>().ShouldBeOfType(typeof(Generic<Derived3>)); container.GetInstance<IGeneric<Derived5>>().ShouldBeOfType(typeof(Generic<Derived5>)); // no variance - can't resolve generics with base or inheriting classes CanResolve<IGeneric<Base>>(container).ShouldBeFalse(); CanResolve<IGeneric<Derived2>>(container).ShouldBeFalse(); CanResolve<IGeneric<Derived4>>(container).ShouldBeFalse(); CanResolve<IGeneric<Derived6>>(container).ShouldBeFalse(); } public interface IGeneric<T> { } public class Generic<T> : IGeneric<T> { } public class TestRegistry : Registry { public TestRegistry() { // registration order: // Derived1 before Derived5 - base class before inheriting class // Derived5 before Derived3 - base class after inheriting class For<IGeneric<Derived1>>().Use<Generic<Derived1>>(); For<IGeneric<Derived5>>().Use<Generic<Derived5>>(); For<IGeneric<Derived3>>().Use<Generic<Derived3>>(); } } } public class Register_generic_types_with_inheritance { [Fact] public void configure_container_during_construction() { var container = new Container(new TestRegistry()); AssertConfigurationIsCorrect(container); } [Fact] public void configure_container_after_construction() { var container = new Container(); container.Configure(x => x.IncludeRegistry(new TestRegistry())); AssertConfigurationIsCorrect(container); } private static void AssertConfigurationIsCorrect(Container container) { // can resolve explicitly registered generics container.GetInstance<IGeneric<string>>().ShouldBeOfType(typeof(GenericImpl<string>)); container.GetInstance<IGeneric<string>>().Value.ShouldBe("base"); container.GetInstance<IMoreSpecificGeneric<string>>().ShouldBeOfType(typeof(GenericImpl<string>)); container.GetInstance<IMoreSpecificGeneric<string>>().Value.ShouldBe("derived"); container.GetInstance<IMostSpecificGeneric<string>>().ShouldBeOfType(typeof(GenericImpl<string>)); container.GetInstance<IMostSpecificGeneric<string>>().Value.ShouldBe("most derived"); } public interface IGeneric<T> { T Value { get; } } public interface IMoreSpecificGeneric<T> : IGeneric<T> { } public interface IMostSpecificGeneric<T> : IMoreSpecificGeneric<T> { } public class GenericImpl<T> : IMostSpecificGeneric<string> { public string Value { get; set; } } public class TestRegistry : Registry { public TestRegistry() { // registration order: // IGeneric before IMostSpecificGeneric - base class before inheriting class // IMostSpecificGeneric before IMoreSpecificGeneric - base class after inheriting class For<IGeneric<string>>().Use(new GenericImpl<string> { Value = "base" }); For<IMostSpecificGeneric<string>>().Use(new GenericImpl<string> { Value = "most derived" }); For<IMoreSpecificGeneric<string>>().Use(new GenericImpl<string> { Value = "derived" }); } } } } }
DixonD-git/structuremap
src/StructureMap.Testing/Bugs/container_configuration_with_generics_inheritance.cs
C#
apache-2.0
10,985
/* * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ /* * * * * * * Written by Doug Lea with assistance from members of JCP JSR-166 * Expert Group and released to the public domain, as explained at * http://creativecommons.org/publicdomain/zero/1.0/ */ package java.util.concurrent; /** * An object that executes submitted {@link Runnable} tasks. This * interface provides a way of decoupling task submission from the * mechanics of how each task will be run, including details of thread * use, scheduling, etc. An {@code Executor} is normally used * instead of explicitly creating threads. For example, rather than * invoking {@code new Thread(new(RunnableTask())).start()} for each * of a set of tasks, you might use: * * <pre> * Executor executor = <em>anExecutor</em>; * executor.execute(new RunnableTask1()); * executor.execute(new RunnableTask2()); * ... * </pre> * * However, the {@code Executor} interface does not strictly * require that execution be asynchronous. In the simplest case, an * executor can run the submitted task immediately in the caller's * thread: * * <pre> {@code * class DirectExecutor implements Executor { * public void execute(Runnable r) { * r.run(); * } * }}</pre> * * More typically, tasks are executed in some thread other * than the caller's thread. The executor below spawns a new thread * for each task. * * <pre> {@code * class ThreadPerTaskExecutor implements Executor { * public void execute(Runnable r) { * new Thread(r).start(); * } * }}</pre> * * Many {@code Executor} implementations impose some sort of * limitation on how and when tasks are scheduled. The executor below * serializes the submission of tasks to a second executor, * illustrating a composite executor. * * <pre> {@code * class SerialExecutor implements Executor { * final Queue<Runnable> tasks = new ArrayDeque<Runnable>(); * final Executor executor; * Runnable active; * * SerialExecutor(Executor executor) { * this.executor = executor; * } * * public synchronized void execute(final Runnable r) { * tasks.offer(new Runnable() { * public void run() { * try { * r.run(); * } finally { * scheduleNext(); * } * } * }); * if (active == null) { * scheduleNext(); * } * } * * protected synchronized void scheduleNext() { * if ((active = tasks.poll()) != null) { * executor.execute(active); * } * } * }}</pre> * * The {@code Executor} implementations provided in this package * implement {@link ExecutorService}, which is a more extensive * interface. The {@link ThreadPoolExecutor} class provides an * extensible thread pool implementation. The {@link Executors} class * provides convenient factory methods for these Executors. * * <p>Memory consistency effects: Actions in a thread prior to * submitting a {@code Runnable} object to an {@code Executor} * <a href="package-summary.html#MemoryVisibility"><i>happen-before</i></a> * its execution begins, perhaps in another thread. * * @since 1.5 * @author Doug Lea */ public interface Executor { /** * Executes the given command at some time in the future. The command * may execute in a new thread, in a pooled thread, or in the calling * thread, at the discretion of the {@code Executor} implementation. * * @param command the runnable task * @throws RejectedExecutionException if this task cannot be * accepted for execution * @throws NullPointerException if command is null */ void execute(Runnable command); }
shun634501730/java_source_cn
src_en/java/util/concurrent/Executor.java
Java
apache-2.0
3,748
/* * sesadmin.c - an sesman administration tool * (c) 2008 Simone Fedele * */ #include "arch.h" #include "tcp.h" #include "libscp.h" #include "parse.h" #include "log.h" #include "libscp.h" #include <stdio.h> #include <unistd.h> char user[257]; char pass[257]; char cmnd[257]; char serv[257]; char port[257]; struct log_config logging; void cmndList(struct SCP_CONNECTION *c); void cmndKill(struct SCP_CONNECTION *c, struct SCP_SESSION *s); void cmndHelp(); int inputSession(struct SCP_SESSION *s); unsigned int menuSelect(unsigned int choices); int main(int argc, char **argv) { struct SCP_SESSION *s; struct SCP_CONNECTION *c; enum SCP_CLIENT_STATES_E e; //int end; int idx; //int sel; int sock; char *pwd; user[0] = '\0'; pass[0] = '\0'; cmnd[0] = '\0'; serv[0] = '\0'; port[0] = '\0'; logging.program_name = g_strdup("sesadmin"); logging.log_file = g_strdup("xrdp-sesadmin.log"); logging.log_level = LOG_LEVEL_DEBUG; logging.enable_syslog = 0; log_start_from_param(&logging); for (idx = 0; idx < argc; idx++) { if (0 == g_strncmp(argv[idx], "-u=", 3)) { g_strncpy(user, (argv[idx]) + 3, 256); } else if (0 == g_strncmp(argv[idx], "-p=", 3)) { g_strncpy(pass, (argv[idx]) + 3, 256); } else if (0 == g_strncmp(argv[idx], "-s=", 3)) { g_strncpy(serv, (argv[idx]) + 3, 256); } else if (0 == g_strncmp(argv[idx], "-i=", 3)) { g_strncpy(port, (argv[idx]) + 3, 256); } else if (0 == g_strncmp(argv[idx], "-c=", 3)) { g_strncpy(cmnd, (argv[idx]) + 3, 256); } } if (0 == g_strncmp(serv, "", 1)) { g_strncpy(serv, "localhost", 256); } if (0 == g_strncmp(port, "", 1)) { g_strncpy(port, "3350", 256); } if (0 == g_strncmp(user, "", 1)) { g_strncpy(user, "root", 256); } if (0 == g_strncmp(pass, "", 1)) { pwd = getpass("password:"); g_strncpy(pass, pwd, 256); /* zeroing the password */ while ((*pwd) != '\0') { (*pwd) = 0x00; pwd++; } } scp_init(&logging); sock = g_tcp_socket(); if (sock < 0) { LOG_DBG("Socket open error, g_tcp_socket() failed\n"); return 1; } s = scp_session_create(); c = scp_connection_create(sock); LOG_DBG("Connecting to %s:%s with user %s (%s)\n", serv, port, user, pass); if (0 != g_tcp_connect(sock, serv, port)) { LOG_DBG("g_tcp_connect() error\n"); return 1; } scp_session_set_type(s, SCP_SESSION_TYPE_MANAGE); scp_session_set_version(s, 1); scp_session_set_username(s, user); scp_session_set_password(s, pass); e = scp_v1c_mng_connect(c, s); if (SCP_CLIENT_STATE_OK != e) { LOG_DBG("libscp error connecting: %s %d\n", s->errstr, (int)e); } if (0 == g_strncmp(cmnd, "list", 5)) { cmndList(c); } else if (0 == g_strncmp(cmnd, "kill:", 5)) { cmndKill(c, s); } g_tcp_close(sock); scp_session_destroy(s); scp_connection_destroy(c); log_end(); return 0; } void cmndHelp() { fprintf(stderr, "sesadmin - a console sesman adminitration tool\n"); fprintf(stderr, "sysntax: sesadmin [] COMMAND [OPTIONS]\n\n"); fprintf(stderr, "-u=<username>: username to connect to sesman [MANDATORY]\n"); fprintf(stderr, "-p=<password>: password to connect to sesman [MANDATORY]\n"); fprintf(stderr, "-s=<hostname>: sesman host (default is localhost)\n"); fprintf(stderr, "-i=<port> : sesman port (default 3350)\n"); fprintf(stderr, "-c=<command> : command to execute on the server [MANDATORY]\n"); fprintf(stderr, " it can be one of those:\n"); fprintf(stderr, " LIST\n"); fprintf(stderr, " KILL:<sid>\n"); } void cmndList(struct SCP_CONNECTION *c) { struct SCP_DISCONNECTED_SESSION *dsl; enum SCP_CLIENT_STATES_E e; int scnt; int idx; e = scp_v1c_mng_get_session_list(c, &scnt, &dsl); if ((SCP_CLIENT_STATE_LIST_OK == e) && (scnt > 0)) { for (idx = 0; idx < scnt; idx++) { printf("%d\t%d\t%dx%dx%d\t%d-%d-%d\t%04d/%02d/%02d@%02d:%02d\n", \ (dsl[idx]).SID, (dsl[idx]).type, (dsl[idx]).width, (dsl[idx]).height, (dsl[idx]).bpp, \ (dsl[idx]).idle_days, (dsl[idx]).idle_hours, (dsl[idx]).idle_minutes, \ (dsl[idx]).conn_year, (dsl[idx]).conn_month, (dsl[idx]).conn_day, (dsl[idx]).conn_hour, (dsl[idx]).conn_minute); } } else { printf("No sessions.\n"); } if (0 != dsl) { g_free(dsl); } } void cmndKill(struct SCP_CONNECTION *c, struct SCP_SESSION *s) { }
lmcro/xrdp
sesman/tools/sesadmin.c
C
apache-2.0
4,922
//======================================================================== //Copyright 2007-2009 David Yu dyuproject@gmail.com //------------------------------------------------------------------------ //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at //http://www.apache.org/licenses/LICENSE-2.0 //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. //======================================================================== package io.protostuff; /** * The serializable object where its {@link Schema schema} handles its serialization and deserialization. * * @author David Yu * @created Nov 9, 2009 */ public interface Message<T> { /** * Gets the cached schema of this message. */ public Schema<T> cachedSchema(); }
Shvid/protostuff
protostuff-api/src/main/java/io/protostuff/Message.java
Java
apache-2.0
1,166
using System.Threading; using Microsoft.CodeAnalysis.Internal.Log; namespace Roslyn.Hosting.Diagnostics.PerfMargin { // A slightly modified version of Roslyn.Services.Internal.Log.EtwLogger. // This version updates the DataModel whenever an operations starts or stops. There // isn't an efficient way to listen to ETW events within the same process unless // running as admin, so we need to add our logic to the logger instead. internal class PerfEventActivityLogger : ILogger { private readonly DataModel model; public PerfEventActivityLogger(DataModel model) { this.model = model; } public bool IsEnabled(FunctionId functionId) { return true; } public void Log(FunctionId functionId, LogMessage logMessage) { // do nothing } public void LogBlockStart(FunctionId functionId, LogMessage logMessage, int uniquePairId, CancellationToken cancellationToken) { model.BlockStart(functionId); } public void LogBlockEnd(FunctionId functionId, LogMessage logMessage, int uniquePairId, int delta, CancellationToken cancellationToken) { model.BlockDisposed(functionId); } } }
DavidKarlas/roslyn
src/Test/Diagnostics/PerfMargin/PerfEventActivityLogger.cs
C#
apache-2.0
1,288
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.io.gcp.firestore; import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Lists.newArrayList; import static org.junit.Assert.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.firestore.v1.FirestoreClient.ListDocumentsPage; import com.google.cloud.firestore.v1.FirestoreClient.ListDocumentsPagedResponse; import com.google.cloud.firestore.v1.stub.FirestoreStub; import com.google.firestore.v1.Document; import com.google.firestore.v1.ListDocumentsRequest; import com.google.firestore.v1.ListDocumentsResponse; import com.google.firestore.v1.Value; import java.util.Iterator; import java.util.List; import org.apache.beam.sdk.io.gcp.firestore.FirestoreV1ReadFn.ListDocumentsFn; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.AbstractIterator; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Mock; @SuppressWarnings( "initialization.fields.uninitialized") // mockito fields are initialized via the Mockito Runner public final class FirestoreV1FnListDocumentsTest extends BaseFirestoreV1ReadFnTest<ListDocumentsRequest, ListDocumentsResponse> { @Mock private UnaryCallable<ListDocumentsRequest, ListDocumentsPagedResponse> callable; @Mock private ListDocumentsPagedResponse pagedResponse1; @Mock private ListDocumentsPage page1; @Mock private ListDocumentsPagedResponse pagedResponse2; @Mock private ListDocumentsPage page2; @Test public void endToEnd() throws Exception { // First page of the response ListDocumentsRequest request1 = ListDocumentsRequest.newBuilder() .setParent(String.format("projects/%s/databases/(default)/document", projectId)) .build(); ListDocumentsResponse response1 = ListDocumentsResponse.newBuilder() .addDocuments( Document.newBuilder() .setName("doc_1-1") .putAllFields( ImmutableMap.of("foo", Value.newBuilder().setStringValue("bar").build())) .build()) .addDocuments( Document.newBuilder() .setName("doc_1-2") .putAllFields( ImmutableMap.of("foo", Value.newBuilder().setStringValue("bar").build())) .build()) .addDocuments( Document.newBuilder() .setName("doc_1-3") .putAllFields( ImmutableMap.of("foo", Value.newBuilder().setStringValue("bar").build())) .build()) .setNextPageToken("page2") .build(); when(page1.getNextPageToken()).thenReturn(response1.getNextPageToken()); when(page1.getResponse()).thenReturn(response1); when(page1.hasNextPage()).thenReturn(true); // Second page of the response ListDocumentsResponse response2 = ListDocumentsResponse.newBuilder() .addDocuments( Document.newBuilder() .setName("doc_2-1") .putAllFields( ImmutableMap.of("foo", Value.newBuilder().setStringValue("bar").build())) .build()) .build(); when(page2.getResponse()).thenReturn(response2); when(page2.hasNextPage()).thenReturn(false); when(pagedResponse1.iteratePages()).thenReturn(ImmutableList.of(page1, page2)); when(callable.call(request1)).thenReturn(pagedResponse1); when(stub.listDocumentsPagedCallable()).thenReturn(callable); when(ff.getFirestoreStub(any())).thenReturn(stub); RpcQosOptions options = RpcQosOptions.defaultOptions(); when(ff.getRpcQos(any())) .thenReturn(FirestoreStatefulComponentFactory.INSTANCE.getRpcQos(options)); ArgumentCaptor<ListDocumentsResponse> responses = ArgumentCaptor.forClass(ListDocumentsResponse.class); doNothing().when(processContext).output(responses.capture()); when(processContext.element()).thenReturn(request1); ListDocumentsFn fn = new ListDocumentsFn(clock, ff, options); runFunction(fn); List<ListDocumentsResponse> expected = newArrayList(response1, response2); List<ListDocumentsResponse> allValues = responses.getAllValues(); assertEquals(expected, allValues); } @Override public void resumeFromLastReadValue() throws Exception { when(ff.getFirestoreStub(any())).thenReturn(stub); when(ff.getRpcQos(any())).thenReturn(rpcQos); when(rpcQos.newReadAttempt(any())).thenReturn(attempt); when(attempt.awaitSafeToProceed(any())).thenReturn(true); // First page of the response ListDocumentsRequest request1 = ListDocumentsRequest.newBuilder() .setParent(String.format("projects/%s/databases/(default)/document", projectId)) .build(); ListDocumentsResponse response1 = ListDocumentsResponse.newBuilder() .addDocuments( Document.newBuilder() .setName("doc_1-1") .putAllFields( ImmutableMap.of("foo", Value.newBuilder().setStringValue("bar").build())) .build()) .addDocuments( Document.newBuilder() .setName("doc_1-2") .putAllFields( ImmutableMap.of("foo", Value.newBuilder().setStringValue("bar").build())) .build()) .addDocuments( Document.newBuilder() .setName("doc_1-3") .putAllFields( ImmutableMap.of("foo", Value.newBuilder().setStringValue("bar").build())) .build()) .setNextPageToken("page2") .build(); when(page1.getNextPageToken()).thenReturn(response1.getNextPageToken()); when(page1.getResponse()).thenReturn(response1); when(page1.hasNextPage()).thenReturn(true); when(callable.call(request1)).thenReturn(pagedResponse1); doNothing().when(attempt).checkCanRetry(any(), eq(RETRYABLE_ERROR)); when(pagedResponse1.iteratePages()) .thenAnswer( invocation -> new Iterable<ListDocumentsPage>() { @Override public Iterator<ListDocumentsPage> iterator() { return new AbstractIterator<ListDocumentsPage>() { private boolean first = true; @Override protected ListDocumentsPage computeNext() { if (first) { first = false; return page1; } else { throw RETRYABLE_ERROR; } } }; } }); // Second page of the response ListDocumentsRequest request2 = ListDocumentsRequest.newBuilder() .setParent(String.format("projects/%s/databases/(default)/document", projectId)) .setPageToken("page2") .build(); ListDocumentsResponse response2 = ListDocumentsResponse.newBuilder() .addDocuments( Document.newBuilder() .setName("doc_2-1") .putAllFields( ImmutableMap.of("foo", Value.newBuilder().setStringValue("bar").build())) .build()) .build(); when(page2.getResponse()).thenReturn(response2); when(page2.hasNextPage()).thenReturn(false); when(callable.call(request2)).thenReturn(pagedResponse2); when(pagedResponse2.iteratePages()).thenReturn(ImmutableList.of(page2)); when(stub.listDocumentsPagedCallable()).thenReturn(callable); when(ff.getFirestoreStub(any())).thenReturn(stub); ArgumentCaptor<ListDocumentsResponse> responses = ArgumentCaptor.forClass(ListDocumentsResponse.class); doNothing().when(processContext).output(responses.capture()); when(processContext.element()).thenReturn(request1); ListDocumentsFn fn = new ListDocumentsFn(clock, ff, rpcQosOptions); runFunction(fn); List<ListDocumentsResponse> expected = newArrayList(response1, response2); List<ListDocumentsResponse> allValues = responses.getAllValues(); assertEquals(expected, allValues); } @Override protected V1RpcFnTestCtx newCtx() { return new V1RpcFnTestCtx() { @Override public ListDocumentsRequest getRequest() { return ListDocumentsRequest.newBuilder() .setParent(String.format("projects/%s/databases/(default)/document", projectId)) .build(); } @Override public void mockRpcToCallable(FirestoreStub stub) { when(stub.listDocumentsPagedCallable()).thenReturn(callable); } @Override public void whenCallableCall(ListDocumentsRequest in, Throwable... throwables) { when(callable.call(in)).thenThrow(throwables); } @Override public void verifyNoInteractionsWithCallable() { verifyNoMoreInteractions(callable); } }; } @Override protected ListDocumentsFn getFn( JodaClock clock, FirestoreStatefulComponentFactory firestoreStatefulComponentFactory, RpcQosOptions rpcQosOptions) { return new ListDocumentsFn(clock, firestoreStatefulComponentFactory, rpcQosOptions); } }
lukecwik/incubator-beam
sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/FirestoreV1FnListDocumentsTest.java
Java
apache-2.0
10,718
/* * © Copyright IBM Corp. 2011 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.ibm.domino.das.resources; import static com.ibm.domino.commons.model.IGatekeeperProvider.FEATURE_REST_API_DATA_DOCUMENT; import static com.ibm.domino.das.service.DataService.STAT_DOCUMENT; import static com.ibm.domino.das.servlet.DasServlet.DAS_LOGGER; import static com.ibm.domino.services.HttpServiceConstants.*; import static com.ibm.domino.services.rest.RestParameterConstants.*; import static com.ibm.domino.services.rest.RestServiceConstants.*; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.StringReader; import java.net.URI; import java.util.Date; import java.util.List; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.HeaderParam; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.StreamingOutput; import javax.ws.rs.core.UriInfo; import lotus.domino.Database; import lotus.domino.DateTime; import lotus.domino.Document; import lotus.domino.NotesException; import org.apache.http.impl.cookie.DateParseException; import com.ibm.commons.util.StringUtil; import com.ibm.commons.util.io.json.JsonJavaFactory; import com.ibm.commons.util.io.json.JsonJavaObject; import com.ibm.commons.util.io.json.JsonParser; import com.ibm.domino.commons.util.UriHelper; import com.ibm.domino.das.service.DataService; import com.ibm.domino.das.utils.ErrorHelper; import com.ibm.domino.httpmethod.PATCH; import com.ibm.domino.services.ServiceException; import com.ibm.domino.services.content.JsonDocumentContent; import com.ibm.domino.services.rest.das.document.DocumentParameters; import com.ibm.domino.services.util.JsonWriter; @Path(PARAM_DATA + PARAM_SEPERATOR + PARAM_DOCUMENTS + PARAM_SEPERATOR + PARAM_UNID + PARAM_SEPERATOR + UNID_RESOURCE_PATH) public class DocumentResource extends AbstractDasResource { @GET @Produces(MediaType.APPLICATION_JSON) public Response getDocumentByUnid(@Context final UriInfo uriInfo, @PathParam(PARAM_UNID) final String unid, @HeaderParam(HEADER_IF_MODIFIED_SINCE) final String ifModifiedSince, @QueryParam(PARAM_COMPACT) final boolean compact, @QueryParam(PARAM_DOC_STRONGTYPE)final boolean strongType, @QueryParam(PARAM_DOC_HIDDEN)final boolean hidden, @QueryParam(PARAM_DOC_MARKREAD) final String markRead, @QueryParam(PARAM_DOC_MULTIPART) final String multipart, @QueryParam(PARAM_DOC_LOWERCASEFIELDS) final boolean lowercaseItems, @QueryParam(PARAM_DOC_FIELDS) final String items){ DAS_LOGGER.traceEntry(this, "getDocumentByUnid"); // $NON-NLS-1$ DataService.beforeRequest(FEATURE_REST_API_DATA_DOCUMENT, STAT_DOCUMENT); final ResponseBuilder builder = Response.ok(); abstract class StreamingOutputImpl implements StreamingOutput { Response response = null; public void setResponse(Response response) { this.response = response; } } StreamingOutputImpl streamJsonEntity = new StreamingOutputImpl() { // @Override public void write(OutputStream outputStream) throws IOException { Document document = null; try { Database database = getDatabase(DB_ACCESS_VIEWS_DOCS); try { document = database.getDocumentByUNID(unid); } catch (NotesException e) { throw new WebApplicationException(ErrorHelper.createErrorResponse(e, Response.Status.NOT_FOUND)); } URI baseUri = UriHelper.copy(uriInfo.getAbsolutePath(),DataService.isUseRelativeUrls()); //Unid don't contain special characters, so don't need encode baseUri = UriHelper.trimAtLast(baseUri, PARAM_SEPERATOR + unid); OutputStreamWriter streamWriter = new OutputStreamWriter(outputStream); String lastModifiedHeader = ifModifiedSince(document, ifModifiedSince); if (lastModifiedHeader != null) { response.getMetadata().add(HEADER_LAST_MODIFIED, lastModifiedHeader); } JsonWriter jsonWriter = new JsonWriter(streamWriter, compact); JsonDocumentContent content = new JsonDocumentContent(document); int sysItems = DocumentParameters.SYS_ITEM_ALL; String rtType = TYPE_MULTIPART; // Handle parameters if (!hidden) { sysItems &= ~DocumentParameters.SYS_ITEM_HIDDEN; } if (multipart != null && multipart.compareToIgnoreCase(PARAM_VALUE_FALSE) == 0) { rtType = TYPE_RICHTEXT; // Deprecated } List<String> defItemFilter = null; if ( StringUtil.isNotEmpty(items) ) { defItemFilter = DataService.getParameterStringList(PARAM_DOC_FIELDS, items); } try { content.writeDocumentAsJson(jsonWriter, sysItems, true, defItemFilter, lowercaseItems, null, strongType, rtType, baseUri.toString()); } finally { jsonWriter.flush(); } // Handle parameters. if (markRead == null || markRead.compareToIgnoreCase(PARAM_VALUE_TRUE) == 0) { document.markRead(); } else if (markRead.compareToIgnoreCase(PARAM_VALUE_FALSE) != 0) { throw new WebApplicationException(ErrorHelper.createErrorResponse("Invalid parameter.", Response.Status.BAD_REQUEST)); // $NLX-DocumentResource.Invalidparameter-1$ } streamWriter.close(); } catch (NotesException e) { throw new WebApplicationException(ErrorHelper.createErrorResponse(e, Response.Status.BAD_REQUEST)); } catch (ServiceException e) { throw new WebApplicationException(ErrorHelper.createErrorResponse(e, Response.Status.BAD_REQUEST)); } finally { if (document != null) { try { document.recycle(); } catch (NotesException e) { // Ignore } } } } }; builder.type(MediaType.APPLICATION_JSON_TYPE).entity(streamJsonEntity); Response response = builder.build(); streamJsonEntity.setResponse(response); DAS_LOGGER.traceExit(this, "getDocumentByUnid", response); // $NON-NLS-1$ return response; } @PUT @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response putDocumentByUnid(String requestEntity, @HeaderParam(HEADER_IF_UNMODIFIED_SINCE) final String ifUnmodifiedSince, @PathParam(PARAM_UNID) String unid, @QueryParam(PARAM_DOC_FORM) String form, @QueryParam(PARAM_DOC_COMPUTEWITHFORM) String computeWithForm) { DAS_LOGGER.traceEntry(this, "putDocumentByUnid"); // $NON-NLS-1$ DataService.beforeRequest(FEATURE_REST_API_DATA_DOCUMENT, STAT_DOCUMENT); Response response = updateDocumentByUnid(requestEntity, ifUnmodifiedSince, unid, form, computeWithForm, true); DAS_LOGGER.traceExit(this, "putDocumentByUnid", response); // $NON-NLS-1$ return response; } @PATCH @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response patchDocumentByUnid(String requestEntity, @HeaderParam(HEADER_IF_UNMODIFIED_SINCE) final String ifUnmodifiedSince, @PathParam(PARAM_UNID) String unid, @QueryParam(PARAM_DOC_FORM) String form, @QueryParam(PARAM_DOC_COMPUTEWITHFORM) String computeWithForm) { DAS_LOGGER.traceEntry(this, "patchDocumentByUnid"); // $NON-NLS-1$ DataService.beforeRequest(FEATURE_REST_API_DATA_DOCUMENT, STAT_DOCUMENT); Response response = updateDocumentByUnid(requestEntity, ifUnmodifiedSince, unid, form, computeWithForm, false); DAS_LOGGER.traceExit(this, "patchDocumentByUnid", response); // $NON-NLS-1$ return response; } @DELETE public Response deleteDocumentByUnid( @HeaderParam(HEADER_IF_UNMODIFIED_SINCE) String ifUnmodifiedSince, @PathParam(PARAM_UNID) String unid) { DAS_LOGGER.traceEntry(this, "deleteDocumentByUnid"); // $NON-NLS-1$ DataService.beforeRequest(FEATURE_REST_API_DATA_DOCUMENT, STAT_DOCUMENT); // String jsonEntity = null; try { Database database = this.getDatabase(DB_ACCESS_VIEWS_DOCS); Document document = null; try { document = database.getDocumentByUNID(unid); } catch (NotesException e) { throw new WebApplicationException(ErrorHelper.createErrorResponse(e, Response.Status.NOT_FOUND)); } ifUnmodifiedSince(document, ifUnmodifiedSince); if(!document.remove(true)) { throw new WebApplicationException(ErrorHelper.createErrorResponse("Document is not deleted because another user modified it.", Response.Status.CONFLICT)); // $NLX-DocumentResource.Documentisnotdeletedbecauseanothe-1$ } } catch (NotesException e) { throw new WebApplicationException(ErrorHelper.createErrorResponse(e, Response.Status.BAD_REQUEST)); } ResponseBuilder builder = Response.ok(); // builder.type(MediaType.APPLICATION_JSON_TYPE).entity(jsonEntity); Response response = builder.build(); DAS_LOGGER.traceExit(this, "deleteDocumentByUnid", response); // $NON-NLS-1$ return response; } public Response updateDocumentByUnid(String requestEntity, @HeaderParam(HEADER_IF_UNMODIFIED_SINCE) final String ifUnmodifiedSince, @PathParam(PARAM_UNID) String unid, @QueryParam(PARAM_DOC_FORM) String form, @QueryParam(PARAM_DOC_COMPUTEWITHFORM) String computeWithForm, boolean put) { String jsonEntity = null; Document document = null; try { // Write JSON content Database database = this.getDatabase(DB_ACCESS_VIEWS_DOCS); try { document = database.getDocumentByUNID(unid); } catch (NotesException e) { throw new WebApplicationException(ErrorHelper.createErrorResponse(e, Response.Status.NOT_FOUND)); } ifUnmodifiedSince(document, ifUnmodifiedSince); JsonDocumentContent content = new JsonDocumentContent(document); JsonJavaObject jsonItems; JsonJavaFactory factory = JsonJavaFactory.instanceEx; try { StringReader reader = new StringReader(requestEntity); try { jsonItems = (JsonJavaObject) JsonParser.fromJson(factory, reader); } finally { reader.close(); } } catch (Exception ex) { throw new ServiceException(ex, "Error while parsing the JSON content"); // $NLX-DocumentResource.ErrorwhileparsingtheJSONcontent-1$ } // Handle parameters. if (StringUtil.isNotEmpty(form)) { document.replaceItemValue(ITEM_FORM, form); } content.updateFields(jsonItems, put); if (computeWithForm != null && computeWithForm.compareToIgnoreCase(PARAM_VALUE_TRUE) == 0) { document.computeWithForm(true, true); } document.save(); } catch (NotesException e) { throw new WebApplicationException(ErrorHelper.createErrorResponse(e, Response.Status.BAD_REQUEST)); } catch (ServiceException e) { throw new WebApplicationException(ErrorHelper.createErrorResponse(e, Response.Status.BAD_REQUEST)); } finally { if (document != null) { try { document.recycle(); } catch (NotesException e) { // Ignore } } } ResponseBuilder builder = Response.ok(); builder.type(MediaType.APPLICATION_JSON_TYPE).entity(jsonEntity); Response response = builder.build(); return response; } static private void ifUnmodifiedSince(Document document, final String ifUnmodifiedSince) throws NotesException { DateTime lastModifiedDateTime = document.getLastModified(); if (lastModifiedDateTime != null) { Date lastModifiedDate = lastModifiedDateTime.toJavaDate(); if (lastModifiedDate != null) { // Formats the given date according to the RFC 1123 pattern. String lastModifiedHeader = org.apache.http.impl.cookie.DateUtils.formatDate(lastModifiedDate); if (lastModifiedHeader != null) { if (ifUnmodifiedSince != null) { if (!ifUnmodifiedSince.equalsIgnoreCase(lastModifiedHeader)) { try { Date ifUnmodifiedSinceDate = org.apache.http.impl.cookie.DateUtils.parseDate(ifUnmodifiedSince); if (lastModifiedDate.after(ifUnmodifiedSinceDate) ) { throw new WebApplicationException(Response.Status.PRECONDITION_FAILED); } } catch (DateParseException e) { throw new WebApplicationException(Response.Status.PRECONDITION_FAILED); } } } } } } } static private String ifModifiedSince(Document document, final String ifModifiedSince) throws NotesException { String lastModifiedHeader = null; DateTime lastModifiedDateTime = document.getLastModified(); if (lastModifiedDateTime != null) { Date lastModifiedDate = lastModifiedDateTime.toJavaDate(); if (lastModifiedDate != null) { // Formats the given date according to the RFC 1123 pattern. lastModifiedHeader = org.apache.http.impl.cookie.DateUtils.formatDate(lastModifiedDate); if (lastModifiedHeader != null) { if (ifModifiedSince != null) { if (ifModifiedSince.equalsIgnoreCase(lastModifiedHeader)) { throw new WebApplicationException(Response.Status.NOT_MODIFIED); } try { Date ifModifiedSinceDate = org.apache.http.impl.cookie.DateUtils.parseDate(ifModifiedSince); if (ifModifiedSinceDate.equals(lastModifiedDate) || ifModifiedSinceDate.after(lastModifiedDate)) { throw new WebApplicationException(Response.Status.NOT_MODIFIED); } } catch (DateParseException e) { // If we can not parse the If-Modified-Since then continue. DAS_LOGGER.info("Can not parse the If-Modified-Since header."); // $NON-NLS-1$ } } } } } return lastModifiedHeader; } }
iharkhukhrakou/XPagesExtensionLibrary
extlib/lwp/product/runtime/eclipse/plugins/com.ibm.domino.das/src/com/ibm/domino/das/resources/DocumentResource.java
Java
apache-2.0
16,715
package com.facebook.presto.operator.scalar; /* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import com.facebook.presto.metadata.BoundVariables; import com.facebook.presto.metadata.FunctionRegistry; import com.facebook.presto.metadata.SqlOperator; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.type.StandardTypes; import com.facebook.presto.spi.type.Type; import com.facebook.presto.spi.type.TypeManager; import com.google.common.collect.ImmutableList; import java.lang.invoke.MethodHandle; import static com.facebook.presto.metadata.Signature.comparableTypeParameter; import static com.facebook.presto.metadata.Signature.internalOperator; import static com.facebook.presto.spi.function.OperatorType.HASH_CODE; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.TypeSignature.parseTypeSignature; import static com.facebook.presto.type.TypeUtils.hashPosition; import static com.facebook.presto.util.Reflection.methodHandle; public class MapHashCodeOperator extends SqlOperator { public static final MapHashCodeOperator MAP_HASH_CODE = new MapHashCodeOperator(); private static final MethodHandle METHOD_HANDLE = methodHandle(MapHashCodeOperator.class, "hash", MethodHandle.class, MethodHandle.class, Type.class, Type.class, Block.class); private MapHashCodeOperator() { super(HASH_CODE, ImmutableList.of(comparableTypeParameter("K"), comparableTypeParameter("V")), ImmutableList.of(), parseTypeSignature(StandardTypes.BIGINT), ImmutableList.of(parseTypeSignature("map(K,V)"))); } @Override public ScalarFunctionImplementation specialize(BoundVariables boundVariables, int arity, TypeManager typeManager, FunctionRegistry functionRegistry) { Type keyType = boundVariables.getTypeVariable("K"); Type valueType = boundVariables.getTypeVariable("V"); MethodHandle keyHashCodeFunction = functionRegistry.getScalarFunctionImplementation(internalOperator(HASH_CODE, BIGINT, ImmutableList.of(keyType))).getMethodHandle(); MethodHandle valueHashCodeFunction = functionRegistry.getScalarFunctionImplementation(internalOperator(HASH_CODE, BIGINT, ImmutableList.of(valueType))).getMethodHandle(); MethodHandle method = METHOD_HANDLE.bindTo(keyHashCodeFunction).bindTo(valueHashCodeFunction).bindTo(keyType).bindTo(valueType); return new ScalarFunctionImplementation(false, ImmutableList.of(false), method, isDeterministic()); } public static long hash(MethodHandle keyHashCodeFunction, MethodHandle valueHashCodeFunction, Type keyType, Type valueType, Block block) { long result = 0; for (int position = 0; position < block.getPositionCount(); position += 2) { result += hashPosition(keyHashCodeFunction, keyType, block, position); result += hashPosition(valueHashCodeFunction, valueType, block, position + 1); } return result; } }
propene/presto
presto-main/src/main/java/com/facebook/presto/operator/scalar/MapHashCodeOperator.java
Java
apache-2.0
3,557
' Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. Imports System.Runtime.InteropServices Imports System.Windows Imports Microsoft.CodeAnalysis Imports Microsoft.CodeAnalysis.Notification Imports Microsoft.VisualStudio.ComponentModelHost Imports Microsoft.VisualStudio.LanguageServices.Implementation.Options Imports Microsoft.VisualStudio.LanguageServices.Implementation.Options.Style Namespace Microsoft.VisualStudio.LanguageServices.VisualBasic.Options <Guid(Guids.VisualBasicOptionPageNamingStyleIdString)> Friend Class NamingStylesOptionPage Inherits AbstractOptionPage Private _grid As NamingStyleOptionPageControl Private _notificationService As INotificationService Protected Overrides Function CreateOptionPage(serviceProvider As IServiceProvider, optionStore As OptionStore) As AbstractOptionPageControl Dim componentModel = DirectCast(serviceProvider.GetService(GetType(SComponentModel)), IComponentModel) Dim workspace = componentModel.GetService(Of VisualStudioWorkspace) _notificationService = workspace.Services.GetService(Of INotificationService) _grid = New NamingStyleOptionPageControl(optionStore, _notificationService, LanguageNames.VisualBasic) Return _grid End Function Protected Overrides Sub OnApply(e As PageApplyEventArgs) If _grid.ContainsErrors() Then _notificationService.SendNotification(ServicesVSResources.Some_naming_rules_are_incomplete_Please_complete_or_remove_them) e.ApplyBehavior = ApplyKind.Cancel Return End If MyBase.OnApply(e) End Sub End Class End Namespace
VSadov/roslyn
src/VisualStudio/VisualBasic/Impl/Options/NamingStylesOptionPage.vb
Visual Basic
apache-2.0
1,833
/* * Copyright (c) 2015 ARM Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "mbed-drivers/mbed.h" #include "mbed-drivers/test_env.h" #include "core-util/Event.h" #include <stdio.h> using namespace mbed::util; /****************************************************************************** * Generic helpers *****************************************************************************/ template<typename R, typename Arg> static void call_fp1(const char* name, FunctionPointer1<R, Arg>& fptr, const Arg& arg) { printf(">>>>>>>> Testing '%s' (one arg) <<<<<<<<\r\n", name); printf("[Direct call] "); fptr(arg); Event e(fptr.bind(arg)); printf("[Event call] "); e.call(); } template<typename R, typename Arg1, typename Arg2> static void call_fp2(const char* name, FunctionPointer2<R, Arg1, Arg2>& fptr, const Arg1& arg1, const Arg2& arg2) { printf(">>>>>>>> Testing '%s' (2 args)<<<<<<<<\r\n", name); printf("[Direct call] "); fptr(arg1, arg2); Event e(fptr.bind(arg1, arg2)); printf("[Event call] "); e.call(); } template<typename R, typename Arg1, typename Arg2, typename Arg3> static void call_fp3(const char* name, FunctionPointer3<R, Arg1, Arg2, Arg3>& fptr, const Arg1& arg1, const Arg2& arg2, const Arg3& arg3) { printf(">>>>>>>> Testing '%s' (3 args) <<<<<<<<\r\n", name); printf("[Direct call] "); fptr(arg1, arg2, arg3); Event e(fptr.bind(arg1, arg2, arg3)); printf("[Event call] "); e.call(); } template<typename R> static void call_fp0(const char* name, FunctionPointer0<R>& fptr) { printf(">>>>>>>> Testing '%s' (no args) <<<<<<<<\r\n", name); printf("[Direct call] "); fptr(); Event e(fptr.bind()); printf("[Event call] "); e(); } static void call_event(const char* name, Event& e) { printf("[Call event '%s'] ", name); e.call(); } /****************************************************************************** * Test with functions that are not part of a class *****************************************************************************/ static void sa_func_1(const char *msg) { printf("Calling sa_func_1 with msg=%s\r\n", msg); } static void sa_func_2(int arg) { printf("Calling sa_func_2 with arg=%d\r\n", arg); } static void sa_func_3() { printf("Calling sa_func_3 (no arguments)\r\n"); } static void sa_func_4(int arg1, const char* arg2, double arg3) { printf("Calling sa_func_4 with arg1=%d, arg=%s, arg3=%f\r\n", arg1, arg2, arg3); } static void test_standalone_funcs() { printf("\r\n********** Starting test_standalone_funcs **********\r\n"); const char *testmsg1 = "Test message 1"; const char *testmsg2 = "Test message 2"; const int testint1 = 13; const double testdouble = 100.0; // First call function pointers directly FunctionPointer1<void, const char*> fp1(sa_func_1); FunctionPointer1<void, int> fp2(sa_func_2); FunctionPointer0<void> fp3(sa_func_3); FunctionPointer3<void, int, const char*, double> fp4(sa_func_4); call_fp1("ptr to standalone func(char*)", fp1, testmsg1); call_fp1("ptr to standalone func(char*)", fp1, testmsg2); call_fp1("ptr to standalone func(int)", fp2, testint1); call_fp0("ptr to standalone func(void)", fp3); call_fp3("ptr to standalong func(int, const char*, double)", fp4, testint1, testmsg1, testdouble); } /****************************************************************************** * Test with functions that are part of a class (trivially copyable arguments) *****************************************************************************/ class VBase { public: VBase(int arg): _arg(arg) {} void print_baseonly(const char * msg) { printf("VBase::print_baseonly: %s, _arg=%d\r\n", msg, _arg); } virtual void print_virtual_str(const char * msg) { printf("VBase::print_virtual_str: %s, _arg=%d\r\n", msg, _arg); } virtual void print_virtual_noargs() { printf("VBase::print_virtual_noargs, _arg=%d\r\n", _arg); } void print_non_virtual(const char* msg, int farg) { printf("VBase::print_non_virtual, msg=%s, farg=%d, _arg=%d\r\n", msg, farg, _arg); } protected: int _arg; }; class VDerived : public VBase { public: VDerived(int arg1, int arg2): VBase(arg1), _arg2(arg2) {} void print_non_virtual(const char* msg) { printf("VDerived::print_non_virtual, _msg=%s, _arg=%d, _arg2=%d\r\n", msg, _arg, _arg2); } virtual void print_virtual_str(const char * msg) { printf("VDerived::print_virtual_str: %s, _arg=%d, _arg2=%d\r\n", msg, _arg, _arg2); } virtual void print_virtual_noargs() { printf("VDerived::print_virtual_noargs, _arg=%d, _arg2=%d\r\n", _arg, _arg2); } private: int _arg2; }; static void test_class_funcs_tca() { printf("\r\n********** Starting test_class_funcs_tca **********\r\n"); VBase base(10); VDerived derived(20, 100); const char *testmsg1 = "Test message 1"; const char *testmsg2 = "Test message 2"; const int testint1 = 17; printf("---- Part 1: test virtual functions\r\n"); FunctionPointer1<void, const char*> p1_fp1(&base, &VBase::print_virtual_str); FunctionPointer1<void, const char*> p1_fp2(&derived, &VDerived::print_virtual_str); FunctionPointer1<void, const char*> p1_fp3((VBase*)&derived, &VBase::print_virtual_str); FunctionPointer0<void> p1_fp4((VBase*)&derived, &VBase::print_virtual_noargs); call_fp1("ptr to base::print_virtual_str", p1_fp1, testmsg1); call_fp1("ptr to derived::print_virtual_str", p1_fp2, testmsg2); call_fp1("ptr to derived::print_virtual_str via VBase* pointer", p1_fp3, testmsg2); call_fp0("ptr to derived::print_virtual_noargs via VBase* pointer", p1_fp4); printf("---- Part 2: call base-only function from base and derived\r\n"); FunctionPointer1<void, const char*> p2_fp1(&base, &VBase::print_baseonly); FunctionPointer1<void, const char*> p2_fp2((VBase*)&derived, &VBase::print_baseonly); call_fp1("ptr to base::print_baseonly", p2_fp1, testmsg1); call_fp1("ptr to base::print_baseonly using VDerived instance", p2_fp2, testmsg1); printf("---- Part 3: call non-virtual function from base and derived\r\n"); FunctionPointer2<void, const char*, int> p3_fp1(&base, &VBase::print_non_virtual); FunctionPointer1<void, const char*> p3_fp2(&derived, &VDerived::print_non_virtual); FunctionPointer2<void, const char*, int> p3_fp3((VBase*)&derived, &VBase::print_non_virtual); call_fp2("ptr to base::print_non_virtual", p3_fp1, testmsg1, testint1); call_fp1("ptr to derived::print_non_virtual", p3_fp2, testmsg2); call_fp2("ptr to base::print_non_virtual via Derived* pointer", p3_fp3, testmsg2, testint1); } /****************************************************************************** * Mixed test (stand alone functions and class function) using non-trivially * copyable arguments *****************************************************************************/ class MyArg { public: MyArg(const char* id = "(none)", int arg1 = 0, int arg2 = 0): _id(id), _arg1(arg1), _arg2(arg2) { instcount ++; } MyArg(const MyArg& arg): _arg1(arg._arg1), _arg2(arg._arg2) { _id = !strcmp(arg._id, "test") ? "(copy)" : arg._id; instcount ++; } ~MyArg() { instcount --; } void print() const { printf("Instance '%s'[%p] of MyArg, arg1=%d, arg2=%d\r\n", _id, this, _arg1, _arg2); } const char *_id; int _arg1, _arg2; static int instcount; }; int MyArg::instcount = 0; static void sa_ntc(MyArg arg) { printf("Called sa_ntc with arg '%s': ", arg._id); arg.print(); } class ABase { public: ABase(int arg): _arg(arg) {} virtual void print_virtual_arg(MyArg a) { printf("ABase::print_virtual_arg: %s, _arg=%d\r\n", a._id, _arg); } protected: int _arg; }; class ADerived : public ABase { public: ADerived(int arg1, int arg2): ABase(arg1), _arg2(arg2) {} virtual void print_virtual_arg(MyArg a) { printf("ADerived::print_virtual_arg: %s, _arg=%d, _arg2=%d\r\n", a._id, _arg, _arg2); } private: int _arg2; }; static void test_funcs_nontca() { printf("\r\n********** Starting test_funcs_nontca **********\r\n"); FunctionPointer1<void, MyArg> fp1(sa_ntc); Event e1, e2, e3; { // Test binding argument that gets out of scope at the end of this block MyArg arg("test", 10, 20); call_fp1("ptr to standalong func taking non-trivial arg", fp1, arg); e1 = e2 = e3 = fp1.bind(arg); } e1.call(); // This should work, since it has a copy of 'arg' above // Test functions taking non-trivial arguments inside classes ADerived d(10, 100); ABase *pDerived = &d; FunctionPointer1<void, MyArg> fp2(&d, &ADerived::print_virtual_arg); FunctionPointer1<void, MyArg> fp3(pDerived, &ABase::print_virtual_arg); call_fp1("ptr to virtual method taking non-tc argument", fp2, MyArg("notest", 5, 8)); call_fp1("ptr to virtual method taking non-tc argument (via base class pointer)", fp2, MyArg("notest", 5, 8)); } /****************************************************************************** * Create an array of events from different kinds of function pointers * Call each one in turn (unified interface) *****************************************************************************/ static void test_array_of_events() { printf("\r\n********** Starting test_array_of_events **********\r\n"); const char* testmsg1 = "Test message 1"; const char* testmsg2 = "Test message 2"; const int testint = 13; VDerived derived(20, 100); MyArg arg("array", 5, 10); FunctionPointer1<void, const char*> fp1((VBase*)&derived, &VBase::print_virtual_str); FunctionPointer0<void> fp2(sa_func_3); FunctionPointer1<void, int> fp3(sa_func_2); FunctionPointer0<void> fp4(&derived, &VDerived::print_virtual_noargs); FunctionPointer1<void, MyArg> fp5(sa_ntc); Event events[] = {fp1.bind(testmsg1), fp1.bind(testmsg2), fp2.bind(), fp3.bind(testint), fp4.bind(), fp5.bind(arg)}; for (unsigned i = 0; i < sizeof(events)/sizeof(events[0]); i ++) { events[i].call(); } } /****************************************************************************** * Test assignment between various kinds of events *****************************************************************************/ static void swap_events_using_eq(Event &e1, Event &e2) { Event temp; temp = e1; e1 = e2; e2 = temp; } static void swap_events_using_cc(Event &e1, Event &e2) { Event temp = e1; e1 = e2; e2 = temp; } static void test_event_assignment_and_swap() { printf("\r\n********** Starting test_event_assignment_and_swap **********\r\n"); ADerived aderived(10, 10); FunctionPointer1<void, const char*> fp_sa_tc1(sa_func_1); FunctionPointer1<void, int> fp_sa_tc2(sa_func_2); FunctionPointer0<void> fp_sa_noargs(sa_func_3); FunctionPointer1<void, MyArg> fp_sa_ntc(sa_ntc); FunctionPointer1<void, MyArg> fp_class_ntc(&aderived, &ADerived::print_virtual_arg); MyArg arg("test_event_assignment"); Event e_sa_tc1(fp_sa_tc1.bind("test_event_assignment")); Event e_sa_tc2(fp_sa_tc2.bind(17)); Event e_sa_noargs(fp_sa_noargs.bind()); Event e_sa_ntc(fp_sa_ntc.bind(arg)); Event e_class_ntc(fp_class_ntc.bind(arg)); Event e1 = e_sa_tc1, e2 = e_class_ntc, e3; e3 = e_sa_noargs; // Swap them around like crazy. I'm going to regret this in the morning. swap_events_using_eq(e1, e_sa_noargs); // e1: fp_sa_noargs, e_sa_noargs: fp_sa_tcl swap_events_using_cc(e2, e_class_ntc); // Intentional NOOP, e2 = e_class_ntc: fp_class_ntc swap_events_using_eq(e_sa_ntc, e_sa_tc2); // e_sa_ntc: fp_sa_tc2, e_sa_tc2: fp_sa_ntc swap_events_using_cc(e3, e_sa_tc1); //e3: fp_sa_tc1, e_sa_tc1: fp_sa_noargs swap_events_using_eq(e_sa_noargs, e2); // e_sa_noargs: fp_class_ntc, e2: fp_sa_tc1 swap_events_using_cc(e_sa_tc2, e_class_ntc); // e_sa_tc2: fp_class_ntc, e_class_ntc: fp_sa_ntc // Final assignments: // e_sa_tc1: fp_sa_noargs // e_sa_tc2: fp_class_ntc // e_sa_noargs: fp_class_ntc // e_sa_ntc: fp_sa_tc2 // e_class_ntc: fp_sa_ntc // e1: fp_sa_noargs // e2: fp_sa_tc1 // e3: fp_sa_tc1 // Now call all of them and prepare for a headache call_event("e_sa_tc1", e_sa_tc1); call_event("e_sa_tc2", e_sa_tc2); call_event("e_sa_noargs", e_sa_noargs); call_event("e_sa_ntc", e_sa_ntc); call_event("e_class_ntc", e_class_ntc); call_event("e1", e1); call_event("e2", e2); call_event("e3", e3); } /****************************************************************************** * Entry point *****************************************************************************/ void runTest(void) { MBED_HOSTTEST_TIMEOUT(10); MBED_HOSTTEST_SELECT(default_auto); MBED_HOSTTEST_DESCRIPTION(EventHandler test); MBED_HOSTTEST_START("EvenHandler_1"); printf("========== Starting event handler test ==========\r\n"); test_standalone_funcs(); test_class_funcs_tca(); test_funcs_nontca(); test_array_of_events(); test_event_assignment_and_swap(); printf ("Final MyArg instance count (should be 0): %d\r\n", MyArg::instcount); printf ("\r\nTest Complete\r\n"); MBED_HOSTTEST_RESULT(MyArg::instcount == 0); } void app_start(int, char* []) { minar::Scheduler::postCallback(&runTest); }
lbk003/mbed-cortexm
yotta_modules/core-util/test/EventHandler/main.cpp
C++
apache-2.0
14,103
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include "tensorflow/lite/delegates/gpu/common/tasks/transpose.h" #include <string> #include "absl/strings/str_cat.h" #include "tensorflow/lite/delegates/gpu/common/task/work_group_picking.h" namespace tflite { namespace gpu { namespace { std::string GetTransposeCode(const OperationDef& op_def, const TransposeAttributes& attr) { const std::string batch_id = op_def.dst_tensors[0].HasAxis(Axis::BATCH) ? "B" : "0"; std::string c; c += "MAIN_FUNCTION($0) {\n"; if (op_def.dst_tensors[0].HasAxis(Axis::BATCH)) { c += " int linear_id = GLOBAL_ID_0;\n"; c += " int X = linear_id / args.dst_tensor.Batch();\n"; c += " int B = linear_id % args.dst_tensor.Batch();\n"; c += " args.dst_tensor.SetBatchRef(B);\n"; } else { c += " int X = GLOBAL_ID_0;\n"; } c += " int Y = GLOBAL_ID_1;\n"; c += " int S = GLOBAL_ID_2;\n"; c += " if (X >= args.dst_tensor.Width() || Y >= args.dst_tensor.Height() || " "S >= args.dst_tensor.Slices()) { \n"; c += " return; \n"; c += " } \n"; c += " FLT temps[4];\n"; c += " temps[0] = INIT_FLT(0.0f);\n"; c += " temps[1] = INIT_FLT(0.0f);\n"; c += " temps[2] = INIT_FLT(0.0f);\n"; c += " temps[3] = INIT_FLT(0.0f);\n"; int remap[4]; remap[attr.perm.b] = 0; remap[attr.perm.h] = 1; remap[attr.perm.w] = 2; remap[attr.perm.c] = 3; if (attr.perm.c == 3) { // optimized reading when no channels permutation const std::string bhw[] = {batch_id, "Y", "X"}; if (op_def.src_tensors[0].HasAxis(Axis::BATCH)) { c += " args.src_tensor.SetBatchRef(" + bhw[remap[0]] + ");\n"; } c += " int s_y = " + bhw[remap[1]] + ";\n"; c += " int s_x = " + bhw[remap[2]] + ";\n"; c += " FLT4 t = args.src_tensor.Read(s_x, s_y, S);\n"; c += " temps[0] = t.x;\n"; c += " temps[1] = t.y;\n"; c += " temps[2] = t.z;\n"; c += " temps[3] = t.w;\n"; } else { c += " for (int i = 0; i < 4; ++i) {\n"; c += " int dst_channel = S * 4 + i;\n"; c += " if (dst_channel < args.dst_tensor.Channels()) {\n"; const std::string bhwc[] = {batch_id, "Y", "X", "dst_channel"}; if (op_def.src_tensors[0].HasAxis(Axis::BATCH)) { c += " args.src_tensor.SetBatchRef(" + bhwc[remap[0]] + ");\n"; } c += " int s_y = " + bhwc[remap[1]] + ";\n"; c += " int s_x = " + bhwc[remap[2]] + ";\n"; c += " int s_c = " + bhwc[remap[3]] + ";\n"; c += " int s_z = s_c / 4;\n"; c += " int src_sub_ch = s_c % 4;\n"; c += " FLT4 t = args.src_tensor.Read(s_x, s_y, s_z);\n"; c += " temps[i] = SELECT_BY_INDEX_FROM_FLT4(t, src_sub_ch);\n"; c += " }\n"; c += " }\n"; } c += " FLT4 result;\n"; c += " result.x = temps[0];\n"; c += " result.y = temps[1];\n"; c += " result.z = temps[2];\n"; c += " result.w = temps[3];\n"; c += " args.dst_tensor.Write(result, X, Y, S);\n"; c += "}\n"; return c; } } // namespace GPUOperation CreateTranspose(const OperationDef& definition, const TransposeAttributes& attr) { GPUOperation op(definition); op.AddSrcTensor("src_tensor", definition.src_tensors[0]); op.AddDstTensor("dst_tensor", definition.dst_tensors[0]); op.code_ = GetTransposeCode(definition, attr); op.tensor_to_grid_ = TensorToGrid::kWBToX_HDToY_SToZ; return op; } } // namespace gpu } // namespace tflite
paolodedios/tensorflow
tensorflow/lite/delegates/gpu/common/tasks/transpose.cc
C++
apache-2.0
4,098
using System; using System.Collections.Generic; using System.IO; namespace Lucene.Net.Search.Spans { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /// <summary> /// Expert: an enumeration of span matches. Used to implement span searching. /// Each span represents a range of term positions within a document. Matches /// are enumerated in order, by increasing document number, within that by /// increasing start position and finally by increasing end position. /// </summary> public abstract class Spans { /// <summary> /// Move to the next match, returning true if any such exists. </summary> public abstract bool MoveNext(); /// <summary> /// Move to the next match, returning true if any such exists. </summary> [Obsolete("Use MoveNext() instead. This method will be removed in 4.8.0 release candidate."), System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] public virtual bool Next() => MoveNext(); /// <summary> /// Skips to the first match beyond the current, whose document number is /// greater than or equal to <i>target</i>. /// <para/>The behavior of this method is <b>undefined</b> when called with /// <c> target &lt;= current</c>, or after the iterator has exhausted. /// Both cases may result in unpredicted behavior. /// <para/>Returns <c>true</c> if there is such /// a match. /// <para/>Behaves as if written: /// <code> /// bool SkipTo(int target) /// { /// do /// { /// if (!Next()) /// return false; /// } while (target > Doc); /// return true; /// } /// </code> /// Most implementations are considerably more efficient than that. /// </summary> public abstract bool SkipTo(int target); /// <summary> /// Returns the document number of the current match. Initially invalid. </summary> public abstract int Doc { get; } /// <summary> /// Returns the start position of the current match. Initially invalid. </summary> public abstract int Start { get; } /// <summary> /// Returns the end position of the current match. Initially invalid. </summary> public abstract int End { get; } /// <summary> /// Returns the payload data for the current span. /// this is invalid until <see cref="MoveNext()"/> is called for /// the first time. /// This method must not be called more than once after each call /// of <see cref="MoveNext()"/>. However, most payloads are loaded lazily, /// so if the payload data for the current position is not needed, /// this method may not be called at all for performance reasons. An ordered /// SpanQuery does not lazy load, so if you have payloads in your index and /// you do not want ordered SpanNearQuerys to collect payloads, you can /// disable collection with a constructor option. /// <para/> /// Note that the return type is a collection, thus the ordering should not be relied upon. /// <para/> /// @lucene.experimental /// </summary> /// <returns> A <see cref="T:ICollection{byte[]}"/> of byte arrays containing the data of this payload, /// otherwise <c>null</c> if <see cref="IsPayloadAvailable"/> is <c>false</c> </returns> /// <exception cref="IOException"> if there is a low-level I/O error </exception> // TODO: Remove warning after API has been finalized public abstract ICollection<byte[]> GetPayload(); /// <summary> /// Checks if a payload can be loaded at this position. /// <para/> /// Payloads can only be loaded once per call to /// <see cref="MoveNext()"/>. /// </summary> /// <returns> <c>true</c> if there is a payload available at this position that can be loaded </returns> public abstract bool IsPayloadAvailable { get; } /// <summary> /// Returns the estimated cost of this spans. /// <para/> /// This is generally an upper bound of the number of documents this iterator /// might match, but may be a rough heuristic, hardcoded value, or otherwise /// completely inaccurate. /// </summary> public abstract long GetCost(); } }
apache/lucenenet
src/Lucene.Net/Search/Spans/Spans.cs
C#
apache-2.0
5,395
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.sql.planner.iterative.rule; import com.google.common.base.Predicates; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.prestosql.sql.planner.Symbol; import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; import io.prestosql.sql.planner.iterative.rule.test.PlanBuilder; import io.prestosql.sql.planner.plan.Assignments; import io.prestosql.sql.planner.plan.ProjectNode; import org.testng.annotations.Test; import java.util.function.Predicate; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.sort; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.strictProject; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.topN; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; import static io.prestosql.sql.tree.SortItem.NullOrdering.FIRST; import static io.prestosql.sql.tree.SortItem.Ordering.ASCENDING; public class TestPruneTopNColumns extends BaseRuleTest { private static final long COUNT = 10; @Test public void testNotAllInputsReferenced() { tester().assertThat(new PruneTopNColumns()) .on(p -> buildProjectedTopN(p, symbol -> symbol.getName().equals("b"))) .matches( strictProject( ImmutableMap.of("b", expression("b")), topN( COUNT, ImmutableList.of(sort("b", ASCENDING, FIRST)), strictProject( ImmutableMap.of("b", expression("b")), values("a", "b"))))); } @Test public void testAllInputsReferenced() { tester().assertThat(new PruneTopNColumns()) .on(p -> buildProjectedTopN(p, symbol -> symbol.getName().equals("a"))) .doesNotFire(); } @Test public void testAllOutputsReferenced() { tester().assertThat(new PruneTopNColumns()) .on(p -> buildProjectedTopN(p, Predicates.alwaysTrue())) .doesNotFire(); } private ProjectNode buildProjectedTopN(PlanBuilder planBuilder, Predicate<Symbol> projectionTopN) { Symbol a = planBuilder.symbol("a"); Symbol b = planBuilder.symbol("b"); return planBuilder.project( Assignments.identity(ImmutableList.of(a, b).stream().filter(projectionTopN).collect(toImmutableSet())), planBuilder.topN( COUNT, ImmutableList.of(b), planBuilder.values(a, b))); } }
treasure-data/presto
presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneTopNColumns.java
Java
apache-2.0
3,495
// // Copyright 2014 RTMFPew // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package session
zhwzhang/rtmfpew
protocol/session/rendezvous.go
GO
apache-2.0
609
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.manifmerger; import static com.android.common.SdkConstants.ANDROID_URI; import static com.android.common.SdkConstants.ATTR_NAME; import static com.android.manifmerger.AttributeModel.Hexadecimal32BitsWithMinimumValue; import static com.android.manifmerger.AttributeModel.MultiValueValidator; import com.android.common.SdkConstants; import com.android.annotations.NonNull; import com.android.annotations.Nullable; import com.android.annotations.concurrency.Immutable; import com.android.common.utils.SdkUtils; import com.android.common.xml.AndroidManifest; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import org.w3c.dom.Attr; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * Model for the manifest file merging activities. * <p> * * This model will describe each element that is eligible for merging and associated merging * policies. It is not reusable as most of its interfaces are private but a future enhancement * could easily make this more generic/reusable if we need to merge more than manifest files. * */ @Immutable class ManifestModel { /** * Interface responsible for providing a key extraction capability from a xml element. * Some elements store their keys as an attribute, some as a sub-element attribute, some don't * have any key. */ @Immutable interface NodeKeyResolver { /** * Returns the key associated with this xml element. * @param xmlElement the xml element to get the key from * @return the key as a string to uniquely identify xmlElement from similarly typed elements * in the xml document or null if there is no key. */ @Nullable String getKey(Element xmlElement); /** * Returns the attribute(s) used to store the xml element key. * @return the key attribute(s) name(s) or null of this element does not have a key. */ @NonNull ImmutableList<String> getKeyAttributesNames(); } /** * Implementation of {@link com.android.manifmerger.ManifestModel.NodeKeyResolver} that do not * provide any key (the element has to be unique in the xml document). */ private static class NoKeyNodeResolver implements NodeKeyResolver { @Override @Nullable public String getKey(Element xmlElement) { return null; } @NonNull @Override public ImmutableList<String> getKeyAttributesNames() { return ImmutableList.of(); } } /** * Implementation of {@link com.android.manifmerger.ManifestModel.NodeKeyResolver} that uses an * attribute to resolve the key value. */ private static class AttributeBasedNodeKeyResolver implements NodeKeyResolver { @Nullable private final String mNamespaceUri; private final String mAttributeName; /** * Build a new instance capable of resolving an xml element key from the passed attribute * namespace and local name. * @param namespaceUri optional namespace for the attribute name. * @param attributeName attribute name */ private AttributeBasedNodeKeyResolver(@Nullable String namespaceUri, @NonNull String attributeName) { this.mNamespaceUri = namespaceUri; this.mAttributeName = Preconditions.checkNotNull(attributeName); } @Override @Nullable public String getKey(@NonNull Element xmlElement) { String key = mNamespaceUri == null ? xmlElement.getAttribute(mAttributeName) : xmlElement.getAttributeNS(mNamespaceUri, mAttributeName); if (Strings.isNullOrEmpty(key)) return null; return key; } @NonNull @Override public ImmutableList<String> getKeyAttributesNames() { return ImmutableList.of(mAttributeName); } } /** * Subclass of {@link com.android.manifmerger.ManifestModel.AttributeBasedNodeKeyResolver} that * uses "android:name" as the attribute. */ private static final NodeKeyResolver DEFAULT_NAME_ATTRIBUTE_RESOLVER = new AttributeBasedNodeKeyResolver(ANDROID_URI, SdkConstants.ATTR_NAME); private static final NoKeyNodeResolver DEFAULT_NO_KEY_NODE_RESOLVER = new NoKeyNodeResolver(); /** * A {@link com.android.manifmerger.ManifestModel.NodeKeyResolver} capable of extracting the * element key first in an "android:name" attribute and if not value found there, in the * "android:glEsVersion" attribute. */ @Nullable private static final NodeKeyResolver NAME_AND_GLESVERSION_KEY_RESOLVER = new NodeKeyResolver() { private final NodeKeyResolver nameAttrResolver = DEFAULT_NAME_ATTRIBUTE_RESOLVER; private final NodeKeyResolver glEsVersionResolver = new AttributeBasedNodeKeyResolver(ANDROID_URI, AndroidManifest.ATTRIBUTE_GLESVERSION); @Nullable @Override public String getKey(Element xmlElement) { @Nullable String key = nameAttrResolver.getKey(xmlElement); return Strings.isNullOrEmpty(key) ? glEsVersionResolver.getKey(xmlElement) : key; } @NonNull @Override public ImmutableList<String> getKeyAttributesNames() { return ImmutableList.of(SdkConstants.ATTR_NAME, AndroidManifest.ATTRIBUTE_GLESVERSION); } }; /** * Specific {@link com.android.manifmerger.ManifestModel.NodeKeyResolver} for intent-filter * elements. * Intent filters do not have a proper key, therefore their identity is really carried by * the presence of the action and category sub-elements. * We concatenate such elements sub-keys (after sorting them to work around declaration order) * and use that for the intent-filter unique key. */ @Nullable private static final NodeKeyResolver INTENT_FILTER_KEY_RESOLVER = new NodeKeyResolver() { @Nullable @Override public String getKey(@NonNull Element element) { @NonNull OrphanXmlElement xmlElement = new OrphanXmlElement(element); assert(xmlElement.getType() == NodeTypes.INTENT_FILTER); // concatenate all actions and categories attribute names. @NonNull List<String> allSubElementKeys = new ArrayList<String>(); NodeList childNodes = element.getChildNodes(); for (int i = 0; i < childNodes.getLength(); i++) { Node child = childNodes.item(i); if (child.getNodeType() != Node.ELEMENT_NODE) continue; @NonNull OrphanXmlElement subElement = new OrphanXmlElement((Element) child); if (subElement.getType() == NodeTypes.ACTION || subElement.getType() == NodeTypes.CATEGORY) { Attr nameAttribute = subElement.getXml() .getAttributeNodeNS(ANDROID_URI, ATTR_NAME); if (nameAttribute != null) { allSubElementKeys.add(nameAttribute.getValue()); } } } Collections.sort(allSubElementKeys); return Joiner.on('+').join(allSubElementKeys); } @NonNull @Override public ImmutableList<String> getKeyAttributesNames() { return ImmutableList.of("action#name", "category#name"); } }; /** * Implementation of {@link com.android.manifmerger.ManifestModel.NodeKeyResolver} that * combined two attributes values to create the key value. */ private static final class TwoAttributesBasedKeyResolver implements NodeKeyResolver { private final NodeKeyResolver firstAttributeKeyResolver; private final NodeKeyResolver secondAttributeKeyResolver; private TwoAttributesBasedKeyResolver(NodeKeyResolver firstAttributeKeyResolver, NodeKeyResolver secondAttributeKeyResolver) { this.firstAttributeKeyResolver = firstAttributeKeyResolver; this.secondAttributeKeyResolver = secondAttributeKeyResolver; } @Nullable @Override public String getKey(Element xmlElement) { @Nullable String firstKey = firstAttributeKeyResolver.getKey(xmlElement); @Nullable String secondKey = secondAttributeKeyResolver.getKey(xmlElement); return Strings.isNullOrEmpty(firstKey) ? secondKey : Strings.isNullOrEmpty(secondKey) ? firstKey : firstKey + "+" + secondKey; } @NonNull @Override public ImmutableList<String> getKeyAttributesNames() { return ImmutableList.of(firstAttributeKeyResolver.getKeyAttributesNames().get(0), secondAttributeKeyResolver.getKeyAttributesNames().get(0)); } } private static final AttributeModel.BooleanValidator BOOLEAN_VALIDATOR = new AttributeModel.BooleanValidator(); private static final boolean MULTIPLE_DECLARATION_FOR_SAME_KEY_ALLOWED = true; /** * Definitions of the support node types in the Android Manifest file. * {@link <a href=http://developer.android.com/guide/topics/manifest/manifest-intro.html/>} * for more details about the xml format. * * There is no DTD or schema associated with the file type so this is best effort in providing * some metadata on the elements of the Android's xml file. * * Each xml element is defined as an enum value and for each node, extra metadata is added * <ul> * <li>{@link com.android.manifmerger.MergeType} to identify how the merging engine * should process this element.</li> * <li>{@link com.android.manifmerger.ManifestModel.NodeKeyResolver} to resolve the * element's key. Elements can have an attribute like "android:name", others can use * a sub-element, and finally some do not have a key and are meant to be unique.</li> * <li>List of attributes models with special behaviors : * <ul> * <li>Smart substitution of class names to fully qualified class names using the * document's package declaration. The list's size can be 0..n</li> * <li>Implicit default value when no defined on the xml element.</li> * <li>{@link AttributeModel.Validator} to validate attribute value against.</li> * </ul> * </ul> * * It is of the outermost importance to keep this model correct as it is used by the merging * engine to make all its decisions. There should not be special casing in the engine, all * decisions must be represented here. * * If you find yourself needing to extend the model to support future requirements, do it here * and modify the engine to make proper decision based on the added metadata. */ enum NodeTypes { /** * Action (contained in intent-filter) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/action-element.html> * Action Xml documentation</a>} */ ACTION(MergeType.MERGE, DEFAULT_NAME_ATTRIBUTE_RESOLVER), /** * Activity (contained in application) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/activity-element.html> * Activity Xml documentation</a>} */ ACTIVITY(MergeType.MERGE, DEFAULT_NAME_ATTRIBUTE_RESOLVER, AttributeModel.newModel("parentActivityName").setIsPackageDependent(), AttributeModel.newModel(SdkConstants.ATTR_NAME).setIsPackageDependent()), /** * Activity-alias (contained in application) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/activity-alias-element.html> * Activity-alias Xml documentation</a>} */ ACTIVITY_ALIAS(MergeType.MERGE, DEFAULT_NAME_ATTRIBUTE_RESOLVER, AttributeModel.newModel("targetActivity").setIsPackageDependent(), AttributeModel.newModel(SdkConstants.ATTR_NAME).setIsPackageDependent()), /** * Application (contained in manifest) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/application-element.html> * Application Xml documentation</a>} */ APPLICATION(MergeType.MERGE, DEFAULT_NO_KEY_NODE_RESOLVER, AttributeModel.newModel("backupAgent").setIsPackageDependent(), AttributeModel.newModel(SdkConstants.ATTR_NAME).setIsPackageDependent()), /** * Category (contained in intent-filter) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/category-element.html> * Category Xml documentation</a>} */ CATEGORY(MergeType.MERGE, DEFAULT_NAME_ATTRIBUTE_RESOLVER), /** * Compatible-screens (contained in manifest) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/compatible-screens-element.html> * Category Xml documentation</a>} */ COMPATIBLE_SCREENS(MergeType.MERGE, DEFAULT_NO_KEY_NODE_RESOLVER), /** * Data (contained in intent-filter) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/data-element.html> * Category Xml documentation</a>} */ DATA(MergeType.MERGE, DEFAULT_NO_KEY_NODE_RESOLVER), /** * Grant-uri-permission (contained in intent-filter) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/grant-uri-permission-element.html> * Category Xml documentation</a>} */ GRANT_URI_PERMISSION(MergeType.MERGE, DEFAULT_NO_KEY_NODE_RESOLVER), /** * Instrumentation (contained in intent-filter) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/instrumentation-element.html> * Instrunentation Xml documentation</a>} */ INSTRUMENTATION( MergeType.MERGE, DEFAULT_NO_KEY_NODE_RESOLVER, AttributeModel.newModel("name").setMergingPolicy(AttributeModel.NO_MERGING_POLICY), AttributeModel.newModel("targetPackage") .setMergingPolicy(AttributeModel.NO_MERGING_POLICY), AttributeModel.newModel("functionalTest") .setMergingPolicy(AttributeModel.NO_MERGING_POLICY), AttributeModel.newModel("handleProfiling") .setMergingPolicy(AttributeModel.NO_MERGING_POLICY), AttributeModel.newModel("label").setMergingPolicy(AttributeModel.NO_MERGING_POLICY) ), /** * Intent-filter (contained in activity, activity-alias, service, receiver) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/intent-filter-element.html> * Intent-filter Xml documentation</a>} */ INTENT_FILTER(MergeType.ALWAYS, INTENT_FILTER_KEY_RESOLVER, MULTIPLE_DECLARATION_FOR_SAME_KEY_ALLOWED), /** * Manifest (top level node) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/manifest-element.html> * Manifest Xml documentation</a>} */ MANIFEST(MergeType.MERGE_CHILDREN_ONLY, DEFAULT_NO_KEY_NODE_RESOLVER), /** * Meta-data (contained in activity, activity-alias, application, provider, receiver) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/meta-data-element.html> * Meta-data Xml documentation</a>} */ META_DATA(MergeType.MERGE, DEFAULT_NAME_ATTRIBUTE_RESOLVER), /** * Path-permission (contained in provider) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/path-permission-element.html> * Meta-data Xml documentation</a>} */ PATH_PERMISSION(MergeType.MERGE, DEFAULT_NO_KEY_NODE_RESOLVER), /** * Permission-group (contained in manifest). * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/permission-group-element.html> * Permission-group Xml documentation</a>} * */ PERMISSION_GROUP(MergeType.MERGE, DEFAULT_NAME_ATTRIBUTE_RESOLVER, AttributeModel.newModel(SdkConstants.ATTR_NAME)), /** * Permission (contained in manifest). * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/permission-element.html> * Permission Xml documentation</a>} * */ PERMISSION(MergeType.MERGE, DEFAULT_NAME_ATTRIBUTE_RESOLVER, AttributeModel.newModel(SdkConstants.ATTR_NAME), AttributeModel.newModel("protectionLevel") .setDefaultValue("normal") // TODO : this will need to be populated from // sdk/platforms/android-19/data/res/values.attrs_manifest.xml .setOnReadValidator(new MultiValueValidator( "normal", "dangerous", "signature", "signatureOrSystem"))), /** * Permission-tree (contained in manifest). * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/permission-tree-element.html> * Permission-tree Xml documentation</a>} * */ PERMISSION_TREE(MergeType.MERGE, DEFAULT_NAME_ATTRIBUTE_RESOLVER, AttributeModel.newModel(SdkConstants.ATTR_NAME)), /** * Provider (contained in application) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/provider-element.html> * Provider Xml documentation</a>} */ PROVIDER(MergeType.MERGE, DEFAULT_NAME_ATTRIBUTE_RESOLVER, AttributeModel.newModel(SdkConstants.ATTR_NAME) .setIsPackageDependent()), /** * Receiver (contained in application) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/receiver-element.html> * Receiver Xml documentation</a>} */ RECEIVER(MergeType.MERGE, DEFAULT_NAME_ATTRIBUTE_RESOLVER, AttributeModel.newModel(SdkConstants.ATTR_NAME).setIsPackageDependent()), /** * Screen (contained in compatible-screens) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/compatible-screens-element.html> * Receiver Xml documentation</a>} */ SCREEN(MergeType.MERGE, new TwoAttributesBasedKeyResolver( new AttributeBasedNodeKeyResolver(ANDROID_URI, "screenSize"), new AttributeBasedNodeKeyResolver(ANDROID_URI, "screenDensity"))), /** * Service (contained in application) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/application-element.html> * Service Xml documentation</a>} */ SERVICE(MergeType.MERGE, DEFAULT_NAME_ATTRIBUTE_RESOLVER, AttributeModel.newModel(SdkConstants.ATTR_NAME).setIsPackageDependent()), /** * Supports-gl-texture (contained in manifest) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/supports-gl-texture-element.html> * Support-screens Xml documentation</a>} */ SUPPORTS_GL_TEXTURE(MergeType.MERGE, DEFAULT_NAME_ATTRIBUTE_RESOLVER), /** * Support-screens (contained in manifest) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/supports-screens-element.html> * Support-screens Xml documentation</a>} */ SUPPORTS_SCREENS(MergeType.MERGE, DEFAULT_NO_KEY_NODE_RESOLVER), /** * Uses-configuration (contained in manifest) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/uses-configuration-element.html> * Support-screens Xml documentation</a>} */ USES_CONFIGURATION(MergeType.MERGE, DEFAULT_NO_KEY_NODE_RESOLVER), /** * Uses-feature (contained in manifest) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/uses-feature-element.html> * Uses-feature Xml documentation</a>} */ USES_FEATURE(MergeType.MERGE, NAME_AND_GLESVERSION_KEY_RESOLVER, AttributeModel.newModel(AndroidManifest.ATTRIBUTE_REQUIRED) .setDefaultValue(SdkConstants.VALUE_TRUE) .setOnReadValidator(BOOLEAN_VALIDATOR) .setMergingPolicy(AttributeModel.OR_MERGING_POLICY), AttributeModel.newModel(AndroidManifest.ATTRIBUTE_GLESVERSION) .setDefaultValue("0x00010000") .setOnReadValidator(new Hexadecimal32BitsWithMinimumValue(0x00010000))), /** * Use-library (contained in application) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/uses-library-element.html> * Use-library Xml documentation</a>} */ USES_LIBRARY(MergeType.MERGE, DEFAULT_NAME_ATTRIBUTE_RESOLVER, AttributeModel.newModel(AndroidManifest.ATTRIBUTE_REQUIRED) .setDefaultValue(SdkConstants.VALUE_TRUE) .setOnReadValidator(BOOLEAN_VALIDATOR) .setMergingPolicy(AttributeModel.OR_MERGING_POLICY)), /** * Uses-permission (contained in application) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/uses-permission-element.html> * Uses-permission Xml documentation</a>} */ USES_PERMISSION(MergeType.MERGE, DEFAULT_NAME_ATTRIBUTE_RESOLVER), /** * Uses-sdk (contained in manifest) * <br> * <b>See also : </b> * {@link <a href=http://developer.android.com/guide/topics/manifest/uses-sdk-element.html> * Uses-sdk Xml documentation</a>} */ USES_SDK(MergeType.MERGE, DEFAULT_NO_KEY_NODE_RESOLVER, AttributeModel.newModel("minSdkVersion") .setDefaultValue(SdkConstants.VALUE_1) .setMergingPolicy(AttributeModel.NO_MERGING_POLICY), AttributeModel.newModel("maxSdkVersion") .setMergingPolicy(AttributeModel.NO_MERGING_POLICY), // TODO : model target's default value is minSdkVersion value. AttributeModel.newModel("targetSdkVersion") .setMergingPolicy(AttributeModel.NO_MERGING_POLICY) ), /** * Custom tag for any application specific element */ CUSTOM(MergeType.MERGE, DEFAULT_NO_KEY_NODE_RESOLVER); private final MergeType mMergeType; private final NodeKeyResolver mNodeKeyResolver; private final ImmutableList<AttributeModel> mAttributeModels; private final boolean mMultipleDeclarationAllowed; NodeTypes( @NonNull MergeType mergeType, @NonNull NodeKeyResolver nodeKeyResolver, @Nullable AttributeModel.Builder... attributeModelBuilders) { this(mergeType, nodeKeyResolver, false, attributeModelBuilders); } NodeTypes( @NonNull MergeType mergeType, @NonNull NodeKeyResolver nodeKeyResolver, boolean mutipleDeclarationAllowed, @Nullable AttributeModel.Builder... attributeModelBuilders) { this.mMergeType = Preconditions.checkNotNull(mergeType); this.mNodeKeyResolver = Preconditions.checkNotNull(nodeKeyResolver); @NonNull ImmutableList.Builder<AttributeModel> attributeModels = new ImmutableList.Builder<AttributeModel>(); if (attributeModelBuilders != null) { for (AttributeModel.Builder attributeModelBuilder : attributeModelBuilders) { attributeModels.add(attributeModelBuilder.build()); } } this.mAttributeModels = attributeModels.build(); this.mMultipleDeclarationAllowed = mutipleDeclarationAllowed; } @NonNull NodeKeyResolver getNodeKeyResolver() { return mNodeKeyResolver; } ImmutableList<AttributeModel> getAttributeModels() { return mAttributeModels.asList(); } @Nullable AttributeModel getAttributeModel(XmlNode.NodeName attributeName) { // mAttributeModels could be replaced with a Map if the number of models grows. for (AttributeModel attributeModel : mAttributeModels) { if (attributeModel.getName().equals(attributeName)) { return attributeModel; } } return null; } /** * Returns the Xml name for this node type */ String toXmlName() { return SdkUtils.constantNameToXmlName(this.name()); } /** * Returns the {@link NodeTypes} instance from an xml element name (without namespace * decoration). For instance, an xml element * <pre> * {@code * <activity android:name="foo"> * ... * </activity>} * </pre> * has a xml simple name of "activity" which will resolve to {@link NodeTypes#ACTIVITY} value. * * Note : a runtime exception will be generated if no mapping from the simple name to a * {@link com.android.manifmerger.ManifestModel.NodeTypes} exists. * * @param xmlSimpleName the xml (lower-hyphen separated words) simple name. * @return the {@link NodeTypes} associated with that element name. */ static NodeTypes fromXmlSimpleName(String xmlSimpleName) { String constantName = SdkUtils.xmlNameToConstantName(xmlSimpleName); try { return NodeTypes.valueOf(constantName); } catch (IllegalArgumentException e) { // if this element name is not a known tag, we categorize it as 'custom' which will // be simply merged. It will prevent us from catching simple spelling mistakes but // extensibility is a must have feature. return NodeTypes.CUSTOM; } } MergeType getMergeType() { return mMergeType; } /** * Returns true if multiple declaration for the same type and key are allowed or false if * there must be only one declaration of this element for a particular key value. */ boolean areMultipleDeclarationAllowed() { return mMultipleDeclarationAllowed; } } }
daedric/buck
third-party/java/aosp/src/com/android/manifmerger/ManifestModel.java
Java
apache-2.0
29,298
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.dataflow; import com.google.auto.service.AutoService; import com.google.common.collect.ImmutableList; import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions; import org.apache.beam.sdk.PipelineRunner; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.options.PipelineOptionsRegistrar; import org.apache.beam.sdk.runners.PipelineRunnerRegistrar; /** * Contains the {@link PipelineOptionsRegistrar} and {@link PipelineRunnerRegistrar} for the {@link * DataflowRunner}. */ public class DataflowPipelineRegistrar { private DataflowPipelineRegistrar() {} /** Register the {@link DataflowPipelineOptions}. */ @AutoService(PipelineOptionsRegistrar.class) public static class Options implements PipelineOptionsRegistrar { @Override public Iterable<Class<? extends PipelineOptions>> getPipelineOptions() { return ImmutableList.of(DataflowPipelineOptions.class); } } /** Register the {@link DataflowRunner}. */ @AutoService(PipelineRunnerRegistrar.class) public static class Runner implements PipelineRunnerRegistrar { @Override public Iterable<Class<? extends PipelineRunner<?>>> getPipelineRunners() { return ImmutableList.of(DataflowRunner.class, TestDataflowRunner.class); } } }
mxm/incubator-beam
runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowPipelineRegistrar.java
Java
apache-2.0
2,118
using Newtonsoft.Json; using System; namespace Box.V2.Models { /// <summary> /// Box representation of a permission /// </summary> public class BoxPermission { /// <summary> /// Whether the item can be downloaded or not /// </summary> [JsonProperty("can_download")] public bool CanDownload { get; set; } /// <summary> /// Whether the item can be previewed or not /// </summary> [Obsolete("CanPreview is now deprecated in the API and will always return true.")] [JsonProperty("can_preview")] public bool CanPreview { get; set; } } }
SudhersonV/DotNetRoot
Modules/Box/Box.V2/Models/BoxPermission.cs
C#
apache-2.0
649
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.ra; import javax.naming.NamingException; import javax.naming.Reference; import javax.resource.ResourceException; import org.apache.geode.internal.ra.spi.JCAManagedConnection; import org.apache.geode.ra.GFConnection; public class GFConnectionImpl implements GFConnection { private JCAManagedConnection mc; private Reference ref; public GFConnectionImpl(JCAManagedConnection mc) { this.mc = mc; } public void resetManagedConnection(JCAManagedConnection mc) { this.mc = mc; } @Override public void close() throws ResourceException { // Check if the connection is associated with a JTA. If yes, then // we should throw an exception on close being invoked. if (this.mc != null) { this.mc.onClose(this); } } public void invalidate() { this.mc = null; } @Override public void setReference(Reference ref) { this.ref = ref; } @Override public Reference getReference() throws NamingException { return this.ref; } }
smgoller/geode
geode-core/src/jca/java/org/apache/geode/internal/ra/GFConnectionImpl.java
Java
apache-2.0
1,817
/* * Copyright (c) 2014, STMicroelectronics International N.V. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ #include "mpa.h" #define USE_PRIME_TABLE #if defined(USE_PRIME_TABLE) #include "mpa_primetable.h" #endif #define DEF_COMPOSITE 0 #define DEF_PRIME 1 #define PROB_PRIME -1 /* Product of all primes < 1000 */ static const mpa_num_base const_small_prime_factors = { 44, 44, {0x2ED42696, 0x2BBFA177, 0x4820594F, 0xF73F4841, 0xBFAC313A, 0xCAC3EB81, 0xF6F26BF8, 0x7FAB5061, 0x59746FB7, 0xF71377F6, 0x3B19855B, 0xCBD03132, 0xBB92EF1B, 0x3AC3152C, 0xE87C8273, 0xC0AE0E69, 0x74A9E295, 0x448CCE86, 0x63CA1907, 0x8A0BF944, 0xF8CC3BE0, 0xC26F0AF5, 0xC501C02F, 0x6579441A, 0xD1099CDA, 0x6BC76A00, 0xC81A3228, 0xBFB1AB25, 0x70FA3841, 0x51B3D076, 0xCC2359ED, 0xD9EE0769, 0x75E47AF0, 0xD45FF31E, 0x52CCE4F6, 0x04DBC891, 0x96658ED2, 0x1753EFE5, 0x3AE4A5A6, 0x8FD4A97F, 0x8B15E7EB, 0x0243C3E1, 0xE0F0C31D, 0x0000000B} }; /* * If n is less than this number (341550071728321 decimal) the Miller-Rabin * test (using specific bases) constitutes a primality proof. */ static const mpa_num_base const_miller_rabin_proof_limit = { 2, 2, {0x52B2C8C1, 0x000136A3} }; static const mpa_num_base const_two = { 1, 1, {0x00000002} }; /* foward declarations */ static int is_small_prime(mpanum n); static int has_small_factors(mpanum n, mpa_scratch_mem pool); static int primality_test_miller_rabin(mpanum n, int conf_level, mpa_scratch_mem pool); /*------------------------------------------------------------ * * mpa_is_prob_prime * * Returns: * 0 if n is definitely composite * 1 if n is definitely prime * -1 if n is composite with a probability less than 2^(-conf_level) * */ int mpa_is_prob_prime(mpanum n, int conf_level, mpa_scratch_mem pool) { int result = 0; /* Check if it's a small prime */ result = is_small_prime(n); if (result != PROB_PRIME) goto cleanup; /* Test if n is divisible by any prime < 1000 */ if (has_small_factors(n, pool)) { result = DEF_COMPOSITE; goto cleanup; } /* Check with Miller Rabin */ result = primality_test_miller_rabin(n, conf_level, pool); cleanup: return result; } #if defined(USE_PRIME_TABLE) /*------------------------------------------------------------ * * check_table * */ static uint32_t check_table(uint32_t v) { return (PRIME_TABLE[v >> 5] >> (v & 0x1f)) & 1; } #endif /*------------------------------------------------------------ * * is_small_prime * * Returns 1 if n is prime, Returns 0 if n is composite * Returns -1 if we cannot decide * */ static int is_small_prime(mpanum n) { mpa_word_t v; /* If n is larger than a mpa_word_t, we can only decide if */ /* n is even. If it's odd we cannot tell. */ if (__mpanum_size(n) > 1) return ((mpa_parity(n) == MPA_EVEN_PARITY) ? 0 : -1); v = mpa_get_word(n); /* will convert negative n:s to positive v:s. */ if ((v | 1) == 1) /* 0 and 1 are not prime */ return DEF_COMPOSITE; if (v == 2) /* 2 is prime */ return DEF_PRIME; if ((v & 1) == 0) return DEF_COMPOSITE; /* but no other even number */ #if defined(USE_PRIME_TABLE) if (mpa_cmp_short(n, MAX_TABULATED_PRIME) > 0) return -1; v = (v - 3) >> 1; return check_table(v); #else return -1; #endif } /*------------------------------------------------------------ * * has_small_factors * * returns 1 if n has small factors * returns 0 if not. */ static int has_small_factors(mpanum n, mpa_scratch_mem pool) { const mpa_num_base *factors = &const_small_prime_factors; int result; mpanum res; mpa_alloc_static_temp_var(&res, pool); mpa_gcd(res, n, (const mpanum)factors, pool); result = (mpa_cmp_short(res, 1) == 0) ? 0 : 1; mpa_free_static_temp_var(&res, pool); return result; } /*------------------------------------------------------------ * * primality_test_miller_rabin * */ static int primality_test_miller_rabin(mpanum n, int conf_level, mpa_scratch_mem pool) { int result; bool proof_version; static const int32_t proof_a[7] = { 2, 3, 5, 7, 11, 13, 17 }; int cnt; int idx; int t; int e = 0; int cmp_one; mpanum a; mpanum q; mpanum n_minus_1; mpanum b; mpanum r_modn; mpanum r2_modn; mpa_word_t n_inv; mpa_alloc_static_temp_var(&r_modn, pool); mpa_alloc_static_temp_var(&r2_modn, pool); if (mpa_compute_fmm_context(n, r_modn, r2_modn, &n_inv, pool) == -1) { result = DEF_COMPOSITE; goto cleanup_short; } mpa_alloc_static_temp_var(&a, pool); mpa_alloc_static_temp_var(&q, pool); mpa_alloc_static_temp_var(&n_minus_1, pool); mpa_alloc_static_temp_var(&b, pool); proof_version = (mpa_cmp(n, (mpanum) &const_miller_rabin_proof_limit) < 0); if (proof_version) cnt = 7; else /* MR has 1/4 chance in failing a composite */ cnt = (conf_level + 1) / 2; mpa_sub_word(n_minus_1, n, 1, pool); mpa_set(q, n_minus_1); t = 0; /* calculate q such that n - 1 = 2^t * q where q is odd */ while (mpa_is_even(q)) { mpa_shift_right(q, q, 1); t++; } result = PROB_PRIME; for (idx = 0; idx < cnt && result == PROB_PRIME; idx++) { if (proof_version) { mpa_set_S32(a, proof_a[idx]); if (mpa_cmp(n, a) == 0) { result = DEF_PRIME; continue; } } else { /* * Get random a, 1 < a < N by * asking for a random in range 0 <= x < N - 2 * and then add 2 to it. */ mpa_sub_word(n_minus_1, n_minus_1, 1, pool); /* n_minus_1 is now N - 2 ! */ mpa_get_random(a, n_minus_1); mpa_add_word(n_minus_1, n_minus_1, 1, pool); /* and a is now 2 <= a < N */ mpa_add_word(a, a, 2, pool); } mpa_exp_mod(b, a, q, n, r_modn, r2_modn, n_inv, pool); e = 0; inner_loop: cmp_one = mpa_cmp_short(b, 1); if ((cmp_one == 0) && (e > 0)) { result = DEF_COMPOSITE; continue; } if ((mpa_cmp(b, n_minus_1) == 0) || ((cmp_one == 0) && (e == 0))) { /* probably prime, try another a */ continue; } e++; if (e < t) { mpa_exp_mod(b, b, (mpanum) &const_two, n, r_modn, r2_modn, n_inv, pool); goto inner_loop; } result = DEF_COMPOSITE; } if (result == PROB_PRIME && proof_version) result = DEF_PRIME; mpa_free_static_temp_var(&a, pool); mpa_free_static_temp_var(&q, pool); mpa_free_static_temp_var(&n_minus_1, pool); mpa_free_static_temp_var(&b, pool); cleanup_short: mpa_free_static_temp_var(&r_modn, pool); mpa_free_static_temp_var(&r2_modn, pool); return result; }
matt2048/optee_os
lib/libmpa/mpa_primetest.c
C
bsd-2-clause
7,676
from __future__ import print_function from pysb.simulator import ScipyOdeSimulator from tutorial_a import model t = [0, 10, 20, 30, 40, 50, 60] simulator = ScipyOdeSimulator(model, tspan=t) simresult = simulator.run() print(simresult.species)
LoLab-VU/pysb
pysb/examples/run_tutorial_a.py
Python
bsd-2-clause
244
#!/usr/bin/python # # Copyright (C) 2012 Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED # TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Script for testing ganeti.tools.node_daemon_setup""" import unittest from ganeti import errors from ganeti import constants from ganeti.tools import node_daemon_setup import testutils _SetupError = node_daemon_setup.SetupError class TestVerifySsconf(unittest.TestCase): def testNoSsconf(self): self.assertRaises(_SetupError, node_daemon_setup.VerifySsconf, {}, NotImplemented, _verify_fn=NotImplemented) for items in [None, {}]: self.assertRaises(_SetupError, node_daemon_setup.VerifySsconf, { constants.NDS_SSCONF: items, }, NotImplemented, _verify_fn=NotImplemented) def _Check(self, names): self.assertEqual(frozenset(names), frozenset([ constants.SS_CLUSTER_NAME, constants.SS_INSTANCE_LIST, ])) def testSuccess(self): ssdata = { constants.SS_CLUSTER_NAME: "cluster.example.com", constants.SS_INSTANCE_LIST: [], } result = node_daemon_setup.VerifySsconf({ constants.NDS_SSCONF: ssdata, }, "cluster.example.com", _verify_fn=self._Check) self.assertEqual(result, ssdata) self.assertRaises(_SetupError, node_daemon_setup.VerifySsconf, { constants.NDS_SSCONF: ssdata, }, "wrong.example.com", _verify_fn=self._Check) def testInvalidKey(self): self.assertRaises(errors.GenericError, node_daemon_setup.VerifySsconf, { constants.NDS_SSCONF: { "no-valid-ssconf-key": "value", }, }, NotImplemented) if __name__ == "__main__": testutils.GanetiTestProgram()
bitemyapp/ganeti
test/py/ganeti.tools.node_daemon_setup_unittest.py
Python
bsd-2-clause
2,901
/* * Copyright (c) 2000-2002, 2004, 2005, 2008 Apple Inc. All rights reserved. * * @APPLE_LICENSE_HEADER_START@ * * This file contains Original Code and/or Modifications of Original Code * as defined in and that are subject to the Apple Public Source License * Version 2.0 (the 'License'). You may not use this file except in * compliance with the License. Please obtain a copy of the License at * http://www.opensource.apple.com/apsl/ and read it before using this * file. * * The Original Code and all software distributed under the License are * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. * Please see the License for the specific language governing rights and * limitations under the License. * * @APPLE_LICENSE_HEADER_END@ */ #ifndef _SCDYNAMICSTOREKEY_H #ifdef USE_SYSTEMCONFIGURATION_PRIVATE_HEADERS #include <SystemConfiguration/_SCDynamicStoreKey.h> #else /* USE_SYSTEMCONFIGURATION_PRIVATE_HEADERS */ #define _SCDYNAMICSTOREKEY_H #include <Availability.h> #include <sys/cdefs.h> #include <CoreFoundation/CoreFoundation.h> /*! @header SCDynamicStoreKey @discussion The SCDynamicStoreKey API provides convenience functions that an application can use to create a correctly formatted dynamic store key for accessing specific items in the dynamic store. An application can then use the resulting string in any function that requires a dynamic store key. */ __BEGIN_DECLS /* * SCDynamicStoreKeyCreate * - convenience routines that create a CFString key for an item in the store */ /*! @function SCDynamicStoreKeyCreate @discussion Creates a dynamic store key using the given format. @param allocator The CFAllocator that should be used to allocate memory for this key. This parameter may be NULL in which case the current default CFAllocator is used. If this reference is not a valid CFAllocator, the behavior is undefined. @param fmt A CFStringRef describing the format for this key. @result Returns a string containing the formatted key. */ CFStringRef SCDynamicStoreKeyCreate ( CFAllocatorRef allocator, CFStringRef fmt, ... ) __OSX_AVAILABLE_STARTING(__MAC_10_1,__IPHONE_NA); /*! @function SCDynamicStoreKeyCreateNetworkGlobalEntity @discussion Creates a dynamic store key that can be used to access a specific global (as opposed to a per-service or per-interface) network configuration entity. @param allocator The CFAllocator that should be used to allocate memory for this key. This parameter may be NULL in which case the current default CFAllocator is used. If this reference is not a valid CFAllocator, the behavior is undefined. @param domain A string specifying the desired domain, such as the requested configuration (kSCDynamicStoreDomainSetup) or the actual state (kSCDynamicStoreDomainState). @param entity A string containing the specific global entity, such as IPv4 (kSCEntNetIPv4) or DNS (kSCEntNetDNS). @result Returns a string containing the formatted key. */ CFStringRef SCDynamicStoreKeyCreateNetworkGlobalEntity ( CFAllocatorRef allocator, CFStringRef domain, CFStringRef entity ) __OSX_AVAILABLE_STARTING(__MAC_10_1,__IPHONE_NA); /*! @function SCDynamicStoreKeyCreateNetworkInterface @discussion Creates a dynamic store key that can be used to access the network interface configuration information stored in the dynamic store. @param allocator The CFAllocator that should be used to allocate memory for this key. This parameter may be NULL in which case the current default CFAllocator is used. If this reference is not a valid CFAllocator, the behavior is undefined. @param domain A string specifying the desired domain, such as the requested configuration (kSCDynamicStoreDomainSetup) or the actual state (kSCDynamicStoreDomainState). @result Returns a string containing the formatted key. */ CFStringRef SCDynamicStoreKeyCreateNetworkInterface ( CFAllocatorRef allocator, CFStringRef domain ) __OSX_AVAILABLE_STARTING(__MAC_10_1,__IPHONE_NA); /*! @function SCDynamicStoreKeyCreateNetworkInterfaceEntity @discussion Creates a dynamic store key that can be used to access the per-interface network configuration information stored in the dynamic store. @param allocator The CFAllocator that should be used to allocate memory for this key. This parameter may be NULL in which case the current default CFAllocator is used. If this reference is not a valid CFAllocator, the behavior is undefined. @param domain A string specifying the desired domain, such as the requested configuration (kSCDynamicStoreDomainSetup) or the actual state (kSCDynamicStoreDomainState). @param ifname A string containing the interface name or a regular expression pattern. @param entity A string containing the specific global entity, such as IPv4 (kSCEntNetIPv4) or DNS (kSCEntNetDNS). @result Returns a string containing the formatted key. */ CFStringRef SCDynamicStoreKeyCreateNetworkInterfaceEntity ( CFAllocatorRef allocator, CFStringRef domain, CFStringRef ifname, CFStringRef entity ) __OSX_AVAILABLE_STARTING(__MAC_10_1,__IPHONE_NA); /*! @function SCDynamicStoreKeyCreateNetworkServiceEntity @discussion Creates a dynamic store key that can be used to access the per-service network configuration information stored in the dynamic store. @param allocator The CFAllocator that should be used to allocate memory for this key. This parameter may be NULL in which case the current default CFAllocator is used. If this reference is not a valid CFAllocator, the behavior is undefined. @param domain A string specifying the desired domain, such as the requested configuration (kSCDynamicStoreDomainSetup) or the actual state (kSCDynamicStoreDomainState). @param serviceID A string containing the service ID or a regular expression pattern. @param entity A string containing the specific global entity, such as IPv4 (kSCEntNetIPv4) or DNS (kSCEntNetDNS). @result Returns a string containing the formatted key. */ CFStringRef SCDynamicStoreKeyCreateNetworkServiceEntity ( CFAllocatorRef allocator, CFStringRef domain, CFStringRef serviceID, CFStringRef entity ) __OSX_AVAILABLE_STARTING(__MAC_10_1,__IPHONE_NA); /*! @function SCDynamicStoreKeyCreateComputerName @discussion Creates a key that can be used in conjuntion with SCDynamicStoreSetNotificationKeys function to receive notifications when the current computer name changes. @param allocator The CFAllocator that should be used to allocate memory for this key. This parameter may be NULL in which case the current default CFAllocator is used. If this reference is not a valid CFAllocator, the behavior is undefined. @result Returns a notification string for the current computer or host name. */ CFStringRef SCDynamicStoreKeyCreateComputerName ( CFAllocatorRef allocator ) __OSX_AVAILABLE_STARTING(__MAC_10_1,__IPHONE_NA); /*! @function SCDynamicStoreKeyCreateConsoleUser @discussion Creates a key that can be used in conjunction with SCDynamicStoreSetNotificationKeys function to receive notifications when the current console user changes. @param allocator The CFAllocator that should be used to allocate memory for this key. This parameter may be NULL in which case the current default CFAllocator is used. If this reference is not a valid CFAllocator, the behavior is undefined. @result Returns a notification string for the current console user. */ CFStringRef SCDynamicStoreKeyCreateConsoleUser ( CFAllocatorRef allocator ) __OSX_AVAILABLE_STARTING(__MAC_10_1,__IPHONE_NA); /*! @function SCDynamicStoreKeyCreateHostNames @discussion Creates a key that can be used in conjunction with the SCDynamicStoreSetNotificationKeys function to receive notifications when the HostNames entity changes. The HostNames entity includes the local host name. @param allocator The CFAllocator that should be used to allocate memory for this key. This parameter may be NULL in which case the current default CFAllocator is used. If this reference is not a valid CFAllocator, the behavior is undefined. @result Returns a notification string for the HostNames entity. */ CFStringRef SCDynamicStoreKeyCreateHostNames ( CFAllocatorRef allocator ) __OSX_AVAILABLE_STARTING(__MAC_10_2,__IPHONE_NA); /*! @function SCDynamicStoreKeyCreateLocation @discussion Creates a key that can be used in conjunction with the SCDynamicStoreSetNotificationKeys function to receive notifications when the location identifier changes. @param allocator The CFAllocator that should be used to allocate memory for this key. This parameter may be NULL in which case the current default CFAllocator is used. If this reference is not a valid CFAllocator, the behavior is undefined. @result Returns a notification string for the current location identifier. */ CFStringRef SCDynamicStoreKeyCreateLocation ( CFAllocatorRef allocator ) __OSX_AVAILABLE_STARTING(__MAC_10_2,__IPHONE_NA); /*! @function SCDynamicStoreKeyCreateProxies @discussion Creates a key that can be used in conjunction with the SCDynamicStoreSetNotificationKeys function to receive notifications when the current network proxy settings (such as HTTP or FTP) are changed. @param allocator The CFAllocator that should be used to allocate memory for this key. This parameter may be NULL in which case the current default CFAllocator is used. If this reference is not a valid CFAllocator, the behavior is undefined. @result Returns a notification string for the current proxy settings. */ CFStringRef SCDynamicStoreKeyCreateProxies ( CFAllocatorRef allocator ) __OSX_AVAILABLE_STARTING(__MAC_10_1,__IPHONE_NA); __END_DECLS #endif /* USE_SYSTEMCONFIGURATION_PRIVATE_HEADERS */ #endif /* _SCDYNAMICSTOREKEY_H */
wangchunling/react-native-for-APICloud
project/AwesomeProject/SystemConfiguration.framework/Headers/SCDynamicStoreKey.h
C
bsd-2-clause
10,213
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CONTENT_RENDERER_DOM_AUTOMATION_CONTROLLER_H_ #define CONTENT_RENDERER_DOM_AUTOMATION_CONTROLLER_H_ #include <stdint.h> #include "base/macros.h" #include "content/public/renderer/render_frame_observer.h" #include "gin/wrappable.h" /* DomAutomationController class: Bound to Javascript window.domAutomationController object. At the very basic, this object makes any native value (string, numbers, boolean) from javascript available to the automation host in Cpp. Any renderer implementation that is built with this binding will allow the above facility. The intended use of this object is to expose the DOM Objects and their attributes to the automation host. A typical usage would be like following (JS code): var object = document.getElementById('some_id'); window.domAutomationController.send(object.nodeName); // get the tag name For the exact mode of usage, refer AutomationProxyTest.*DomAutomationController tests. The class provides a single send method that can send variety of native javascript values. (NPString, Number(double), Boolean) The actual communication occurs in the following manner: TEST MASTER RENDERER (1) (3) |AProxy| ----->|AProvider|----->|RenderView|------| /\ | | | | | | | |(6) |(2) |(0) |(4) | | \/ | | |-------->|DAController|<----| | | | |(5) |-------|WebContentsImpl|<--------| Legends: - AProxy = AutomationProxy - AProvider = AutomationProvider - DAController = DomAutomationController (0) Initialization step where DAController is bound to the renderer and the view_id of the renderer is supplied to the DAController for routing message in (5). (1) A 'javascript:' url is sent from the test process to master as an IPC message. A unique routing id is generated at this stage (automation_id_) (2) The automation_id_ of step (1) is supplied to DAController by calling the bound method setAutomationId(). This is required for routing message in (6). (3) The 'javascript:' url is sent for execution by calling into Browser::LoadURL() (4) A callback is generated as a result of domAutomationController.send() into Cpp. The supplied value is received as a result of this callback. (5) The value received in (4) is sent to the master along with the stored automation_id_ as an IPC message. The frame_'s RenderFrameImpl is used to route the message. (IPC messages, ViewHostMsg_*DomAutomation* ) (6) The value and the automation_id_ is extracted out of the message received in (5). This value is relayed to AProxy using another IPC message. automation_id_ is used to route the message. (IPC messages, AutomationMsg_Dom*Response) */ namespace blink { class WebFrame; } namespace gin { class Arguments; } namespace content { class RenderFrame; class DomAutomationController : public gin::Wrappable<DomAutomationController>, public RenderFrameObserver { public: static gin::WrapperInfo kWrapperInfo; static void Install(RenderFrame* render_frame, blink::WebFrame* frame); // Makes the renderer send a javascript value to the app. // The value to be sent can be either of type String, // Number (double casted to int32_t) or Boolean. Any other type or no // argument at all is ignored. bool SendMsg(const gin::Arguments& args); // Makes the renderer send a javascript value to the app. // The value should be properly formed JSON. bool SendJSON(const std::string& json); // Sends a string with a provided Automation Id. bool SendWithId(int automation_id, const std::string& str); bool SetAutomationId(int automation_id); private: explicit DomAutomationController(RenderFrame* render_view); ~DomAutomationController() override; // gin::WrappableBase gin::ObjectTemplateBuilder GetObjectTemplateBuilder( v8::Isolate* isolate) override; // RenderFrameObserver void OnDestruct() override; void DidCreateScriptContext(v8::Local<v8::Context> context, int extension_group, int world_id) override; int automation_id_; // routing id to be used by the next channel. DISALLOW_COPY_AND_ASSIGN(DomAutomationController); }; } // namespace content #endif // CONTENT_RENDERER_DOM_AUTOMATION_CONTROLLER_H_
ssaroha/node-webrtc
third_party/webrtc/include/chromium/src/content/renderer/dom_automation_controller.h
C
bsd-2-clause
4,863
#define DLONG #include <../Partition/cholmod_nesdis.c>
jokereactive/ORB_Android
suitesparse/SuiteSparse/CHOLMOD/SourceWrappers/cholmod_l_nesdis.o.c
C
bsd-2-clause
55
/* @flow */ import {join, relative, resolve} from 'path'; import {format} from 'util'; import {exec, exists, mkdirp, readFile, unlink, writeFile} from '../async'; import {getTestsDir, defaultFlowConfigName} from '../constants'; import type {Args} from './new-testCommand'; async function newTest(bin: string, suiteName: string): Promise<void> { function log(...args: any) { console.log("[%s]\t\t%s", suiteName, format(...args)); } const dest = join(getTestsDir(), suiteName); const alreadyExists = await exists(dest); if (alreadyExists) { log("There is already a test with that name. Skipping..."); return; } await mkdirp(dest); const testFile = join(dest, 'test.js'); const testerLoc = relative(dest, resolve(__dirname, "../test/Tester")); await writeFile( join(dest, 'test.js'), `/* @flow */ import {suite, test} from '${testerLoc}'; export default suite(({addFile, addFiles, addCode}) => [ test('TestName', [ ]), ]); ` ); await exec(format('%s init --options "all=true" %s', bin, dest)); // Rename .flowconfig to _flowconfig const config = await readFile(join(dest, ".flowconfig")); await Promise.all([ writeFile(join(dest, defaultFlowConfigName), config.toString()), unlink(join(dest, ".flowconfig")), ]); log( "Created test! To start editing open %s", relative(process.cwd(), resolve(__dirname, testFile)), ); } export default async function(args: Args): Promise<void> { await Promise.all( Array.from(args.names).map(newTest.bind(null, args.bin)) ); }
AprilArcus/flow
tsrc/new-test/new-testRunner.js
JavaScript
bsd-3-clause
1,555
using System; namespace Org.BouncyCastle.Asn1.Cms { public class AuthEnvelopedData : Asn1Encodable { private DerInteger version; private OriginatorInfo originatorInfo; private Asn1Set recipientInfos; private EncryptedContentInfo authEncryptedContentInfo; private Asn1Set authAttrs; private Asn1OctetString mac; private Asn1Set unauthAttrs; public AuthEnvelopedData( OriginatorInfo originatorInfo, Asn1Set recipientInfos, EncryptedContentInfo authEncryptedContentInfo, Asn1Set authAttrs, Asn1OctetString mac, Asn1Set unauthAttrs) { // "It MUST be set to 0." this.version = new DerInteger(0); this.originatorInfo = originatorInfo; // TODO // "There MUST be at least one element in the collection." this.recipientInfos = recipientInfos; this.authEncryptedContentInfo = authEncryptedContentInfo; // TODO // "The authAttrs MUST be present if the content type carried in // EncryptedContentInfo is not id-data." this.authAttrs = authAttrs; this.mac = mac; this.unauthAttrs = unauthAttrs; } private AuthEnvelopedData( Asn1Sequence seq) { int index = 0; // TODO // "It MUST be set to 0." Asn1Object tmp = seq[index++].ToAsn1Object(); version = (DerInteger)tmp; tmp = seq[index++].ToAsn1Object(); if (tmp is Asn1TaggedObject) { originatorInfo = OriginatorInfo.GetInstance((Asn1TaggedObject)tmp, false); tmp = seq[index++].ToAsn1Object(); } // TODO // "There MUST be at least one element in the collection." recipientInfos = Asn1Set.GetInstance(tmp); tmp = seq[index++].ToAsn1Object(); authEncryptedContentInfo = EncryptedContentInfo.GetInstance(tmp); tmp = seq[index++].ToAsn1Object(); if (tmp is Asn1TaggedObject) { authAttrs = Asn1Set.GetInstance((Asn1TaggedObject)tmp, false); tmp = seq[index++].ToAsn1Object(); } else { // TODO // "The authAttrs MUST be present if the content type carried in // EncryptedContentInfo is not id-data." } mac = Asn1OctetString.GetInstance(tmp); if (seq.Count > index) { tmp = seq[index++].ToAsn1Object(); unauthAttrs = Asn1Set.GetInstance((Asn1TaggedObject)tmp, false); } } /** * return an AuthEnvelopedData object from a tagged object. * * @param obj the tagged object holding the object we want. * @param isExplicit true if the object is meant to be explicitly * tagged false otherwise. * @throws ArgumentException if the object held by the * tagged object cannot be converted. */ public static AuthEnvelopedData GetInstance( Asn1TaggedObject obj, bool isExplicit) { return GetInstance(Asn1Sequence.GetInstance(obj, isExplicit)); } /** * return an AuthEnvelopedData object from the given object. * * @param obj the object we want converted. * @throws ArgumentException if the object cannot be converted. */ public static AuthEnvelopedData GetInstance( object obj) { if (obj == null || obj is AuthEnvelopedData) return (AuthEnvelopedData)obj; if (obj is Asn1Sequence) return new AuthEnvelopedData((Asn1Sequence)obj); throw new ArgumentException("Invalid AuthEnvelopedData: " + obj.GetType().Name); } public DerInteger Version { get { return version; } } public OriginatorInfo OriginatorInfo { get { return originatorInfo; } } public Asn1Set RecipientInfos { get { return recipientInfos; } } public EncryptedContentInfo AuthEncryptedContentInfo { get { return authEncryptedContentInfo; } } public Asn1Set AuthAttrs { get { return authAttrs; } } public Asn1OctetString Mac { get { return mac; } } public Asn1Set UnauthAttrs { get { return unauthAttrs; } } /** * Produce an object suitable for an Asn1OutputStream. * <pre> * AuthEnvelopedData ::= SEQUENCE { * version CMSVersion, * originatorInfo [0] IMPLICIT OriginatorInfo OPTIONAL, * recipientInfos RecipientInfos, * authEncryptedContentInfo EncryptedContentInfo, * authAttrs [1] IMPLICIT AuthAttributes OPTIONAL, * mac MessageAuthenticationCode, * unauthAttrs [2] IMPLICIT UnauthAttributes OPTIONAL } * </pre> */ public override Asn1Object ToAsn1Object() { Asn1EncodableVector v = new Asn1EncodableVector(version); if (originatorInfo != null) { v.Add(new DerTaggedObject(false, 0, originatorInfo)); } v.Add(recipientInfos, authEncryptedContentInfo); // "authAttrs optionally contains the authenticated attributes." if (authAttrs != null) { // "AuthAttributes MUST be DER encoded, even if the rest of the // AuthEnvelopedData structure is BER encoded." v.Add(new DerTaggedObject(false, 1, authAttrs)); } v.Add(mac); // "unauthAttrs optionally contains the unauthenticated attributes." if (unauthAttrs != null) { v.Add(new DerTaggedObject(false, 2, unauthAttrs)); } return new BerSequence(v); } } }
GaloisInc/hacrypto
src/C#/BouncyCastle/BouncyCastle-1.7/crypto/src/asn1/cms/AuthEnvelopedData.cs
C#
bsd-3-clause
5,282
#include <boost/make_shared.hpp> #include "aslamcv_helper.hpp" namespace aslam { template<typename DISTORTION_T, typename MASK_T> PinholeUndistorter<DISTORTION_T, MASK_T>::PinholeUndistorter() { _idealGeometry.reset(); } template<typename DISTORTION_T, typename MASK_T> PinholeUndistorter<DISTORTION_T, MASK_T>::PinholeUndistorter( const sm::PropertyTree& undistorterConfig, const sm::PropertyTree& cameraConfig) { boost::shared_ptr<distorted_geometry_t> distortedGeometry( new distorted_geometry_t(cameraConfig)); int interpolation = undistorterConfig.getInt("interpolationType", cv::INTER_NEAREST); double alpha = undistorterConfig.getDouble("alpha"); double scale = undistorterConfig.getDouble("scale"); init(distortedGeometry, interpolation, alpha, scale); } template<typename DISTORTION_T, typename MASK_T> PinholeUndistorter<DISTORTION_T, MASK_T>::PinholeUndistorter( const sm::PropertyTree& cameraConfig, int interpolation, double alpha, double scale) { boost::shared_ptr<distorted_geometry_t> distortedGeometry( new distorted_geometry_t(cameraConfig)); init(distortedGeometry, interpolation, alpha, scale); } template<typename DISTORTION_T, typename MASK_T> PinholeUndistorter<DISTORTION_T, MASK_T>::PinholeUndistorter( boost::shared_ptr<distorted_geometry_t> distortedGeometry, int interpolation, double alpha, double scale) { init(distortedGeometry, interpolation, alpha, scale); } template<typename DISTORTION_T, typename MASK_T> PinholeUndistorter<DISTORTION_T, MASK_T>::PinholeUndistorter( boost::shared_ptr<distorted_geometry_t> distortedGeometry, const sm::PropertyTree & config) { int interpolation = config.getInt("interpolationType", cv::INTER_NEAREST); double alpha = config.getDouble("alpha"); double scale = config.getDouble("scale"); init(distortedGeometry, interpolation, alpha, scale); } template<typename DISTORTION_T, typename MASK_T> PinholeUndistorter<DISTORTION_T, MASK_T>::~PinholeUndistorter() { } template<typename DISTORTION_T, typename MASK_T> void PinholeUndistorter<DISTORTION_T, MASK_T>::init( boost::shared_ptr<distorted_geometry_t> distortedGeometry, int interpolation, double alpha, double scale) { _interpolation = interpolation; setIdealGeometry(distortedGeometry, alpha, scale); } template<typename DISTORTION_T, typename MASK_T> void PinholeUndistorter<DISTORTION_T, MASK_T>::constructUndistortedFrame( const cv::Mat & image, frame_t & outFrame) const { SM_ASSERT_TRUE(std::runtime_error, _idealGeometry, "Camera has not yet been set.") cv::Mat undistImage; undistortImage(image, undistImage); outFrame.setImage(undistImage); outFrame.setGeometry(_idealGeometry); } template<typename DISTORTION_T, typename MASK_T> void PinholeUndistorter<DISTORTION_T, MASK_T>::undistortImage(const cv::Mat & inImage, cv::Mat & outImage) const { // see: http://opencv.willowgarage.com/documentation/cpp/imgproc_geometric_image_transformations.html#remap cv::remap(inImage, outImage, mapX, mapY, _interpolation); } template<typename DISTORTION_T, typename MASK_T> boost::shared_ptr<typename PinholeUndistorter<DISTORTION_T, MASK_T>::ideal_geometry_t> PinholeUndistorter< DISTORTION_T, MASK_T>::idealGeometry() const { return _idealGeometry; } template<typename DISTORTION_T, typename MASK_T> boost::shared_ptr<typename PinholeUndistorter<DISTORTION_T, MASK_T>::distorted_geometry_t> PinholeUndistorter< DISTORTION_T, MASK_T>::distortedGeometry() const { return _distortedGeometry; } template<typename DISTORTION_T, typename MASK_T> void PinholeUndistorter<DISTORTION_T, MASK_T>::setIdealGeometry( boost::shared_ptr<distorted_geometry_t> distortedGeometry, double alpha, double scale) { //set distorted geometry _distortedGeometry = distortedGeometry; int width = distortedGeometry->projection().ru(); int height = distortedGeometry->projection().rv(); int newWidth = (int) width * scale; int newHeight = (int) height * scale; // compute the optimal new camera matrix based on the free scaling parameter // see: http://opencv.willowgarage.com/documentation/cpp/calib3d_camera_calibration_and_3d_reconstruction.html#getOptimalNewCameraMatrix Eigen::Matrix3d idealCameraMatrix = Eigen::Matrix3d::Zero(); idealCameraMatrix = aslamcv_helper::getOptimalNewCameraMatrix<distorted_geometry_t>( _distortedGeometry, cv::Size(width, height), alpha, cv::Size(newWidth, newHeight)); // create idealProjection typename ideal_geometry_t::projection_t idealProjection( idealCameraMatrix(0, 0), idealCameraMatrix(1, 1), idealCameraMatrix(0, 2), idealCameraMatrix(1, 2), newWidth, newHeight, typename ideal_geometry_t::projection_t::distortion_t() ); // set new idealGeometry _idealGeometry = boost::make_shared<ideal_geometry_t>(idealProjection, _distortedGeometry->shutter(), _distortedGeometry->mask() ); // compute the undistortion and rectification transformation map // see: http://opencv.willowgarage.com/documentation/cpp/calib3d_camera_calibration_and_3d_reconstruction.html#initUndistortRectifyMap aslamcv_helper::initUndistortRectifyMap<distorted_geometry_t>(_distortedGeometry, Eigen::Matrix3d::Identity(), idealCameraMatrix, cv::Size(newWidth, newHeight), CV_16SC2, mapX, mapY); } template<typename DISTORTION_T, typename MASK_T> boost::shared_ptr<FrameBase> PinholeUndistorter<DISTORTION_T, MASK_T>::buildFrame( cv::Mat & inImage) { boost::shared_ptr < frame_t > frame(new frame_t()); constructUndistortedFrame(inImage, *frame); return frame; } } // namespace aslam
udrg/kalibr
aslam_cv/aslam_imgproc/include/aslam/implementation/PinholeUndistorter.hpp
C++
bsd-3-clause
6,157
<?php /** * This file is part of PHP Mess Detector. * * Copyright (c) 2008-2012, Manuel Pichler <mapi@phpmd.org>. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * * Neither the name of Manuel Pichler nor the names of his * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * * @author Manuel Pichler <mapi@phpmd.org> * @copyright 2008-2014 Manuel Pichler. All rights reserved. * @license http://www.opensource.org/licenses/bsd-license.php BSD License */ namespace PHPMD\Stubs; use PHPMD\AbstractWriter; /** * Simple test implementation of PHPMD's writer. * * @author Manuel Pichler <mapi@phpmd.org> * @copyright 2008-2014 Manuel Pichler. All rights reserved. * @license http://www.opensource.org/licenses/bsd-license.php BSD License */ class WriterStub extends AbstractWriter { /** * The written data chunks. * * @var array */ public $chunks = array(); /** * Writes a data string to the concrete output. * * @param string $data The data to write. * * @return void */ public function write($data) { $this->chunks[] = $data; } /** * Returns a concated string of all data chunks. * * @return string */ public function getData() { return join('', $this->chunks); } /** * Returns the written data chunks. * * @return array */ public function getChunks() { return $this->chunks; } }
BernardoSilva/phpmd
src/test/php/PHPMD/Stubs/WriterStub.php
PHP
bsd-3-clause
2,873
// (C) Copyright John Maddock 2001 - 2003. // (C) Copyright Jens Maurer 2001. // (C) Copyright Peter Dimov 2001. // (C) Copyright David Abrahams 2002. // (C) Copyright Guillaume Melquiond 2003. // Use, modification and distribution are subject to the // Boost Software License, Version 1.0. (See accompanying file // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // See http://www.boost.org for most recent version. // Dinkumware standard library config: #if !defined(_YVALS) && !defined(_CPPLIB_VER) #include <boost/config/no_tr1/utility.hpp> #if !defined(_YVALS) && !defined(_CPPLIB_VER) #error This is not the Dinkumware lib! #endif #endif #if defined(_CPPLIB_VER) && (_CPPLIB_VER >= 306) // full dinkumware 3.06 and above // fully conforming provided the compiler supports it: # if !(defined(_GLOBAL_USING) && (_GLOBAL_USING+0 > 0)) && !defined(__BORLANDC__) && !defined(_STD) && !(defined(__ICC) && (__ICC >= 700)) // can be defined in yvals.h # define BOOST_NO_STDC_NAMESPACE # endif # if !(defined(_HAS_MEMBER_TEMPLATES_REBIND) && (_HAS_MEMBER_TEMPLATES_REBIND+0 > 0)) && !(defined(_MSC_VER) && (_MSC_VER > 1300)) && defined(BOOST_MSVC) # define BOOST_NO_STD_ALLOCATOR # endif # define BOOST_HAS_PARTIAL_STD_ALLOCATOR # if defined(BOOST_MSVC) && (BOOST_MSVC < 1300) // if this lib version is set up for vc6 then there is no std::use_facet: # define BOOST_NO_STD_USE_FACET # define BOOST_HAS_TWO_ARG_USE_FACET // C lib functions aren't in namespace std either: # define BOOST_NO_STDC_NAMESPACE // and nor is <exception> # define BOOST_NO_EXCEPTION_STD_NAMESPACE # endif // There's no numeric_limits<long long> support unless _LONGLONG is defined: # if !defined(_LONGLONG) && (_CPPLIB_VER <= 310) # define BOOST_NO_MS_INT64_NUMERIC_LIMITS # endif // 3.06 appears to have (non-sgi versions of) <hash_set> & <hash_map>, // and no <slist> at all #else # define BOOST_MSVC_STD_ITERATOR 1 # define BOOST_NO_STD_ITERATOR # define BOOST_NO_TEMPLATED_ITERATOR_CONSTRUCTORS # define BOOST_NO_STD_ALLOCATOR # define BOOST_NO_STDC_NAMESPACE # define BOOST_NO_STD_USE_FACET # define BOOST_NO_STD_OUTPUT_ITERATOR_ASSIGN # define BOOST_HAS_MACRO_USE_FACET # ifndef _CPPLIB_VER // Updated Dinkum library defines this, and provides // its own min and max definitions, as does MTA version. # ifndef __MTA__ # define BOOST_NO_STD_MIN_MAX # endif # define BOOST_NO_MS_INT64_NUMERIC_LIMITS # endif #endif // // std extension namespace is stdext for vc7.1 and later, // the same applies to other compilers that sit on top // of vc7.1 (Intel and Comeau): // #if defined(_MSC_VER) && (_MSC_VER >= 1310) && !defined(__BORLANDC__) # define BOOST_STD_EXTENSION_NAMESPACE stdext #endif #if (defined(_MSC_VER) && (_MSC_VER <= 1300) && !defined(__BORLANDC__)) || !defined(_CPPLIB_VER) || (_CPPLIB_VER < 306) // if we're using a dinkum lib that's // been configured for VC6/7 then there is // no iterator traits (true even for icl) # define BOOST_NO_STD_ITERATOR_TRAITS #endif #if defined(__ICL) && (__ICL < 800) && defined(_CPPLIB_VER) && (_CPPLIB_VER <= 310) // Intel C++ chokes over any non-trivial use of <locale> // this may be an overly restrictive define, but regex fails without it: # define BOOST_NO_STD_LOCALE #endif // Fix for VC++ 8.0 on up ( I do not have a previous version to test ) // or clang-cl. If exceptions are off you must manually include the // <exception> header before including the <typeinfo> header. Admittedly // trying to use Boost libraries or the standard C++ libraries without // exception support is not suggested but currently clang-cl ( v 3.4 ) // does not support exceptions and must be compiled with exceptions off. #if !_HAS_EXCEPTIONS && ((defined(BOOST_MSVC) && BOOST_MSVC >= 1400) || (defined(__clang__) && defined(_MSC_VER))) #include <exception> #endif #include <typeinfo> #if ( (!_HAS_EXCEPTIONS && !defined(__ghs__)) || (defined(__ghs__) && !_HAS_NAMESPACE) ) && !defined(__TI_COMPILER_VERSION__) && !defined(__VISUALDSPVERSION__) \ && !defined(__VXWORKS__) # define BOOST_NO_STD_TYPEINFO #endif // C++0x headers implemented in 520 (as shipped by Microsoft) // #if !defined(_CPPLIB_VER) || _CPPLIB_VER < 520 # define BOOST_NO_CXX11_HDR_ARRAY # define BOOST_NO_CXX11_HDR_CODECVT # define BOOST_NO_CXX11_HDR_FORWARD_LIST # define BOOST_NO_CXX11_HDR_INITIALIZER_LIST # define BOOST_NO_CXX11_HDR_RANDOM # define BOOST_NO_CXX11_HDR_REGEX # define BOOST_NO_CXX11_HDR_SYSTEM_ERROR # define BOOST_NO_CXX11_HDR_UNORDERED_MAP # define BOOST_NO_CXX11_HDR_UNORDERED_SET # define BOOST_NO_CXX11_HDR_TUPLE # define BOOST_NO_CXX11_HDR_TYPEINDEX # define BOOST_NO_CXX11_HDR_FUNCTIONAL # define BOOST_NO_CXX11_NUMERIC_LIMITS # define BOOST_NO_CXX11_SMART_PTR #endif #if ((!defined(_HAS_TR1_IMPORTS) || (_HAS_TR1_IMPORTS+0 == 0)) && !defined(BOOST_NO_CXX11_HDR_TUPLE)) \ && (!defined(_CPPLIB_VER) || _CPPLIB_VER < 610) # define BOOST_NO_CXX11_HDR_TUPLE #endif // C++0x headers implemented in 540 (as shipped by Microsoft) // #if !defined(_CPPLIB_VER) || _CPPLIB_VER < 540 # define BOOST_NO_CXX11_HDR_TYPE_TRAITS # define BOOST_NO_CXX11_HDR_CHRONO # define BOOST_NO_CXX11_HDR_CONDITION_VARIABLE # define BOOST_NO_CXX11_HDR_FUTURE # define BOOST_NO_CXX11_HDR_MUTEX # define BOOST_NO_CXX11_HDR_RATIO # define BOOST_NO_CXX11_HDR_THREAD # define BOOST_NO_CXX11_ATOMIC_SMART_PTR # define BOOST_NO_CXX11_HDR_EXCEPTION #endif // C++0x headers implemented in 610 (as shipped by Microsoft) // #if !defined(_CPPLIB_VER) || _CPPLIB_VER < 610 # define BOOST_NO_CXX11_HDR_INITIALIZER_LIST # define BOOST_NO_CXX11_HDR_ATOMIC # define BOOST_NO_CXX11_ALLOCATOR // 540 has std::align but it is not a conforming implementation # define BOOST_NO_CXX11_STD_ALIGN #endif // Before 650 std::pointer_traits has a broken rebind template #if !defined(_CPPLIB_VER) || _CPPLIB_VER < 650 # define BOOST_NO_CXX11_POINTER_TRAITS #elif defined(BOOST_MSVC) && BOOST_MSVC < 1910 # define BOOST_NO_CXX11_POINTER_TRAITS #endif #if defined(__has_include) #if !__has_include(<shared_mutex>) # define BOOST_NO_CXX14_HDR_SHARED_MUTEX #elif (__cplusplus < 201402) && !defined(_MSC_VER) # define BOOST_NO_CXX14_HDR_SHARED_MUTEX #endif #elif !defined(_CPPLIB_VER) || (_CPPLIB_VER < 650) # define BOOST_NO_CXX14_HDR_SHARED_MUTEX #endif // C++14 features #if !defined(_CPPLIB_VER) || (_CPPLIB_VER < 650) # define BOOST_NO_CXX14_STD_EXCHANGE #endif // C++17 features #if !defined(_CPPLIB_VER) || (_CPPLIB_VER < 650) || !defined(BOOST_MSVC) || (BOOST_MSVC < 1910) || !defined(_HAS_CXX17) || (_HAS_CXX17 == 0) # define BOOST_NO_CXX17_STD_APPLY # define BOOST_NO_CXX17_ITERATOR_TRAITS # define BOOST_NO_CXX17_HDR_STRING_VIEW # define BOOST_NO_CXX17_HDR_OPTIONAL # define BOOST_NO_CXX17_HDR_VARIANT #endif #if !defined(_CPPLIB_VER) || (_CPPLIB_VER < 650) || !defined(_HAS_CXX17) || (_HAS_CXX17 == 0) || !defined(_MSVC_STL_UPDATE) || (_MSVC_STL_UPDATE < 201709) # define BOOST_NO_CXX17_STD_INVOKE #endif #if !(!defined(_CPPLIB_VER) || (_CPPLIB_VER < 650) || !defined(BOOST_MSVC) || (BOOST_MSVC < 1912) || !defined(_HAS_CXX17) || (_HAS_CXX17 == 0)) // Deprecated std::iterator: # define BOOST_NO_STD_ITERATOR #endif #if defined(BOOST_INTEL) && (BOOST_INTEL <= 1400) // Intel's compiler can't handle this header yet: # define BOOST_NO_CXX11_HDR_ATOMIC #endif // 520..610 have std::addressof, but it doesn't support functions // #if !defined(_CPPLIB_VER) || _CPPLIB_VER < 650 # define BOOST_NO_CXX11_ADDRESSOF #endif // Bug specific to VC14, // See https://connect.microsoft.com/VisualStudio/feedback/details/1348277/link-error-when-using-std-codecvt-utf8-utf16-char16-t // and discussion here: http://blogs.msdn.com/b/vcblog/archive/2014/11/12/visual-studio-2015-preview-now-available.aspx?PageIndex=2 #if defined(_CPPLIB_VER) && (_CPPLIB_VER == 650) # define BOOST_NO_CXX11_HDR_CODECVT #endif #if defined(_CPPLIB_VER) && (_CPPLIB_VER >= 650) // If _HAS_AUTO_PTR_ETC is defined to 0, std::auto_ptr and std::random_shuffle are not available. // See https://www.visualstudio.com/en-us/news/vs2015-vs.aspx#C++ // and http://blogs.msdn.com/b/vcblog/archive/2015/06/19/c-11-14-17-features-in-vs-2015-rtm.aspx # if defined(_HAS_AUTO_PTR_ETC) && (_HAS_AUTO_PTR_ETC == 0) # define BOOST_NO_AUTO_PTR # define BOOST_NO_CXX98_RANDOM_SHUFFLE # define BOOST_NO_CXX98_FUNCTION_BASE # define BOOST_NO_CXX98_BINDERS # endif #endif // // Things not supported by the CLR: #ifdef _M_CEE #ifndef BOOST_NO_CXX11_HDR_MUTEX # define BOOST_NO_CXX11_HDR_MUTEX #endif #ifndef BOOST_NO_CXX11_HDR_ATOMIC # define BOOST_NO_CXX11_HDR_ATOMIC #endif #ifndef BOOST_NO_CXX11_HDR_FUTURE # define BOOST_NO_CXX11_HDR_FUTURE #endif #ifndef BOOST_NO_CXX11_HDR_CONDITION_VARIABLE # define BOOST_NO_CXX11_HDR_CONDITION_VARIABLE #endif #ifndef BOOST_NO_CXX11_HDR_THREAD # define BOOST_NO_CXX11_HDR_THREAD #endif #ifndef BOOST_NO_CXX14_HDR_SHARED_MUTEX # define BOOST_NO_CXX14_HDR_SHARED_MUTEX #endif #ifndef BOOST_NO_CXX14_STD_EXCHANGE # define BOOST_NO_CXX14_STD_EXCHANGE #endif #ifndef BOOST_NO_FENV_H # define BOOST_NO_FENV_H #endif #endif #ifdef _CPPLIB_VER # define BOOST_DINKUMWARE_STDLIB _CPPLIB_VER #else # define BOOST_DINKUMWARE_STDLIB 1 #endif #ifdef _CPPLIB_VER # define BOOST_STDLIB "Dinkumware standard library version " BOOST_STRINGIZE(_CPPLIB_VER) #else # define BOOST_STDLIB "Dinkumware standard library version 1.x" #endif
pombredanne/pythran
third_party/boost/config/stdlib/dinkumware.hpp
C++
bsd-3-clause
9,511
package org.hisp.dhis.analytics.table; /* * Copyright (c) 2004-2017, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.hisp.dhis.analytics.AnalyticsTable; import org.hisp.dhis.analytics.AnalyticsTableColumn; import org.hisp.dhis.dataelement.CategoryOptionGroupSet; import org.hisp.dhis.dataelement.DataElementCategory; import org.hisp.dhis.organisationunit.OrganisationUnitGroupSet; import org.hisp.dhis.organisationunit.OrganisationUnitLevel; import org.hisp.dhis.period.PeriodType; import org.hisp.dhis.system.util.DateUtils; import org.springframework.scheduling.annotation.Async; import org.springframework.transaction.annotation.Transactional; import java.util.*; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.Future; /** * @author Lars Helge Overland */ public class JdbcCompletenessTableManager extends AbstractJdbcTableManager { @Override public AnalyticsTableType getAnalyticsTableType() { return AnalyticsTableType.COMPLETENESS; } @Override @Transactional public List<AnalyticsTable> getTables( Date earliest ) { log.info( "Get tables using earliest: " + earliest ); return getTables( getDataYears( earliest ) ); } @Override public Set<String> getExistingDatabaseTables() { return Sets.newHashSet( getTableName() ); } @Override public String validState() { boolean hasData = jdbcTemplate.queryForRowSet( "select datasetid from completedatasetregistration limit 1" ).next(); if ( !hasData ) { return "No complete registrations exist, not updating completeness analytics tables"; } return null; } @Override public void createTable( AnalyticsTable table ) { final String tableName = table.getTempTableName(); final String sqlDrop = "drop table " + tableName; executeSilently( sqlDrop ); String sqlCreate = "create table " + tableName + " ("; List<AnalyticsTableColumn> columns = getDimensionColumns( table ); validateDimensionColumns( columns ); for ( AnalyticsTableColumn col : columns ) { sqlCreate += col.getName() + " " + col.getDataType() + ","; } sqlCreate += "value date)"; log.info( "Creating table: " + tableName + ", columns: " + columns.size() ); log.debug( "Create SQL: " + sqlCreate ); jdbcTemplate.execute( sqlCreate ); } @Override protected void populateTable( AnalyticsTable table ) { final String start = DateUtils.getMediumDateString( table.getPeriod().getStartDate() ); final String end = DateUtils.getMediumDateString( table.getPeriod().getEndDate() ); final String tableName = table.getTempTableName(); String insert = "insert into " + table.getTempTableName() + " ("; List<AnalyticsTableColumn> columns = getDimensionColumns( table ); validateDimensionColumns( columns ); for ( AnalyticsTableColumn col : columns ) { insert += col.getName() + ","; } insert += "value) "; String select = "select "; for ( AnalyticsTableColumn col : columns ) { select += col.getAlias() + ","; } select = select.replace( "organisationunitid", "sourceid" ); // Legacy fix select += "cdr.date as value " + "from completedatasetregistration cdr " + "inner join dataset ds on cdr.datasetid=ds.datasetid " + "inner join _organisationunitgroupsetstructure ougs on cdr.sourceid=ougs.organisationunitid " + "left join _orgunitstructure ous on cdr.sourceid=ous.organisationunitid " + "inner join _categorystructure acs on cdr.attributeoptioncomboid=acs.categoryoptioncomboid " + "inner join period pe on cdr.periodid=pe.periodid " + "inner join _periodstructure ps on cdr.periodid=ps.periodid " + "where pe.startdate >= '" + start + "' " + "and pe.startdate <= '" + end + "' " + "and cdr.date is not null"; final String sql = insert + select; populateAndLog( sql, tableName ); } @Override public List<AnalyticsTableColumn> getDimensionColumns( AnalyticsTable table ) { List<AnalyticsTableColumn> columns = new ArrayList<>(); List<OrganisationUnitGroupSet> orgUnitGroupSets = idObjectManager.getDataDimensionsNoAcl( OrganisationUnitGroupSet.class ); List<OrganisationUnitLevel> levels = organisationUnitService.getFilledOrganisationUnitLevels(); List<CategoryOptionGroupSet> attributeCategoryOptionGroupSets = categoryService.getAttributeCategoryOptionGroupSetsNoAcl(); List<DataElementCategory> attributeCategories = categoryService.getAttributeDataDimensionCategoriesNoAcl(); for ( OrganisationUnitGroupSet groupSet : orgUnitGroupSets ) { columns.add( new AnalyticsTableColumn( quote( groupSet.getUid() ), "character(11)", "ougs." + quote( groupSet.getUid() ), groupSet.getCreated() ) ); } for ( OrganisationUnitLevel level : levels ) { String column = quote( PREFIX_ORGUNITLEVEL + level.getLevel() ); columns.add( new AnalyticsTableColumn( column, "character(11)", "ous." + column, level.getCreated() ) ); } for ( CategoryOptionGroupSet groupSet : attributeCategoryOptionGroupSets ) { columns.add( new AnalyticsTableColumn( quote( groupSet.getUid() ), "character(11)", "acs." + quote( groupSet.getUid() ), groupSet.getCreated() ) ); } for ( DataElementCategory category : attributeCategories ) { columns.add( new AnalyticsTableColumn( quote( category.getUid() ), "character(11)", "acs." + quote( category.getUid() ), category.getCreated() ) ); } for ( PeriodType periodType : PeriodType.getAvailablePeriodTypes() ) { String column = quote( periodType.getName().toLowerCase() ); columns.add( new AnalyticsTableColumn( column, "character varying(15)", "ps." + column ) ); } String timelyDateDiff = statementBuilder.getDaysBetweenDates( "pe.enddate", statementBuilder.getCastToDate( "cdr.date" ) ); String timelyAlias = "(select (" + timelyDateDiff + ") <= ds.timelydays) as timely"; AnalyticsTableColumn tm = new AnalyticsTableColumn( quote( "timely" ), "boolean", timelyAlias ); AnalyticsTableColumn ds = new AnalyticsTableColumn( quote( "dx" ), "character(11) not null", "ds.uid" ); columns.addAll( Lists.newArrayList( ds, tm ) ); return filterDimensionColumns( columns ); } private List<Integer> getDataYears( Date earliest ) { String sql = "select distinct(extract(year from pe.startdate)) " + "from completedatasetregistration cdr " + "inner join period pe on cdr.periodid=pe.periodid " + "where pe.startdate is not null "; if ( earliest != null ) { sql += "and pe.startdate >= '" + DateUtils.getMediumDateString( earliest ) + "'"; } return jdbcTemplate.queryForList( sql, Integer.class ); } @Override @Async public Future<?> applyAggregationLevels( ConcurrentLinkedQueue<AnalyticsTable> tables, Collection<String> dataElements, int aggregationLevel ) { return null; // Not relevant } @Override @Async public Future<?> vacuumTablesAsync( ConcurrentLinkedQueue<AnalyticsTable> tables ) { return null; // Not needed } }
troyel/dhis2-core
dhis-2/dhis-services/dhis-service-analytics/src/main/java/org/hisp/dhis/analytics/table/JdbcCompletenessTableManager.java
Java
bsd-3-clause
9,545
### # Copyright (c) 2015, Upnext Technologies Sp. z o.o. # All rights reserved. # # This source code is licensed under the BSD 3-Clause License found in the # LICENSE.txt file in the root directory of this source tree. ### module BeaconControl module PresenceExtension class ZoneLocationStorage # # Returns list of all users being currently in given zones. # # ==== Parameters # # * +zone_ids+ - Array, list of Zone IDs for which users should be returned. # def self.users_in_zones(zone_ids) StorageFinder.new(zone_ids).find_for(ZonePresence, :zone_id, :client_id) end def initialize(event) self.zone_id = event.zone_id self.client_id = event.client_id self.timestamp = Time.at(event.timestamp).utc end # # Creates/updates zone presence record in database for given user with enter information # and enter occurrence timestamp. # def enter BeaconPresence.transaction do ZonePresence.present.for_user(client_id).update_all(present: false) zp = presence.first_or_create(present: false) if zp.valid_timestamp_for_enter?(timestamp) zp.update_attributes(timestamp: timestamp, present: true) end end end # # Updates zone presence record in database with leave information and leave occurrence timestamp. # def leave BeaconPresence.transaction do zp = presence.first_or_create(present: false) if zp.valid_timestamp_for_leave?(timestamp) zp.update_attributes(timestamp: timestamp, present: false) end end end private attr_accessor :zone_id, :client_id, :timestamp def presence ZonePresence.for_user_and_zone(client_id, zone_id) end end end end
ElanII/BeaconControl-master
vendor/gems/beacon_control-presence_extension/app/storages/beacon_control/presence_extension/zone_location_storage.rb
Ruby
bsd-3-clause
1,881
/** * @file kmeans.hpp * @author Parikshit Ram (pram@cc.gatech.edu) * * K-Means clustering. */ #ifndef MLPACK_METHODS_KMEANS_KMEANS_HPP #define MLPACK_METHODS_KMEANS_KMEANS_HPP #include <mlpack/core.hpp> #include <mlpack/core/metrics/lmetric.hpp> #include "sample_initialization.hpp" #include "max_variance_new_cluster.hpp" #include "naive_kmeans.hpp" #include <mlpack/core/tree/binary_space_tree.hpp> namespace mlpack { namespace kmeans /** K-Means clustering. */ { /** * This class implements K-Means clustering, using a variety of possible * implementations of Lloyd's algorithm. * * Four template parameters can (optionally) be supplied: the distance metric to * use, the policy for how to find the initial partition of the data, the * actions to be taken when an empty cluster is encountered, and the * implementation of a single Lloyd step to use. * * A simple example of how to run K-Means clustering is shown below. * * @code * extern arma::mat data; // Dataset we want to run K-Means on. * arma::Row<size_t> assignments; // Cluster assignments. * arma::mat centroids; // Cluster centroids. * * KMeans<> k; // Default options. * k.Cluster(data, 3, assignments, centroids); // 3 clusters. * * // Cluster using the Manhattan distance, 100 iterations maximum, saving only * // the centroids. * KMeans<metric::ManhattanDistance> k(100); * k.Cluster(data, 6, centroids); // 6 clusters. * @endcode * * @tparam MetricType The distance metric to use for this KMeans; see * metric::LMetric for an example. * @tparam InitialPartitionPolicy Initial partitioning policy; must implement a * default constructor and either 'void Cluster(const arma::mat&, const * size_t, arma::Row<size_t>&)' or 'void Cluster(const arma::mat&, const * size_t, arma::mat&)'. * @tparam EmptyClusterPolicy Policy for what to do on an empty cluster; must * implement a default constructor and 'void EmptyCluster(const arma::mat& * data, const size_t emptyCluster, const arma::mat& oldCentroids, * arma::mat& newCentroids, arma::Col<size_t>& counts, MetricType& metric, * const size_t iteration)'. * @tparam LloydStepType Implementation of single Lloyd step to use. * * @see RandomPartition, SampleInitialization, RefinedStart, AllowEmptyClusters, * MaxVarianceNewCluster, NaiveKMeans, ElkanKMeans */ template<typename MetricType = metric::EuclideanDistance, typename InitialPartitionPolicy = SampleInitialization, typename EmptyClusterPolicy = MaxVarianceNewCluster, template<class, class> class LloydStepType = NaiveKMeans, typename MatType = arma::mat> class KMeans { public: /** * Create a K-Means object and (optionally) set the parameters which K-Means * will be run with. * * @param maxIterations Maximum number of iterations allowed before giving up * (0 is valid, but the algorithm may never terminate). * @param metric Optional MetricType object; for when the metric has state * it needs to store. * @param partitioner Optional InitialPartitionPolicy object; for when a * specially initialized partitioning policy is required. * @param emptyClusterAction Optional EmptyClusterPolicy object; for when a * specially initialized empty cluster policy is required. */ KMeans(const size_t maxIterations = 1000, const MetricType metric = MetricType(), const InitialPartitionPolicy partitioner = InitialPartitionPolicy(), const EmptyClusterPolicy emptyClusterAction = EmptyClusterPolicy()); /** * Perform k-means clustering on the data, returning a list of cluster * assignments. Optionally, the vector of assignments can be set to an * initial guess of the cluster assignments; to do this, set initialGuess to * true. * * @tparam MatType Type of matrix (arma::mat or arma::sp_mat). * @param data Dataset to cluster. * @param clusters Number of clusters to compute. * @param assignments Vector to store cluster assignments in. * @param initialGuess If true, then it is assumed that assignments has a list * of initial cluster assignments. */ void Cluster(const MatType& data, const size_t clusters, arma::Row<size_t>& assignments, const bool initialGuess = false); /** * Perform k-means clustering on the data, returning the centroids of each * cluster in the centroids matrix. Optionally, the initial centroids can be * specified by filling the centroids matrix with the initial centroids and * specifying initialGuess = true. * * @tparam MatType Type of matrix (arma::mat or arma::sp_mat). * @param data Dataset to cluster. * @param clusters Number of clusters to compute. * @param centroids Matrix in which centroids are stored. * @param initialGuess If true, then it is assumed that centroids contains the * initial cluster centroids. */ void Cluster(const MatType& data, const size_t clusters, arma::mat& centroids, const bool initialGuess = false); /** * Perform k-means clustering on the data, returning a list of cluster * assignments and also the centroids of each cluster. Optionally, the vector * of assignments can be set to an initial guess of the cluster assignments; * to do this, set initialAssignmentGuess to true. Another way to set initial * cluster guesses is to fill the centroids matrix with the centroid guesses, * and then set initialCentroidGuess to true. initialAssignmentGuess * supersedes initialCentroidGuess, so if both are set to true, the * assignments vector is used. * * @tparam MatType Type of matrix (arma::mat or arma::sp_mat). * @param data Dataset to cluster. * @param clusters Number of clusters to compute. * @param assignments Vector to store cluster assignments in. * @param centroids Matrix in which centroids are stored. * @param initialAssignmentGuess If true, then it is assumed that assignments * has a list of initial cluster assignments. * @param initialCentroidGuess If true, then it is assumed that centroids * contains the initial centroids of each cluster. */ void Cluster(const MatType& data, const size_t clusters, arma::Row<size_t>& assignments, arma::mat& centroids, const bool initialAssignmentGuess = false, const bool initialCentroidGuess = false); //! Get the maximum number of iterations. size_t MaxIterations() const { return maxIterations; } //! Set the maximum number of iterations. size_t& MaxIterations() { return maxIterations; } //! Get the distance metric. const MetricType& Metric() const { return metric; } //! Modify the distance metric. MetricType& Metric() { return metric; } //! Get the initial partitioning policy. const InitialPartitionPolicy& Partitioner() const { return partitioner; } //! Modify the initial partitioning policy. InitialPartitionPolicy& Partitioner() { return partitioner; } //! Get the empty cluster policy. const EmptyClusterPolicy& EmptyClusterAction() const { return emptyClusterAction; } //! Modify the empty cluster policy. EmptyClusterPolicy& EmptyClusterAction() { return emptyClusterAction; } //! Serialize the k-means object. template<typename Archive> void Serialize(Archive& ar, const unsigned int version); private: //! Maximum number of iterations before giving up. size_t maxIterations; //! Instantiated distance metric. MetricType metric; //! Instantiated initial partitioning policy. InitialPartitionPolicy partitioner; //! Instantiated empty cluster policy. EmptyClusterPolicy emptyClusterAction; }; } // namespace kmeans } // namespace mlpack // Include implementation. #include "kmeans_impl.hpp" #endif // MLPACK_METHODS_KMEANS_KMEANS_HPP
darcyliu/mlpack
src/mlpack/methods/kmeans/kmeans.hpp
C++
bsd-3-clause
7,939
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.5.0_22) on Tue Sep 18 20:44:16 GMT+01:00 2012 --> <META http-equiv="Content-Type" content="text/html; charset=UTF-8"> <TITLE> Uses of Class org.apache.http.ContentTooLongException (HttpComponents Core 4.2.2 API) </TITLE> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { parent.document.title="Uses of Class org.apache.http.ContentTooLongException (HttpComponents Core 4.2.2 API)"; } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../org/apache/http/ContentTooLongException.html" title="class in org.apache.http"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV&nbsp; &nbsp;NEXT</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../index.html?org/apache/http/class-use/ContentTooLongException.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="ContentTooLongException.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <HR> <CENTER> <H2> <B>Uses of Class<br>org.apache.http.ContentTooLongException</B></H2> </CENTER> No usage of org.apache.http.ContentTooLongException <P> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../org/apache/http/ContentTooLongException.html" title="class in org.apache.http"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV&nbsp; &nbsp;NEXT</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../index.html?org/apache/http/class-use/ContentTooLongException.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="ContentTooLongException.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <HR> Copyright &#169; 2005-2012 <a href="http://www.apache.org/">The Apache Software Foundation</a>. All Rights Reserved. </BODY> </HTML>
espadrine/opera
chromium/src/third_party/httpcomponents-core/binary-distribution/javadoc/org/apache/http/class-use/ContentTooLongException.html
HTML
bsd-3-clause
6,003
package org.hisp.dhis.schema.descriptors; /* * Copyright (c) 2004-2016, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import org.hisp.dhis.schema.Schema; import org.hisp.dhis.schema.SchemaDescriptor; import org.hisp.dhis.user.UserCredentials; /** * @author Morten Olav Hansen <mortenoh@gmail.com> */ public class UserCredentialsSchemaDescriptor implements SchemaDescriptor { public static final String SINGULAR = "userCredentials"; public static final String PLURAL = "userCredentials"; public static final String API_ENDPOINT = "/" + PLURAL; @Override public Schema getSchema() { Schema schema = new Schema( UserCredentials.class, SINGULAR, PLURAL ); schema.setMetadata( false ); schema.setOrder( 2000 ); return schema; } }
minagri-rwanda/DHIS2-Agriculture
dhis-services/dhis-service-core/src/main/java/org/hisp/dhis/schema/descriptors/UserCredentialsSchemaDescriptor.java
Java
bsd-3-clause
2,275
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.tabmodel.document; import android.annotation.TargetApi; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.os.AsyncTask; import android.os.Build; import android.os.StrictMode; import android.text.TextUtils; import android.util.Log; import android.util.Pair; import android.util.SparseArray; import com.google.protobuf.nano.MessageNano; import org.chromium.base.ApplicationStatus; import org.chromium.base.ObserverList; import org.chromium.base.ThreadUtils; import org.chromium.base.VisibleForTesting; import org.chromium.chrome.browser.Tab; import org.chromium.chrome.browser.TabState; import org.chromium.chrome.browser.document.DocumentActivity; import org.chromium.chrome.browser.document.DocumentMetricIds; import org.chromium.chrome.browser.document.IncognitoNotificationManager; import org.chromium.chrome.browser.tabmodel.TabCreatorManager; import org.chromium.chrome.browser.tabmodel.TabList; import org.chromium.chrome.browser.tabmodel.TabModel; import org.chromium.chrome.browser.tabmodel.TabModelJniBridge; import org.chromium.chrome.browser.tabmodel.TabModelObserver; import org.chromium.chrome.browser.tabmodel.TabModelUtils; import org.chromium.chrome.browser.tabmodel.document.DocumentTabModelInfo.DocumentEntry; import org.chromium.chrome.browser.tabmodel.document.DocumentTabModelInfo.DocumentList; import org.chromium.chrome.browser.util.MathUtils; import org.chromium.content_public.browser.LoadUrlParams; import org.chromium.content_public.browser.WebContents; import java.io.File; import java.io.IOException; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; /** * Maintains a list of Tabs displayed when Chrome is running in document-mode. */ @TargetApi(Build.VERSION_CODES.LOLLIPOP) public class DocumentTabModelImpl extends TabModelJniBridge implements DocumentTabModel { private static final String TAG = "DocumentTabModel"; @VisibleForTesting public static final String PREF_PACKAGE = "com.google.android.apps.chrome.document"; @VisibleForTesting public static final String PREF_LAST_SHOWN_TAB_ID_REGULAR = "last_shown_tab_id.regular"; public static final String PREF_LAST_SHOWN_TAB_ID_INCOGNITO = "last_shown_tab_id.incognito"; /** TabModel is uninitialized. */ public static final int STATE_UNINITIALIZED = 0; /** Begin parsing the tasks from Recents and loading persisted state. */ public static final int STATE_READ_RECENT_TASKS_START = 1; /** Done parsing the tasks from Recents and loading persisted state. */ public static final int STATE_READ_RECENT_TASKS_END = 2; /** Begin loading the current/prioritized tab state synchronously. */ public static final int STATE_LOAD_CURRENT_TAB_STATE_START = 3; /** Finish loading the current/prioritized tab state synchronously. */ public static final int STATE_LOAD_CURRENT_TAB_STATE_END = 4; /** Begin reading TabStates from storage for background tabs. */ public static final int STATE_LOAD_TAB_STATE_BG_START = 5; /** Done reading TabStates from storage for background tabs. */ public static final int STATE_LOAD_TAB_STATE_BG_END = 6; /** Begin deserializing the TabState. Requires the native library. */ public static final int STATE_DESERIALIZE_START = 7; /** Done deserializing the TabState. */ public static final int STATE_DESERIALIZE_END = 8; /** Begin parsing the historical tabs. */ public static final int STATE_DETERMINE_HISTORICAL_TABS_START = 9; /** Done parsing the historical tabs. */ public static final int STATE_DETERMINE_HISTORICAL_TABS_END = 10; /** Clean out old TabState files. */ public static final int STATE_CLEAN_UP_OBSOLETE_TABS = 11; /** TabModel is fully ready to use. */ public static final int STATE_FULLY_LOADED = 12; /** List of known tabs. */ private final ArrayList<Integer> mTabIdList; /** Stores an entry for each DocumentActivity that is alive. Keys are document IDs. */ private final SparseArray<Entry> mEntryMap; /** * Stores tabIds which have been removed from the ActivityManager while Chrome was not alive. * It is cleared after restoration has been finished. */ private final List<Integer> mHistoricalTabs; /** Delegate for working with the ActivityManager. */ private final ActivityDelegate mActivityDelegate; /** Delegate for working with the filesystem. */ private final StorageDelegate mStorageDelegate; /** Delegate that provides Tabs to the DocumentTabModel. */ private final TabCreatorManager mTabCreatorManager; /** ID of a Tab whose state should be loaded immediately, if it belongs to this TabList. */ private final int mPrioritizedTabId; /** List of observers watching for a particular loading state. */ private final ObserverList<InitializationObserver> mInitializationObservers; /** List of observers watching the TabModel. */ private final ObserverList<TabModelObserver> mObservers; /** Context to use. */ private final Context mContext; /** Current loading status. */ private int mCurrentState; /** ID of the last tab that was shown to the user. */ private int mLastShownTabId = Tab.INVALID_TAB_ID; /** * Construct a DocumentTabModel. * @param activityDelegate Delegate to use for accessing the ActivityManager. * @param storageDelegate Delegate to use for accessing persistent storage. * @param tabDelegate Used to create/get Tabs. * @param isIncognito Whether or not the TabList is managing incognito tabs. * @param prioritizedTabId ID of the tab to prioritize when loading. * @param context Context to use for accessing SharedPreferences. */ public DocumentTabModelImpl(ActivityDelegate activityDelegate, StorageDelegate storageDelegate, TabCreatorManager tabCreatorManager, boolean isIncognito, int prioritizedTabId, Context context) { super(isIncognito); mActivityDelegate = activityDelegate; mStorageDelegate = storageDelegate; mTabCreatorManager = tabCreatorManager; mPrioritizedTabId = prioritizedTabId; mContext = context; mCurrentState = STATE_UNINITIALIZED; mTabIdList = new ArrayList<Integer>(); mEntryMap = new SparseArray<Entry>(); mHistoricalTabs = new ArrayList<Integer>(); mInitializationObservers = new ObserverList<InitializationObserver>(); mObservers = new ObserverList<TabModelObserver>(); SharedPreferences prefs = mContext.getSharedPreferences(PREF_PACKAGE, Context.MODE_PRIVATE); mLastShownTabId = prefs.getInt( isIncognito() ? PREF_LAST_SHOWN_TAB_ID_INCOGNITO : PREF_LAST_SHOWN_TAB_ID_REGULAR, Tab.INVALID_TAB_ID); initializeTabList(); } @Override public void initializeNative() { if (!isNativeInitialized()) super.initializeNative(); deserializeTabStatesAsync(); } public StorageDelegate getStorageDelegate() { return mStorageDelegate; } /** * Finds the index of the given Tab ID. * @param tabId ID of the Tab to find. * @return Index of the tab, or -1 if it couldn't be found. */ private int indexOf(int tabId) { return mTabIdList.indexOf(tabId); } @Override public int index() { if (getCount() == 0) return TabList.INVALID_TAB_INDEX; int indexOfLastId = indexOf(mLastShownTabId); if (indexOfLastId != -1) return indexOfLastId; // The previous Tab is gone; select a Tab based on MRU ordering. List<Entry> tasks = mActivityDelegate.getTasksFromRecents(isIncognito()); if (tasks.size() == 0) return TabList.INVALID_TAB_INDEX; for (int i = 0; i < tasks.size(); i++) { int lastKnownId = tasks.get(i).tabId; int indexOfMostRecentlyUsedId = indexOf(lastKnownId); if (indexOfMostRecentlyUsedId != -1) return indexOfMostRecentlyUsedId; } return TabList.INVALID_TAB_INDEX; } @Override public boolean setLastShownId(int id) { if (mLastShownTabId == id) return false; int previousTabId = mLastShownTabId; mLastShownTabId = id; String prefName = isIncognito() ? PREF_LAST_SHOWN_TAB_ID_INCOGNITO : PREF_LAST_SHOWN_TAB_ID_REGULAR; SharedPreferences prefs = mContext.getSharedPreferences(PREF_PACKAGE, Context.MODE_PRIVATE); SharedPreferences.Editor editor = prefs.edit(); editor.putInt(prefName, id); editor.apply(); // TODO(dfalcantara): Figure out how to fire the correct type of TabSelectionType, which is // quite hard to do in Document-mode from where we call this. for (TabModelObserver obs : mObservers) { obs.didSelectTab( TabModelUtils.getCurrentTab(this), TabSelectionType.FROM_USER, previousTabId); } return true; } @Override public int indexOf(Tab tab) { if (tab == null) return Tab.INVALID_TAB_ID; return indexOf(tab.getId()); } @Override public int getCount() { return mTabIdList.size(); } @Override public boolean isClosurePending(int tabId) { return false; } @Override public Tab getTabAt(int index) { if (index < 0 || index >= getCount()) return null; // Return a live tab if the corresponding DocumentActivity is currently alive. int tabId = mTabIdList.get(index); List<WeakReference<Activity>> activities = ApplicationStatus.getRunningActivities(); for (WeakReference<Activity> activityRef : activities) { Activity activity = activityRef.get(); if (!(activity instanceof DocumentActivity) || !mActivityDelegate.isValidActivity(isIncognito(), activity.getIntent())) { continue; } Tab tab = ((DocumentActivity) activity).getActivityTab(); int documentId = tab == null ? Tab.INVALID_TAB_ID : tab.getId(); if (documentId == tabId) return tab; } // Try to create a Tab that will hold the Tab's info. Entry entry = mEntryMap.get(tabId); assert entry != null; // If a tab has already been initialized, use that. if (entry.placeholderTab != null && entry.placeholderTab.isInitialized()) { return entry.placeholderTab; } // Create a frozen Tab if we are capable, or if the previous Tab is just a placeholder. if (entry.getTabState() != null && isNativeInitialized() && (entry.placeholderTab == null || !entry.placeholderTab.isInitialized())) { entry.placeholderTab = getTabDelegate(isIncognito()).createFrozenTab( entry.getTabState(), entry.tabId, TabModel.INVALID_TAB_INDEX); entry.placeholderTab.initializeNative(); } // Create a placeholder Tab that just has the ID. if (entry.placeholderTab == null) { entry.placeholderTab = new Tab(tabId, isIncognito(), null, null); } return entry.placeholderTab; } @Override public void setIndex(int index, TabSelectionType type) { if (index < 0 || index >= getCount()) return; int tabId = mTabIdList.get(index); mActivityDelegate.moveTaskToFront(isIncognito(), tabId); setLastShownId(tabId); } @Override public boolean closeTabAt(int index) { ThreadUtils.assertOnUiThread(); if (index < 0 || index >= getCount()) return false; Tab tab = getTabAt(index); for (TabModelObserver obs : mObservers) obs.willCloseTab(tab, false); int tabId = tab.getId(); Entry entry = mEntryMap.get(tabId); if (!isIncognito() && entry != null && entry.getTabState() != null) { entry.getTabState().contentsState.createHistoricalTab(); } mActivityDelegate.finishAndRemoveTask(isIncognito(), tabId); mTabIdList.remove(index); mEntryMap.remove(tabId); for (TabModelObserver obs : mObservers) obs.didCloseTab(tab); return true; } @Override public boolean closeTab(Tab tab) { return closeTab(tab, false, false, false); } @Override public boolean closeTab(Tab tabToClose, boolean animate, boolean uponExit, boolean canUndo) { // The tab should be destroyed by the DocumentActivity that owns it. return closeTabAt(indexOf(tabToClose.getId())); } @Override protected Tab createTabWithWebContents( boolean isIncognito, WebContents webContents, int parentTabId) { // Tabs created along this pathway are currently only created via JNI, which includes // session restore tabs. Differs from TabModelImpl because we explicitly open tabs in the // foreground -- opening tabs in affiliated mode is disallowed by ChromeLauncherActivity // when a WebContents has already been created. getTabDelegate(isIncognito).createTabWithWebContents( webContents, parentTabId, TabLaunchType.FROM_LONGPRESS_FOREGROUND, webContents.getUrl(), DocumentMetricIds.STARTED_BY_CHROME_HOME_RECENT_TABS); return null; } @Override protected Tab createNewTabForDevTools(String url) { // TODO(dfalcantara): Move upwards once we delete ChromeShellTabModel. return getTabDelegate(false).createNewTab(new LoadUrlParams(url), TabModel.TabLaunchType.FROM_MENU_OR_OVERVIEW, null); } @Override protected boolean isSessionRestoreInProgress() { return mCurrentState < STATE_FULLY_LOADED; } /** * Add the tab ID to the end of the list. * @param tabId ID to add. */ private void addTabId(int tabId) { addTabId(mTabIdList.size(), tabId); } /** * Adds the Tab ID at the given index. * @param index Where to add the ID. * @param tabId ID to add. */ private void addTabId(int index, int tabId) { assert tabId != Tab.INVALID_TAB_ID; if (mTabIdList.contains(tabId)) return; mTabIdList.add(index, tabId); } @Override public String getInitialUrlForDocument(int tabId) { Entry entry = mEntryMap.get(tabId); return entry == null ? null : entry.initialUrl; } @Override public String getCurrentUrlForDocument(int tabId) { Entry entry = mEntryMap.get(tabId); return entry == null ? null : entry.currentUrl; } @Override public boolean isTabStateReady(int tabId) { Entry entry = mEntryMap.get(tabId); return entry == null ? true : entry.isTabStateReady; } @Override public TabState getTabStateForDocument(int tabId) { Entry entry = mEntryMap.get(tabId); return entry == null ? null : entry.getTabState(); } @Override public boolean hasEntryForTabId(int tabId) { return mEntryMap.get(tabId) != null; } @Override public boolean isRetargetable(int tabId) { Entry entry = mEntryMap.get(tabId); return entry == null ? false : !entry.canGoBack; } @Override public boolean isCoveredByChildActivity(int tabId) { Entry entry = mEntryMap.get(tabId); return entry == null ? false : entry.isCoveredByChildActivity; } @Override public void addInitializationObserver(InitializationObserver observer) { ThreadUtils.assertOnUiThread(); mInitializationObservers.addObserver(observer); } @Override public void updateRecentlyClosed() { ThreadUtils.assertOnUiThread(); List<Entry> current = mActivityDelegate.getTasksFromRecents(isIncognito()); Set<Integer> removed = new HashSet<Integer>(); for (int i = 0; i < mEntryMap.size(); i++) { int tabId = mEntryMap.keyAt(i); if (isTabIdInEntryList(current, tabId) || mActivityDelegate.isTabAssociatedWithNonDestroyedActivity( isIncognito(), tabId)) { continue; } removed.add(tabId); } for (Integer tabId : removed) { closeTabAt(indexOf(tabId)); } } @Override public void updateEntry(Intent intent, Tab tab) { assert mActivityDelegate.isValidActivity(isIncognito(), intent); int id = ActivityDelegate.getTabIdFromIntent(intent); if (id == Tab.INVALID_TAB_ID) return; Entry currentEntry = mEntryMap.get(id); String currentUrl = tab.getUrl(); boolean canGoBack = tab.canGoBack(); boolean isCoveredByChildActivity = tab.isCoveredByChildActivity(); TabState state = tab.getState(); if (currentEntry != null && currentEntry.tabId == id && TextUtils.equals(currentEntry.currentUrl, currentUrl) && currentEntry.canGoBack == canGoBack && currentEntry.isCoveredByChildActivity == isCoveredByChildActivity && currentEntry.getTabState() == state && !tab.isTabStateDirty()) { return; } if (currentEntry == null) { currentEntry = new Entry(id, ActivityDelegate.getInitialUrlForDocument(intent)); mEntryMap.put(id, currentEntry); } currentEntry.isDirty = true; currentEntry.currentUrl = currentUrl; currentEntry.canGoBack = canGoBack; currentEntry.isCoveredByChildActivity = isCoveredByChildActivity; currentEntry.setTabState(state); // TODO(dfalcantara): This is different from how the normal Tab determines when to save its // state, but this can't be fixed because we cann't hold onto Tabs in this class. tab.setIsTabStateDirty(false); if (currentEntry.placeholderTab != null) { if (currentEntry.placeholderTab.isInitialized()) currentEntry.placeholderTab.destroy(); currentEntry.placeholderTab = null; } writeGeneralDataToStorageAsync(); writeTabStatesToStorageAsync(); } @Override public int getCurrentInitializationStage() { return mCurrentState; } /** * Add an entry to the entry map for migration purposes. * @param entry The entry to be added. * * TODO(dfalcantara): Reduce visibility once DocumentMigrationHelper is upstreamed. */ public void addEntryForMigration(Entry entry) { addTabId(getCount(), entry.tabId); if (mEntryMap.indexOfKey(entry.tabId) >= 0) return; mEntryMap.put(entry.tabId, entry); } private void initializeTabList() { // Temporarily allowing disk access. TODO: Fix. See http://crbug.com/496348 StrictMode.ThreadPolicy oldPolicy = StrictMode.allowThreadDiskReads(); try { setCurrentState(STATE_READ_RECENT_TASKS_START); // Run through Recents to see what tasks exist. Prevent them from being retargeted until // we have had the opportunity to load more information about them. List<Entry> entries = mActivityDelegate.getTasksFromRecents(isIncognito()); for (Entry entry : entries) { entry.canGoBack = true; mEntryMap.put(entry.tabId, entry); } // Read the file, which saved out the task IDs in regular order. byte[] tabFileBytes = mStorageDelegate.readTaskFileBytes(isIncognito()); if (tabFileBytes != null) { try { DocumentList list = MessageNano.mergeFrom(new DocumentList(), tabFileBytes); for (int i = 0; i < list.entries.length; i++) { DocumentEntry savedEntry = list.entries[i]; int tabId = savedEntry.tabId; if (mEntryMap.indexOfKey(tabId) < 0) { mHistoricalTabs.add(tabId); continue; } addTabId(getCount(), tabId); mEntryMap.get(tabId).canGoBack = savedEntry.canGoBack; // For backward compatibility, isCoveredByChildActivity may not be // available. mEntryMap.get(tabId).isCoveredByChildActivity = (savedEntry.isCoveredByChildActivity == null) ? false : savedEntry.isCoveredByChildActivity; } } catch (IOException e) { Log.e(TAG, "I/O exception", e); } } // Add any missing tasks to the list. for (int i = 0; i < mEntryMap.size(); i++) { int id = mEntryMap.keyAt(i); if (mTabIdList.contains(id)) continue; addTabId(id); } setCurrentState(STATE_READ_RECENT_TASKS_END); } finally { StrictMode.setThreadPolicy(oldPolicy); } } // TODO(mariakhomenko): we no longer need prioritized tab id in constructor, shift it here. @Override public void startTabStateLoad() { if (mCurrentState != STATE_READ_RECENT_TASKS_END) return; setCurrentState(STATE_LOAD_CURRENT_TAB_STATE_START); // Immediately try loading the requested tab. if (mPrioritizedTabId != Tab.INVALID_TAB_ID) { Entry entry = mEntryMap.get(mPrioritizedTabId); if (entry != null) { entry.setTabState( mStorageDelegate.restoreTabState(mPrioritizedTabId, isIncognito())); entry.isTabStateReady = true; } } setCurrentState(STATE_LOAD_CURRENT_TAB_STATE_END); loadTabStatesAsync(); } private void loadTabStatesAsync() { new AsyncTask<Void, Void, Void>() { private final List<Entry> mEntries = new ArrayList<Entry>(getCount()); @Override public void onPreExecute() { setCurrentState(STATE_LOAD_TAB_STATE_BG_START); for (int i = 0; i < getCount(); i++) { mEntries.add(new Entry(getTabAt(i).getId())); } } @Override public Void doInBackground(Void... params) { for (Entry entry : mEntries) { if (mPrioritizedTabId == entry.tabId) continue; entry.setTabState( mStorageDelegate.restoreTabState(entry.tabId, isIncognito())); entry.isTabStateReady = true; } return null; } @Override public void onPostExecute(Void result) { for (Entry pair : mEntries) { Entry entry = mEntryMap.get(pair.tabId); if (entry == null) continue; if (entry.getTabState() == null) entry.setTabState(pair.getTabState()); entry.isTabStateReady = true; } setCurrentState(STATE_LOAD_TAB_STATE_BG_END); deserializeTabStatesAsync(); } }.executeOnExecutor(AsyncTask.SERIAL_EXECUTOR); } private void deserializeTabStatesAsync() { if (!shouldStartDeserialization(mCurrentState)) return; new AsyncTask<Void, Void, Void>() { private final List<Entry> mCachedEntries = new ArrayList<Entry>(mEntryMap.size()); @Override public void onPreExecute() { setCurrentState(STATE_DESERIALIZE_START); for (int i = 0; i < mEntryMap.size(); i++) { Entry entry = mEntryMap.valueAt(i); if (entry.getTabState() == null) continue; mCachedEntries.add(new Entry(entry.tabId, entry.getTabState())); } } @Override public Void doInBackground(Void... params) { for (Entry entry : mCachedEntries) { TabState tabState = entry.getTabState(); updateEntryInfoFromTabState(entry, tabState); } return null; } @Override public void onPostExecute(Void result) { for (Entry pair : mCachedEntries) { Entry realEntry = mEntryMap.get(pair.tabId); if (realEntry == null || realEntry.currentUrl != null) continue; realEntry.currentUrl = pair.currentUrl; } setCurrentState(STATE_DESERIALIZE_END); if (isNativeInitialized()) { broadcastSessionRestoreComplete(); loadHistoricalTabsAsync(); } } }.executeOnExecutor(AsyncTask.SERIAL_EXECUTOR); } /** * Call for extending classes to override for getting additional information for an entry from * the tab state when it is deserialized. * @param entry The {@link Entry} currently being processed * @param tabState The {@link TabState} that has been deserialized for the entry. */ protected void updateEntryInfoFromTabState(Entry entry, TabState tabState) { entry.currentUrl = tabState.getVirtualUrlFromState(); } /** * Checks whether initialization should move to the deserialization step. * @param currentState Current initialization stage. * @return Whether to proceed or not. */ protected boolean shouldStartDeserialization(int currentState) { return isNativeInitialized() && currentState == STATE_LOAD_TAB_STATE_BG_END; } private void loadHistoricalTabsAsync() { new AsyncTask<Void, Void, Void>() { private Set<Integer> mHistoricalTabsForBackgroundThread; private List<Entry> mEntries; @Override public void onPreExecute() { setCurrentState(STATE_DETERMINE_HISTORICAL_TABS_START); mHistoricalTabsForBackgroundThread = new HashSet<Integer>(mHistoricalTabs.size()); mHistoricalTabsForBackgroundThread.addAll(mHistoricalTabs); mEntries = new ArrayList<Entry>(mHistoricalTabsForBackgroundThread.size()); } @Override public Void doInBackground(Void... params) { for (Integer tabId : mHistoricalTabsForBackgroundThread) { // Read the saved state, then delete the file. TabState state = mStorageDelegate.restoreTabState(tabId, isIncognito()); mEntries.add(new Entry(tabId, state)); mStorageDelegate.deleteTabState(tabId, isIncognito()); } return null; } @Override public void onPostExecute(Void result) { for (Entry entry : mEntries) { if (entry.getTabState() == null) continue; entry.getTabState().contentsState.createHistoricalTab(); } mHistoricalTabs.clear(); setCurrentState(STATE_DETERMINE_HISTORICAL_TABS_END); cleanUpObsoleteTabStatesAsync(); } }.executeOnExecutor(AsyncTask.SERIAL_EXECUTOR); } /** * Clears the folder of TabStates that correspond to missing tasks. */ private void cleanUpObsoleteTabStatesAsync() { new AsyncTask<Void, Void, Void>() { private List<Entry> mCurrentTabs; @Override protected void onPreExecute() { setCurrentState(STATE_CLEAN_UP_OBSOLETE_TABS); mCurrentTabs = mActivityDelegate.getTasksFromRecents(isIncognito()); } @Override protected Void doInBackground(Void... voids) { File stateDirectory = mStorageDelegate.getStateDirectory(); String[] files = stateDirectory.list(); for (final String fileName : files) { Pair<Integer, Boolean> tabInfo = TabState.parseInfoFromFilename(fileName); if (tabInfo == null) continue; int tabId = tabInfo.first; boolean incognito = tabInfo.second; if (incognito != isIncognito() || isTabIdInEntryList(mCurrentTabs, tabId)) { continue; } boolean success = new File(stateDirectory, fileName).delete(); if (!success) Log.w(TAG, "Failed to delete: " + fileName); } return null; } @Override protected void onPostExecute(Void result) { setCurrentState(STATE_FULLY_LOADED); } }.executeOnExecutor(AsyncTask.SERIAL_EXECUTOR); } /** * Save out a tiny file with minimal information required for retargeting. */ private void writeGeneralDataToStorageAsync() { if (isIncognito()) return; new AsyncTask<Void, Void, Void>() { private DocumentList mList; @Override protected void onPreExecute() { List<DocumentEntry> entriesList = new ArrayList<DocumentEntry>(); for (int i = 0; i < getCount(); i++) { Entry entry = mEntryMap.get(getTabAt(i).getId()); if (entry == null) continue; DocumentEntry docEntry = new DocumentEntry(); docEntry.tabId = entry.tabId; docEntry.canGoBack = entry.canGoBack; docEntry.isCoveredByChildActivity = entry.isCoveredByChildActivity; entriesList.add(docEntry); } mList = new DocumentList(); mList.entries = entriesList.toArray(new DocumentEntry[entriesList.size()]); } @Override protected Void doInBackground(Void... params) { mStorageDelegate.writeTaskFileBytes(isIncognito(), MessageNano.toByteArray(mList)); return null; } }.executeOnExecutor(AsyncTask.SERIAL_EXECUTOR); } /** * Write out all of the TabStates. */ private void writeTabStatesToStorageAsync() { new AsyncTask<Void, Void, Void>() { private final SparseArray<TabState> mStatesToWrite = new SparseArray<TabState>(); @Override protected void onPreExecute() { for (int i = 0; i < mEntryMap.size(); i++) { Entry entry = mEntryMap.valueAt(i); if (!entry.isDirty || entry.getTabState() == null) continue; mStatesToWrite.put(entry.tabId, entry.getTabState()); } } @Override protected Void doInBackground(Void... voids) { for (int i = 0; i < mStatesToWrite.size(); i++) { int tabId = mStatesToWrite.keyAt(i); mStorageDelegate.saveTabState(tabId, isIncognito(), mStatesToWrite.valueAt(i)); } return null; } @Override protected void onPostExecute(Void v) { for (int i = 0; i < mStatesToWrite.size(); i++) { int tabId = mStatesToWrite.keyAt(i); Entry entry = mEntryMap.get(tabId); if (entry == null) continue; entry.isDirty = false; } } }.executeOnExecutor(AsyncTask.SERIAL_EXECUTOR); } private void setCurrentState(int newState) { ThreadUtils.assertOnUiThread(); assert mCurrentState == newState - 1; mCurrentState = newState; for (InitializationObserver observer : mInitializationObservers) { if (observer.isCanceled()) { Log.w(TAG, "Observer alerted after canceled: " + observer); mInitializationObservers.removeObserver(observer); } else if (observer.isSatisfied(mCurrentState)) { observer.runWhenReady(); mInitializationObservers.removeObserver(observer); } } } @Override public Tab getNextTabIfClosed(int id) { // Tab may not necessarily exist. return null; } @Override public void closeAllTabs() { closeAllTabs(true, false); } @Override public void closeAllTabs(boolean allowDelegation, boolean uponExit) { for (int i = getCount() - 1; i >= 0; i--) closeTabAt(i); if (isIncognito()) IncognitoNotificationManager.dismissIncognitoNotification(); } @Override public void moveTab(int id, int newIndex) { newIndex = MathUtils.clamp(newIndex, 0, getCount()); int curIndex = TabModelUtils.getTabIndexById(this, id); if (curIndex == INVALID_TAB_INDEX || curIndex == newIndex || curIndex + 1 == newIndex) { return; } mTabIdList.remove(curIndex); addTabId(newIndex, id); Tab tab = getTabAt(curIndex); if (tab == null) return; for (TabModelObserver obs : mObservers) obs.didMoveTab(tab, newIndex, curIndex); } @Override public void destroy() { super.destroy(); mInitializationObservers.clear(); mObservers.clear(); } @Override public void addTab(Intent intent, Tab tab) { if (tab.getId() == Tab.INVALID_TAB_ID || ActivityDelegate.getTabIdFromIntent(intent) != tab.getId()) { return; } int parentIndex = indexOf(tab.getParentId()); int index = parentIndex == -1 ? getCount() : parentIndex + 1; addTab(tab, index, tab.getLaunchType()); updateEntry(intent, tab); } @Override public void addTab(Tab tab, int index, TabLaunchType type) { // TODO(dfalcantara): Prevent this method from being called directly instead of going // through addTab(Intent intent, Tab tab). if (tab.getId() == Tab.INVALID_TAB_ID) return; for (TabModelObserver obs : mObservers) obs.willAddTab(tab, type); if (index == TabModel.INVALID_TAB_INDEX) { addTabId(getCount(), tab.getId()); } else { addTabId(index, tab.getId()); } tabAddedToModel(tab); for (TabModelObserver obs : mObservers) obs.didAddTab(tab, type); } @Override public boolean supportsPendingClosures() { return false; } @Override public void commitAllTabClosures() { } @Override public void commitTabClosure(int tabId) { } @Override public void cancelTabClosure(int tabId) { } @Override public TabList getComprehensiveModel() { return this; } @Override public void addObserver(TabModelObserver observer) { mObservers.addObserver(observer); } @Override public void removeObserver(TabModelObserver observer) { mObservers.removeObserver(observer); } private static boolean isTabIdInEntryList(List<Entry> entries, int tabId) { for (int i = 0; i < entries.size(); i++) { if (entries.get(i).tabId == tabId) return true; } return false; } private TabDelegate getTabDelegate(boolean incognito) { return (TabDelegate) mTabCreatorManager.getTabCreator(incognito); } }
SaschaMester/delicium
chrome/android/java/src/org/chromium/chrome/browser/tabmodel/document/DocumentTabModelImpl.java
Java
bsd-3-clause
36,099
using System; namespace Org.BouncyCastle.Math.EC.Custom.Sec { internal class SecT409R1Point : AbstractF2mPoint { /** * @deprecated Use ECCurve.createPoint to construct points */ public SecT409R1Point(ECCurve curve, ECFieldElement x, ECFieldElement y) : this(curve, x, y, false) { } /** * @deprecated per-point compression property will be removed, refer {@link #getEncoded(bool)} */ public SecT409R1Point(ECCurve curve, ECFieldElement x, ECFieldElement y, bool withCompression) : base(curve, x, y, withCompression) { if ((x == null) != (y == null)) throw new ArgumentException("Exactly one of the field elements is null"); } internal SecT409R1Point(ECCurve curve, ECFieldElement x, ECFieldElement y, ECFieldElement[] zs, bool withCompression) : base(curve, x, y, zs, withCompression) { } protected override ECPoint Detach() { return new SecT409R1Point(null, AffineXCoord, AffineYCoord); } public override ECFieldElement YCoord { get { ECFieldElement X = RawXCoord, L = RawYCoord; if (this.IsInfinity || X.IsZero) return L; // Y is actually Lambda (X + Y/X) here; convert to affine value on the fly ECFieldElement Y = L.Add(X).Multiply(X); ECFieldElement Z = RawZCoords[0]; if (!Z.IsOne) { Y = Y.Divide(Z); } return Y; } } protected internal override bool CompressionYTilde { get { ECFieldElement X = this.RawXCoord; if (X.IsZero) return false; ECFieldElement Y = this.RawYCoord; // Y is actually Lambda (X + Y/X) here return Y.TestBitZero() != X.TestBitZero(); } } public override ECPoint Add(ECPoint b) { if (this.IsInfinity) return b; if (b.IsInfinity) return this; ECCurve curve = this.Curve; ECFieldElement X1 = this.RawXCoord; ECFieldElement X2 = b.RawXCoord; if (X1.IsZero) { if (X2.IsZero) return curve.Infinity; return b.Add(this); } ECFieldElement L1 = this.RawYCoord, Z1 = this.RawZCoords[0]; ECFieldElement L2 = b.RawYCoord, Z2 = b.RawZCoords[0]; bool Z1IsOne = Z1.IsOne; ECFieldElement U2 = X2, S2 = L2; if (!Z1IsOne) { U2 = U2.Multiply(Z1); S2 = S2.Multiply(Z1); } bool Z2IsOne = Z2.IsOne; ECFieldElement U1 = X1, S1 = L1; if (!Z2IsOne) { U1 = U1.Multiply(Z2); S1 = S1.Multiply(Z2); } ECFieldElement A = S1.Add(S2); ECFieldElement B = U1.Add(U2); if (B.IsZero) { if (A.IsZero) return Twice(); return curve.Infinity; } ECFieldElement X3, L3, Z3; if (X2.IsZero) { // TODO This can probably be optimized quite a bit ECPoint p = this.Normalize(); X1 = p.XCoord; ECFieldElement Y1 = p.YCoord; ECFieldElement Y2 = L2; ECFieldElement L = Y1.Add(Y2).Divide(X1); //X3 = L.Square().Add(L).Add(X1).Add(curve.A); X3 = L.Square().Add(L).Add(X1).AddOne(); if (X3.IsZero) { return new SecT409R1Point(curve, X3, curve.B.Sqrt(), IsCompressed); } ECFieldElement Y3 = L.Multiply(X1.Add(X3)).Add(X3).Add(Y1); L3 = Y3.Divide(X3).Add(X3); Z3 = curve.FromBigInteger(BigInteger.One); } else { B = B.Square(); ECFieldElement AU1 = A.Multiply(U1); ECFieldElement AU2 = A.Multiply(U2); X3 = AU1.Multiply(AU2); if (X3.IsZero) { return new SecT409R1Point(curve, X3, curve.B.Sqrt(), IsCompressed); } ECFieldElement ABZ2 = A.Multiply(B); if (!Z2IsOne) { ABZ2 = ABZ2.Multiply(Z2); } L3 = AU2.Add(B).SquarePlusProduct(ABZ2, L1.Add(Z1)); Z3 = ABZ2; if (!Z1IsOne) { Z3 = Z3.Multiply(Z1); } } return new SecT409R1Point(curve, X3, L3, new ECFieldElement[] { Z3 }, IsCompressed); } public override ECPoint Twice() { if (this.IsInfinity) return this; ECCurve curve = this.Curve; ECFieldElement X1 = this.RawXCoord; if (X1.IsZero) { // A point with X == 0 is it's own Additive inverse return curve.Infinity; } ECFieldElement L1 = this.RawYCoord, Z1 = this.RawZCoords[0]; bool Z1IsOne = Z1.IsOne; ECFieldElement L1Z1 = Z1IsOne ? L1 : L1.Multiply(Z1); ECFieldElement Z1Sq = Z1IsOne ? Z1 : Z1.Square(); ECFieldElement T = L1.Square().Add(L1Z1).Add(Z1Sq); if (T.IsZero) { return new SecT409R1Point(curve, T, curve.B.Sqrt(), IsCompressed); } ECFieldElement X3 = T.Square(); ECFieldElement Z3 = Z1IsOne ? T : T.Multiply(Z1Sq); ECFieldElement X1Z1 = Z1IsOne ? X1 : X1.Multiply(Z1); ECFieldElement L3 = X1Z1.SquarePlusProduct(T, L1Z1).Add(X3).Add(Z3); return new SecT409R1Point(curve, X3, L3, new ECFieldElement[] { Z3 }, IsCompressed); } public override ECPoint TwicePlus(ECPoint b) { if (this.IsInfinity) return b; if (b.IsInfinity) return Twice(); ECCurve curve = this.Curve; ECFieldElement X1 = this.RawXCoord; if (X1.IsZero) { // A point with X == 0 is it's own Additive inverse return b; } ECFieldElement X2 = b.RawXCoord, Z2 = b.RawZCoords[0]; if (X2.IsZero || !Z2.IsOne) { return Twice().Add(b); } ECFieldElement L1 = this.RawYCoord, Z1 = this.RawZCoords[0]; ECFieldElement L2 = b.RawYCoord; ECFieldElement X1Sq = X1.Square(); ECFieldElement L1Sq = L1.Square(); ECFieldElement Z1Sq = Z1.Square(); ECFieldElement L1Z1 = L1.Multiply(Z1); //ECFieldElement T = curve.A.Multiply(Z1Sq).Add(L1Sq).Add(L1Z1); ECFieldElement T = Z1Sq.Add(L1Sq).Add(L1Z1); ECFieldElement L2plus1 = L2.AddOne(); //ECFieldElement A = curve.A.Add(L2plus1).Multiply(Z1Sq).Add(L1Sq).MultiplyPlusProduct(T, X1Sq, Z1Sq); ECFieldElement A = L2.Multiply(Z1Sq).Add(L1Sq).MultiplyPlusProduct(T, X1Sq, Z1Sq); ECFieldElement X2Z1Sq = X2.Multiply(Z1Sq); ECFieldElement B = X2Z1Sq.Add(T).Square(); if (B.IsZero) { if (A.IsZero) return b.Twice(); return curve.Infinity; } if (A.IsZero) { return new SecT409R1Point(curve, A, curve.B.Sqrt(), IsCompressed); } ECFieldElement X3 = A.Square().Multiply(X2Z1Sq); ECFieldElement Z3 = A.Multiply(B).Multiply(Z1Sq); ECFieldElement L3 = A.Add(B).Square().MultiplyPlusProduct(T, L2plus1, Z3); return new SecT409R1Point(curve, X3, L3, new ECFieldElement[] { Z3 }, IsCompressed); } public override ECPoint Negate() { if (this.IsInfinity) return this; ECFieldElement X = this.RawXCoord; if (X.IsZero) return this; // L is actually Lambda (X + Y/X) here ECFieldElement L = this.RawYCoord, Z = this.RawZCoords[0]; return new SecT409R1Point(Curve, X, L.Add(Z), new ECFieldElement[] { Z }, IsCompressed); } } }
tobiasge/OutlookPrivacyPlugin
3rdParty/bccrypto-net-05282015/crypto/src/math/ec/custom/sec/SecT409R1Point.cs
C#
bsd-3-clause
8,783
/* * Copyright (C) yajin 2008 <yajinzhou@gmail.com > * * This file is part of the virtualmips distribution. * See LICENSE file for terms of the license. * */ /*JZ4740 UART Emulation. * * JZ4740 UART is compatible with 16c550 . * * Linux use uart interrupt to receive and send data. * * For simulator, it is a bad idea for os to use interrupt to send data . * Because simulator is always READY for sending, so interrupt is slow than polling. * * * receive: * 1.set IER * 2. Wating interrupt (read IIR and LSR) * 3. if IIR says an interrupt and LSR says that data ready. read RBR. * * send: * 1. set IER to enable transmit request interrupt * 2. if UART can send data, generate an interrupt and set IIR LSR * 3. linux receives the interrupt ,read IIR and LSR * 4. send data. * */ #define _GNU_SOURCE #include <stdlib.h> #include <stdio.h> #include <assert.h> #include<sys/types.h> #include<sys/stat.h> #include<string.h> #include "device.h" #include "mips_memory.h" #include "jz4740.h" #include "cpu.h" #include "vp_timer.h" /* Interrupt Identification Register */ #define IIR_NPENDING 0x01 /* 0: irq pending, 1: no irq pending */ #define IIR_TXRDY 0x02 #define IIR_RXRDY 0x04 struct jz4740_uart_data { struct vdevice *dev; u_int irq, duart_irq_seq; u_int output; vtty_t *vtty; vm_instance_t *vm; m_uint32_t ier; /*0x04 */ m_uint32_t iir; /*0x08 */ m_uint32_t fcr; /*0x08 */ m_uint32_t lcr; /*0x0c */ m_uint32_t mcr; /*0x10 */ m_uint32_t lsr; /*0x14 */ m_uint32_t msr; /*0x18 */ m_uint32_t spr; /*0x1c */ m_uint32_t isr; /*0x20 */ m_uint32_t umr; /*0x24 */ m_uint32_t uacr; /*0x28 */ m_uint32_t jz4740_uart_size; vp_timer_t *uart_timer; }; static void jz4740_tty_con_input (vtty_t * vtty) { struct jz4740_uart_data *d = vtty->priv_data; if (d->ier & UART_IER_RDRIE) { d->vm->set_irq (d->vm, d->irq); } d->lsr |= UART_LSR_DRY; } void *dev_jz4740_uart_access (cpu_mips_t * cpu, struct vdevice *dev, m_uint32_t offset, u_int op_size, u_int op_type, m_reg_t * data, m_uint8_t * has_set_value) { struct jz4740_uart_data *d = dev->priv_data; u_char odata; if (offset >= d->jz4740_uart_size) { *data = 0; return NULL; } switch (offset) { case UART_RBR: /*0x0 RBR THR */ if (op_type == MTS_READ) { *data = vtty_get_char (d->vtty); if (vtty_is_char_avail (d->vtty)) d->lsr |= UART_LSR_DRY; else d->lsr &= ~UART_LSR_DRY; } else { vtty_put_char (d->vtty, (char) (*data)); if ((d->ier & UART_IER_TDRIE) && (d->output == 0) && (d->fcr & 0x10)) { /*yajin. * * In order to put the next data more quickly, just set irq not waiting for host_alarm_handler to set irq. * Sorry uart, too much work for you. * * Sometimes, linux kernel prints "serial8250: too much work for irq9" if we print large data on screen. * Please patch the kernel. comment "printk(KERN_ERR "serial8250: too much work for " * "irq%d\n", irq);" * qemu has some question. * http://lkml.org/lkml/2008/1/12/135 * http://kerneltrap.org/mailarchive/linux-kernel/2008/2/7/769924 * * If jit is used in future, we may not need to set irq here because simulation is quick enough. Then we have * no "too much work for irq9" problem. * * */ d->output = TRUE; d->vm->set_irq (d->vm, d->irq); } } *has_set_value = TRUE; break; case UART_IER: /*0x4 */ if (op_type == MTS_READ) { *data = d->ier; } else { d->ier = *data & 0xFF; } *has_set_value = TRUE; break; case UART_IIR: /*0x08 */ d->vm->clear_irq (d->vm, d->irq); if (op_type == MTS_READ) { odata = IIR_NPENDING; if (vtty_is_char_avail (d->vtty)) { odata = IIR_RXRDY; } else { if (d->output) { odata = IIR_TXRDY; d->output = 0; } } *data = odata; } else { d->fcr = *data; if (d->fcr & 0x20) d->lsr &= ~UART_LSR_DRY; } *has_set_value = TRUE; break; case UART_LSR: /*0x14 */ if (op_type == MTS_READ) { d->lsr |= UART_LSR_TDRQ | UART_LSR_TEMP; if (vtty_is_char_avail (d->vtty)) d->lsr |= UART_LSR_DRY; return &(d->lsr); } else ASSERT (0, "WRITE TO LSR\n"); *has_set_value = TRUE; break; case UART_LCR: return &(d->lcr); case UART_MCR: return &(d->mcr); case UART_MSR: return &(d->msr); case UART_SPR: return &(d->spr); case UART_ISR: return &(d->isr); case UART_UMR: return &(d->umr); case UART_UACR: return &(d->uacr); default: ASSERT (0, "invalid uart offset %x\n", offset); } return NULL; } void dev_jz4740_uart_reset (cpu_mips_t * cpu, struct vdevice *dev) { struct jz4740_uart_data *d = dev->priv_data; d->fcr = 0x0; d->lcr = 0x0; d->mcr = 0x0; d->lsr |= UART_LSR_TDRQ | UART_LSR_TEMP; d->msr = 0x0; d->spr = 0x0; d->isr = 0x0; d->umr = 0x0; d->uacr = 0x0; } extern cpu_mips_t *current_cpu; #define UART_TIME_OUT 25 void dev_jz4740_uart_cb (void *opaque) { struct jz4740_uart_data *d = (struct jz4740_uart_data *) opaque; d->output = 0; if (vtty_is_char_avail (d->vtty)) { d->lsr |= UART_LSR_DRY; if (d->ier & UART_IER_RDRIE) { d->vm->set_irq (d->vm, d->irq); vp_mod_timer (d->uart_timer, vp_get_clock (rt_clock) + UART_TIME_OUT); return; } } if ((d->ier & UART_IER_TDRIE) && (d->output == 0) && (d->fcr & 0x10)) { d->output = TRUE; d->vm->set_irq (d->vm, d->irq); vp_mod_timer (d->uart_timer, vp_get_clock (rt_clock) + UART_TIME_OUT); return; } // d->uart_timer->set_time=vp_get_clock(rt_clock); vp_mod_timer (d->uart_timer, vp_get_clock (rt_clock) + UART_TIME_OUT); } int dev_jz4740_uart_init (vm_instance_t * vm, char *name, m_pa_t paddr, m_uint32_t len, u_int irq, vtty_t * vtty) { struct jz4740_uart_data *d; /* allocate the private data structure */ if (!(d = malloc (sizeof (*d)))) { fprintf (stderr, "JZ4740 UART: unable to create device.\n"); return (-1); } memset (d, 0, sizeof (*d)); if (!(d->dev = dev_create (name))) goto err_dev_create; d->dev->priv_data = d; d->dev->phys_addr = paddr; d->dev->phys_len = len; d->dev->flags = VDEVICE_FLAG_NO_MTS_MMAP; d->vm = vm; (*d).vtty = vtty; d->irq = irq; vtty->priv_data = d; d->jz4740_uart_size = len; d->dev->handler = dev_jz4740_uart_access; d->dev->reset_handler = dev_jz4740_uart_reset; (*d).vtty->read_notifier = jz4740_tty_con_input; d->uart_timer = vp_new_timer (rt_clock, dev_jz4740_uart_cb, d); //d->uart_timer->set_time=vp_get_clock(rt_clock); vp_mod_timer (d->uart_timer, vp_get_clock (rt_clock) + UART_TIME_OUT); vm_bind_device (vm, d->dev); return (0); err_dev_create: free (d); return (-1); }
RetroBSD/retrobsd
tools/virtualmips/jz4740/jz4740_dev_uart.c
C
bsd-3-clause
7,904
// Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ash/webui/file_manager/resource_loader.h" #include "content/public/test/browser_task_environment.h" #include "content/public/test/test_web_ui_data_source.h" #include "testing/gtest/include/gtest/gtest.h" #include "ui/base/webui/resource_path.h" namespace ash { namespace file_manager { class ResourceLoaderTest : public testing::Test { public: ResourceLoaderTest() = default; content::TestWebUIDataSource* source() { return source_.get(); } private: void SetUp() override { source_ = content::TestWebUIDataSource::Create("test-file-manager-host"); } content::BrowserTaskEnvironment task_environment_; std::unique_ptr<content::TestWebUIDataSource> source_; }; TEST_F(ResourceLoaderTest, AddFilesAppResources) { const webui::ResourcePath kTestResources[] = { {"file_manager/images/icon192.png", 8}, {"file_manager_fakes.js", 9}, {"file_manager/untrusted_resources_files_img_content.css", 10}, {"file_manager/untrusted_resources/files_img_content.css", 11}, }; const size_t kTestResourcesSize = std::size(kTestResources); AddFilesAppResources(source()->GetWebUIDataSource(), kTestResources, kTestResourcesSize); EXPECT_EQ(8, source()->PathToIdrOrDefault("images/icon192.png")); EXPECT_EQ(-1, source()->PathToIdrOrDefault("file_manager_fakes.js")); EXPECT_EQ(10, source()->PathToIdrOrDefault( "untrusted_resources_files_img_content.css")); EXPECT_EQ(-1, source()->PathToIdrOrDefault( "untrusted_resources/files_img_content.css")); } } // namespace file_manager } // namespace ash
chromium/chromium
ash/webui/file_manager/resource_loader_unittest.cc
C++
bsd-3-clause
1,786
<!DOCTYPE html> <!-- Copyright (c) 2013 The Chromium Authors. All rights reserved. Use of this source code is governed by a BSD-style license that can be found in the LICENSE file. --> <link rel="import" href="/tracing/extras/importer/linux_perf/parser.html"> <link rel="import" href="/tracing/model/counter_series.html"> <script> 'use strict'; /** * @fileoverview Parses trace_marker events that were inserted in the trace by * userland. */ tr.exportTo('tr.e.importer.linux_perf', function() { var Parser = tr.e.importer.linux_perf.Parser; /** * Parses linux trace mark events that were inserted in the trace by userland. * @constructor */ function BusParser(importer) { Parser.call(this, importer); importer.registerEventHandler('memory_bus_usage', BusParser.prototype.traceMarkWriteBusEvent.bind(this)); this.model_ = importer.model_; this.ppids_ = {}; } BusParser.prototype = { __proto__: Parser.prototype, traceMarkWriteBusEvent: function(eventName, cpuNumber, pid, ts, eventBase, threadName) { var re = new RegExp('bus=(\\S+) rw_bytes=(\\d+) r_bytes=(\\d+) ' + 'w_bytes=(\\d+) cycles=(\\d+) ns=(\\d+)'); var event = re.exec(eventBase.details); var name = event[1]; var rw_bytes = parseInt(event[2]); var r_bytes = parseInt(event[3]); var w_bytes = parseInt(event[4]); var cycles = parseInt(event[5]); var ns = parseInt(event[6]); // BW in MB/s var r_bw = r_bytes * 1000000000 / ns; r_bw /= 1024 * 1024; var w_bw = w_bytes * 1000000000 / ns; w_bw /= 1024 * 1024; var ctr = this.model_.kernel .getOrCreateCounter(null, 'bus ' + name + ' read'); if (ctr.numSeries === 0) { ctr.addSeries(new tr.model.CounterSeries('value', tr.ui.b.getColorIdForGeneralPurposeString( ctr.name + '.' + 'value'))); } ctr.series.forEach(function(series) { series.addCounterSample(ts, r_bw); }); ctr = this.model_.kernel .getOrCreateCounter(null, 'bus ' + name + ' write'); if (ctr.numSeries === 0) { ctr.addSeries(new tr.model.CounterSeries('value', tr.ui.b.getColorIdForGeneralPurposeString( ctr.name + '.' + 'value'))); } ctr.series.forEach(function(series) { series.addCounterSample(ts, r_bw); }); return true; } }; Parser.register(BusParser); return { BusParser: BusParser }; }); </script>
zeptonaut/catapult
tracing/tracing/extras/importer/linux_perf/bus_parser.html
HTML
bsd-3-clause
2,583
#region Copyright notice and license // Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #endregion using Google.Protobuf.Collections; using System; using System.Collections.Generic; using System.IO; namespace Google.Protobuf { /// <summary> /// Reads and decodes protocol message fields. /// </summary> /// <remarks> /// <para> /// This class is generally used by generated code to read appropriate /// primitives from the stream. It effectively encapsulates the lowest /// levels of protocol buffer format. /// </para> /// <para> /// Repeated fields and map fields are not handled by this class; use <see cref="RepeatedField{T}"/> /// and <see cref="MapField{TKey, TValue}"/> to serialize such fields. /// </para> /// </remarks> public sealed class CodedInputStream { /// <summary> /// Buffer of data read from the stream or provided at construction time. /// </summary> private readonly byte[] buffer; /// <summary> /// The index of the buffer at which we need to refill from the stream (if there is one). /// </summary> private int bufferSize; private int bufferSizeAfterLimit = 0; /// <summary> /// The position within the current buffer (i.e. the next byte to read) /// </summary> private int bufferPos = 0; /// <summary> /// The stream to read further input from, or null if the byte array buffer was provided /// directly on construction, with no further data available. /// </summary> private readonly Stream input; /// <summary> /// The last tag we read. 0 indicates we've read to the end of the stream /// (or haven't read anything yet). /// </summary> private uint lastTag = 0; /// <summary> /// The next tag, used to store the value read by PeekTag. /// </summary> private uint nextTag = 0; private bool hasNextTag = false; internal const int DefaultRecursionLimit = 64; internal const int DefaultSizeLimit = 64 << 20; // 64MB internal const int BufferSize = 4096; /// <summary> /// The total number of bytes read before the current buffer. The /// total bytes read up to the current position can be computed as /// totalBytesRetired + bufferPos. /// </summary> private int totalBytesRetired = 0; /// <summary> /// The absolute position of the end of the current message. /// </summary> private int currentLimit = int.MaxValue; private int recursionDepth = 0; private readonly int recursionLimit; private readonly int sizeLimit; #region Construction // Note that the checks are performed such that we don't end up checking obviously-valid things // like non-null references for arrays we've just created. /// <summary> /// Creates a new CodedInputStream reading data from the given byte array. /// </summary> public CodedInputStream(byte[] buffer) : this(null, Preconditions.CheckNotNull(buffer, "buffer"), 0, buffer.Length) { } /// <summary> /// Creates a new CodedInputStream that reads from the given byte array slice. /// </summary> public CodedInputStream(byte[] buffer, int offset, int length) : this(null, Preconditions.CheckNotNull(buffer, "buffer"), offset, offset + length) { if (offset < 0 || offset > buffer.Length) { throw new ArgumentOutOfRangeException("offset", "Offset must be within the buffer"); } if (length < 0 || offset + length > buffer.Length) { throw new ArgumentOutOfRangeException("length", "Length must be non-negative and within the buffer"); } } /// <summary> /// Creates a new CodedInputStream reading data from the given stream. /// </summary> public CodedInputStream(Stream input) : this(input, new byte[BufferSize], 0, 0) { Preconditions.CheckNotNull(input, "input"); } /// <summary> /// Creates a new CodedInputStream reading data from the given /// stream and buffer, using the default limits. /// </summary> internal CodedInputStream(Stream input, byte[] buffer, int bufferPos, int bufferSize) { this.input = input; this.buffer = buffer; this.bufferPos = bufferPos; this.bufferSize = bufferSize; this.sizeLimit = DefaultSizeLimit; this.recursionLimit = DefaultRecursionLimit; } /// <summary> /// Creates a new CodedInputStream reading data from the given /// stream and buffer, using the specified limits. /// </summary> /// <remarks> /// This chains to the version with the default limits instead of vice versa to avoid /// having to check that the default values are valid every time. /// </remarks> internal CodedInputStream(Stream input, byte[] buffer, int bufferPos, int bufferSize, int sizeLimit, int recursionLimit) : this(input, buffer, bufferPos, bufferSize) { if (sizeLimit <= 0) { throw new ArgumentOutOfRangeException("sizeLimit", "Size limit must be positive"); } if (recursionLimit <= 0) { throw new ArgumentOutOfRangeException("recursionLimit!", "Recursion limit must be positive"); } this.sizeLimit = sizeLimit; this.recursionLimit = recursionLimit; } #endregion /// <summary> /// Creates a <see cref="CodedInputStream"/> with the specified size and recursion limits, reading /// from an input stream. /// </summary> /// <remarks> /// This method exists separately from the constructor to reduce the number of constructor overloads. /// It is likely to be used considerably less frequently than the constructors, as the default limits /// are suitable for most use cases. /// </remarks> /// <param name="input">The input stream to read from</param> /// <param name="sizeLimit">The total limit of data to read from the stream.</param> /// <param name="recursionLimit">The maximum recursion depth to allow while reading.</param> /// <returns>A <c>CodedInputStream</c> reading from <paramref name="input"/> with the specified size /// and recursion limits.</returns> public static CodedInputStream CreateWithLimits(Stream input, int sizeLimit, int recursionLimit) { return new CodedInputStream(input, new byte[BufferSize], 0, 0, sizeLimit, recursionLimit); } /// <summary> /// Returns the current position in the input stream, or the position in the input buffer /// </summary> public long Position { get { if (input != null) { return input.Position - ((bufferSize + bufferSizeAfterLimit) - bufferPos); } return bufferPos; } } /// <summary> /// Returns the last tag read, or 0 if no tags have been read or we've read beyond /// the end of the stream. /// </summary> internal uint LastTag { get { return lastTag; } } /// <summary> /// Returns the size limit for this stream. /// </summary> /// <remarks> /// This limit is applied when reading from the underlying stream, as a sanity check. It is /// not applied when reading from a byte array data source without an underlying stream. /// The default value is 64MB. /// </remarks> /// <value> /// The size limit. /// </value> public int SizeLimit { get { return sizeLimit; } } /// <summary> /// Returns the recursion limit for this stream. This limit is applied whilst reading messages, /// to avoid maliciously-recursive data. /// </summary> /// <remarks> /// The default limit is 64. /// </remarks> /// <value> /// The recursion limit for this stream. /// </value> public int RecursionLimit { get { return recursionLimit; } } #region Validation /// <summary> /// Verifies that the last call to ReadTag() returned tag 0 - in other words, /// we've reached the end of the stream when we expected to. /// </summary> /// <exception cref="InvalidProtocolBufferException">The /// tag read was not the one specified</exception> internal void CheckReadEndOfStreamTag() { if (lastTag != 0) { throw InvalidProtocolBufferException.MoreDataAvailable(); } } #endregion #region Reading of tags etc /// <summary> /// Peeks at the next field tag. This is like calling <see cref="ReadTag"/>, but the /// tag is not consumed. (So a subsequent call to <see cref="ReadTag"/> will return the /// same value.) /// </summary> public uint PeekTag() { if (hasNextTag) { return nextTag; } uint savedLast = lastTag; nextTag = ReadTag(); hasNextTag = true; lastTag = savedLast; // Undo the side effect of ReadTag return nextTag; } /// <summary> /// Reads a field tag, returning the tag of 0 for "end of stream". /// </summary> /// <remarks> /// If this method returns 0, it doesn't necessarily mean the end of all /// the data in this CodedInputStream; it may be the end of the logical stream /// for an embedded message, for example. /// </remarks> /// <returns>The next field tag, or 0 for end of stream. (0 is never a valid tag.)</returns> public uint ReadTag() { if (hasNextTag) { lastTag = nextTag; hasNextTag = false; return lastTag; } // Optimize for the incredibly common case of having at least two bytes left in the buffer, // and those two bytes being enough to get the tag. This will be true for fields up to 4095. if (bufferPos + 2 <= bufferSize) { int tmp = buffer[bufferPos++]; if (tmp < 128) { lastTag = (uint)tmp; } else { int result = tmp & 0x7f; if ((tmp = buffer[bufferPos++]) < 128) { result |= tmp << 7; lastTag = (uint) result; } else { // Nope, rewind and go the potentially slow route. bufferPos -= 2; lastTag = ReadRawVarint32(); } } } else { if (IsAtEnd) { lastTag = 0; return 0; // This is the only case in which we return 0. } lastTag = ReadRawVarint32(); } if (lastTag == 0) { // If we actually read zero, that's not a valid tag. throw InvalidProtocolBufferException.InvalidTag(); } return lastTag; } /// <summary> /// Skips the data for the field with the tag we've just read. /// This should be called directly after <see cref="ReadTag"/>, when /// the caller wishes to skip an unknown field. /// </summary> public void SkipLastField() { if (lastTag == 0) { throw new InvalidOperationException("SkipLastField cannot be called at the end of a stream"); } switch (WireFormat.GetTagWireType(lastTag)) { case WireFormat.WireType.StartGroup: SkipGroup(); break; case WireFormat.WireType.EndGroup: // Just ignore; there's no data following the tag. break; case WireFormat.WireType.Fixed32: ReadFixed32(); break; case WireFormat.WireType.Fixed64: ReadFixed64(); break; case WireFormat.WireType.LengthDelimited: var length = ReadLength(); SkipRawBytes(length); break; case WireFormat.WireType.Varint: ReadRawVarint32(); break; } } private void SkipGroup() { // Note: Currently we expect this to be the way that groups are read. We could put the recursion // depth changes into the ReadTag method instead, potentially... recursionDepth++; if (recursionDepth >= recursionLimit) { throw InvalidProtocolBufferException.RecursionLimitExceeded(); } uint tag; do { tag = ReadTag(); if (tag == 0) { throw InvalidProtocolBufferException.TruncatedMessage(); } // This recursion will allow us to handle nested groups. SkipLastField(); } while (WireFormat.GetTagWireType(tag) != WireFormat.WireType.EndGroup); recursionDepth--; } /// <summary> /// Reads a double field from the stream. /// </summary> public double ReadDouble() { return BitConverter.Int64BitsToDouble((long) ReadRawLittleEndian64()); } /// <summary> /// Reads a float field from the stream. /// </summary> public float ReadFloat() { if (BitConverter.IsLittleEndian && 4 <= bufferSize - bufferPos) { float ret = BitConverter.ToSingle(buffer, bufferPos); bufferPos += 4; return ret; } else { byte[] rawBytes = ReadRawBytes(4); if (!BitConverter.IsLittleEndian) { ByteArray.Reverse(rawBytes); } return BitConverter.ToSingle(rawBytes, 0); } } /// <summary> /// Reads a uint64 field from the stream. /// </summary> public ulong ReadUInt64() { return ReadRawVarint64(); } /// <summary> /// Reads an int64 field from the stream. /// </summary> public long ReadInt64() { return (long) ReadRawVarint64(); } /// <summary> /// Reads an int32 field from the stream. /// </summary> public int ReadInt32() { return (int) ReadRawVarint32(); } /// <summary> /// Reads a fixed64 field from the stream. /// </summary> public ulong ReadFixed64() { return ReadRawLittleEndian64(); } /// <summary> /// Reads a fixed32 field from the stream. /// </summary> public uint ReadFixed32() { return ReadRawLittleEndian32(); } /// <summary> /// Reads a bool field from the stream. /// </summary> public bool ReadBool() { return ReadRawVarint32() != 0; } /// <summary> /// Reads a string field from the stream. /// </summary> public string ReadString() { int length = ReadLength(); // No need to read any data for an empty string. if (length == 0) { return ""; } if (length <= bufferSize - bufferPos) { // Fast path: We already have the bytes in a contiguous buffer, so // just copy directly from it. String result = CodedOutputStream.Utf8Encoding.GetString(buffer, bufferPos, length); bufferPos += length; return result; } // Slow path: Build a byte array first then copy it. return CodedOutputStream.Utf8Encoding.GetString(ReadRawBytes(length), 0, length); } /// <summary> /// Reads an embedded message field value from the stream. /// </summary> public void ReadMessage(IMessage builder) { int length = ReadLength(); if (recursionDepth >= recursionLimit) { throw InvalidProtocolBufferException.RecursionLimitExceeded(); } int oldLimit = PushLimit(length); ++recursionDepth; builder.MergeFrom(this); CheckReadEndOfStreamTag(); // Check that we've read exactly as much data as expected. if (!ReachedLimit) { throw InvalidProtocolBufferException.TruncatedMessage(); } --recursionDepth; PopLimit(oldLimit); } /// <summary> /// Reads a bytes field value from the stream. /// </summary> public ByteString ReadBytes() { int length = ReadLength(); if (length <= bufferSize - bufferPos && length > 0) { // Fast path: We already have the bytes in a contiguous buffer, so // just copy directly from it. ByteString result = ByteString.CopyFrom(buffer, bufferPos, length); bufferPos += length; return result; } else { // Slow path: Build a byte array and attach it to a new ByteString. return ByteString.AttachBytes(ReadRawBytes(length)); } } /// <summary> /// Reads a uint32 field value from the stream. /// </summary> public uint ReadUInt32() { return ReadRawVarint32(); } /// <summary> /// Reads an enum field value from the stream. If the enum is valid for type T, /// then the ref value is set and it returns true. Otherwise the unknown output /// value is set and this method returns false. /// </summary> public int ReadEnum() { // Currently just a pass-through, but it's nice to separate it logically from WriteInt32. return (int) ReadRawVarint32(); } /// <summary> /// Reads an sfixed32 field value from the stream. /// </summary> public int ReadSFixed32() { return (int) ReadRawLittleEndian32(); } /// <summary> /// Reads an sfixed64 field value from the stream. /// </summary> public long ReadSFixed64() { return (long) ReadRawLittleEndian64(); } /// <summary> /// Reads an sint32 field value from the stream. /// </summary> public int ReadSInt32() { return DecodeZigZag32(ReadRawVarint32()); } /// <summary> /// Reads an sint64 field value from the stream. /// </summary> public long ReadSInt64() { return DecodeZigZag64(ReadRawVarint64()); } /// <summary> /// Reads a length for length-delimited data. /// </summary> /// <remarks> /// This is internally just reading a varint, but this method exists /// to make the calling code clearer. /// </remarks> public int ReadLength() { return (int) ReadRawVarint32(); } /// <summary> /// Peeks at the next tag in the stream. If it matches <paramref name="tag"/>, /// the tag is consumed and the method returns <c>true</c>; otherwise, the /// stream is left in the original position and the method returns <c>false</c>. /// </summary> public bool MaybeConsumeTag(uint tag) { if (PeekTag() == tag) { hasNextTag = false; return true; } return false; } #endregion #region Underlying reading primitives /// <summary> /// Same code as ReadRawVarint32, but read each byte individually, checking for /// buffer overflow. /// </summary> private uint SlowReadRawVarint32() { int tmp = ReadRawByte(); if (tmp < 128) { return (uint) tmp; } int result = tmp & 0x7f; if ((tmp = ReadRawByte()) < 128) { result |= tmp << 7; } else { result |= (tmp & 0x7f) << 7; if ((tmp = ReadRawByte()) < 128) { result |= tmp << 14; } else { result |= (tmp & 0x7f) << 14; if ((tmp = ReadRawByte()) < 128) { result |= tmp << 21; } else { result |= (tmp & 0x7f) << 21; result |= (tmp = ReadRawByte()) << 28; if (tmp >= 128) { // Discard upper 32 bits. for (int i = 0; i < 5; i++) { if (ReadRawByte() < 128) { return (uint) result; } } throw InvalidProtocolBufferException.MalformedVarint(); } } } } return (uint) result; } /// <summary> /// Reads a raw Varint from the stream. If larger than 32 bits, discard the upper bits. /// This method is optimised for the case where we've got lots of data in the buffer. /// That means we can check the size just once, then just read directly from the buffer /// without constant rechecking of the buffer length. /// </summary> internal uint ReadRawVarint32() { if (bufferPos + 5 > bufferSize) { return SlowReadRawVarint32(); } int tmp = buffer[bufferPos++]; if (tmp < 128) { return (uint) tmp; } int result = tmp & 0x7f; if ((tmp = buffer[bufferPos++]) < 128) { result |= tmp << 7; } else { result |= (tmp & 0x7f) << 7; if ((tmp = buffer[bufferPos++]) < 128) { result |= tmp << 14; } else { result |= (tmp & 0x7f) << 14; if ((tmp = buffer[bufferPos++]) < 128) { result |= tmp << 21; } else { result |= (tmp & 0x7f) << 21; result |= (tmp = buffer[bufferPos++]) << 28; if (tmp >= 128) { // Discard upper 32 bits. // Note that this has to use ReadRawByte() as we only ensure we've // got at least 5 bytes at the start of the method. This lets us // use the fast path in more cases, and we rarely hit this section of code. for (int i = 0; i < 5; i++) { if (ReadRawByte() < 128) { return (uint) result; } } throw InvalidProtocolBufferException.MalformedVarint(); } } } } return (uint) result; } /// <summary> /// Reads a varint from the input one byte at a time, so that it does not /// read any bytes after the end of the varint. If you simply wrapped the /// stream in a CodedInputStream and used ReadRawVarint32(Stream) /// then you would probably end up reading past the end of the varint since /// CodedInputStream buffers its input. /// </summary> /// <param name="input"></param> /// <returns></returns> internal static uint ReadRawVarint32(Stream input) { int result = 0; int offset = 0; for (; offset < 32; offset += 7) { int b = input.ReadByte(); if (b == -1) { throw InvalidProtocolBufferException.TruncatedMessage(); } result |= (b & 0x7f) << offset; if ((b & 0x80) == 0) { return (uint) result; } } // Keep reading up to 64 bits. for (; offset < 64; offset += 7) { int b = input.ReadByte(); if (b == -1) { throw InvalidProtocolBufferException.TruncatedMessage(); } if ((b & 0x80) == 0) { return (uint) result; } } throw InvalidProtocolBufferException.MalformedVarint(); } /// <summary> /// Reads a raw varint from the stream. /// </summary> internal ulong ReadRawVarint64() { int shift = 0; ulong result = 0; while (shift < 64) { byte b = ReadRawByte(); result |= (ulong) (b & 0x7F) << shift; if ((b & 0x80) == 0) { return result; } shift += 7; } throw InvalidProtocolBufferException.MalformedVarint(); } /// <summary> /// Reads a 32-bit little-endian integer from the stream. /// </summary> internal uint ReadRawLittleEndian32() { uint b1 = ReadRawByte(); uint b2 = ReadRawByte(); uint b3 = ReadRawByte(); uint b4 = ReadRawByte(); return b1 | (b2 << 8) | (b3 << 16) | (b4 << 24); } /// <summary> /// Reads a 64-bit little-endian integer from the stream. /// </summary> internal ulong ReadRawLittleEndian64() { ulong b1 = ReadRawByte(); ulong b2 = ReadRawByte(); ulong b3 = ReadRawByte(); ulong b4 = ReadRawByte(); ulong b5 = ReadRawByte(); ulong b6 = ReadRawByte(); ulong b7 = ReadRawByte(); ulong b8 = ReadRawByte(); return b1 | (b2 << 8) | (b3 << 16) | (b4 << 24) | (b5 << 32) | (b6 << 40) | (b7 << 48) | (b8 << 56); } /// <summary> /// Decode a 32-bit value with ZigZag encoding. /// </summary> /// <remarks> /// ZigZag encodes signed integers into values that can be efficiently /// encoded with varint. (Otherwise, negative values must be /// sign-extended to 64 bits to be varint encoded, thus always taking /// 10 bytes on the wire.) /// </remarks> internal static int DecodeZigZag32(uint n) { return (int)(n >> 1) ^ -(int)(n & 1); } /// <summary> /// Decode a 32-bit value with ZigZag encoding. /// </summary> /// <remarks> /// ZigZag encodes signed integers into values that can be efficiently /// encoded with varint. (Otherwise, negative values must be /// sign-extended to 64 bits to be varint encoded, thus always taking /// 10 bytes on the wire.) /// </remarks> internal static long DecodeZigZag64(ulong n) { return (long)(n >> 1) ^ -(long)(n & 1); } #endregion #region Internal reading and buffer management /// <summary> /// Sets currentLimit to (current position) + byteLimit. This is called /// when descending into a length-delimited embedded message. The previous /// limit is returned. /// </summary> /// <returns>The old limit.</returns> internal int PushLimit(int byteLimit) { if (byteLimit < 0) { throw InvalidProtocolBufferException.NegativeSize(); } byteLimit += totalBytesRetired + bufferPos; int oldLimit = currentLimit; if (byteLimit > oldLimit) { throw InvalidProtocolBufferException.TruncatedMessage(); } currentLimit = byteLimit; RecomputeBufferSizeAfterLimit(); return oldLimit; } private void RecomputeBufferSizeAfterLimit() { bufferSize += bufferSizeAfterLimit; int bufferEnd = totalBytesRetired + bufferSize; if (bufferEnd > currentLimit) { // Limit is in current buffer. bufferSizeAfterLimit = bufferEnd - currentLimit; bufferSize -= bufferSizeAfterLimit; } else { bufferSizeAfterLimit = 0; } } /// <summary> /// Discards the current limit, returning the previous limit. /// </summary> internal void PopLimit(int oldLimit) { currentLimit = oldLimit; RecomputeBufferSizeAfterLimit(); } /// <summary> /// Returns whether or not all the data before the limit has been read. /// </summary> /// <returns></returns> internal bool ReachedLimit { get { if (currentLimit == int.MaxValue) { return false; } int currentAbsolutePosition = totalBytesRetired + bufferPos; return currentAbsolutePosition >= currentLimit; } } /// <summary> /// Returns true if the stream has reached the end of the input. This is the /// case if either the end of the underlying input source has been reached or /// the stream has reached a limit created using PushLimit. /// </summary> public bool IsAtEnd { get { return bufferPos == bufferSize && !RefillBuffer(false); } } /// <summary> /// Called when buffer is empty to read more bytes from the /// input. If <paramref name="mustSucceed"/> is true, RefillBuffer() gurantees that /// either there will be at least one byte in the buffer when it returns /// or it will throw an exception. If <paramref name="mustSucceed"/> is false, /// RefillBuffer() returns false if no more bytes were available. /// </summary> /// <param name="mustSucceed"></param> /// <returns></returns> private bool RefillBuffer(bool mustSucceed) { if (bufferPos < bufferSize) { throw new InvalidOperationException("RefillBuffer() called when buffer wasn't empty."); } if (totalBytesRetired + bufferSize == currentLimit) { // Oops, we hit a limit. if (mustSucceed) { throw InvalidProtocolBufferException.TruncatedMessage(); } else { return false; } } totalBytesRetired += bufferSize; bufferPos = 0; bufferSize = (input == null) ? 0 : input.Read(buffer, 0, buffer.Length); if (bufferSize < 0) { throw new InvalidOperationException("Stream.Read returned a negative count"); } if (bufferSize == 0) { if (mustSucceed) { throw InvalidProtocolBufferException.TruncatedMessage(); } else { return false; } } else { RecomputeBufferSizeAfterLimit(); int totalBytesRead = totalBytesRetired + bufferSize + bufferSizeAfterLimit; if (totalBytesRead > sizeLimit || totalBytesRead < 0) { throw InvalidProtocolBufferException.SizeLimitExceeded(); } return true; } } /// <summary> /// Read one byte from the input. /// </summary> /// <exception cref="InvalidProtocolBufferException"> /// the end of the stream or the current limit was reached /// </exception> internal byte ReadRawByte() { if (bufferPos == bufferSize) { RefillBuffer(true); } return buffer[bufferPos++]; } /// <summary> /// Reads a fixed size of bytes from the input. /// </summary> /// <exception cref="InvalidProtocolBufferException"> /// the end of the stream or the current limit was reached /// </exception> internal byte[] ReadRawBytes(int size) { if (size < 0) { throw InvalidProtocolBufferException.NegativeSize(); } if (totalBytesRetired + bufferPos + size > currentLimit) { // Read to the end of the stream (up to the current limit) anyway. SkipRawBytes(currentLimit - totalBytesRetired - bufferPos); // Then fail. throw InvalidProtocolBufferException.TruncatedMessage(); } if (size <= bufferSize - bufferPos) { // We have all the bytes we need already. byte[] bytes = new byte[size]; ByteArray.Copy(buffer, bufferPos, bytes, 0, size); bufferPos += size; return bytes; } else if (size < buffer.Length) { // Reading more bytes than are in the buffer, but not an excessive number // of bytes. We can safely allocate the resulting array ahead of time. // First copy what we have. byte[] bytes = new byte[size]; int pos = bufferSize - bufferPos; ByteArray.Copy(buffer, bufferPos, bytes, 0, pos); bufferPos = bufferSize; // We want to use RefillBuffer() and then copy from the buffer into our // byte array rather than reading directly into our byte array because // the input may be unbuffered. RefillBuffer(true); while (size - pos > bufferSize) { Buffer.BlockCopy(buffer, 0, bytes, pos, bufferSize); pos += bufferSize; bufferPos = bufferSize; RefillBuffer(true); } ByteArray.Copy(buffer, 0, bytes, pos, size - pos); bufferPos = size - pos; return bytes; } else { // The size is very large. For security reasons, we can't allocate the // entire byte array yet. The size comes directly from the input, so a // maliciously-crafted message could provide a bogus very large size in // order to trick the app into allocating a lot of memory. We avoid this // by allocating and reading only a small chunk at a time, so that the // malicious message must actually *be* extremely large to cause // problems. Meanwhile, we limit the allowed size of a message elsewhere. // Remember the buffer markers since we'll have to copy the bytes out of // it later. int originalBufferPos = bufferPos; int originalBufferSize = bufferSize; // Mark the current buffer consumed. totalBytesRetired += bufferSize; bufferPos = 0; bufferSize = 0; // Read all the rest of the bytes we need. int sizeLeft = size - (originalBufferSize - originalBufferPos); List<byte[]> chunks = new List<byte[]>(); while (sizeLeft > 0) { byte[] chunk = new byte[Math.Min(sizeLeft, buffer.Length)]; int pos = 0; while (pos < chunk.Length) { int n = (input == null) ? -1 : input.Read(chunk, pos, chunk.Length - pos); if (n <= 0) { throw InvalidProtocolBufferException.TruncatedMessage(); } totalBytesRetired += n; pos += n; } sizeLeft -= chunk.Length; chunks.Add(chunk); } // OK, got everything. Now concatenate it all into one buffer. byte[] bytes = new byte[size]; // Start by copying the leftover bytes from this.buffer. int newPos = originalBufferSize - originalBufferPos; ByteArray.Copy(buffer, originalBufferPos, bytes, 0, newPos); // And now all the chunks. foreach (byte[] chunk in chunks) { Buffer.BlockCopy(chunk, 0, bytes, newPos, chunk.Length); newPos += chunk.Length; } // Done. return bytes; } } /// <summary> /// Reads and discards <paramref name="size"/> bytes. /// </summary> /// <exception cref="InvalidProtocolBufferException">the end of the stream /// or the current limit was reached</exception> private void SkipRawBytes(int size) { if (size < 0) { throw InvalidProtocolBufferException.NegativeSize(); } if (totalBytesRetired + bufferPos + size > currentLimit) { // Read to the end of the stream anyway. SkipRawBytes(currentLimit - totalBytesRetired - bufferPos); // Then fail. throw InvalidProtocolBufferException.TruncatedMessage(); } if (size <= bufferSize - bufferPos) { // We have all the bytes we need already. bufferPos += size; } else { // Skipping more bytes than are in the buffer. First skip what we have. int pos = bufferSize - bufferPos; // ROK 5/7/2013 Issue #54: should retire all bytes in buffer (bufferSize) // totalBytesRetired += pos; totalBytesRetired += bufferSize; bufferPos = 0; bufferSize = 0; // Then skip directly from the InputStream for the rest. if (pos < size) { if (input == null) { throw InvalidProtocolBufferException.TruncatedMessage(); } SkipImpl(size - pos); totalBytesRetired += size - pos; } } } /// <summary> /// Abstraction of skipping to cope with streams which can't really skip. /// </summary> private void SkipImpl(int amountToSkip) { if (input.CanSeek) { long previousPosition = input.Position; input.Position += amountToSkip; if (input.Position != previousPosition + amountToSkip) { throw InvalidProtocolBufferException.TruncatedMessage(); } } else { byte[] skipBuffer = new byte[Math.Min(1024, amountToSkip)]; while (amountToSkip > 0) { int bytesRead = input.Read(skipBuffer, 0, Math.Min(skipBuffer.Length, amountToSkip)); if (bytesRead <= 0) { throw InvalidProtocolBufferException.TruncatedMessage(); } amountToSkip -= bytesRead; } } } #endregion } }
axinging/chromium-crosswalk
third_party/protobuf/csharp/src/Google.Protobuf/CodedInputStream.cs
C#
bsd-3-clause
44,033
// Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef ASH_SERVICES_SECURE_CHANNEL_PUBLIC_CPP_CLIENT_FAKE_CLIENT_CHANNEL_OBSERVER_H_ #define ASH_SERVICES_SECURE_CHANNEL_PUBLIC_CPP_CLIENT_FAKE_CLIENT_CHANNEL_OBSERVER_H_ #include "ash/services/secure_channel/public/cpp/client/client_channel.h" namespace ash::secure_channel { // Test double implementation of ClientChannel::Observer. class FakeClientChannelObserver : public ClientChannel::Observer { public: FakeClientChannelObserver(); FakeClientChannelObserver(const FakeClientChannelObserver&) = delete; FakeClientChannelObserver& operator=(const FakeClientChannelObserver&) = delete; ~FakeClientChannelObserver() override; // ClientChannel::Observer: void OnDisconnected() override; void OnMessageReceived(const std::string& payload) override; bool is_disconnected() const { return is_disconnected_; } const std::vector<std::string>& received_messages() const { return received_messages_; } private: bool is_disconnected_ = false; std::vector<std::string> received_messages_; }; } // namespace ash::secure_channel #endif // ASH_SERVICES_SECURE_CHANNEL_PUBLIC_CPP_CLIENT_FAKE_CLIENT_CHANNEL_OBSERVER_H_
chromium/chromium
ash/services/secure_channel/public/cpp/client/fake_client_channel_observer.h
C
bsd-3-clause
1,325
#-*- coding: ISO-8859-1 -*- # pysqlite2/test/factory.py: tests for the various factories in pysqlite # # Copyright (C) 2005-2007 Gerhard Häring <gh@ghaering.de> # # This file is part of pysqlite. # # This software is provided 'as-is', without any express or implied # warranty. In no event will the authors be held liable for any damages # arising from the use of this software. # # Permission is granted to anyone to use this software for any purpose, # including commercial applications, and to alter it and redistribute it # freely, subject to the following restrictions: # # 1. The origin of this software must not be misrepresented; you must not # claim that you wrote the original software. If you use this software # in a product, an acknowledgment in the product documentation would be # appreciated but is not required. # 2. Altered source versions must be plainly marked as such, and must not be # misrepresented as being the original software. # 3. This notice may not be removed or altered from any source distribution. import unittest import sqlite3 as sqlite class MyConnection(sqlite.Connection): def __init__(self, *args, **kwargs): sqlite.Connection.__init__(self, *args, **kwargs) def dict_factory(cursor, row): d = {} for idx, col in enumerate(cursor.description): d[col[0]] = row[idx] return d class MyCursor(sqlite.Cursor): def __init__(self, *args, **kwargs): sqlite.Cursor.__init__(self, *args, **kwargs) self.row_factory = dict_factory class ConnectionFactoryTests(unittest.TestCase): def setUp(self): self.con = sqlite.connect(":memory:", factory=MyConnection) def tearDown(self): self.con.close() def CheckIsInstance(self): self.assertTrue(isinstance(self.con, MyConnection), "connection is not instance of MyConnection") class CursorFactoryTests(unittest.TestCase): def setUp(self): self.con = sqlite.connect(":memory:") def tearDown(self): self.con.close() def CheckIsInstance(self): cur = self.con.cursor(factory=MyCursor) self.assertTrue(isinstance(cur, MyCursor), "cursor is not instance of MyCursor") class RowFactoryTestsBackwardsCompat(unittest.TestCase): def setUp(self): self.con = sqlite.connect(":memory:") def CheckIsProducedByFactory(self): cur = self.con.cursor(factory=MyCursor) cur.execute("select 4+5 as foo") row = cur.fetchone() self.assertTrue(isinstance(row, dict), "row is not instance of dict") cur.close() def tearDown(self): self.con.close() class RowFactoryTests(unittest.TestCase): def setUp(self): self.con = sqlite.connect(":memory:") def CheckCustomFactory(self): self.con.row_factory = lambda cur, row: list(row) row = self.con.execute("select 1, 2").fetchone() self.assertTrue(isinstance(row, list), "row is not instance of list") def CheckSqliteRowIndex(self): self.con.row_factory = sqlite.Row row = self.con.execute("select 1 as a, 2 as b").fetchone() self.assertTrue(isinstance(row, sqlite.Row), "row is not instance of sqlite.Row") col1, col2 = row["a"], row["b"] self.assertTrue(col1 == 1, "by name: wrong result for column 'a'") self.assertTrue(col2 == 2, "by name: wrong result for column 'a'") col1, col2 = row["A"], row["B"] self.assertTrue(col1 == 1, "by name: wrong result for column 'A'") self.assertTrue(col2 == 2, "by name: wrong result for column 'B'") col1, col2 = row[0], row[1] self.assertTrue(col1 == 1, "by index: wrong result for column 0") self.assertTrue(col2 == 2, "by index: wrong result for column 1") def CheckSqliteRowIter(self): """Checks if the row object is iterable""" self.con.row_factory = sqlite.Row row = self.con.execute("select 1 as a, 2 as b").fetchone() for col in row: pass def CheckSqliteRowAsTuple(self): """Checks if the row object can be converted to a tuple""" self.con.row_factory = sqlite.Row row = self.con.execute("select 1 as a, 2 as b").fetchone() t = tuple(row) def CheckSqliteRowAsDict(self): """Checks if the row object can be correctly converted to a dictionary""" self.con.row_factory = sqlite.Row row = self.con.execute("select 1 as a, 2 as b").fetchone() d = dict(row) self.assertEqual(d["a"], row["a"]) self.assertEqual(d["b"], row["b"]) def CheckSqliteRowHashCmp(self): """Checks if the row object compares and hashes correctly""" self.con.row_factory = sqlite.Row row_1 = self.con.execute("select 1 as a, 2 as b").fetchone() row_2 = self.con.execute("select 1 as a, 2 as b").fetchone() row_3 = self.con.execute("select 1 as a, 3 as b").fetchone() self.assertTrue(row_1 == row_1) self.assertTrue(row_1 == row_2) self.assertTrue(row_2 != row_3) self.assertFalse(row_1 != row_1) self.assertFalse(row_1 != row_2) self.assertFalse(row_2 == row_3) self.assertEqual(row_1, row_2) self.assertEqual(hash(row_1), hash(row_2)) self.assertNotEqual(row_1, row_3) self.assertNotEqual(hash(row_1), hash(row_3)) def tearDown(self): self.con.close() class TextFactoryTests(unittest.TestCase): def setUp(self): self.con = sqlite.connect(":memory:") def CheckUnicode(self): austria = "Österreich" row = self.con.execute("select ?", (austria,)).fetchone() self.assertTrue(type(row[0]) == str, "type of row[0] must be unicode") def CheckString(self): self.con.text_factory = bytes austria = "Österreich" row = self.con.execute("select ?", (austria,)).fetchone() self.assertTrue(type(row[0]) == bytes, "type of row[0] must be bytes") self.assertTrue(row[0] == austria.encode("utf-8"), "column must equal original data in UTF-8") def CheckCustom(self): self.con.text_factory = lambda x: str(x, "utf-8", "ignore") austria = "Österreich" row = self.con.execute("select ?", (austria,)).fetchone() self.assertTrue(type(row[0]) == str, "type of row[0] must be unicode") self.assertTrue(row[0].endswith("reich"), "column must contain original data") def CheckOptimizedUnicode(self): self.con.text_factory = sqlite.OptimizedUnicode austria = "Österreich" germany = "Deutchland" a_row = self.con.execute("select ?", (austria,)).fetchone() d_row = self.con.execute("select ?", (germany,)).fetchone() self.assertTrue(type(a_row[0]) == str, "type of non-ASCII row must be str") self.assertTrue(type(d_row[0]) == str, "type of ASCII-only row must be str") def tearDown(self): self.con.close() class TextFactoryTestsWithEmbeddedZeroBytes(unittest.TestCase): def setUp(self): self.con = sqlite.connect(":memory:") self.con.execute("create table test (value text)") self.con.execute("insert into test (value) values (?)", ("a\x00b",)) def CheckString(self): # text_factory defaults to str row = self.con.execute("select value from test").fetchone() self.assertIs(type(row[0]), str) self.assertEqual(row[0], "a\x00b") def CheckBytes(self): self.con.text_factory = bytes row = self.con.execute("select value from test").fetchone() self.assertIs(type(row[0]), bytes) self.assertEqual(row[0], b"a\x00b") def CheckBytearray(self): self.con.text_factory = bytearray row = self.con.execute("select value from test").fetchone() self.assertIs(type(row[0]), bytearray) self.assertEqual(row[0], b"a\x00b") def CheckCustom(self): # A custom factory should receive a bytes argument self.con.text_factory = lambda x: x row = self.con.execute("select value from test").fetchone() self.assertIs(type(row[0]), bytes) self.assertEqual(row[0], b"a\x00b") def tearDown(self): self.con.close() def suite(): connection_suite = unittest.makeSuite(ConnectionFactoryTests, "Check") cursor_suite = unittest.makeSuite(CursorFactoryTests, "Check") row_suite_compat = unittest.makeSuite(RowFactoryTestsBackwardsCompat, "Check") row_suite = unittest.makeSuite(RowFactoryTests, "Check") text_suite = unittest.makeSuite(TextFactoryTests, "Check") text_zero_bytes_suite = unittest.makeSuite(TextFactoryTestsWithEmbeddedZeroBytes, "Check") return unittest.TestSuite((connection_suite, cursor_suite, row_suite_compat, row_suite, text_suite, text_zero_bytes_suite)) def test(): runner = unittest.TextTestRunner() runner.run(suite()) if __name__ == "__main__": test()
wdv4758h/ZipPy
lib-python/3/sqlite3/test/factory.py
Python
bsd-3-clause
9,209
// // Copyright (C) 2020-2021 Brian P Kelley, Joann Prescott-Roy and other RDKit // contributors // // @@ All Rights Reserved @@ // This file is part of the RDKit. // The contents are covered by the terms of the BSD license // which is included in the file license.txt, found at the root // of the RDKit source tree. // #ifndef RDK_DEPROTECT_LIBRARY #define RDK_DEPROTECT_LIBRARY #include <RDGeneral/export.h> #include <GraphMol/RDKitBase.h> #include <GraphMol/ChemReactions/Reaction.h> #include <string> #include <memory> namespace RDKit { namespace Deprotect { /*! Data for Deprotecting molecules Deprotects are described as reactions that remove the protecting group and leave behind the group being protected. Each DeprotectData has the following attributes: - <b>deprotection_class</b> functional group being protected (i.e. amine, alcohol, ...) - <b>reaction_smarts</b> the reaction smarts pattern for removing the protecting group - <b>abbreviation</b> common abbreviation for the protecting group (Boc, Fmoc) - <b>full_name</b> full name for the protecting group - <b> rxn </b> the reaction itself. */ struct RDKIT_DEPROTECT_EXPORT DeprotectData { std::string deprotection_class; std::string reaction_smarts; std::string abbreviation; std::string full_name; std::string example; std::shared_ptr<ChemicalReaction> rxn; // so much easier than unique_ptr, sigh... DeprotectData(std::string deprotection_class, const std::string &reaction_smarts, std::string abbreviation, std::string full_name, std::string example = ""); bool operator==(const DeprotectData &other) const { return (deprotection_class == other.deprotection_class && full_name == other.full_name && abbreviation == other.abbreviation && reaction_smarts == other.reaction_smarts && isValid() == other.isValid()); } bool operator!=(const DeprotectData &other) const { return !(*this == other); } //! Returns true if the deprotection is valid bool isValid() const { return rxn.get() != nullptr && rxn->getNumProductTemplates() == 1; } }; //! Retrieves the built in list of common deprotections RDKIT_DEPROTECT_EXPORT const std::vector<DeprotectData> &getDeprotections(); //! Deprotect a molecule /*! The resulting molecule is annotated with the deprotections used (property DEPROTECTIONS) and the number of deprotections applied (property DEPROTECTIION_COUNT) \param mol the molecule to deprotect \param deprotections - a vector of deprotections to use, defaults to the built in deprotections. \return The deprotected form of the input molecule */ RDKIT_DEPROTECT_EXPORT std::unique_ptr<ROMol> deprotect( const ROMol &mol, const std::vector<DeprotectData> &deprotections = getDeprotections()); //! Deprotect a molecule in place /*! The molecule is annotated with the deprotections used (property DEPROTECTIONS) and the number of deprotections applied (property DEPROTECTIION_COUNT) \param mol the molecule to deprotect \param deprotections - a vector of deprotections to use, defaults to the built in deprotections. \return whether or not the molecule was changed */ RDKIT_DEPROTECT_EXPORT bool deprotectInPlace( RWMol &mol, const std::vector<DeprotectData> &deprotections = getDeprotections()); } // namespace Deprotect } // namespace RDKit #endif
bp-kelley/rdkit
Code/GraphMol/Deprotect/Deprotect.h
C
bsd-3-clause
3,471
<style> #topicSourceTitle { color: var(--cros-text-color-primary); font: var(--personalization-app-label-font); margin-bottom: 16px; margin-top: 34px; } topic-source-item { align-items: center; height: 64px; } iron-list > *:not(:first-of-type) { border-top: var(--cr-separator-line); } iron-list > :focus { background-color: var(--cr-focused-item-color); } </style> <h3 id="topicSourceTitle" aria-hidden="true"> $i18n{ambientModeTopicSourceTitle} </h3> <iron-list id="topicSourceList" items="[[topicSources]]"> <template> <topic-source-item topic-source="[[item]]" disabled$="[[disabled]]" tabindex$="[[computeTabIndex_(tabIndex, disabled)]]" has-google-photos-albums="[[hasGooglePhotosAlbums]]" checked="[[isSelected_(item, selectedTopicSource)]]"> </topic-source-item> </template> </iron-list>
chromium/chromium
ash/webui/personalization_app/resources/trusted/ambient/topic_source_list_element.html
HTML
bsd-3-clause
895
// Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ash/services/secure_channel/fake_connection_attempt_delegate.h" #include "ash/services/secure_channel/authenticated_channel.h" #include "base/check.h" namespace ash::secure_channel { FakeConnectionAttemptDelegate::FakeConnectionAttemptDelegate() = default; FakeConnectionAttemptDelegate::~FakeConnectionAttemptDelegate() = default; void FakeConnectionAttemptDelegate::OnConnectionAttemptSucceeded( const ConnectionDetails& connection_details, std::unique_ptr<AuthenticatedChannel> authenticated_channel) { DCHECK(!connection_details_); connection_details_ = connection_details; authenticated_channel_ = std::move(authenticated_channel); } void FakeConnectionAttemptDelegate:: OnConnectionAttemptFinishedWithoutConnection( const ConnectionAttemptDetails& connection_attempt_details) { DCHECK(!connection_attempt_details_); connection_attempt_details_ = connection_attempt_details; } } // namespace ash::secure_channel
chromium/chromium
ash/services/secure_channel/fake_connection_attempt_delegate.cc
C++
bsd-3-clause
1,131
//===- subzero/src/IceBuildDefs.h - Translator build defines ----*- C++ -*-===// // // The Subzero Code Generator // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// /// /// \file /// \brief Define the Ice::BuildDefs namespace //===----------------------------------------------------------------------===// #ifndef SUBZERO_SRC_ICEBUILDDEFS_H #define SUBZERO_SRC_ICEBUILDDEFS_H namespace Ice { /// \brief Defines constexpr functions that express various Subzero build /// system defined values. /// /// These resulting constexpr functions allow code to in effect be /// conditionally compiled without having to do this using the older C++ /// preprocessor solution. /** \verbatim For example whenever the value of FEATURE_SUPPORTED is needed, instead of (except in these constexpr functions): #if FEATURE_SUPPORTED ... ... #endif We can have: namespace Ice { namespace BuildDefs { // Use this form when FEATURE_SUPPORTED is guaranteed to be defined on the // C++ compiler command line as 0 or 1. constexpr bool hasFeature() { return FEATURE_SUPPORTED; } or // Use this form when FEATURE_SUPPORTED may not necessarily be defined on // the C++ compiler command line. constexpr bool hasFeature() { #if FEATURE_SUPPORTED return true; #else // !FEATURE_SUPPORTED return false; #endif // !FEATURE_SUPPORTED } ...} // end of namespace BuildDefs } // end of namespace Ice And later in the code: if (Ice::BuildDefs::hasFeature() { ... } \endverbatim Since hasFeature() returns a constexpr, an optimizing compiler will know to keep or discard the above fragment. In addition, the code will always be looked at by the compiler which eliminates the problem with defines in that if you don't build that variant, you don't even know if the code would compile unless you build with that variant. **/ namespace BuildDefs { // The ALLOW_* etc. symbols must be #defined to zero or non-zero. /// Return true if ALLOW_DUMP is defined as a non-zero value constexpr bool dump() { return ALLOW_DUMP; } /// Return true if ALLOW_TIMERS is defined as a non-zero value constexpr bool timers() { return ALLOW_TIMERS; } /// Return true if ALLOW_LLVM_CL is defined as a non-zero value // TODO(stichnot): this ALLOW_LLVM_CL is a TBD option which will // allow for replacement of llvm:cl command line processor with a // smaller footprint version for Subzero. constexpr bool llvmCl() { return ALLOW_LLVM_CL; } /// Return true if ALLOW_LLVM_IR is defined as a non-zero value constexpr bool llvmIr() { return ALLOW_LLVM_IR; } /// Return true if ALLOW_LLVM_IR_AS_INPUT is defined as a non-zero value constexpr bool llvmIrAsInput() { return ALLOW_LLVM_IR_AS_INPUT; } /// Return true if ALLOW_MINIMAL_BUILD is defined as a non-zero value constexpr bool minimal() { return ALLOW_MINIMAL_BUILD; } /// Return true if ALLOW_WASM is defined as a non-zero value constexpr bool wasm() { return ALLOW_WASM; } /// Return true if NDEBUG is defined constexpr bool asserts() { #ifdef NDEBUG return false; #else // !NDEBUG return true; #endif // !NDEBUG } /// Return true if PNACL_BROWSER_TRANSLATOR is defined constexpr bool browser() { #if PNACL_BROWSER_TRANSLATOR return true; #else // !PNACL_BROWSER_TRANSLATOR return false; #endif // !PNACL_BROWSER_TRANSLATOR } /// Return true if ALLOW_EXTRA_VALIDATION is defined constexpr bool extraValidation() { #if ALLOW_EXTRA_VALIDATION return true; #else // !ALLOW_EXTRA_VALIDATION return false; #endif // !ALLOW_EXTRA_VALIDATION } } // end of namespace BuildDefs } // end of namespace Ice #endif // SUBZERO_SRC_ICEBUILDDEFS_H
endlessm/chromium-browser
third_party/swiftshader/third_party/subzero/src/IceBuildDefs.h
C
bsd-3-clause
3,776
Travailler avec des bases de données ====================== Cette section décrit comment créer une nouvelle page qui affiche des données pays récupérées dans une table de base de données nommée `country`. Pour ce faire, vous allez configurer une connexion à une base de données, créer une classe [Active Record](db-active-record.md), définir une [action](structure-controllers.md), et créer une [vue](structure-views.md). Au long de ce tutoriel, vous apprendrez comment : * Configurer une connexion à une base de données * Définir une classe Active Record * Demander des données en utilisant la classe Active Record (enregistrement actif) * Afficher des données dans une vue paginée Notez que, pour finir cette section, vous aurez besoin d'avoir une connaissance basique des bases de données. En particulier, vous devez savoir créer une base de données et exécuter des déclarations SQL en utilisant un client de gestion de bases de données. Préparer la Base de Données <span id="preparing-database"></span> -------------------- Pour commencer, créez une base de données appelée `yii2basic`, depuis laquelle vous irez chercher les données dans votre application. Vous pouvez créer une base de données SQLite, MySQL, PostgreSQL, MSSQL ou Oracle, car Yii gère nativement de nombreuses applications de base de données. Pour simplifier, nous supposerons que vous utilisez MySQL dans les descriptions qui suivent. >Note : bien que MariaDB a été un remplacement direct de MySQL, cela n'est plus complètement vrai. Dans le cas où vous auriez besoin de fonctionnalités avancées telles que la prise en charge de `JSON`, jetez un coup d'œil à la liste des extensions de MariaDB ci-dessous. Créez maintenant une table nommée `country` dans la base de données et insérez-y quelques données exemples. Vous pouvez exécuter l'instruction SQL suivante pour le faire : ```sql CREATE TABLE `country` ( `code` CHAR(2) NOT NULL PRIMARY KEY, `name` CHAR(52) NOT NULL, `population` INT(11) NOT NULL DEFAULT '0' ) ENGINE=InnoDB DEFAULT CHARSET=utf8; INSERT INTO `country` VALUES ('AU','Australia',24016400); INSERT INTO `country` VALUES ('BR','Brazil',205722000); INSERT INTO `country` VALUES ('CA','Canada',35985751); INSERT INTO `country` VALUES ('CN','China',1375210000); INSERT INTO `country` VALUES ('DE','Germany',81459000); INSERT INTO `country` VALUES ('FR','France',64513242); INSERT INTO `country` VALUES ('GB','United Kingdom',65097000); INSERT INTO `country` VALUES ('IN','India',1285400000); INSERT INTO `country` VALUES ('RU','Russia',146519759); INSERT INTO `country` VALUES ('US','United States',322976000); ``` Vous avez désormais une base de données appelée `yii2basic` comprenant une table `country` comportant trois colonnes et contenant dix lignes de données. Configurer une Connexion à la BDD <span id="configuring-db-connection"></span> --------------------------- Avant de continuer, vérifiez que vous avez installé à la fois l'extension PHP [PDO](https://www.php.net/manual/fr/book.pdo.php) et le pilote PDO pour la base de données que vous utilisez (c'est à dire `pdo_mysql` pour MySQL). C'est une exigence de base si votre application utilise une base de données relationnelle. Une fois ces éléments installés, ouvrez le fichier `config/db.php` et modifiez les paramètres pour qu'ils correspondent à votre base de données. Par défaut, le fichier contient ce qui suit : ```php <?php return [ 'class' => 'yii\db\Connection', 'dsn' => 'mysql:host=localhost;dbname=yii2basic', 'username' => 'root', 'password' => '', 'charset' => 'utf8', ]; ``` Le fichier `config/db.php` est un exemple type d'outil de [configuration](concept-configurations.md) basé sur un fichier. Ce fichier de configuration en particulier spécifie les paramètres nécessaires à la création et l'initialisation d'une instance de [[yii\db\Connection]] grâce à laquelle vous pouvez effectuer des requêtes SQL dans la base de données sous-jacente. On peut accéder à connexion à la BDD configurée ci-dessus depuis le code de l'application via l'expression `Yii::$app->db`. > Info: le fichier `config/db.php` sera inclus par la configuration principale de l'application `config/web.php`, qui spécifie comment l'instance d'[application](structure-applications.md) doit être initialisée. Pour plus d'informations, reportez-vous à la section [Configurations](concept-configurations.md). Si vous avez besoin de fonctionnalités de base de données dont la prise en charge n'est pas comprise dans Yii, examinez les extensions suivantes: - [Informix](https://github.com/edgardmessias/yii2-informix) - [IBM DB2](https://github.com/edgardmessias/yii2-ibm-db2) - [Firebird](https://github.com/edgardmessias/yii2-firebird) - [MariaDB](https://github.com/sam-it/yii2-mariadb) Créer un Active Record <span id="creating-active-record"></span> ------------------------- Pour représenter et aller chercher des données dans la table `country`, créez une classe dérivée d'[Active Record](db-active-record.md) appelée `Country`, et enregistrez-la dans le fichier `models/Country.php`. ```php <?php namespace app\models; use yii\db\ActiveRecord; class Country extends ActiveRecord { } ``` La classe `Country` étend [[yii\db\ActiveRecord]]. Vous n'avez pas besoin d'y écrire le moindre code ! Simplement, avec le code ci-dessus, Yii devine le nom de la table associée au nom de la classe. > Info: si aucune correspondance directe ne peut être faite à partir du nom de la classe, vous pouvez outrepasser la méthode [[yii\db\ActiveRecord::tableName()]] pour spécifier explicitement un nom de table. A l'aide de la classe `Country`, vous pouvez facilement manipuler les données de la table `country`, comme dans les bribes suivantes : ```php use app\models\Country; // chercher toutes les lignes de la table pays et les trier par "name" $countries = Country::find()->orderBy('name')->all(); // chercher la ligne dont la clef primaire est "US" $country = Country::findOne('US'); // afficher "United States" echo $country->name; // remplace le nom du pays par "U.S.A." et le sauvegarde dans la base de données $country->name = 'U.S.A.'; $country->save(); ``` > Info: Active Record (enregistrement actif) est un moyen puissant pour accéder et manipuler des données d'une base de manière orientée objet. Vous pouvez trouver plus d'informations dans la section [Active Record](db-active-record.md). Sinon, vous pouvez également interagir avec une base de données en utilisant une méthode de plus bas niveau d'accès aux données appelée [Database Access Objects](db-dao.md). Créer une Action <span id="creating-action"></span> ------------------ Pour exposer les données pays aux utilisateurs, vous devez créer une action. Plutôt que de placer la nouvelle action dans le contrôleur `site`, comme vous l'avez fait dans les sections précédentes, il est plus cohérent de créer un nouveau contrôleur spécifique à toutes les actions liées aux données pays. Nommez ce contrôleur `CountryController`, et créez-y une action `index`, comme suit. ```php <?php namespace app\controllers; use yii\web\Controller; use yii\data\Pagination; use app\models\Country; class CountryController extends Controller { public function actionIndex() { $query = Country::find(); $pagination = new Pagination([ 'defaultPageSize' => 5, 'totalCount' => $query->count(), ]); $countries = $query->orderBy('name') ->offset($pagination->offset) ->limit($pagination->limit) ->all(); return $this->render('index', [ 'countries' => $countries, 'pagination' => $pagination, ]); } } ``` Enregistrez le code ci-dessus dans le fichier `controllers/CountryController.php`. L'action `index` appelle `Country::find()`. Cette méthode Active Record construit une requête de BDD et récupère toutes les données de la table `country`. Pour limiter le nombre de pays retournés par chaque requête, la requête est paginée à l'aide d'un objet [[yii\data\Pagination]]. L'objet `Pagination` dessert deux buts : * Il ajuste les clauses `offset` et `limit` de la déclaration SQL représentée par la requête afin qu'elle en retourne qu'une page de données à la fois (au plus 5 colonnes par page). * Il est utilisé dans la vue pour afficher un sélecteur de pages qui consiste en une liste de boutons de page, comme nous l'expliquerons dans la prochaine sous-section. A la fin du code, l'action `index` effectue le rendu d'une vue nommée `index`, et lui transmet les données pays ainsi que les informations de pagination. Créer une Vue <span id="creating-view"></span> --------------- Dans le dossier `views`, commencez par créer un sous-dossier nommé `country`. Ce dossier sera utilisé pour contenir toutes les vues rendues par le contrôleur `country`. Dans le dossier `views/country`, créez un fichier nommé `index.php` contenant ce qui suit : ```php <?php use yii\helpers\Html; use yii\widgets\LinkPager; ?> <h1>Countries</h1> <ul> <?php foreach ($countries as $country): ?> <li> <?= Html::encode("{$country->code} ({$country->name})") ?>: <?= $country->population ?> </li> <?php endforeach; ?> </ul> <?= LinkPager::widget(['pagination' => $pagination]) ?> ``` La vue comprend deux sections relatives à l'affichage des données pays. Dans la première partie, les données pays fournies sont parcourues et rendues sous forme de liste non ordonnée HTML. Dans la deuxième partie, un objet graphique [[yii\widgets\LinkPager]] est rendu en utilisant les informations de pagination transmises par l'action. L'objet graphique `LinkPager` affiche une liste de boutons de page. Le fait de cliquer sur l'un deux rafraichit les données pays dans la page correspondante. Essayer <span id="trying-it-out"></span> ------------- Pour voir comment tout le code ci-dessus fonctionne, pointez votre navigateur sur l'URL suivante : ``` http://hostname/index.php?r=country/index ``` ![Liste de Pays](images/start-country-list.png) Au début, vous verrez une page affichant cinq pays. En dessous des pays, vous verrez un sélecteur de pages avec quatre boutons. Si vous cliquez sur le bouton "2", vous verrez la page afficher cinq autres pays de la base de données : la deuxième page d'enregistrements. Observez plus attentivement et vous noterez que l'URL dans le navigateur devient ``` http://hostname/index.php?r=country/index&page=2 ``` En coulisse, [[yii\data\Pagination|Pagination]] fournit toutes les fonctionnalités permettant de paginer un ensemble de données : * Au départ, [[yii\data\Pagination|Pagination]] représente la première page, qui reflète la requête SELECT de country avec la clause `LIMIT 5 OFFSET 0`. Il en résulte que les cinq premiers pays seront trouvés et affichés. * L'objet graphique [[yii\widgets\LinkPager|LinkPager]] effectue le rendu des boutons de pages en utilisant les URL créées par [[yii\data\Pagination::createUrl()|Pagination]]. Les URL contiendront le paramètre de requête `page`, qui représente les différents numéros de pages. * Si vous cliquez sur le bouton de page "2", une nouvelle requête pour la route `country/index` sera déclenchée et traitée. [[yii\data\Pagination|Pagination]] lit le paramètre de requête `page` dans l'URL et met le numéro de page à 2. La nouvelle requête de pays aura donc la clause `LIMIT 5 OFFSET 5` et retournera les cinq pays suivants pour être affichés. Résumé <span id="summary"></span> ------- Dans cette section, vous avez appris comment travailler avec une base de données. Vous avez également appris comment chercher et afficher des données dans des pages avec l'aide de [[yii\data\Pagination]] et de [[yii\widgets\LinkPager]]. Dans la prochaine section, vous apprendrez comment utiliser le puissant outil de génération de code, appelé [Gii](https://www.yiiframework.com/extension/yiisoft/yii2-gii/doc/guide), pour vous aider à rapidement mettre en œuvre des fonctionnalités communément requises, telles que les opérations Créer, Lire, Mettre à Jour et Supprimer (CRUD : Create-Read-Update-Delete) pour travailler avec les données dans une table de base de données. En fait, le code que vous venez d'écrire peut être généré automatiquement dans Yii en utilisant l'outil Gii.
mikehaertl/yii2
docs/guide-fr/start-databases.md
Markdown
bsd-3-clause
12,527
<?php /** * File containing the ezcWorkflowTestExecution class. * * @package Workflow * @version 1.4.1 * @copyright Copyright (C) 2005-2010 eZ Systems AS. All rights reserved. * @license http://ez.no/licenses/new_bsd New BSD License */ /** * Workflow execution engine for testing workflows. * * @package Workflow * @subpackage Tests * @version 1.4.1 */ class ezcWorkflowTestExecution extends ezcWorkflowExecutionNonInteractive { /** * Execution ID. * * @var integer */ protected $id = 0; /** * @var array */ protected $inputVariables = array(); /** * @var array */ protected $inputVariablesForSubWorkflow = array(); /** * Property write access. * * @param string $propertyName Name of the property. * @param mixed $val The value for the property. * * @throws ezcBaseValueException * If a the value for the property definitionStorage is not an * instance of ezcWorkflowDefinitionStorage. * @throws ezcBaseValueException * If a the value for the property workflow is not an instance of * ezcWorkflow. * @ignore */ public function __set( $propertyName, $val ) { if ( $propertyName == 'workflow' ) { if ( !( $val instanceof ezcWorkflow ) ) { throw new ezcBaseValueException( $propertyName, $val, 'ezcWorkflow' ); } $this->properties['workflow'] = $val; return; } else { return parent::__set( $propertyName, $val ); } } /** * Sets an input variable. * * @param string $name * @param mixed $value */ public function setInputVariable( $name, $value ) { $this->inputVariables[$name] = $value; } /** * Sets an input variable for a sub workflow. * * @param string $name * @param mixed $value */ public function setInputVariableForSubWorkflow( $name, $value ) { $this->inputVariablesForSubWorkflow[$name] = $value; } /** * Suspend workflow execution. */ public function suspend() { parent::suspend(); PHPUnit_Framework_Assert::assertFalse( $this->hasEnded() ); PHPUnit_Framework_Assert::assertFalse( $this->isResumed() ); PHPUnit_Framework_Assert::assertTrue( $this->isSuspended() ); $inputData = array(); $waitingFor = $this->getWaitingFor(); foreach ( $this->inputVariables as $name => $value ) { if ( isset( $waitingFor[$name] ) ) { $inputData[$name] = $value; } } if ( empty( $inputData ) ) { throw new ezcWorkflowExecutionException( 'Workflow is waiting for input data that has not been mocked.' ); } $this->resume( $inputData ); } /** * Returns a new execution object for a sub workflow. * * @param int $id * @return ezcWorkflowExecution */ protected function doGetSubExecution( $id = NULL ) { parent::doGetSubExecution( $id ); $execution = new ezcWorkflowTestExecution( $id ); foreach ( $this->inputVariablesForSubWorkflow as $name => $value ) { $execution->setInputVariable( $name, $value ); } if ( $id !== NULL ) { $execution->resume(); } return $execution; } } ?>
evp/ezcomponents
Workflow/tests/execution.php
PHP
bsd-3-clause
3,572
// Copyright (C) 2011 Davis E. King (davis@dlib.net) // License: Boost Software License See LICENSE.txt for the full license. #ifndef DLIB_CROSS_VALIDATE_OBJECT_DETECTION_TRaINER_H__ #define DLIB_CROSS_VALIDATE_OBJECT_DETECTION_TRaINER_H__ #include "cross_validate_object_detection_trainer_abstract.h" #include <vector> #include "../matrix.h" #include "svm.h" #include "../geometry.h" #include "../image_processing/full_object_detection.h" #include "../image_processing/box_overlap_testing.h" #include "../statistics.h" namespace dlib { // ---------------------------------------------------------------------------------------- namespace impl { inline unsigned long number_of_truth_hits ( const std::vector<full_object_detection>& truth_boxes, const std::vector<rectangle>& ignore, const std::vector<std::pair<double,rectangle> >& boxes, const test_box_overlap& overlap_tester, std::vector<std::pair<double,bool> >& all_dets, unsigned long& missing_detections ) /*! ensures - returns the number of elements in truth_boxes which are overlapped by an element of boxes. In this context, two boxes, A and B, overlap if and only if overlap_tester(A,B) == true. - No element of boxes is allowed to account for more than one element of truth_boxes. - The returned number is in the range [0,truth_boxes.size()] - Adds the score for each box from boxes into all_dets and labels each with a bool indicating if it hit a truth box. Note that we skip boxes that don't hit any truth boxes and match an ignore box. - Adds the number of truth boxes which didn't have any hits into missing_detections. !*/ { if (boxes.size() == 0) { missing_detections += truth_boxes.size(); return 0; } unsigned long count = 0; std::vector<bool> used(boxes.size(),false); for (unsigned long i = 0; i < truth_boxes.size(); ++i) { bool found_match = false; // Find the first box that hits truth_boxes[i] for (unsigned long j = 0; j < boxes.size(); ++j) { if (used[j]) continue; if (overlap_tester(truth_boxes[i].get_rect(), boxes[j].second)) { used[j] = true; ++count; found_match = true; break; } } if (!found_match) ++missing_detections; } for (unsigned long i = 0; i < boxes.size(); ++i) { // only out put boxes if they match a truth box or are not ignored. if (used[i] || !overlaps_any_box(overlap_tester, ignore, boxes[i].second)) { all_dets.push_back(std::make_pair(boxes[i].first, used[i])); } } return count; } // ------------------------------------------------------------------------------------ } // ---------------------------------------------------------------------------------------- template < typename object_detector_type, typename image_array_type > const matrix<double,1,3> test_object_detection_function ( object_detector_type& detector, const image_array_type& images, const std::vector<std::vector<full_object_detection> >& truth_dets, const std::vector<std::vector<rectangle> >& ignore, const test_box_overlap& overlap_tester = test_box_overlap(), const double adjust_threshold = 0 ) { // make sure requires clause is not broken DLIB_CASSERT( is_learning_problem(images,truth_dets) == true && ignore.size() == images.size(), "\t matrix test_object_detection_function()" << "\n\t invalid inputs were given to this function" << "\n\t is_learning_problem(images,truth_dets): " << is_learning_problem(images,truth_dets) << "\n\t ignore.size(): " << ignore.size() << "\n\t images.size(): " << images.size() ); double correct_hits = 0; double total_true_targets = 0; std::vector<std::pair<double,bool> > all_dets; unsigned long missing_detections = 0; for (unsigned long i = 0; i < images.size(); ++i) { std::vector<std::pair<double,rectangle> > hits; detector(images[i], hits, adjust_threshold); correct_hits += impl::number_of_truth_hits(truth_dets[i], ignore[i], hits, overlap_tester, all_dets, missing_detections); total_true_targets += truth_dets[i].size(); } std::sort(all_dets.rbegin(), all_dets.rend()); double precision, recall; double total_hits = all_dets.size(); if (total_hits == 0) precision = 1; else precision = correct_hits / total_hits; if (total_true_targets == 0) recall = 1; else recall = correct_hits / total_true_targets; matrix<double, 1, 3> res; res = precision, recall, average_precision(all_dets, missing_detections); return res; } template < typename object_detector_type, typename image_array_type > const matrix<double,1,3> test_object_detection_function ( object_detector_type& detector, const image_array_type& images, const std::vector<std::vector<rectangle> >& truth_dets, const std::vector<std::vector<rectangle> >& ignore, const test_box_overlap& overlap_tester = test_box_overlap(), const double adjust_threshold = 0 ) { // convert into a list of regular rectangles. std::vector<std::vector<full_object_detection> > rects(truth_dets.size()); for (unsigned long i = 0; i < truth_dets.size(); ++i) { for (unsigned long j = 0; j < truth_dets[i].size(); ++j) { rects[i].push_back(full_object_detection(truth_dets[i][j])); } } return test_object_detection_function(detector, images, rects, ignore, overlap_tester, adjust_threshold); } template < typename object_detector_type, typename image_array_type > const matrix<double,1,3> test_object_detection_function ( object_detector_type& detector, const image_array_type& images, const std::vector<std::vector<rectangle> >& truth_dets, const test_box_overlap& overlap_tester = test_box_overlap(), const double adjust_threshold = 0 ) { std::vector<std::vector<rectangle> > ignore(images.size()); return test_object_detection_function(detector,images,truth_dets,ignore, overlap_tester, adjust_threshold); } template < typename object_detector_type, typename image_array_type > const matrix<double,1,3> test_object_detection_function ( object_detector_type& detector, const image_array_type& images, const std::vector<std::vector<full_object_detection> >& truth_dets, const test_box_overlap& overlap_tester = test_box_overlap(), const double adjust_threshold = 0 ) { std::vector<std::vector<rectangle> > ignore(images.size()); return test_object_detection_function(detector,images,truth_dets,ignore, overlap_tester, adjust_threshold); } // ---------------------------------------------------------------------------------------- // ---------------------------------------------------------------------------------------- // ---------------------------------------------------------------------------------------- // ---------------------------------------------------------------------------------------- namespace impl { template < typename array_type > struct array_subset_helper { typedef typename array_type::mem_manager_type mem_manager_type; array_subset_helper ( const array_type& array_, const std::vector<unsigned long>& idx_set_ ) : array(array_), idx_set(idx_set_) { } unsigned long size() const { return idx_set.size(); } typedef typename array_type::type type; const type& operator[] ( unsigned long idx ) const { return array[idx_set[idx]]; } private: const array_type& array; const std::vector<unsigned long>& idx_set; }; template < typename T > const matrix_op<op_array_to_mat<array_subset_helper<T> > > mat ( const array_subset_helper<T>& m ) { typedef op_array_to_mat<array_subset_helper<T> > op; return matrix_op<op>(op(m)); } } // ---------------------------------------------------------------------------------------- template < typename trainer_type, typename image_array_type > const matrix<double,1,3> cross_validate_object_detection_trainer ( const trainer_type& trainer, const image_array_type& images, const std::vector<std::vector<full_object_detection> >& truth_dets, const std::vector<std::vector<rectangle> >& ignore, const long folds, const test_box_overlap& overlap_tester = test_box_overlap(), const double adjust_threshold = 0 ) { // make sure requires clause is not broken DLIB_CASSERT( is_learning_problem(images,truth_dets) == true && ignore.size() == images.size() && 1 < folds && folds <= static_cast<long>(images.size()), "\t matrix cross_validate_object_detection_trainer()" << "\n\t invalid inputs were given to this function" << "\n\t is_learning_problem(images,truth_dets): " << is_learning_problem(images,truth_dets) << "\n\t folds: "<< folds << "\n\t ignore.size(): " << ignore.size() << "\n\t images.size(): " << images.size() ); double correct_hits = 0; double total_true_targets = 0; const long test_size = images.size()/folds; std::vector<std::pair<double,bool> > all_dets; unsigned long missing_detections = 0; unsigned long test_idx = 0; for (long iter = 0; iter < folds; ++iter) { std::vector<unsigned long> train_idx_set; std::vector<unsigned long> test_idx_set; for (long i = 0; i < test_size; ++i) test_idx_set.push_back(test_idx++); unsigned long train_idx = test_idx%images.size(); std::vector<std::vector<full_object_detection> > training_rects; std::vector<std::vector<rectangle> > training_ignores; for (unsigned long i = 0; i < images.size()-test_size; ++i) { training_rects.push_back(truth_dets[train_idx]); training_ignores.push_back(ignore[train_idx]); train_idx_set.push_back(train_idx); train_idx = (train_idx+1)%images.size(); } impl::array_subset_helper<image_array_type> array_subset(images, train_idx_set); typename trainer_type::trained_function_type detector = trainer.train(array_subset, training_rects, training_ignores, overlap_tester); for (unsigned long i = 0; i < test_idx_set.size(); ++i) { std::vector<std::pair<double,rectangle> > hits; detector(images[test_idx_set[i]], hits, adjust_threshold); correct_hits += impl::number_of_truth_hits(truth_dets[test_idx_set[i]], ignore[i], hits, overlap_tester, all_dets, missing_detections); total_true_targets += truth_dets[test_idx_set[i]].size(); } } std::sort(all_dets.rbegin(), all_dets.rend()); double precision, recall; double total_hits = all_dets.size(); if (total_hits == 0) precision = 1; else precision = correct_hits / total_hits; if (total_true_targets == 0) recall = 1; else recall = correct_hits / total_true_targets; matrix<double, 1, 3> res; res = precision, recall, average_precision(all_dets, missing_detections); return res; } template < typename trainer_type, typename image_array_type > const matrix<double,1,3> cross_validate_object_detection_trainer ( const trainer_type& trainer, const image_array_type& images, const std::vector<std::vector<rectangle> >& truth_dets, const std::vector<std::vector<rectangle> >& ignore, const long folds, const test_box_overlap& overlap_tester = test_box_overlap(), const double adjust_threshold = 0 ) { // convert into a list of regular rectangles. std::vector<std::vector<full_object_detection> > dets(truth_dets.size()); for (unsigned long i = 0; i < truth_dets.size(); ++i) { for (unsigned long j = 0; j < truth_dets[i].size(); ++j) { dets[i].push_back(full_object_detection(truth_dets[i][j])); } } return cross_validate_object_detection_trainer(trainer, images, dets, ignore, folds, overlap_tester, adjust_threshold); } template < typename trainer_type, typename image_array_type > const matrix<double,1,3> cross_validate_object_detection_trainer ( const trainer_type& trainer, const image_array_type& images, const std::vector<std::vector<rectangle> >& truth_dets, const long folds, const test_box_overlap& overlap_tester = test_box_overlap(), const double adjust_threshold = 0 ) { const std::vector<std::vector<rectangle> > ignore(images.size()); return cross_validate_object_detection_trainer(trainer,images,truth_dets,ignore,folds,overlap_tester,adjust_threshold); } template < typename trainer_type, typename image_array_type > const matrix<double,1,3> cross_validate_object_detection_trainer ( const trainer_type& trainer, const image_array_type& images, const std::vector<std::vector<full_object_detection> >& truth_dets, const long folds, const test_box_overlap& overlap_tester = test_box_overlap(), const double adjust_threshold = 0 ) { const std::vector<std::vector<rectangle> > ignore(images.size()); return cross_validate_object_detection_trainer(trainer,images,truth_dets,ignore,folds,overlap_tester,adjust_threshold); } // ---------------------------------------------------------------------------------------- } #endif // DLIB_CROSS_VALIDATE_OBJECT_DETECTION_TRaINER_H__
Chpark/itomp
itomp_cio_planner/dlib/dlib/svm/cross_validate_object_detection_trainer.h
C
bsd-3-clause
15,607
/* Copyright 1994, LongView Technologies L.L.C. $Revision: 1.13 $ */ /* Copyright (c) 2006, Sun Microsystems, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Sun Microsystems nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE */ #ifdef DELTA_COMPILER // The code table is used to find nmethods in the zone. // It is a hash table, where each bucket contains a list of nmethods. //%note // Should implement free list like symbolTable (Lars 2/10-95) const int codeTableSize = 2048; const int debugTableSize = 256; struct codeTableEntry; struct codeTableLink : public CHeapObj { // instance variable nmethod* nm; codeTableLink* next; // memory operations bool verify(int i); }; struct codeTableEntry : ValueObj { // methods are tagged, links are not. void* nmethod_or_link; bool is_empty() { return nmethod_or_link == NULL; } bool is_nmethod() { return (int) nmethod_or_link & 1; } void clear() { nmethod_or_link = NULL; } nmethod* get_nmethod() { return (nmethod*) ((int) nmethod_or_link - 1); } void set_nmethod(nmethod* nm) { assert_oop_aligned(nm); nmethod_or_link = (void*) ((int) nm + 1); } codeTableLink* get_link() { return (codeTableLink*) nmethod_or_link; } void set_link(codeTableLink* l) { assert_oop_aligned(l); nmethod_or_link = (void*) l; } // memory operations void deallocate(); int length(); // returns the number of nmethod in this bucket. bool verify(int i); }; class codeTable : public PrintableCHeapObj{ protected: int tableSize; codeTableEntry* buckets; codeTableEntry* at(int index) { return &buckets[index]; } codeTableEntry* bucketFor(int hash) { return at(hash & (tableSize - 1)); } codeTableLink* new_link(nmethod* nm, codeTableLink* n = NULL); public: codeTable(int size); void clear(); nmethod* lookup(LookupKey* L); bool verify(); void print(); void print_stats(); // Tells whether a nmethod is present bool is_present(nmethod* nm); // Removes a nmethod from the table void remove(nmethod* nm); protected: // should always add through zone->addToCodeTable() void add(nmethod* nm); void addIfAbsent(nmethod* nm); // add only if not there yet friend class zone; }; #endif
szKarlen/strongtalk
vm/code/codeTable.hpp
C++
bsd-3-clause
3,618
require("@fatso83/mini-mocha").install(); const sinon = require("sinon"); const referee = require("@sinonjs/referee"); const assert = referee.assert; const fs = require("fs"); describe("FakeTest", function () { it("should create a fake that 'yields asynchronously'", function () { const fake = sinon.fake.yieldsAsync(null, "file content"); const anotherFake = sinon.fake(); sinon.replace(fs, "readFile", fake); fs.readFile("somefile", (err, data) => { // called with fake values given to yields as arguments assert.isNull(err); assert.equals(data, "file content"); // since yields is asynchronous, anotherFake is called first assert.isTrue(anotherFake.called); }); anotherFake(); }); });
cjohansen/Sinon.JS
docs/release-source/release/examples/fakes-8-yields-async.test.js
JavaScript
bsd-3-clause
807
// Copyright 2014 Gary Burd // // Licensed under the Apache License, Version 2.0 (the "License"): you may // not use this file except in compliance with the License. You may obtain // a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations // under the License. package internal // import "github.com/garyburd/redigo/internal" import ( "strings" ) const ( WatchState = 1 << iota MultiState SubscribeState MonitorState ) type CommandInfo struct { Set, Clear int } var commandInfos = map[string]CommandInfo{ "WATCH": {Set: WatchState}, "UNWATCH": {Clear: WatchState}, "MULTI": {Set: MultiState}, "EXEC": {Clear: WatchState | MultiState}, "DISCARD": {Clear: WatchState | MultiState}, "PSUBSCRIBE": {Set: SubscribeState}, "SUBSCRIBE": {Set: SubscribeState}, "MONITOR": {Set: MonitorState}, } func init() { for n, ci := range commandInfos { commandInfos[strings.ToLower(n)] = ci } } func LookupCommandInfo(commandName string) CommandInfo { if ci, ok := commandInfos[commandName]; ok { return ci } return commandInfos[strings.ToUpper(commandName)] }
bjwschaap/copper-caple
vendor/src/github.com/kataras/go-sessions/vendor/github.com/garyburd/redigo/internal/commandinfo.go
GO
mit
1,424
// +build linux package overlay import ( "fmt" "io" "os" "path/filepath" "syscall" "github.com/docker/docker/pkg/system" ) type copyFlags int const ( copyHardlink copyFlags = 1 << iota ) func copyRegular(srcPath, dstPath string, mode os.FileMode) error { srcFile, err := os.Open(srcPath) if err != nil { return err } defer srcFile.Close() dstFile, err := os.OpenFile(dstPath, os.O_WRONLY|os.O_CREATE, mode) if err != nil { return err } defer dstFile.Close() _, err = io.Copy(dstFile, srcFile) return err } func copyXattr(srcPath, dstPath, attr string) error { data, err := system.Lgetxattr(srcPath, attr) if err != nil { return err } if data != nil { if err := system.Lsetxattr(dstPath, attr, data, 0); err != nil { return err } } return nil } func copyDir(srcDir, dstDir string, flags copyFlags) error { err := filepath.Walk(srcDir, func(srcPath string, f os.FileInfo, err error) error { if err != nil { return err } // Rebase path relPath, err := filepath.Rel(srcDir, srcPath) if err != nil { return err } dstPath := filepath.Join(dstDir, relPath) if err != nil { return err } stat, ok := f.Sys().(*syscall.Stat_t) if !ok { return fmt.Errorf("Unable to get raw syscall.Stat_t data for %s", srcPath) } isHardlink := false switch f.Mode() & os.ModeType { case 0: // Regular file if flags&copyHardlink != 0 { isHardlink = true if err := os.Link(srcPath, dstPath); err != nil { return err } } else { if err := copyRegular(srcPath, dstPath, f.Mode()); err != nil { return err } } case os.ModeDir: if err := os.Mkdir(dstPath, f.Mode()); err != nil && !os.IsExist(err) { return err } case os.ModeSymlink: link, err := os.Readlink(srcPath) if err != nil { return err } if err := os.Symlink(link, dstPath); err != nil { return err } case os.ModeNamedPipe: fallthrough case os.ModeSocket: if err := syscall.Mkfifo(dstPath, stat.Mode); err != nil { return err } case os.ModeDevice: if err := syscall.Mknod(dstPath, stat.Mode, int(stat.Rdev)); err != nil { return err } default: return fmt.Errorf("Unknown file type for %s\n", srcPath) } // Everything below is copying metadata from src to dst. All this metadata // already shares an inode for hardlinks. if isHardlink { return nil } if err := os.Lchown(dstPath, int(stat.Uid), int(stat.Gid)); err != nil { return err } if err := copyXattr(srcPath, dstPath, "security.capability"); err != nil { return err } // We need to copy this attribute if it appears in an overlay upper layer, as // this function is used to copy those. It is set by overlay if a directory // is removed and then re-created and should not inherit anything from the // same dir in the lower dir. if err := copyXattr(srcPath, dstPath, "trusted.overlay.opaque"); err != nil { return err } isSymlink := f.Mode()&os.ModeSymlink != 0 // There is no LChmod, so ignore mode for symlink. Also, this // must happen after chown, as that can modify the file mode if !isSymlink { if err := os.Chmod(dstPath, f.Mode()); err != nil { return err } } ts := []syscall.Timespec{stat.Atim, stat.Mtim} // syscall.UtimesNano doesn't support a NOFOLLOW flag atm, and if !isSymlink { if err := system.UtimesNano(dstPath, ts); err != nil { return err } } else { if err := system.LUtimesNano(dstPath, ts); err != nil { return err } } return nil }) return err }
jwhonce/graphc
graphdriver/overlay/copy.go
GO
apache-2.0
3,561
/* SPDX-License-Identifier: GPL-2.0 */ /* * This header provides constants for the MVEBU ICU driver. */ #ifndef _DT_BINDINGS_INTERRUPT_CONTROLLER_MVEBU_ICU_H #define _DT_BINDINGS_INTERRUPT_CONTROLLER_MVEBU_ICU_H /* interrupt specifier cell 0 */ #define ICU_GRP_NSR 0x0 #define ICU_GRP_SR 0x1 #define ICU_GRP_SEI 0x4 #define ICU_GRP_REI 0x5 #endif
CSE3320/kernel-code
linux-5.8/include/dt-bindings/interrupt-controller/mvebu-icu.h
C
gpl-2.0
357
/* * arch/blackfin/include/asm/mem_init.h - reprogram clocks / memory * * Copyright 2004-2008 Analog Devices Inc. * * Licensed under the GPL-2 or later. */ #ifndef __MEM_INIT_H__ #define __MEM_INIT_H__ #if defined(EBIU_SDGCTL) #if defined(CONFIG_MEM_MT48LC16M16A2TG_75) || \ defined(CONFIG_MEM_MT48LC64M4A2FB_7E) || \ defined(CONFIG_MEM_MT48LC16M8A2TG_75) || \ defined(CONFIG_MEM_MT48LC32M8A2_75) || \ defined(CONFIG_MEM_MT48LC8M32B2B5_7) || \ defined(CONFIG_MEM_MT48LC32M16A2TG_75) || \ defined(CONFIG_MEM_MT48LC32M8A2_75) #if (CONFIG_SCLK_HZ > 119402985) #define SDRAM_tRP TRP_2 #define SDRAM_tRP_num 2 #define SDRAM_tRAS TRAS_7 #define SDRAM_tRAS_num 7 #define SDRAM_tRCD TRCD_2 #define SDRAM_tWR TWR_2 #endif #if (CONFIG_SCLK_HZ > 104477612) && (CONFIG_SCLK_HZ <= 119402985) #define SDRAM_tRP TRP_2 #define SDRAM_tRP_num 2 #define SDRAM_tRAS TRAS_6 #define SDRAM_tRAS_num 6 #define SDRAM_tRCD TRCD_2 #define SDRAM_tWR TWR_2 #endif #if (CONFIG_SCLK_HZ > 89552239) && (CONFIG_SCLK_HZ <= 104477612) #define SDRAM_tRP TRP_2 #define SDRAM_tRP_num 2 #define SDRAM_tRAS TRAS_5 #define SDRAM_tRAS_num 5 #define SDRAM_tRCD TRCD_2 #define SDRAM_tWR TWR_2 #endif #if (CONFIG_SCLK_HZ > 74626866) && (CONFIG_SCLK_HZ <= 89552239) #define SDRAM_tRP TRP_2 #define SDRAM_tRP_num 2 #define SDRAM_tRAS TRAS_4 #define SDRAM_tRAS_num 4 #define SDRAM_tRCD TRCD_2 #define SDRAM_tWR TWR_2 #endif #if (CONFIG_SCLK_HZ > 66666667) && (CONFIG_SCLK_HZ <= 74626866) #define SDRAM_tRP TRP_2 #define SDRAM_tRP_num 2 #define SDRAM_tRAS TRAS_3 #define SDRAM_tRAS_num 3 #define SDRAM_tRCD TRCD_2 #define SDRAM_tWR TWR_2 #endif #if (CONFIG_SCLK_HZ > 59701493) && (CONFIG_SCLK_HZ <= 66666667) #define SDRAM_tRP TRP_1 #define SDRAM_tRP_num 1 #define SDRAM_tRAS TRAS_4 #define SDRAM_tRAS_num 4 #define SDRAM_tRCD TRCD_1 #define SDRAM_tWR TWR_2 #endif #if (CONFIG_SCLK_HZ > 44776119) && (CONFIG_SCLK_HZ <= 59701493) #define SDRAM_tRP TRP_1 #define SDRAM_tRP_num 1 #define SDRAM_tRAS TRAS_3 #define SDRAM_tRAS_num 3 #define SDRAM_tRCD TRCD_1 #define SDRAM_tWR TWR_2 #endif #if (CONFIG_SCLK_HZ > 29850746) && (CONFIG_SCLK_HZ <= 44776119) #define SDRAM_tRP TRP_1 #define SDRAM_tRP_num 1 #define SDRAM_tRAS TRAS_2 #define SDRAM_tRAS_num 2 #define SDRAM_tRCD TRCD_1 #define SDRAM_tWR TWR_2 #endif #if (CONFIG_SCLK_HZ <= 29850746) #define SDRAM_tRP TRP_1 #define SDRAM_tRP_num 1 #define SDRAM_tRAS TRAS_1 #define SDRAM_tRAS_num 1 #define SDRAM_tRCD TRCD_1 #define SDRAM_tWR TWR_2 #endif #endif /* * The BF526-EZ-Board changed SDRAM chips between revisions, * so we use below timings to accommodate both. */ #if defined(CONFIG_MEM_MT48H32M16LFCJ_75) #if (CONFIG_SCLK_HZ > 119402985) #define SDRAM_tRP TRP_2 #define SDRAM_tRP_num 2 #define SDRAM_tRAS TRAS_8 #define SDRAM_tRAS_num 8 #define SDRAM_tRCD TRCD_2 #define SDRAM_tWR TWR_2 #endif #if (CONFIG_SCLK_HZ > 104477612) && (CONFIG_SCLK_HZ <= 119402985) #define SDRAM_tRP TRP_2 #define SDRAM_tRP_num 2 #define SDRAM_tRAS TRAS_7 #define SDRAM_tRAS_num 7 #define SDRAM_tRCD TRCD_2 #define SDRAM_tWR TWR_2 #endif #if (CONFIG_SCLK_HZ > 89552239) && (CONFIG_SCLK_HZ <= 104477612) #define SDRAM_tRP TRP_2 #define SDRAM_tRP_num 2 #define SDRAM_tRAS TRAS_6 #define SDRAM_tRAS_num 6 #define SDRAM_tRCD TRCD_2 #define SDRAM_tWR TWR_2 #endif #if (CONFIG_SCLK_HZ > 74626866) && (CONFIG_SCLK_HZ <= 89552239) #define SDRAM_tRP TRP_2 #define SDRAM_tRP_num 2 #define SDRAM_tRAS TRAS_5 #define SDRAM_tRAS_num 5 #define SDRAM_tRCD TRCD_2 #define SDRAM_tWR TWR_2 #endif #if (CONFIG_SCLK_HZ > 66666667) && (CONFIG_SCLK_HZ <= 74626866) #define SDRAM_tRP TRP_2 #define SDRAM_tRP_num 2 #define SDRAM_tRAS TRAS_4 #define SDRAM_tRAS_num 4 #define SDRAM_tRCD TRCD_2 #define SDRAM_tWR TWR_2 #endif #if (CONFIG_SCLK_HZ > 59701493) && (CONFIG_SCLK_HZ <= 66666667) #define SDRAM_tRP TRP_2 #define SDRAM_tRP_num 2 #define SDRAM_tRAS TRAS_4 #define SDRAM_tRAS_num 4 #define SDRAM_tRCD TRCD_1 #define SDRAM_tWR TWR_2 #endif #if (CONFIG_SCLK_HZ > 44776119) && (CONFIG_SCLK_HZ <= 59701493) #define SDRAM_tRP TRP_2 #define SDRAM_tRP_num 2 #define SDRAM_tRAS TRAS_3 #define SDRAM_tRAS_num 3 #define SDRAM_tRCD TRCD_1 #define SDRAM_tWR TWR_2 #endif #if (CONFIG_SCLK_HZ > 29850746) && (CONFIG_SCLK_HZ <= 44776119) #define SDRAM_tRP TRP_1 #define SDRAM_tRP_num 1 #define SDRAM_tRAS TRAS_3 #define SDRAM_tRAS_num 3 #define SDRAM_tRCD TRCD_1 #define SDRAM_tWR TWR_2 #endif #if (CONFIG_SCLK_HZ <= 29850746) #define SDRAM_tRP TRP_1 #define SDRAM_tRP_num 1 #define SDRAM_tRAS TRAS_2 #define SDRAM_tRAS_num 2 #define SDRAM_tRCD TRCD_1 #define SDRAM_tWR TWR_2 #endif #endif #if defined(CONFIG_MEM_MT48LC16M8A2TG_75) || \ defined(CONFIG_MEM_MT48LC8M32B2B5_7) /*SDRAM INFORMATION: */ #define SDRAM_Tref 64 /* Refresh period in milliseconds */ #define SDRAM_NRA 4096 /* Number of row addresses in SDRAM */ #define SDRAM_CL CL_3 #endif #if defined(CONFIG_MEM_MT48LC32M8A2_75) || \ defined(CONFIG_MEM_MT48LC64M4A2FB_7E) || \ defined(CONFIG_MEM_MT48LC32M16A2TG_75) || \ defined(CONFIG_MEM_MT48LC16M16A2TG_75) || \ defined(CONFIG_MEM_MT48LC32M8A2_75) /*SDRAM INFORMATION: */ #define SDRAM_Tref 64 /* Refresh period in milliseconds */ #define SDRAM_NRA 8192 /* Number of row addresses in SDRAM */ #define SDRAM_CL CL_3 #endif #if defined(CONFIG_MEM_MT48H32M16LFCJ_75) /*SDRAM INFORMATION: */ #define SDRAM_Tref 64 /* Refresh period in milliseconds */ #define SDRAM_NRA 8192 /* Number of row addresses in SDRAM */ #define SDRAM_CL CL_2 #endif #ifdef CONFIG_BFIN_KERNEL_CLOCK_MEMINIT_CALC /* Equation from section 17 (p17-46) of BF533 HRM */ #define mem_SDRRC (((CONFIG_SCLK_HZ / 1000) * SDRAM_Tref) / SDRAM_NRA) - (SDRAM_tRAS_num + SDRAM_tRP_num) /* Enable SCLK Out */ #define mem_SDGCTL (SCTLE | SDRAM_CL | SDRAM_tRAS | SDRAM_tRP | SDRAM_tRCD | SDRAM_tWR | PSS) #else #define mem_SDRRC CONFIG_MEM_SDRRC #define mem_SDGCTL CONFIG_MEM_SDGCTL #endif #endif #if defined(EBIU_DDRCTL0) #define MIN_DDR_SCLK(x) (x*(CONFIG_SCLK_HZ/1000/1000)/1000 + 1) #define MAX_DDR_SCLK(x) (x*(CONFIG_SCLK_HZ/1000/1000)/1000) #define DDR_CLK_HZ(x) (1000*1000*1000/x) #if defined(CONFIG_MEM_MT46V32M16_6T) #define DDR_SIZE DEVSZ_512 #define DDR_WIDTH DEVWD_16 #define DDR_MAX_tCK 13 #define DDR_tRC DDR_TRC(MIN_DDR_SCLK(60)) #define DDR_tRAS DDR_TRAS(MIN_DDR_SCLK(42)) #define DDR_tRP DDR_TRP(MIN_DDR_SCLK(15)) #define DDR_tRFC DDR_TRFC(MIN_DDR_SCLK(72)) #define DDR_tREFI DDR_TREFI(MAX_DDR_SCLK(7800)) #define DDR_tRCD DDR_TRCD(MIN_DDR_SCLK(15)) #define DDR_tWTR DDR_TWTR(1) #define DDR_tMRD DDR_TMRD(MIN_DDR_SCLK(12)) #define DDR_tWR DDR_TWR(MIN_DDR_SCLK(15)) #endif #if defined(CONFIG_MEM_MT46V32M16_5B) #define DDR_SIZE DEVSZ_512 #define DDR_WIDTH DEVWD_16 #define DDR_MAX_tCK 13 #define DDR_tRC DDR_TRC(MIN_DDR_SCLK(55)) #define DDR_tRAS DDR_TRAS(MIN_DDR_SCLK(40)) #define DDR_tRP DDR_TRP(MIN_DDR_SCLK(15)) #define DDR_tRFC DDR_TRFC(MIN_DDR_SCLK(70)) #define DDR_tREFI DDR_TREFI(MAX_DDR_SCLK(7800)) #define DDR_tRCD DDR_TRCD(MIN_DDR_SCLK(15)) #define DDR_tWTR DDR_TWTR(2) #define DDR_tMRD DDR_TMRD(MIN_DDR_SCLK(10)) #define DDR_tWR DDR_TWR(MIN_DDR_SCLK(15)) #endif #if (CONFIG_SCLK_HZ < DDR_CLK_HZ(DDR_MAX_tCK)) # error "CONFIG_SCLK_HZ is too small (<DDR_CLK_HZ(DDR_MAX_tCK) Hz)." #elif(CONFIG_SCLK_HZ <= 133333333) # define DDR_CL CL_2 #else # error "CONFIG_SCLK_HZ is too large (>133333333 Hz)." #endif #ifdef CONFIG_BFIN_KERNEL_CLOCK_MEMINIT_CALC #define mem_DDRCTL0 (DDR_tRP | DDR_tRAS | DDR_tRC | DDR_tRFC | DDR_tREFI) #define mem_DDRCTL1 (DDR_DATWIDTH | EXTBANK_1 | DDR_SIZE | DDR_WIDTH | DDR_tWTR \ | DDR_tMRD | DDR_tWR | DDR_tRCD) #define mem_DDRCTL2 DDR_CL #else #define mem_DDRCTL0 CONFIG_MEM_DDRCTL0 #define mem_DDRCTL1 CONFIG_MEM_DDRCTL1 #define mem_DDRCTL2 CONFIG_MEM_DDRCTL2 #endif #endif #if defined CONFIG_CLKIN_HALF #define CLKIN_HALF 1 #else #define CLKIN_HALF 0 #endif #if defined CONFIG_PLL_BYPASS #define PLL_BYPASS 1 #else #define PLL_BYPASS 0 #endif #ifdef CONFIG_BF60x /* DMC status bits */ #define IDLE 0x1 #define MEMINITDONE 0x4 #define SRACK 0x8 #define PDACK 0x10 #define DPDACK 0x20 #define DLLCALDONE 0x2000 #define PENDREF 0xF0000 #define PHYRDPHASE 0xF00000 #define PHYRDPHASE_OFFSET 20 /* DMC control bits */ #define LPDDR 0x2 #define INIT 0x4 #define SRREQ 0x8 #define PDREQ 0x10 #define DPDREQ 0x20 #define PREC 0x40 #define ADDRMODE 0x100 #define RDTOWR 0xE00 #define PPREF 0x1000 #define DLLCAL 0x2000 /* DMC DLL control bits */ #define DLLCALRDCNT 0xFF #define DATACYC 0xF00 #define DATACYC_OFFSET 8 /* CGU Divisor bits */ #define CSEL_OFFSET 0 #define S0SEL_OFFSET 5 #define SYSSEL_OFFSET 8 #define S1SEL_OFFSET 13 #define DSEL_OFFSET 16 #define OSEL_OFFSET 22 #define ALGN 0x20000000 #define UPDT 0x40000000 #define LOCK 0x80000000 /* CGU Status bits */ #define PLLEN 0x1 #define PLLBP 0x2 #define PLOCK 0x4 #define CLKSALGN 0x8 /* CGU Control bits */ #define MSEL_MASK 0x7F00 #define DF_MASK 0x1 struct ddr_config { u32 ddr_clk; u32 dmc_ddrctl; u32 dmc_effctl; u32 dmc_ddrcfg; u32 dmc_ddrtr0; u32 dmc_ddrtr1; u32 dmc_ddrtr2; u32 dmc_ddrmr; u32 dmc_ddrmr1; }; #if defined(CONFIG_MEM_MT47H64M16) static struct ddr_config ddr_config_table[] __attribute__((section(".data_l1"))) = { [0] = { .ddr_clk = 125, .dmc_ddrctl = 0x00000904, .dmc_effctl = 0x004400C0, .dmc_ddrcfg = 0x00000422, .dmc_ddrtr0 = 0x20705212, .dmc_ddrtr1 = 0x201003CF, .dmc_ddrtr2 = 0x00320107, .dmc_ddrmr = 0x00000422, .dmc_ddrmr1 = 0x4, }, [1] = { .ddr_clk = 133, .dmc_ddrctl = 0x00000904, .dmc_effctl = 0x004400C0, .dmc_ddrcfg = 0x00000422, .dmc_ddrtr0 = 0x20806313, .dmc_ddrtr1 = 0x2013040D, .dmc_ddrtr2 = 0x00320108, .dmc_ddrmr = 0x00000632, .dmc_ddrmr1 = 0x4, }, [2] = { .ddr_clk = 150, .dmc_ddrctl = 0x00000904, .dmc_effctl = 0x004400C0, .dmc_ddrcfg = 0x00000422, .dmc_ddrtr0 = 0x20A07323, .dmc_ddrtr1 = 0x20160492, .dmc_ddrtr2 = 0x00320209, .dmc_ddrmr = 0x00000632, .dmc_ddrmr1 = 0x4, }, [3] = { .ddr_clk = 166, .dmc_ddrctl = 0x00000904, .dmc_effctl = 0x004400C0, .dmc_ddrcfg = 0x00000422, .dmc_ddrtr0 = 0x20A07323, .dmc_ddrtr1 = 0x2016050E, .dmc_ddrtr2 = 0x00320209, .dmc_ddrmr = 0x00000632, .dmc_ddrmr1 = 0x4, }, [4] = { .ddr_clk = 200, .dmc_ddrctl = 0x00000904, .dmc_effctl = 0x004400C0, .dmc_ddrcfg = 0x00000422, .dmc_ddrtr0 = 0x20a07323, .dmc_ddrtr1 = 0x2016050f, .dmc_ddrtr2 = 0x00320509, .dmc_ddrmr = 0x00000632, .dmc_ddrmr1 = 0x4, }, [5] = { .ddr_clk = 225, .dmc_ddrctl = 0x00000904, .dmc_effctl = 0x004400C0, .dmc_ddrcfg = 0x00000422, .dmc_ddrtr0 = 0x20E0A424, .dmc_ddrtr1 = 0x302006DB, .dmc_ddrtr2 = 0x0032020D, .dmc_ddrmr = 0x00000842, .dmc_ddrmr1 = 0x4, }, [6] = { .ddr_clk = 250, .dmc_ddrctl = 0x00000904, .dmc_effctl = 0x004400C0, .dmc_ddrcfg = 0x00000422, .dmc_ddrtr0 = 0x20E0A424, .dmc_ddrtr1 = 0x3020079E, .dmc_ddrtr2 = 0x0032050D, .dmc_ddrmr = 0x00000842, .dmc_ddrmr1 = 0x4, }, }; #endif static inline void dmc_enter_self_refresh(void) { if (bfin_read_DMC0_STAT() & MEMINITDONE) { bfin_write_DMC0_CTL(bfin_read_DMC0_CTL() | SRREQ); while (!(bfin_read_DMC0_STAT() & SRACK)) continue; } } static inline void dmc_exit_self_refresh(void) { if (bfin_read_DMC0_STAT() & MEMINITDONE) { bfin_write_DMC0_CTL(bfin_read_DMC0_CTL() & ~SRREQ); while (bfin_read_DMC0_STAT() & SRACK) continue; } } static inline void init_cgu(u32 cgu_div, u32 cgu_ctl) { dmc_enter_self_refresh(); /* Don't set the same value of MSEL and DF to CGU_CTL */ if ((bfin_read32(CGU0_CTL) & (MSEL_MASK | DF_MASK)) != cgu_ctl) { bfin_write32(CGU0_DIV, cgu_div); bfin_write32(CGU0_CTL, cgu_ctl); while ((bfin_read32(CGU0_STAT) & (CLKSALGN | PLLBP)) || !(bfin_read32(CGU0_STAT) & PLOCK)) continue; } bfin_write32(CGU0_DIV, cgu_div | UPDT); while (bfin_read32(CGU0_STAT) & CLKSALGN) continue; dmc_exit_self_refresh(); } static inline void init_dmc(u32 dmc_clk) { int i, dlldatacycle, dll_ctl; for (i = 0; i < 7; i++) { if (ddr_config_table[i].ddr_clk == dmc_clk) { bfin_write_DMC0_CFG(ddr_config_table[i].dmc_ddrcfg); bfin_write_DMC0_TR0(ddr_config_table[i].dmc_ddrtr0); bfin_write_DMC0_TR1(ddr_config_table[i].dmc_ddrtr1); bfin_write_DMC0_TR2(ddr_config_table[i].dmc_ddrtr2); bfin_write_DMC0_MR(ddr_config_table[i].dmc_ddrmr); bfin_write_DMC0_EMR1(ddr_config_table[i].dmc_ddrmr1); bfin_write_DMC0_EFFCTL(ddr_config_table[i].dmc_effctl); bfin_write_DMC0_CTL(ddr_config_table[i].dmc_ddrctl); break; } } while (!(bfin_read_DMC0_STAT() & MEMINITDONE)) continue; dlldatacycle = (bfin_read_DMC0_STAT() & PHYRDPHASE) >> PHYRDPHASE_OFFSET; dll_ctl = bfin_read_DMC0_DLLCTL(); dll_ctl &= ~DATACYC; bfin_write_DMC0_DLLCTL(dll_ctl | (dlldatacycle << DATACYC_OFFSET)); while (!(bfin_read_DMC0_STAT() & DLLCALDONE)) continue; } #endif #endif /*__MEM_INIT_H__*/
AiJiaZone/linux-4.0
virt/arch/blackfin/include/asm/mem_init.h
C
gpl-2.0
13,417
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // <auto-generated/> #nullable disable using System.Text.Json; using Azure.Core; namespace Azure.ResourceManager.Communication.Models { public partial class LinkNotificationHubParameters : IUtf8JsonSerializable { void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) { writer.WriteStartObject(); writer.WritePropertyName("resourceId"); writer.WriteStringValue(ResourceId); writer.WritePropertyName("connectionString"); writer.WriteStringValue(ConnectionString); writer.WriteEndObject(); } } }
ayeletshpigelman/azure-sdk-for-net
sdk/communication/Azure.ResourceManager.Communication/src/Generated/Models/LinkNotificationHubParameters.Serialization.cs
C#
mit
703
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <title>Vue.js Modal Example</title> <!-- Delete ".min" for console warnings in development --> <script src="../../dist/vue.min.js"></script> <link rel="stylesheet" href="style.css"> </head> <body> <!-- template for the modal component --> <script type="text/x-template" id="modal-template"> <transition name="modal"> <div class="modal-mask"> <div class="modal-wrapper"> <div class="modal-container"> <div class="modal-header"> <slot name="header"> default header </slot> </div> <div class="modal-body"> <slot name="body"> default body </slot> </div> <div class="modal-footer"> <slot name="footer"> default footer <button class="modal-default-button" @click="$emit('close')"> OK </button> </slot> </div> </div> </div> </div> </transition> </script> <!-- app --> <div id="app"> <button id="show-modal" @click="showModal = true">Show Modal</button> <!-- use the modal component, pass in the prop --> <modal v-if="showModal" @close="showModal = false"> <!-- you can use custom content here to overwrite default content --> <h3 slot="header">custom header</h3> </modal> </div> <script> // register modal component Vue.component('modal', { template: '#modal-template' }) // start app new Vue({ el: '#app', data: { showModal: false } }) </script> </body> </html>
sreesirisha/GITMidtermexam
examples/modal/index.html
HTML
mit
1,893
// Copyright 2009 the Sputnik authors. All rights reserved. // This code is governed by the BSD license found in the LICENSE file. /** * @name: S15.10.1_A1_T4; * @section: 15.10.1; * @assertion: RegExp syntax errors must be caught when matcher(s) compiles; * @description: Tested RegExp is "a+++"; */ //CHECK#1 try { $ERROR('#1.1: new RegExp("a+++") throw SyntaxError. Actual: ' + (new RegExp("a+++"))); } catch (e) { if ((e instanceof SyntaxError) !== true) { $ERROR('#1.2: new RegExp("a+++") throw SyntaxError. Actual: ' + (e)); } }
seraum/nectarjs
tests/ES3/Conformance/15_Native_ECMA_Script_Objects/15.10_RegExp_Objects/15.10.1_Patterns/S15.10.1_A1_T4.js
JavaScript
mit
544
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=US-ASCII"> <title>Index modifiers</title> <link rel="stylesheet" href="../../../../../../doc/src/boostbook.css" type="text/css"> <meta name="generator" content="DocBook XSL Stylesheets V1.79.1"> <link rel="home" href="../../index.html" title="Chapter&#160;1.&#160;The Variadic Macro Data Library 1.9"> <link rel="up" href="../vmd_modifiers.html" title="Macros with modifiers"> <link rel="prev" href="vmd_modifiers_splitting.html" title="Splitting modifiers"> <link rel="next" href="vmd_modifiers_single.html" title="Modifiers and the single-element sequence"> </head> <body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF"> <table cellpadding="2" width="100%"><tr> <td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../../../boost.png"></td> <td align="center"><a href="../../../../../../index.html">Home</a></td> <td align="center"><a href="../../../../../../libs/libraries.htm">Libraries</a></td> <td align="center"><a href="http://www.boost.org/users/people.html">People</a></td> <td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td> <td align="center"><a href="../../../../../../more/index.htm">More</a></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="vmd_modifiers_splitting.html"><img src="../../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../vmd_modifiers.html"><img src="../../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="vmd_modifiers_single.html"><img src="../../../../../../doc/src/images/next.png" alt="Next"></a> </div> <div class="section"> <div class="titlepage"><div><div><h3 class="title"> <a name="variadic_macro_data.vmd_modifiers.vmd_modifiers_index"></a><a class="link" href="vmd_modifiers_index.html" title="Index modifiers">Index modifiers</a> </h3></div></div></div> <p> Index modifiers can be used with the BOOST_VMD_ELEM macro when identifier modifiers are being used. Index modifiers take two values: </p> <div class="itemizedlist"><ul class="itemizedlist" style="list-style-type: disc; "> <li class="listitem"> BOOST_VMD_RETURN_INDEX, return an index as a number, starting with 0, of the particular identifier modifier which matched, as part of the output of the BOOST_VMD_ELEM macro. If no particular identifier modifier matches, return emptiness as part of the output. The index number is determined purely by the order in which identifier modifiers are specified as optional parameters to BOOST_VMD_ELEM, whether singly as individual optional parameters or as a tuple of identifier modifiers. </li> <li class="listitem"> BOOST_VMD_RETURN_NO_INDEX, do not return an index as part of the output. This is the default value and need only be used to override the BOOST_VMD_RETURN_INDEX value if it is specified. </li> </ul></div> <p> The BOOST_VMD_RETURN_INDEX tells the programmer which one of the identifier modifiers matched the element's data as an index. Some macro programmers find this more useful for the purposes of macro branching logic than branching using the actual name of the identifier itself. </p> <p> When the index modifier BOOST_VMD_RETURN_INDEX is specified, and identifier modifiers are specified along with the BOOST_VMD_TYPE_IDENTIFIER filter modifier, the output of BOOST_VMD_ELEM becomes a tuple of two elements. The first tuple element is the element matched and the last tuple element is the index, starting with 0, of the identifier modifier which matched. If an element is not matched both tuple elements are empty. </p> <p> If the splitting modifier BOOST_VMD_RETURN_AFTER is also specified then the output is a tuple of three elements. The first tuple element is the element matched, the second tuple element is the rest of the sequence after the matching element, and the last tuple element is the numeric index. If an element is not matched then all three tuple elements are empty. </p> <p> If identifier modifiers and the BOOST_VMD_TYPE_IDENTIFIER filter modifier are not specified as optional parameters, then if BOOST_VMD_RETURN_INDEX is specified it is ignored. If the splitting modifier BOOST_VMD_RETURN_ONLY_AFTER is specified, if BOOST_VMD_RETURN_INDEX is also specified it is ignored. </p> <p> Let's see how this works: </p> <pre class="programlisting"><span class="preprocessor">#include</span> <span class="special">&lt;</span><span class="identifier">boost</span><span class="special">/</span><span class="identifier">vmd</span><span class="special">/</span><span class="identifier">elem</span><span class="special">.</span><span class="identifier">hpp</span><span class="special">&gt;</span> <span class="preprocessor">#define</span> <span class="identifier">BOOST_VMD_REGISTER_ANAME</span> <span class="special">(</span><span class="identifier">ANAME</span><span class="special">)</span> <span class="preprocessor">#define</span> <span class="identifier">BOOST_VMD_REGISTER_APLACE</span> <span class="special">(</span><span class="identifier">APLACE</span><span class="special">)</span> <span class="preprocessor">#define</span> <span class="identifier">BOOST_VMD_REGISTER_ACOUNTRY</span> <span class="special">(</span><span class="identifier">ACOUNTRY</span><span class="special">)</span> <span class="preprocessor">#define</span> <span class="identifier">BOOST_VMD_DETECT_ANAME_ANAME</span> <span class="preprocessor">#define</span> <span class="identifier">BOOST_VMD_DETECT_APLACE_APLACE</span> <span class="preprocessor">#define</span> <span class="identifier">A_SEQUENCE</span> <span class="special">(</span><span class="number">1</span><span class="special">,</span><span class="number">2</span><span class="special">,</span><span class="number">3</span><span class="special">)</span> <span class="identifier">ANAME</span> <span class="special">(</span><span class="number">1</span><span class="special">)(</span><span class="number">2</span><span class="special">)</span> <span class="number">46</span> <span class="identifier">BOOST_VMD_ELEM</span><span class="special">(</span><span class="number">1</span><span class="special">,</span><span class="identifier">A_SEQUENCE</span><span class="special">,</span><span class="identifier">BOOST_VMD_TYPE_IDENTIFIER</span><span class="special">)</span> <span class="identifier">will</span> <span class="keyword">return</span> <span class="char">'ANAME'</span> <span class="identifier">BOOST_VMD_ELEM</span><span class="special">(</span><span class="number">1</span><span class="special">,</span><span class="identifier">A_SEQUENCE</span><span class="special">,</span><span class="identifier">BOOST_VMD_TYPE_IDENTIFIER</span><span class="special">,</span><span class="identifier">APLACE</span><span class="special">,</span><span class="identifier">ACOUNTRY</span><span class="special">)</span> <span class="identifier">will</span> <span class="keyword">return</span> <span class="identifier">emptiness</span> <span class="identifier">BOOST_VMD_ELEM</span><span class="special">(</span><span class="number">1</span><span class="special">,</span><span class="identifier">A_SEQUENCE</span><span class="special">,</span><span class="identifier">BOOST_VMD_TYPE_IDENTIFIER</span><span class="special">,</span><span class="identifier">BOOST_VMD_RETURN_INDEX</span><span class="special">,</span><span class="identifier">APLACE</span><span class="special">,</span><span class="identifier">ACOUNTRY</span><span class="special">)</span> <span class="identifier">will</span> <span class="keyword">return</span> <span class="special">(,)</span> <span class="identifier">BOOST_VMD_ELEM</span><span class="special">(</span><span class="number">1</span><span class="special">,</span><span class="identifier">A_SEQUENCE</span><span class="special">,</span><span class="identifier">BOOST_VMD_TYPE_IDENTIFIER</span><span class="special">,</span><span class="identifier">BOOST_VMD_RETURN_INDEX</span><span class="special">,</span><span class="identifier">ANAME</span><span class="special">,</span><span class="identifier">APLACE</span><span class="special">,</span><span class="identifier">ACOUNTRY</span><span class="special">)</span> <span class="identifier">will</span> <span class="keyword">return</span> <span class="char">'(ANAME,0)'</span> <span class="identifier">BOOST_VMD_ELEM</span><span class="special">(</span><span class="number">1</span><span class="special">,</span><span class="identifier">A_SEQUENCE</span><span class="special">,</span><span class="identifier">BOOST_VMD_TYPE_IDENTIFIER</span><span class="special">,</span><span class="identifier">BOOST_VMD_RETURN_INDEX</span><span class="special">,(</span><span class="identifier">APLACE</span><span class="special">,</span><span class="identifier">ACOUNTRY</span><span class="special">,</span><span class="identifier">ANAME</span><span class="special">))</span> <span class="identifier">will</span> <span class="keyword">return</span> <span class="char">'(ANAME,2)'</span> </pre> <p> Used with splitting modifiers: </p> <pre class="programlisting"><span class="preprocessor">#include</span> <span class="special">&lt;</span><span class="identifier">boost</span><span class="special">/</span><span class="identifier">vmd</span><span class="special">/</span><span class="identifier">elem</span><span class="special">.</span><span class="identifier">hpp</span><span class="special">&gt;</span> <span class="identifier">BOOST_VMD_ELEM</span><span class="special">(</span><span class="number">1</span><span class="special">,</span><span class="identifier">A_SEQUENCE</span><span class="special">,</span><span class="identifier">BOOST_VMD_TYPE_IDENTIFIER</span><span class="special">,</span><span class="identifier">BOOST_VMD_RETURN_INDEX</span><span class="special">,</span><span class="identifier">APLACE</span><span class="special">,</span><span class="identifier">ACOUNTRY</span><span class="special">,</span><span class="identifier">BOOST_VMD_RETURN_AFTER</span><span class="special">)</span> <span class="identifier">will</span> <span class="keyword">return</span> <span class="special">(,,)</span> <span class="identifier">BOOST_VMD_ELEM</span><span class="special">(</span><span class="number">1</span><span class="special">,</span><span class="identifier">A_SEQUENCE</span><span class="special">,</span><span class="identifier">BOOST_VMD_TYPE_IDENTIFIER</span><span class="special">,</span><span class="identifier">BOOST_VMD_RETURN_INDEX</span><span class="special">,</span><span class="identifier">ANAME</span><span class="special">,</span><span class="identifier">APLACE</span><span class="special">,</span><span class="identifier">ACOUNTRY</span><span class="special">,</span><span class="identifier">BOOST_VMD_RETURN_AFTER</span><span class="special">)</span> <span class="identifier">will</span> <span class="keyword">return</span> <span class="char">'(ANAME,(1)(2) 46,0)'</span> <span class="identifier">BOOST_VMD_ELEM</span><span class="special">(</span><span class="number">1</span><span class="special">,</span><span class="identifier">A_SEQUENCE</span><span class="special">,</span><span class="identifier">BOOST_VMD_TYPE_IDENTIFIER</span><span class="special">,</span><span class="identifier">BOOST_VMD_RETURN_INDEX</span><span class="special">,(</span><span class="identifier">APLACE</span><span class="special">,</span><span class="identifier">ACOUNTRY</span><span class="special">,</span><span class="identifier">ANAME</span><span class="special">),</span><span class="identifier">BOOST_VMD_RETURN_AFTER</span><span class="special">)</span> <span class="identifier">will</span> <span class="keyword">return</span> <span class="char">'(ANAME,(1)(2) 46,2)'</span> <span class="identifier">BOOST_VMD_ELEM</span><span class="special">(</span><span class="number">1</span><span class="special">,</span><span class="identifier">A_SEQUENCE</span><span class="special">,</span><span class="identifier">BOOST_VMD_TYPE_IDENTIFIER</span><span class="special">,</span><span class="identifier">BOOST_VMD_RETURN_INDEX</span><span class="special">,(</span><span class="identifier">APLACE</span><span class="special">,</span><span class="identifier">ACOUNTRY</span><span class="special">,</span><span class="identifier">ANAME</span><span class="special">),</span><span class="identifier">BOOST_VMD_RETURN_ONLY_AFTER</span><span class="special">)</span> <span class="identifier">will</span> <span class="keyword">return</span> <span class="char">'(1)(2) 46'</span> </pre> </div> <table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr> <td align="left"></td> <td align="right"><div class="copyright-footer">Copyright &#169; 2010-2017 Tropic Software East Inc</div></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="vmd_modifiers_splitting.html"><img src="../../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../vmd_modifiers.html"><img src="../../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="vmd_modifiers_single.html"><img src="../../../../../../doc/src/images/next.png" alt="Next"></a> </div> </body> </html>
vslavik/poedit
deps/boost/libs/vmd/doc/html/variadic_macro_data/vmd_modifiers/vmd_modifiers_index.html
HTML
mit
13,740
version https://git-lfs.github.com/spec/v1 oid sha256:7bb2ff95b5b5b8b9381c3164c00724bbcf60a5e3a249bffc29cd3c7030ebda2a size 5862
yogeshsaroya/new-cdnjs
ajax/libs/jQuery.mmenu/4.3.6/js/addons/jquery.mmenu.dragopen.js
JavaScript
mit
129
/** * @license * Copyright Google LLC All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {AsyncTestCompleter, beforeEach, describe, expect, inject, it} from '@angular/core/testing/src/testing_internal'; import {filter} from 'rxjs/operators'; import {EventEmitter} from '../src/event_emitter'; { describe('EventEmitter', () => { let emitter: EventEmitter<any>; beforeEach(() => { emitter = new EventEmitter(); }); it('should call the next callback', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => { emitter.subscribe({ next: (value: any) => { expect(value).toEqual(99); async.done(); } }); emitter.emit(99); })); it('should call the throw callback', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => { emitter.subscribe({ next: () => {}, error: (error: any) => { expect(error).toEqual('Boom'); async.done(); } }); emitter.error('Boom'); })); it('should work when no throw callback is provided', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => { emitter.subscribe({ next: () => {}, error: (_: any) => { async.done(); } }); emitter.error('Boom'); })); it('should call the return callback', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => { emitter.subscribe({ next: () => {}, error: (_: any) => {}, complete: () => { async.done(); } }); emitter.complete(); })); it('should subscribe to the wrapper synchronously', () => { let called = false; emitter.subscribe({ next: (value: any) => { called = true; } }); emitter.emit(99); expect(called).toBe(true); }); it('delivers next and error events synchronously', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => { const log: any[] /** TODO #9100 */ = []; emitter.subscribe({ next: (x: any) => { log.push(x); expect(log).toEqual([1, 2]); }, error: (err: any) => { log.push(err); expect(log).toEqual([1, 2, 3, 4]); async.done(); } }); log.push(1); emitter.emit(2); log.push(3); emitter.error(4); log.push(5); })); it('delivers next and complete events synchronously', () => { const log: any[] /** TODO #9100 */ = []; emitter.subscribe({ next: (x: any) => { log.push(x); expect(log).toEqual([1, 2]); }, error: null, complete: () => { log.push(4); expect(log).toEqual([1, 2, 3, 4]); } }); log.push(1); emitter.emit(2); log.push(3); emitter.complete(); log.push(5); expect(log).toEqual([1, 2, 3, 4, 5]); }); it('delivers events asynchronously when forced to async mode', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => { const e = new EventEmitter(true); const log: any[] /** TODO #9100 */ = []; e.subscribe((x: any) => { log.push(x); expect(log).toEqual([1, 3, 2]); async.done(); }); log.push(1); e.emit(2); log.push(3); })); it('reports whether it has subscribers', () => { const e = new EventEmitter(false); expect(e.observers.length > 0).toBe(false); e.subscribe({next: () => {}}); expect(e.observers.length > 0).toBe(true); }); it('remove a subscriber subscribed directly to EventEmitter', () => { const sub = emitter.subscribe(); expect(emitter.observers.length).toBe(1); sub.unsubscribe(); expect(emitter.observers.length).toBe(0); }); it('remove a subscriber subscribed after applying operators with pipe()', () => { const sub = emitter.pipe(filter(() => true)).subscribe(); expect(emitter.observers.length).toBe(1); sub.unsubscribe(); expect(emitter.observers.length).toBe(0); }); it('unsubscribing a subscriber invokes the dispose method', () => { inject([AsyncTestCompleter], (async: AsyncTestCompleter) => { const sub = emitter.subscribe(); sub.add(() => async.done()); sub.unsubscribe(); }); }); it('unsubscribing a subscriber after applying operators with pipe() invokes the dispose method', () => { inject([AsyncTestCompleter], (async: AsyncTestCompleter) => { const sub = emitter.pipe(filter(() => true)).subscribe(); sub.add(() => async.done()); sub.unsubscribe(); }); }); it('error thrown inside an Rx chain propagates to the error handler and disposes the chain', () => { let errorPropagated = false; emitter .pipe( filter(() => { throw new Error(); }), ) .subscribe( () => {}, err => errorPropagated = true, ); emitter.next(1); expect(errorPropagated).toBe(true); expect(emitter.observers.length).toBe(0); }); it('error sent by EventEmitter should dispose the Rx chain and remove subscribers', () => { let errorPropagated = false; emitter.pipe(filter(() => true)) .subscribe( () => {}, err => errorPropagated = true, ); emitter.error(1); expect(errorPropagated).toBe(true); expect(emitter.observers.length).toBe(0); }); // TODO: vsavkin: add tests cases // should call dispose on the subscription if generator returns {done:true} // should call dispose on the subscription on throw // should call dispose on the subscription on return }); }
wKoza/angular
packages/core/test/event_emitter_spec.ts
TypeScript
mit
6,262