code
stringlengths 3
1.01M
| repo_name
stringlengths 5
116
| path
stringlengths 3
311
| language
stringclasses 30
values | license
stringclasses 15
values | size
int64 3
1.01M
|
---|---|---|---|---|---|
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.rsocket.core;
import io.rsocket.ConnectionSetupPayload;
import io.rsocket.RSocket;
import io.rsocket.SocketAcceptor;
import reactor.core.publisher.Mono;
import org.springframework.security.core.context.ReactiveSecurityContextHolder;
import org.springframework.security.core.context.SecurityContext;
/**
* A {@link SocketAcceptor} that captures the {@link SecurityContext} and then continues
* with the {@link RSocket}
*
* @author Rob Winch
*/
class CaptureSecurityContextSocketAcceptor implements SocketAcceptor {
private final RSocket accept;
private SecurityContext securityContext;
CaptureSecurityContextSocketAcceptor(RSocket accept) {
this.accept = accept;
}
@Override
public Mono<RSocket> accept(ConnectionSetupPayload setup, RSocket sendingSocket) {
return ReactiveSecurityContextHolder.getContext()
.doOnNext((securityContext) -> this.securityContext = securityContext).thenReturn(this.accept);
}
SecurityContext getSecurityContext() {
return this.securityContext;
}
}
| jgrandja/spring-security | rsocket/src/test/java/org/springframework/security/rsocket/core/CaptureSecurityContextSocketAcceptor.java | Java | apache-2.0 | 1,665 |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.events.jpa;
import org.keycloak.events.admin.AdminEvent;
import org.keycloak.events.admin.AdminEventQuery;
import org.keycloak.events.admin.OperationType;
import org.keycloak.events.admin.ResourceType;
import javax.persistence.EntityManager;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Expression;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import java.util.ArrayList;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
/**
* @author <a href="mailto:giriraj.sharma27@gmail.com">Giriraj Sharma</a>
*/
public class JpaAdminEventQuery implements AdminEventQuery {
private final EntityManager em;
private final CriteriaBuilder cb;
private final CriteriaQuery<AdminEventEntity> cq;
private final Root<AdminEventEntity> root;
private final ArrayList<Predicate> predicates;
private Integer firstResult;
private Integer maxResults;
public JpaAdminEventQuery(EntityManager em) {
this.em = em;
cb = em.getCriteriaBuilder();
cq = cb.createQuery(AdminEventEntity.class);
root = cq.from(AdminEventEntity.class);
predicates = new ArrayList<Predicate>();
}
@Override
public AdminEventQuery realm(String realmId) {
predicates.add(cb.equal(root.get("realmId"), realmId));
return this;
}
@Override
public AdminEventQuery operation(OperationType... operations) {
List<String> operationStrings = new LinkedList<String>();
for (OperationType e : operations) {
operationStrings.add(e.toString());
}
predicates.add(root.get("operationType").in(operationStrings));
return this;
}
@Override
public AdminEventQuery resourceType(ResourceType... resourceTypes) {
List<String> resourceTypeStrings = new LinkedList<String>();
for (ResourceType e : resourceTypes) {
resourceTypeStrings.add(e.toString());
}
predicates.add(root.get("resourceType").in(resourceTypeStrings));
return this;
}
@Override
public AdminEventQuery authRealm(String authRealmId) {
predicates.add(cb.equal(root.get("authRealmId"), authRealmId));
return this;
}
@Override
public AdminEventQuery authClient(String authClientId) {
predicates.add(cb.equal(root.get("authClientId"), authClientId));
return this;
}
@Override
public AdminEventQuery authUser(String authUserId) {
predicates.add(cb.equal(root.get("authUserId"), authUserId));
return this;
}
@Override
public AdminEventQuery authIpAddress(String ipAddress) {
predicates.add(cb.equal(root.get("authIpAddress"), ipAddress));
return this;
}
@Override
public AdminEventQuery resourcePath(String resourcePath) {
Expression<String> rPath = root.get("resourcePath");
predicates.add(cb.like(rPath, "%"+resourcePath+"%"));
return this;
}
@Override
public AdminEventQuery fromTime(Date fromTime) {
predicates.add(cb.greaterThanOrEqualTo(root.<Long>get("time"), fromTime.getTime()));
return this;
}
@Override
public AdminEventQuery toTime(Date toTime) {
predicates.add(cb.lessThanOrEqualTo(root.<Long>get("time"), toTime.getTime()));
return this;
}
@Override
public AdminEventQuery firstResult(int firstResult) {
this.firstResult = firstResult;
return this;
}
@Override
public AdminEventQuery maxResults(int maxResults) {
this.maxResults = maxResults;
return this;
}
@Override
public List<AdminEvent> getResultList() {
if (!predicates.isEmpty()) {
cq.where(cb.and(predicates.toArray(new Predicate[predicates.size()])));
}
cq.orderBy(cb.desc(root.get("time")));
TypedQuery<AdminEventEntity> query = em.createQuery(cq);
if (firstResult != null) {
query.setFirstResult(firstResult);
}
if (maxResults != null) {
query.setMaxResults(maxResults);
}
List<AdminEvent> events = new LinkedList<AdminEvent>();
for (AdminEventEntity e : query.getResultList()) {
events.add(JpaEventStoreProvider.convertAdminEvent(e));
}
return events;
}
}
| iperdomo/keycloak | model/jpa/src/main/java/org/keycloak/events/jpa/JpaAdminEventQuery.java | Java | apache-2.0 | 5,197 |
/*
* Copyright (c) 1999, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.math;
/**
* A class used to represent multiprecision integers that makes efficient
* use of allocated space by allowing a number to occupy only part of
* an array so that the arrays do not have to be reallocated as often.
* When performing an operation with many iterations the array used to
* hold a number is only reallocated when necessary and does not have to
* be the same size as the number it represents. A mutable number allows
* calculations to occur on the same number without having to create
* a new number for every step of the calculation as occurs with
* BigIntegers.
*
* @see BigInteger
* @author Michael McCloskey
* @since 1.3
*/
import java.util.Arrays;
import static java.math.BigInteger.LONG_MASK;
import static java.math.BigDecimal.INFLATED;
class MutableBigInteger {
/**
* Holds the magnitude of this MutableBigInteger in big endian order.
* The magnitude may start at an offset into the value array, and it may
* end before the length of the value array.
*/
int[] value;
/**
* The number of ints of the value array that are currently used
* to hold the magnitude of this MutableBigInteger. The magnitude starts
* at an offset and offset + intLen may be less than value.length.
*/
int intLen;
/**
* The offset into the value array where the magnitude of this
* MutableBigInteger begins.
*/
int offset = 0;
// Constants
/**
* MutableBigInteger with one element value array with the value 1. Used by
* BigDecimal divideAndRound to increment the quotient. Use this constant
* only when the method is not going to modify this object.
*/
static final MutableBigInteger ONE = new MutableBigInteger(1);
// Constructors
/**
* The default constructor. An empty MutableBigInteger is created with
* a one word capacity.
*/
MutableBigInteger() {
value = new int[1];
intLen = 0;
}
/**
* Construct a new MutableBigInteger with a magnitude specified by
* the int val.
*/
MutableBigInteger(int val) {
value = new int[1];
intLen = 1;
value[0] = val;
}
/**
* Construct a new MutableBigInteger with the specified value array
* up to the length of the array supplied.
*/
MutableBigInteger(int[] val) {
value = val;
intLen = val.length;
}
/**
* Construct a new MutableBigInteger with a magnitude equal to the
* specified BigInteger.
*/
MutableBigInteger(BigInteger b) {
intLen = b.mag.length;
value = Arrays.copyOf(b.mag, intLen);
}
/**
* Construct a new MutableBigInteger with a magnitude equal to the
* specified MutableBigInteger.
*/
MutableBigInteger(MutableBigInteger val) {
intLen = val.intLen;
value = Arrays.copyOfRange(val.value, val.offset, val.offset + intLen);
}
/**
* Internal helper method to return the magnitude array. The caller is not
* supposed to modify the returned array.
*/
private int[] getMagnitudeArray() {
if (offset > 0 || value.length != intLen)
return Arrays.copyOfRange(value, offset, offset + intLen);
return value;
}
/**
* Convert this MutableBigInteger to a long value. The caller has to make
* sure this MutableBigInteger can be fit into long.
*/
private long toLong() {
assert (intLen <= 2) : "this MutableBigInteger exceeds the range of long";
if (intLen == 0)
return 0;
long d = value[offset] & LONG_MASK;
return (intLen == 2) ? d << 32 | (value[offset + 1] & LONG_MASK) : d;
}
/**
* Convert this MutableBigInteger to a BigInteger object.
*/
BigInteger toBigInteger(int sign) {
if (intLen == 0 || sign == 0)
return BigInteger.ZERO;
return new BigInteger(getMagnitudeArray(), sign);
}
/**
* Convert this MutableBigInteger to BigDecimal object with the specified sign
* and scale.
*/
BigDecimal toBigDecimal(int sign, int scale) {
if (intLen == 0 || sign == 0)
return BigDecimal.valueOf(0, scale);
int[] mag = getMagnitudeArray();
int len = mag.length;
int d = mag[0];
// If this MutableBigInteger can't be fit into long, we need to
// make a BigInteger object for the resultant BigDecimal object.
if (len > 2 || (d < 0 && len == 2))
return new BigDecimal(new BigInteger(mag, sign), INFLATED, scale, 0);
long v = (len == 2) ?
((mag[1] & LONG_MASK) | (d & LONG_MASK) << 32) :
d & LONG_MASK;
return new BigDecimal(null, sign == -1 ? -v : v, scale, 0);
}
/**
* Clear out a MutableBigInteger for reuse.
*/
void clear() {
offset = intLen = 0;
for (int index=0, n=value.length; index < n; index++)
value[index] = 0;
}
/**
* Set a MutableBigInteger to zero, removing its offset.
*/
void reset() {
offset = intLen = 0;
}
/**
* Compare the magnitude of two MutableBigIntegers. Returns -1, 0 or 1
* as this MutableBigInteger is numerically less than, equal to, or
* greater than <tt>b</tt>.
*/
final int compare(MutableBigInteger b) {
int blen = b.intLen;
if (intLen < blen)
return -1;
if (intLen > blen)
return 1;
// Add Integer.MIN_VALUE to make the comparison act as unsigned integer
// comparison.
int[] bval = b.value;
for (int i = offset, j = b.offset; i < intLen + offset; i++, j++) {
int b1 = value[i] + 0x80000000;
int b2 = bval[j] + 0x80000000;
if (b1 < b2)
return -1;
if (b1 > b2)
return 1;
}
return 0;
}
/**
* Compare this against half of a MutableBigInteger object (Needed for
* remainder tests).
* Assumes no leading unnecessary zeros, which holds for results
* from divide().
*/
final int compareHalf(MutableBigInteger b) {
int blen = b.intLen;
int len = intLen;
if (len <= 0)
return blen <=0 ? 0 : -1;
if (len > blen)
return 1;
if (len < blen - 1)
return -1;
int[] bval = b.value;
int bstart = 0;
int carry = 0;
// Only 2 cases left:len == blen or len == blen - 1
if (len != blen) { // len == blen - 1
if (bval[bstart] == 1) {
++bstart;
carry = 0x80000000;
} else
return -1;
}
// compare values with right-shifted values of b,
// carrying shifted-out bits across words
int[] val = value;
for (int i = offset, j = bstart; i < len + offset;) {
int bv = bval[j++];
long hb = ((bv >>> 1) + carry) & LONG_MASK;
long v = val[i++] & LONG_MASK;
if (v != hb)
return v < hb ? -1 : 1;
carry = (bv & 1) << 31; // carray will be either 0x80000000 or 0
}
return carry == 0? 0 : -1;
}
/**
* Return the index of the lowest set bit in this MutableBigInteger. If the
* magnitude of this MutableBigInteger is zero, -1 is returned.
*/
private final int getLowestSetBit() {
if (intLen == 0)
return -1;
int j, b;
for (j=intLen-1; (j>0) && (value[j+offset]==0); j--)
;
b = value[j+offset];
if (b==0)
return -1;
return ((intLen-1-j)<<5) + Integer.numberOfTrailingZeros(b);
}
/**
* Return the int in use in this MutableBigInteger at the specified
* index. This method is not used because it is not inlined on all
* platforms.
*/
private final int getInt(int index) {
return value[offset+index];
}
/**
* Return a long which is equal to the unsigned value of the int in
* use in this MutableBigInteger at the specified index. This method is
* not used because it is not inlined on all platforms.
*/
private final long getLong(int index) {
return value[offset+index] & LONG_MASK;
}
/**
* Ensure that the MutableBigInteger is in normal form, specifically
* making sure that there are no leading zeros, and that if the
* magnitude is zero, then intLen is zero.
*/
final void normalize() {
if (intLen == 0) {
offset = 0;
return;
}
int index = offset;
if (value[index] != 0)
return;
int indexBound = index+intLen;
do {
index++;
} while(index < indexBound && value[index]==0);
int numZeros = index - offset;
intLen -= numZeros;
offset = (intLen==0 ? 0 : offset+numZeros);
}
/**
* If this MutableBigInteger cannot hold len words, increase the size
* of the value array to len words.
*/
private final void ensureCapacity(int len) {
if (value.length < len) {
value = new int[len];
offset = 0;
intLen = len;
}
}
/**
* Convert this MutableBigInteger into an int array with no leading
* zeros, of a length that is equal to this MutableBigInteger's intLen.
*/
int[] toIntArray() {
int[] result = new int[intLen];
for(int i=0; i<intLen; i++)
result[i] = value[offset+i];
return result;
}
/**
* Sets the int at index+offset in this MutableBigInteger to val.
* This does not get inlined on all platforms so it is not used
* as often as originally intended.
*/
void setInt(int index, int val) {
value[offset + index] = val;
}
/**
* Sets this MutableBigInteger's value array to the specified array.
* The intLen is set to the specified length.
*/
void setValue(int[] val, int length) {
value = val;
intLen = length;
offset = 0;
}
/**
* Sets this MutableBigInteger's value array to a copy of the specified
* array. The intLen is set to the length of the new array.
*/
void copyValue(MutableBigInteger src) {
int len = src.intLen;
if (value.length < len)
value = new int[len];
System.arraycopy(src.value, src.offset, value, 0, len);
intLen = len;
offset = 0;
}
/**
* Sets this MutableBigInteger's value array to a copy of the specified
* array. The intLen is set to the length of the specified array.
*/
void copyValue(int[] val) {
int len = val.length;
if (value.length < len)
value = new int[len];
System.arraycopy(val, 0, value, 0, len);
intLen = len;
offset = 0;
}
/**
* Returns true iff this MutableBigInteger has a value of one.
*/
boolean isOne() {
return (intLen == 1) && (value[offset] == 1);
}
/**
* Returns true iff this MutableBigInteger has a value of zero.
*/
boolean isZero() {
return (intLen == 0);
}
/**
* Returns true iff this MutableBigInteger is even.
*/
boolean isEven() {
return (intLen == 0) || ((value[offset + intLen - 1] & 1) == 0);
}
/**
* Returns true iff this MutableBigInteger is odd.
*/
boolean isOdd() {
return isZero() ? false : ((value[offset + intLen - 1] & 1) == 1);
}
/**
* Returns true iff this MutableBigInteger is in normal form. A
* MutableBigInteger is in normal form if it has no leading zeros
* after the offset, and intLen + offset <= value.length.
*/
boolean isNormal() {
if (intLen + offset > value.length)
return false;
if (intLen ==0)
return true;
return (value[offset] != 0);
}
/**
* Returns a String representation of this MutableBigInteger in radix 10.
*/
public String toString() {
BigInteger b = toBigInteger(1);
return b.toString();
}
/**
* Right shift this MutableBigInteger n bits. The MutableBigInteger is left
* in normal form.
*/
void rightShift(int n) {
if (intLen == 0)
return;
int nInts = n >>> 5;
int nBits = n & 0x1F;
this.intLen -= nInts;
if (nBits == 0)
return;
int bitsInHighWord = BigInteger.bitLengthForInt(value[offset]);
if (nBits >= bitsInHighWord) {
this.primitiveLeftShift(32 - nBits);
this.intLen--;
} else {
primitiveRightShift(nBits);
}
}
/**
* Left shift this MutableBigInteger n bits.
*/
void leftShift(int n) {
/*
* If there is enough storage space in this MutableBigInteger already
* the available space will be used. Space to the right of the used
* ints in the value array is faster to utilize, so the extra space
* will be taken from the right if possible.
*/
if (intLen == 0)
return;
int nInts = n >>> 5;
int nBits = n&0x1F;
int bitsInHighWord = BigInteger.bitLengthForInt(value[offset]);
// If shift can be done without moving words, do so
if (n <= (32-bitsInHighWord)) {
primitiveLeftShift(nBits);
return;
}
int newLen = intLen + nInts +1;
if (nBits <= (32-bitsInHighWord))
newLen--;
if (value.length < newLen) {
// The array must grow
int[] result = new int[newLen];
for (int i=0; i<intLen; i++)
result[i] = value[offset+i];
setValue(result, newLen);
} else if (value.length - offset >= newLen) {
// Use space on right
for(int i=0; i<newLen - intLen; i++)
value[offset+intLen+i] = 0;
} else {
// Must use space on left
for (int i=0; i<intLen; i++)
value[i] = value[offset+i];
for (int i=intLen; i<newLen; i++)
value[i] = 0;
offset = 0;
}
intLen = newLen;
if (nBits == 0)
return;
if (nBits <= (32-bitsInHighWord))
primitiveLeftShift(nBits);
else
primitiveRightShift(32 -nBits);
}
/**
* A primitive used for division. This method adds in one multiple of the
* divisor a back to the dividend result at a specified offset. It is used
* when qhat was estimated too large, and must be adjusted.
*/
private int divadd(int[] a, int[] result, int offset) {
long carry = 0;
for (int j=a.length-1; j >= 0; j--) {
long sum = (a[j] & LONG_MASK) +
(result[j+offset] & LONG_MASK) + carry;
result[j+offset] = (int)sum;
carry = sum >>> 32;
}
return (int)carry;
}
/**
* This method is used for division. It multiplies an n word input a by one
* word input x, and subtracts the n word product from q. This is needed
* when subtracting qhat*divisor from dividend.
*/
private int mulsub(int[] q, int[] a, int x, int len, int offset) {
long xLong = x & LONG_MASK;
long carry = 0;
offset += len;
for (int j=len-1; j >= 0; j--) {
long product = (a[j] & LONG_MASK) * xLong + carry;
long difference = q[offset] - product;
q[offset--] = (int)difference;
carry = (product >>> 32)
+ (((difference & LONG_MASK) >
(((~(int)product) & LONG_MASK))) ? 1:0);
}
return (int)carry;
}
/**
* Right shift this MutableBigInteger n bits, where n is
* less than 32.
* Assumes that intLen > 0, n > 0 for speed
*/
private final void primitiveRightShift(int n) {
int[] val = value;
int n2 = 32 - n;
for (int i=offset+intLen-1, c=val[i]; i>offset; i--) {
int b = c;
c = val[i-1];
val[i] = (c << n2) | (b >>> n);
}
val[offset] >>>= n;
}
/**
* Left shift this MutableBigInteger n bits, where n is
* less than 32.
* Assumes that intLen > 0, n > 0 for speed
*/
private final void primitiveLeftShift(int n) {
int[] val = value;
int n2 = 32 - n;
for (int i=offset, c=val[i], m=i+intLen-1; i<m; i++) {
int b = c;
c = val[i+1];
val[i] = (b << n) | (c >>> n2);
}
val[offset+intLen-1] <<= n;
}
/**
* Adds the contents of two MutableBigInteger objects.The result
* is placed within this MutableBigInteger.
* The contents of the addend are not changed.
*/
void add(MutableBigInteger addend) {
int x = intLen;
int y = addend.intLen;
int resultLen = (intLen > addend.intLen ? intLen : addend.intLen);
int[] result = (value.length < resultLen ? new int[resultLen] : value);
int rstart = result.length-1;
long sum;
long carry = 0;
// Add common parts of both numbers
while(x>0 && y>0) {
x--; y--;
sum = (value[x+offset] & LONG_MASK) +
(addend.value[y+addend.offset] & LONG_MASK) + carry;
result[rstart--] = (int)sum;
carry = sum >>> 32;
}
// Add remainder of the longer number
while(x>0) {
x--;
if (carry == 0 && result == value && rstart == (x + offset))
return;
sum = (value[x+offset] & LONG_MASK) + carry;
result[rstart--] = (int)sum;
carry = sum >>> 32;
}
while(y>0) {
y--;
sum = (addend.value[y+addend.offset] & LONG_MASK) + carry;
result[rstart--] = (int)sum;
carry = sum >>> 32;
}
if (carry > 0) { // Result must grow in length
resultLen++;
if (result.length < resultLen) {
int temp[] = new int[resultLen];
// Result one word longer from carry-out; copy low-order
// bits into new result.
System.arraycopy(result, 0, temp, 1, result.length);
temp[0] = 1;
result = temp;
} else {
result[rstart--] = 1;
}
}
value = result;
intLen = resultLen;
offset = result.length - resultLen;
}
/**
* Subtracts the smaller of this and b from the larger and places the
* result into this MutableBigInteger.
*/
int subtract(MutableBigInteger b) {
MutableBigInteger a = this;
int[] result = value;
int sign = a.compare(b);
if (sign == 0) {
reset();
return 0;
}
if (sign < 0) {
MutableBigInteger tmp = a;
a = b;
b = tmp;
}
int resultLen = a.intLen;
if (result.length < resultLen)
result = new int[resultLen];
long diff = 0;
int x = a.intLen;
int y = b.intLen;
int rstart = result.length - 1;
// Subtract common parts of both numbers
while (y>0) {
x--; y--;
diff = (a.value[x+a.offset] & LONG_MASK) -
(b.value[y+b.offset] & LONG_MASK) - ((int)-(diff>>32));
result[rstart--] = (int)diff;
}
// Subtract remainder of longer number
while (x>0) {
x--;
diff = (a.value[x+a.offset] & LONG_MASK) - ((int)-(diff>>32));
result[rstart--] = (int)diff;
}
value = result;
intLen = resultLen;
offset = value.length - resultLen;
normalize();
return sign;
}
/**
* Subtracts the smaller of a and b from the larger and places the result
* into the larger. Returns 1 if the answer is in a, -1 if in b, 0 if no
* operation was performed.
*/
private int difference(MutableBigInteger b) {
MutableBigInteger a = this;
int sign = a.compare(b);
if (sign ==0)
return 0;
if (sign < 0) {
MutableBigInteger tmp = a;
a = b;
b = tmp;
}
long diff = 0;
int x = a.intLen;
int y = b.intLen;
// Subtract common parts of both numbers
while (y>0) {
x--; y--;
diff = (a.value[a.offset+ x] & LONG_MASK) -
(b.value[b.offset+ y] & LONG_MASK) - ((int)-(diff>>32));
a.value[a.offset+x] = (int)diff;
}
// Subtract remainder of longer number
while (x>0) {
x--;
diff = (a.value[a.offset+ x] & LONG_MASK) - ((int)-(diff>>32));
a.value[a.offset+x] = (int)diff;
}
a.normalize();
return sign;
}
/**
* Multiply the contents of two MutableBigInteger objects. The result is
* placed into MutableBigInteger z. The contents of y are not changed.
*/
void multiply(MutableBigInteger y, MutableBigInteger z) {
int xLen = intLen;
int yLen = y.intLen;
int newLen = xLen + yLen;
// Put z into an appropriate state to receive product
if (z.value.length < newLen)
z.value = new int[newLen];
z.offset = 0;
z.intLen = newLen;
// The first iteration is hoisted out of the loop to avoid extra add
long carry = 0;
for (int j=yLen-1, k=yLen+xLen-1; j >= 0; j--, k--) {
long product = (y.value[j+y.offset] & LONG_MASK) *
(value[xLen-1+offset] & LONG_MASK) + carry;
z.value[k] = (int)product;
carry = product >>> 32;
}
z.value[xLen-1] = (int)carry;
// Perform the multiplication word by word
for (int i = xLen-2; i >= 0; i--) {
carry = 0;
for (int j=yLen-1, k=yLen+i; j >= 0; j--, k--) {
long product = (y.value[j+y.offset] & LONG_MASK) *
(value[i+offset] & LONG_MASK) +
(z.value[k] & LONG_MASK) + carry;
z.value[k] = (int)product;
carry = product >>> 32;
}
z.value[i] = (int)carry;
}
// Remove leading zeros from product
z.normalize();
}
/**
* Multiply the contents of this MutableBigInteger by the word y. The
* result is placed into z.
*/
void mul(int y, MutableBigInteger z) {
if (y == 1) {
z.copyValue(this);
return;
}
if (y == 0) {
z.clear();
return;
}
// Perform the multiplication word by word
long ylong = y & LONG_MASK;
int[] zval = (z.value.length<intLen+1 ? new int[intLen + 1]
: z.value);
long carry = 0;
for (int i = intLen-1; i >= 0; i--) {
long product = ylong * (value[i+offset] & LONG_MASK) + carry;
zval[i+1] = (int)product;
carry = product >>> 32;
}
if (carry == 0) {
z.offset = 1;
z.intLen = intLen;
} else {
z.offset = 0;
z.intLen = intLen + 1;
zval[0] = (int)carry;
}
z.value = zval;
}
/**
* This method is used for division of an n word dividend by a one word
* divisor. The quotient is placed into quotient. The one word divisor is
* specified by divisor.
*
* @return the remainder of the division is returned.
*
*/
int divideOneWord(int divisor, MutableBigInteger quotient) {
long divisorLong = divisor & LONG_MASK;
// Special case of one word dividend
if (intLen == 1) {
long dividendValue = value[offset] & LONG_MASK;
int q = (int) (dividendValue / divisorLong);
int r = (int) (dividendValue - q * divisorLong);
quotient.value[0] = q;
quotient.intLen = (q == 0) ? 0 : 1;
quotient.offset = 0;
return r;
}
if (quotient.value.length < intLen)
quotient.value = new int[intLen];
quotient.offset = 0;
quotient.intLen = intLen;
// Normalize the divisor
int shift = Integer.numberOfLeadingZeros(divisor);
int rem = value[offset];
long remLong = rem & LONG_MASK;
if (remLong < divisorLong) {
quotient.value[0] = 0;
} else {
quotient.value[0] = (int)(remLong / divisorLong);
rem = (int) (remLong - (quotient.value[0] * divisorLong));
remLong = rem & LONG_MASK;
}
int xlen = intLen;
int[] qWord = new int[2];
while (--xlen > 0) {
long dividendEstimate = (remLong<<32) |
(value[offset + intLen - xlen] & LONG_MASK);
if (dividendEstimate >= 0) {
qWord[0] = (int) (dividendEstimate / divisorLong);
qWord[1] = (int) (dividendEstimate - qWord[0] * divisorLong);
} else {
divWord(qWord, dividendEstimate, divisor);
}
quotient.value[intLen - xlen] = qWord[0];
rem = qWord[1];
remLong = rem & LONG_MASK;
}
quotient.normalize();
// Unnormalize
if (shift > 0)
return rem % divisor;
else
return rem;
}
/**
* Calculates the quotient of this div b and places the quotient in the
* provided MutableBigInteger objects and the remainder object is returned.
*
* Uses Algorithm D in Knuth section 4.3.1.
* Many optimizations to that algorithm have been adapted from the Colin
* Plumb C library.
* It special cases one word divisors for speed. The content of b is not
* changed.
*
*/
MutableBigInteger divide(MutableBigInteger b, MutableBigInteger quotient) {
if (b.intLen == 0)
throw new ArithmeticException("BigInteger divide by zero");
// Dividend is zero
if (intLen == 0) {
quotient.intLen = quotient.offset;
return new MutableBigInteger();
}
int cmp = compare(b);
// Dividend less than divisor
if (cmp < 0) {
quotient.intLen = quotient.offset = 0;
return new MutableBigInteger(this);
}
// Dividend equal to divisor
if (cmp == 0) {
quotient.value[0] = quotient.intLen = 1;
quotient.offset = 0;
return new MutableBigInteger();
}
quotient.clear();
// Special case one word divisor
if (b.intLen == 1) {
int r = divideOneWord(b.value[b.offset], quotient);
if (r == 0)
return new MutableBigInteger();
return new MutableBigInteger(r);
}
// Copy divisor value to protect divisor
int[] div = Arrays.copyOfRange(b.value, b.offset, b.offset + b.intLen);
return divideMagnitude(div, quotient);
}
/**
* Internally used to calculate the quotient of this div v and places the
* quotient in the provided MutableBigInteger object and the remainder is
* returned.
*
* @return the remainder of the division will be returned.
*/
long divide(long v, MutableBigInteger quotient) {
if (v == 0)
throw new ArithmeticException("BigInteger divide by zero");
// Dividend is zero
if (intLen == 0) {
quotient.intLen = quotient.offset = 0;
return 0;
}
if (v < 0)
v = -v;
int d = (int)(v >>> 32);
quotient.clear();
// Special case on word divisor
if (d == 0)
return divideOneWord((int)v, quotient) & LONG_MASK;
else {
int[] div = new int[]{ d, (int)(v & LONG_MASK) };
return divideMagnitude(div, quotient).toLong();
}
}
/**
* Divide this MutableBigInteger by the divisor represented by its magnitude
* array. The quotient will be placed into the provided quotient object &
* the remainder object is returned.
*/
private MutableBigInteger divideMagnitude(int[] divisor,
MutableBigInteger quotient) {
// Remainder starts as dividend with space for a leading zero
MutableBigInteger rem = new MutableBigInteger(new int[intLen + 1]);
System.arraycopy(value, offset, rem.value, 1, intLen);
rem.intLen = intLen;
rem.offset = 1;
int nlen = rem.intLen;
// Set the quotient size
int dlen = divisor.length;
int limit = nlen - dlen + 1;
if (quotient.value.length < limit) {
quotient.value = new int[limit];
quotient.offset = 0;
}
quotient.intLen = limit;
int[] q = quotient.value;
// D1 normalize the divisor
int shift = Integer.numberOfLeadingZeros(divisor[0]);
if (shift > 0) {
// First shift will not grow array
BigInteger.primitiveLeftShift(divisor, dlen, shift);
// But this one might
rem.leftShift(shift);
}
// Must insert leading 0 in rem if its length did not change
if (rem.intLen == nlen) {
rem.offset = 0;
rem.value[0] = 0;
rem.intLen++;
}
int dh = divisor[0];
long dhLong = dh & LONG_MASK;
int dl = divisor[1];
int[] qWord = new int[2];
// D2 Initialize j
for(int j=0; j<limit; j++) {
// D3 Calculate qhat
// estimate qhat
int qhat = 0;
int qrem = 0;
boolean skipCorrection = false;
int nh = rem.value[j+rem.offset];
int nh2 = nh + 0x80000000;
int nm = rem.value[j+1+rem.offset];
if (nh == dh) {
qhat = ~0;
qrem = nh + nm;
skipCorrection = qrem + 0x80000000 < nh2;
} else {
long nChunk = (((long)nh) << 32) | (nm & LONG_MASK);
if (nChunk >= 0) {
qhat = (int) (nChunk / dhLong);
qrem = (int) (nChunk - (qhat * dhLong));
} else {
divWord(qWord, nChunk, dh);
qhat = qWord[0];
qrem = qWord[1];
}
}
if (qhat == 0)
continue;
if (!skipCorrection) { // Correct qhat
long nl = rem.value[j+2+rem.offset] & LONG_MASK;
long rs = ((qrem & LONG_MASK) << 32) | nl;
long estProduct = (dl & LONG_MASK) * (qhat & LONG_MASK);
if (unsignedLongCompare(estProduct, rs)) {
qhat--;
qrem = (int)((qrem & LONG_MASK) + dhLong);
if ((qrem & LONG_MASK) >= dhLong) {
estProduct -= (dl & LONG_MASK);
rs = ((qrem & LONG_MASK) << 32) | nl;
if (unsignedLongCompare(estProduct, rs))
qhat--;
}
}
}
// D4 Multiply and subtract
rem.value[j+rem.offset] = 0;
int borrow = mulsub(rem.value, divisor, qhat, dlen, j+rem.offset);
// D5 Test remainder
if (borrow + 0x80000000 > nh2) {
// D6 Add back
divadd(divisor, rem.value, j+1+rem.offset);
qhat--;
}
// Store the quotient digit
q[j] = qhat;
} // D7 loop on j
// D8 Unnormalize
if (shift > 0)
rem.rightShift(shift);
quotient.normalize();
rem.normalize();
return rem;
}
/**
* Compare two longs as if they were unsigned.
* Returns true iff one is bigger than two.
*/
private boolean unsignedLongCompare(long one, long two) {
return (one+Long.MIN_VALUE) > (two+Long.MIN_VALUE);
}
/**
* This method divides a long quantity by an int to estimate
* qhat for two multi precision numbers. It is used when
* the signed value of n is less than zero.
*/
private void divWord(int[] result, long n, int d) {
long dLong = d & LONG_MASK;
if (dLong == 1) {
result[0] = (int)n;
result[1] = 0;
return;
}
// Approximate the quotient and remainder
long q = (n >>> 1) / (dLong >>> 1);
long r = n - q*dLong;
// Correct the approximation
while (r < 0) {
r += dLong;
q--;
}
while (r >= dLong) {
r -= dLong;
q++;
}
// n - q*dlong == r && 0 <= r <dLong, hence we're done.
result[0] = (int)q;
result[1] = (int)r;
}
/**
* Calculate GCD of this and b. This and b are changed by the computation.
*/
MutableBigInteger hybridGCD(MutableBigInteger b) {
// Use Euclid's algorithm until the numbers are approximately the
// same length, then use the binary GCD algorithm to find the GCD.
MutableBigInteger a = this;
MutableBigInteger q = new MutableBigInteger();
while (b.intLen != 0) {
if (Math.abs(a.intLen - b.intLen) < 2)
return a.binaryGCD(b);
MutableBigInteger r = a.divide(b, q);
a = b;
b = r;
}
return a;
}
/**
* Calculate GCD of this and v.
* Assumes that this and v are not zero.
*/
private MutableBigInteger binaryGCD(MutableBigInteger v) {
// Algorithm B from Knuth section 4.5.2
MutableBigInteger u = this;
MutableBigInteger r = new MutableBigInteger();
// step B1
int s1 = u.getLowestSetBit();
int s2 = v.getLowestSetBit();
int k = (s1 < s2) ? s1 : s2;
if (k != 0) {
u.rightShift(k);
v.rightShift(k);
}
// step B2
boolean uOdd = (k==s1);
MutableBigInteger t = uOdd ? v: u;
int tsign = uOdd ? -1 : 1;
int lb;
while ((lb = t.getLowestSetBit()) >= 0) {
// steps B3 and B4
t.rightShift(lb);
// step B5
if (tsign > 0)
u = t;
else
v = t;
// Special case one word numbers
if (u.intLen < 2 && v.intLen < 2) {
int x = u.value[u.offset];
int y = v.value[v.offset];
x = binaryGcd(x, y);
r.value[0] = x;
r.intLen = 1;
r.offset = 0;
if (k > 0)
r.leftShift(k);
return r;
}
// step B6
if ((tsign = u.difference(v)) == 0)
break;
t = (tsign >= 0) ? u : v;
}
if (k > 0)
u.leftShift(k);
return u;
}
/**
* Calculate GCD of a and b interpreted as unsigned integers.
*/
static int binaryGcd(int a, int b) {
if (b==0)
return a;
if (a==0)
return b;
// Right shift a & b till their last bits equal to 1.
int aZeros = Integer.numberOfTrailingZeros(a);
int bZeros = Integer.numberOfTrailingZeros(b);
a >>>= aZeros;
b >>>= bZeros;
int t = (aZeros < bZeros ? aZeros : bZeros);
while (a != b) {
if ((a+0x80000000) > (b+0x80000000)) { // a > b as unsigned
a -= b;
a >>>= Integer.numberOfTrailingZeros(a);
} else {
b -= a;
b >>>= Integer.numberOfTrailingZeros(b);
}
}
return a<<t;
}
/**
* Returns the modInverse of this mod p. This and p are not affected by
* the operation.
*/
MutableBigInteger mutableModInverse(MutableBigInteger p) {
// Modulus is odd, use Schroeppel's algorithm
if (p.isOdd())
return modInverse(p);
// Base and modulus are even, throw exception
if (isEven())
throw new ArithmeticException("BigInteger not invertible.");
// Get even part of modulus expressed as a power of 2
int powersOf2 = p.getLowestSetBit();
// Construct odd part of modulus
MutableBigInteger oddMod = new MutableBigInteger(p);
oddMod.rightShift(powersOf2);
if (oddMod.isOne())
return modInverseMP2(powersOf2);
// Calculate 1/a mod oddMod
MutableBigInteger oddPart = modInverse(oddMod);
// Calculate 1/a mod evenMod
MutableBigInteger evenPart = modInverseMP2(powersOf2);
// Combine the results using Chinese Remainder Theorem
MutableBigInteger y1 = modInverseBP2(oddMod, powersOf2);
MutableBigInteger y2 = oddMod.modInverseMP2(powersOf2);
MutableBigInteger temp1 = new MutableBigInteger();
MutableBigInteger temp2 = new MutableBigInteger();
MutableBigInteger result = new MutableBigInteger();
oddPart.leftShift(powersOf2);
oddPart.multiply(y1, result);
evenPart.multiply(oddMod, temp1);
temp1.multiply(y2, temp2);
result.add(temp2);
return result.divide(p, temp1);
}
/*
* Calculate the multiplicative inverse of this mod 2^k.
*/
MutableBigInteger modInverseMP2(int k) {
if (isEven())
throw new ArithmeticException("Non-invertible. (GCD != 1)");
if (k > 64)
return euclidModInverse(k);
int t = inverseMod32(value[offset+intLen-1]);
if (k < 33) {
t = (k == 32 ? t : t & ((1 << k) - 1));
return new MutableBigInteger(t);
}
long pLong = (value[offset+intLen-1] & LONG_MASK);
if (intLen > 1)
pLong |= ((long)value[offset+intLen-2] << 32);
long tLong = t & LONG_MASK;
tLong = tLong * (2 - pLong * tLong); // 1 more Newton iter step
tLong = (k == 64 ? tLong : tLong & ((1L << k) - 1));
MutableBigInteger result = new MutableBigInteger(new int[2]);
result.value[0] = (int)(tLong >>> 32);
result.value[1] = (int)tLong;
result.intLen = 2;
result.normalize();
return result;
}
/*
* Returns the multiplicative inverse of val mod 2^32. Assumes val is odd.
*/
static int inverseMod32(int val) {
// Newton's iteration!
int t = val;
t *= 2 - val*t;
t *= 2 - val*t;
t *= 2 - val*t;
t *= 2 - val*t;
return t;
}
/*
* Calculate the multiplicative inverse of 2^k mod mod, where mod is odd.
*/
static MutableBigInteger modInverseBP2(MutableBigInteger mod, int k) {
// Copy the mod to protect original
return fixup(new MutableBigInteger(1), new MutableBigInteger(mod), k);
}
/**
* Calculate the multiplicative inverse of this mod mod, where mod is odd.
* This and mod are not changed by the calculation.
*
* This method implements an algorithm due to Richard Schroeppel, that uses
* the same intermediate representation as Montgomery Reduction
* ("Montgomery Form"). The algorithm is described in an unpublished
* manuscript entitled "Fast Modular Reciprocals."
*/
private MutableBigInteger modInverse(MutableBigInteger mod) {
MutableBigInteger p = new MutableBigInteger(mod);
MutableBigInteger f = new MutableBigInteger(this);
MutableBigInteger g = new MutableBigInteger(p);
SignedMutableBigInteger c = new SignedMutableBigInteger(1);
SignedMutableBigInteger d = new SignedMutableBigInteger();
MutableBigInteger temp = null;
SignedMutableBigInteger sTemp = null;
int k = 0;
// Right shift f k times until odd, left shift d k times
if (f.isEven()) {
int trailingZeros = f.getLowestSetBit();
f.rightShift(trailingZeros);
d.leftShift(trailingZeros);
k = trailingZeros;
}
// The Almost Inverse Algorithm
while(!f.isOne()) {
// If gcd(f, g) != 1, number is not invertible modulo mod
if (f.isZero())
throw new ArithmeticException("BigInteger not invertible.");
// If f < g exchange f, g and c, d
if (f.compare(g) < 0) {
temp = f; f = g; g = temp;
sTemp = d; d = c; c = sTemp;
}
// If f == g (mod 4)
if (((f.value[f.offset + f.intLen - 1] ^
g.value[g.offset + g.intLen - 1]) & 3) == 0) {
f.subtract(g);
c.signedSubtract(d);
} else { // If f != g (mod 4)
f.add(g);
c.signedAdd(d);
}
// Right shift f k times until odd, left shift d k times
int trailingZeros = f.getLowestSetBit();
f.rightShift(trailingZeros);
d.leftShift(trailingZeros);
k += trailingZeros;
}
while (c.sign < 0)
c.signedAdd(p);
return fixup(c, p, k);
}
/*
* The Fixup Algorithm
* Calculates X such that X = C * 2^(-k) (mod P)
* Assumes C<P and P is odd.
*/
static MutableBigInteger fixup(MutableBigInteger c, MutableBigInteger p,
int k) {
MutableBigInteger temp = new MutableBigInteger();
// Set r to the multiplicative inverse of p mod 2^32
int r = -inverseMod32(p.value[p.offset+p.intLen-1]);
for(int i=0, numWords = k >> 5; i<numWords; i++) {
// V = R * c (mod 2^j)
int v = r * c.value[c.offset + c.intLen-1];
// c = c + (v * p)
p.mul(v, temp);
c.add(temp);
// c = c / 2^j
c.intLen--;
}
int numBits = k & 0x1f;
if (numBits != 0) {
// V = R * c (mod 2^j)
int v = r * c.value[c.offset + c.intLen-1];
v &= ((1<<numBits) - 1);
// c = c + (v * p)
p.mul(v, temp);
c.add(temp);
// c = c / 2^j
c.rightShift(numBits);
}
// In theory, c may be greater than p at this point (Very rare!)
while (c.compare(p) >= 0)
c.subtract(p);
return c;
}
/**
* Uses the extended Euclidean algorithm to compute the modInverse of base
* mod a modulus that is a power of 2. The modulus is 2^k.
*/
MutableBigInteger euclidModInverse(int k) {
MutableBigInteger b = new MutableBigInteger(1);
b.leftShift(k);
MutableBigInteger mod = new MutableBigInteger(b);
MutableBigInteger a = new MutableBigInteger(this);
MutableBigInteger q = new MutableBigInteger();
MutableBigInteger r = b.divide(a, q);
MutableBigInteger swapper = b;
// swap b & r
b = r;
r = swapper;
MutableBigInteger t1 = new MutableBigInteger(q);
MutableBigInteger t0 = new MutableBigInteger(1);
MutableBigInteger temp = new MutableBigInteger();
while (!b.isOne()) {
r = a.divide(b, q);
if (r.intLen == 0)
throw new ArithmeticException("BigInteger not invertible.");
swapper = r;
a = swapper;
if (q.intLen == 1)
t1.mul(q.value[q.offset], temp);
else
q.multiply(t1, temp);
swapper = q;
q = temp;
temp = swapper;
t0.add(q);
if (a.isOne())
return t0;
r = b.divide(a, q);
if (r.intLen == 0)
throw new ArithmeticException("BigInteger not invertible.");
swapper = b;
b = r;
if (q.intLen == 1)
t0.mul(q.value[q.offset], temp);
else
q.multiply(t0, temp);
swapper = q; q = temp; temp = swapper;
t1.add(q);
}
mod.subtract(t1);
return mod;
}
}
| andreagenso/java2scala | test/J2s/java/openjdk-6-src-b27/jdk/src/share/classes/java/math/MutableBigInteger.java | Java | apache-2.0 | 46,139 |
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=US-ASCII">
<title>Class template basic_managed_shared_memory</title>
<link rel="stylesheet" href="../../../../doc/src/boostbook.css" type="text/css">
<meta name="generator" content="DocBook XSL Stylesheets V1.78.1">
<link rel="home" href="../../index.html" title="The Boost C++ Libraries BoostBook Documentation Subset">
<link rel="up" href="../../interprocess/indexes_reference.html#header.boost.interprocess.managed_shared_memory_hpp" title="Header <boost/interprocess/managed_shared_memory.hpp>">
<link rel="prev" href="wmanaged_mapped_file.html" title="Type definition wmanaged_mapped_file">
<link rel="next" href="managed_shared_memory.html" title="Type definition managed_shared_memory">
</head>
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
<table cellpadding="2" width="100%"><tr>
<td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../boost.png"></td>
<td align="center"><a href="../../../../index.html">Home</a></td>
<td align="center"><a href="../../../../libs/libraries.htm">Libraries</a></td>
<td align="center"><a href="http://www.boost.org/users/people.html">People</a></td>
<td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td>
<td align="center"><a href="../../../../more/index.htm">More</a></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="wmanaged_mapped_file.html"><img src="../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../interprocess/indexes_reference.html#header.boost.interprocess.managed_shared_memory_hpp"><img src="../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="managed_shared_memory.html"><img src="../../../../doc/src/images/next.png" alt="Next"></a>
</div>
<div class="refentry">
<a name="boost.interprocess.basic_managed__idp65355360"></a><div class="titlepage"></div>
<div class="refnamediv">
<h2><span class="refentrytitle">Class template basic_managed_shared_memory</span></h2>
<p>boost::interprocess::basic_managed_shared_memory</p>
</div>
<h2 xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv-title">Synopsis</h2>
<div xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv"><pre class="synopsis"><span class="comment">// In header: <<a class="link" href="../../interprocess/indexes_reference.html#header.boost.interprocess.managed_shared_memory_hpp" title="Header <boost/interprocess/managed_shared_memory.hpp>">boost/interprocess/managed_shared_memory.hpp</a>>
</span><span class="keyword">template</span><span class="special"><</span><span class="keyword">typename</span> CharType<span class="special">,</span> <span class="keyword">typename</span> AllocationAlgorithm<span class="special">,</span>
<span class="keyword">template</span><span class="special"><</span> <span class="keyword">class</span> <span class="identifier">IndexConfig</span> <span class="special">></span> <span class="keyword">class</span> IndexType<span class="special">></span>
<span class="keyword">class</span> <a class="link" href="basic_managed__idp65355360.html" title="Class template basic_managed_shared_memory">basic_managed_shared_memory</a> <span class="special">{</span>
<span class="keyword">public</span><span class="special">:</span>
<span class="comment">// <a class="link" href="basic_managed__idp65355360.html#boost.interprocess.basic_managed__idp65355360construct-copy-destruct">construct/copy/destruct</a></span>
<a class="link" href="basic_managed__idp65355360.html#idp65362736-bb"><span class="identifier">basic_managed_shared_memory</span></a><span class="special">(</span><span class="special">)</span><span class="special">;</span>
<a class="link" href="basic_managed__idp65355360.html#idp65363504-bb"><span class="identifier">basic_managed_shared_memory</span></a><span class="special">(</span><a class="link" href="create_only_t.html" title="Struct create_only_t">create_only_t</a><span class="special">,</span> <span class="keyword">const</span> <span class="keyword">char</span> <span class="special">*</span><span class="special">,</span> <span class="identifier">size_type</span><span class="special">,</span>
<span class="keyword">const</span> <span class="keyword">void</span> <span class="special">*</span> <span class="special">=</span> <span class="number">0</span><span class="special">,</span>
<span class="keyword">const</span> <a class="link" href="permissions.html" title="Class permissions">permissions</a> <span class="special">&</span> <span class="special">=</span> <a class="link" href="permissions.html" title="Class permissions">permissions</a><span class="special">(</span><span class="special">)</span><span class="special">)</span><span class="special">;</span>
<a class="link" href="basic_managed__idp65355360.html#idp65369440-bb"><span class="identifier">basic_managed_shared_memory</span></a><span class="special">(</span><a class="link" href="open_or_create_t.html" title="Struct open_or_create_t">open_or_create_t</a><span class="special">,</span> <span class="keyword">const</span> <span class="keyword">char</span> <span class="special">*</span><span class="special">,</span> <span class="identifier">size_type</span><span class="special">,</span>
<span class="keyword">const</span> <span class="keyword">void</span> <span class="special">*</span> <span class="special">=</span> <span class="number">0</span><span class="special">,</span>
<span class="keyword">const</span> <a class="link" href="permissions.html" title="Class permissions">permissions</a> <span class="special">&</span> <span class="special">=</span> <a class="link" href="permissions.html" title="Class permissions">permissions</a><span class="special">(</span><span class="special">)</span><span class="special">)</span><span class="special">;</span>
<a class="link" href="basic_managed__idp65355360.html#idp65375456-bb"><span class="identifier">basic_managed_shared_memory</span></a><span class="special">(</span><a class="link" href="open_copy_on_write_t.html" title="Struct open_copy_on_write_t">open_copy_on_write_t</a><span class="special">,</span> <span class="keyword">const</span> <span class="keyword">char</span> <span class="special">*</span><span class="special">,</span>
<span class="keyword">const</span> <span class="keyword">void</span> <span class="special">*</span> <span class="special">=</span> <span class="number">0</span><span class="special">)</span><span class="special">;</span>
<a class="link" href="basic_managed__idp65355360.html#idp65378720-bb"><span class="identifier">basic_managed_shared_memory</span></a><span class="special">(</span><a class="link" href="open_read_only_t.html" title="Struct open_read_only_t">open_read_only_t</a><span class="special">,</span> <span class="keyword">const</span> <span class="keyword">char</span> <span class="special">*</span><span class="special">,</span>
<span class="keyword">const</span> <span class="keyword">void</span> <span class="special">*</span> <span class="special">=</span> <span class="number">0</span><span class="special">)</span><span class="special">;</span>
<a class="link" href="basic_managed__idp65355360.html#idp65381984-bb"><span class="identifier">basic_managed_shared_memory</span></a><span class="special">(</span><a class="link" href="open_only_t.html" title="Struct open_only_t">open_only_t</a><span class="special">,</span> <span class="keyword">const</span> <span class="keyword">char</span> <span class="special">*</span><span class="special">,</span> <span class="keyword">const</span> <span class="keyword">void</span> <span class="special">*</span> <span class="special">=</span> <span class="number">0</span><span class="special">)</span><span class="special">;</span>
<a class="link" href="basic_managed__idp65355360.html#idp65385232-bb"><span class="identifier">basic_managed_shared_memory</span></a><span class="special">(</span><a class="link" href="basic_managed__idp65355360.html" title="Class template basic_managed_shared_memory">basic_managed_shared_memory</a> <span class="special">&&</span><span class="special">)</span><span class="special">;</span>
<a class="link" href="basic_managed__idp65355360.html" title="Class template basic_managed_shared_memory">basic_managed_shared_memory</a> <span class="special">&</span> <a class="link" href="basic_managed__idp65355360.html#idp65386992-bb"><span class="keyword">operator</span><span class="special">=</span></a><span class="special">(</span><a class="link" href="basic_managed__idp65355360.html" title="Class template basic_managed_shared_memory">basic_managed_shared_memory</a> <span class="special">&&</span><span class="special">)</span><span class="special">;</span>
<a class="link" href="basic_managed__idp65355360.html#idp65361696-bb"><span class="special">~</span><span class="identifier">basic_managed_shared_memory</span></a><span class="special">(</span><span class="special">)</span><span class="special">;</span>
<span class="comment">// <a class="link" href="basic_managed__idp65355360.html#idp65358640-bb">public member functions</a></span>
<span class="keyword">void</span> <a class="link" href="basic_managed__idp65355360.html#idp65359200-bb"><span class="identifier">swap</span></a><span class="special">(</span><a class="link" href="basic_managed__idp65355360.html" title="Class template basic_managed_shared_memory">basic_managed_shared_memory</a> <span class="special">&</span><span class="special">)</span><span class="special">;</span>
<span class="comment">// <a class="link" href="basic_managed__idp65355360.html#idp65389344-bb">public static functions</a></span>
<span class="keyword">static</span> <span class="keyword">bool</span> <a class="link" href="basic_managed__idp65355360.html#idp65389904-bb"><span class="identifier">grow</span></a><span class="special">(</span><span class="keyword">const</span> <span class="keyword">char</span> <span class="special">*</span><span class="special">,</span> <span class="identifier">size_type</span><span class="special">)</span><span class="special">;</span>
<span class="keyword">static</span> <span class="keyword">bool</span> <a class="link" href="basic_managed__idp65355360.html#idp65393248-bb"><span class="identifier">shrink_to_fit</span></a><span class="special">(</span><span class="keyword">const</span> <span class="keyword">char</span> <span class="special">*</span><span class="special">)</span><span class="special">;</span>
<span class="special">}</span><span class="special">;</span></pre></div>
<div class="refsect1">
<a name="idp240845536"></a><h2>Description</h2>
<p>A basic shared memory named object creation class. Initializes the shared memory segment. Inherits all basic functionality from basic_managed_memory_impl<CharType, AllocationAlgorithm, IndexType> </p>
<div class="refsect2">
<a name="idp240846496"></a><h3>
<a name="boost.interprocess.basic_managed__idp65355360construct-copy-destruct"></a><code class="computeroutput">basic_managed_shared_memory</code>
public
construct/copy/destruct</h3>
<div class="orderedlist"><ol class="orderedlist" type="1">
<li class="listitem">
<pre class="literallayout"><a name="idp65362736-bb"></a><span class="identifier">basic_managed_shared_memory</span><span class="special">(</span><span class="special">)</span><span class="special">;</span></pre>
<p>Default constructor. Does nothing. Useful in combination with move semantics </p>
</li>
<li class="listitem">
<pre class="literallayout"><a name="idp65363504-bb"></a><span class="identifier">basic_managed_shared_memory</span><span class="special">(</span><a class="link" href="create_only_t.html" title="Struct create_only_t">create_only_t</a><span class="special">,</span> <span class="keyword">const</span> <span class="keyword">char</span> <span class="special">*</span> name<span class="special">,</span> <span class="identifier">size_type</span> size<span class="special">,</span>
<span class="keyword">const</span> <span class="keyword">void</span> <span class="special">*</span> addr <span class="special">=</span> <span class="number">0</span><span class="special">,</span>
<span class="keyword">const</span> <a class="link" href="permissions.html" title="Class permissions">permissions</a> <span class="special">&</span> perm <span class="special">=</span> <a class="link" href="permissions.html" title="Class permissions">permissions</a><span class="special">(</span><span class="special">)</span><span class="special">)</span><span class="special">;</span></pre>
<p>Creates shared memory and creates and places the segment manager. This can throw. </p>
</li>
<li class="listitem">
<pre class="literallayout"><a name="idp65369440-bb"></a><span class="identifier">basic_managed_shared_memory</span><span class="special">(</span><a class="link" href="open_or_create_t.html" title="Struct open_or_create_t">open_or_create_t</a><span class="special">,</span> <span class="keyword">const</span> <span class="keyword">char</span> <span class="special">*</span> name<span class="special">,</span>
<span class="identifier">size_type</span> size<span class="special">,</span> <span class="keyword">const</span> <span class="keyword">void</span> <span class="special">*</span> addr <span class="special">=</span> <span class="number">0</span><span class="special">,</span>
<span class="keyword">const</span> <a class="link" href="permissions.html" title="Class permissions">permissions</a> <span class="special">&</span> perm <span class="special">=</span> <a class="link" href="permissions.html" title="Class permissions">permissions</a><span class="special">(</span><span class="special">)</span><span class="special">)</span><span class="special">;</span></pre>
<p>Creates shared memory and creates and places the segment manager if segment was not created. If segment was created it connects to the segment. This can throw. </p>
</li>
<li class="listitem">
<pre class="literallayout"><a name="idp65375456-bb"></a><span class="identifier">basic_managed_shared_memory</span><span class="special">(</span><a class="link" href="open_copy_on_write_t.html" title="Struct open_copy_on_write_t">open_copy_on_write_t</a><span class="special">,</span> <span class="keyword">const</span> <span class="keyword">char</span> <span class="special">*</span> name<span class="special">,</span>
<span class="keyword">const</span> <span class="keyword">void</span> <span class="special">*</span> addr <span class="special">=</span> <span class="number">0</span><span class="special">)</span><span class="special">;</span></pre>
<p>Connects to a created shared memory and its segment manager. in copy_on_write mode. This can throw. </p>
</li>
<li class="listitem">
<pre class="literallayout"><a name="idp65378720-bb"></a><span class="identifier">basic_managed_shared_memory</span><span class="special">(</span><a class="link" href="open_read_only_t.html" title="Struct open_read_only_t">open_read_only_t</a><span class="special">,</span> <span class="keyword">const</span> <span class="keyword">char</span> <span class="special">*</span> name<span class="special">,</span>
<span class="keyword">const</span> <span class="keyword">void</span> <span class="special">*</span> addr <span class="special">=</span> <span class="number">0</span><span class="special">)</span><span class="special">;</span></pre>
<p>Connects to a created shared memory and its segment manager. in read-only mode. This can throw. </p>
</li>
<li class="listitem">
<pre class="literallayout"><a name="idp65381984-bb"></a><span class="identifier">basic_managed_shared_memory</span><span class="special">(</span><a class="link" href="open_only_t.html" title="Struct open_only_t">open_only_t</a><span class="special">,</span> <span class="keyword">const</span> <span class="keyword">char</span> <span class="special">*</span> name<span class="special">,</span>
<span class="keyword">const</span> <span class="keyword">void</span> <span class="special">*</span> addr <span class="special">=</span> <span class="number">0</span><span class="special">)</span><span class="special">;</span></pre>
<p>Connects to a created shared memory and its segment manager. This can throw. </p>
</li>
<li class="listitem">
<pre class="literallayout"><a name="idp65385232-bb"></a><span class="identifier">basic_managed_shared_memory</span><span class="special">(</span><a class="link" href="basic_managed__idp65355360.html" title="Class template basic_managed_shared_memory">basic_managed_shared_memory</a> <span class="special">&&</span> moved<span class="special">)</span><span class="special">;</span></pre>
<p>Moves the ownership of "moved"'s managed memory to *this. Does not throw </p>
</li>
<li class="listitem">
<pre class="literallayout"><a class="link" href="basic_managed__idp65355360.html" title="Class template basic_managed_shared_memory">basic_managed_shared_memory</a> <span class="special">&</span> <a name="idp65386992-bb"></a><span class="keyword">operator</span><span class="special">=</span><span class="special">(</span><a class="link" href="basic_managed__idp65355360.html" title="Class template basic_managed_shared_memory">basic_managed_shared_memory</a> <span class="special">&&</span> moved<span class="special">)</span><span class="special">;</span></pre>
<p>Moves the ownership of "moved"'s managed memory to *this. Does not throw </p>
</li>
<li class="listitem">
<pre class="literallayout"><a name="idp65361696-bb"></a><span class="special">~</span><span class="identifier">basic_managed_shared_memory</span><span class="special">(</span><span class="special">)</span><span class="special">;</span></pre>
<p>Destroys *this and indicates that the calling process is finished using the resource. The destructor function will deallocate any system resources allocated by the system for use by this process for this resource. The resource can still be opened again calling the open constructor overload. To erase the resource from the system use remove(). </p>
</li>
</ol></div>
</div>
<div class="refsect2">
<a name="idp240930496"></a><h3>
<a name="idp65358640-bb"></a><code class="computeroutput">basic_managed_shared_memory</code> public member functions</h3>
<div class="orderedlist"><ol class="orderedlist" type="1"><li class="listitem">
<pre class="literallayout"><span class="keyword">void</span> <a name="idp65359200-bb"></a><span class="identifier">swap</span><span class="special">(</span><a class="link" href="basic_managed__idp65355360.html" title="Class template basic_managed_shared_memory">basic_managed_shared_memory</a> <span class="special">&</span> other<span class="special">)</span><span class="special">;</span></pre>
<p>Swaps the ownership of the managed shared memories managed by *this and other. Never throws. </p>
</li></ol></div>
</div>
<div class="refsect2">
<a name="idp240937808"></a><h3>
<a name="idp65389344-bb"></a><code class="computeroutput">basic_managed_shared_memory</code> public static functions</h3>
<div class="orderedlist"><ol class="orderedlist" type="1">
<li class="listitem">
<pre class="literallayout"><span class="keyword">static</span> <span class="keyword">bool</span> <a name="idp65389904-bb"></a><span class="identifier">grow</span><span class="special">(</span><span class="keyword">const</span> <span class="keyword">char</span> <span class="special">*</span> shmname<span class="special">,</span> <span class="identifier">size_type</span> extra_bytes<span class="special">)</span><span class="special">;</span></pre>
<p>Tries to resize the managed shared memory object so that we have room for more objects.</p>
<p>This function is not synchronized so no other thread or process should be reading or writing the file </p>
</li>
<li class="listitem">
<pre class="literallayout"><span class="keyword">static</span> <span class="keyword">bool</span> <a name="idp65393248-bb"></a><span class="identifier">shrink_to_fit</span><span class="special">(</span><span class="keyword">const</span> <span class="keyword">char</span> <span class="special">*</span> shmname<span class="special">)</span><span class="special">;</span></pre>
<p>Tries to resize the managed shared memory to minimized the size of the file.</p>
<p>This function is not synchronized so no other thread or process should be reading or writing the file </p>
</li>
</ol></div>
</div>
</div>
</div>
<table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr>
<td align="left"></td>
<td align="right"><div class="copyright-footer">Copyright © 2005-2012 Ion Gaztanaga<p>
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>)
</p>
</div></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="wmanaged_mapped_file.html"><img src="../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../interprocess/indexes_reference.html#header.boost.interprocess.managed_shared_memory_hpp"><img src="../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="managed_shared_memory.html"><img src="../../../../doc/src/images/next.png" alt="Next"></a>
</div>
</body>
</html>
| biospi/seamass-windeps | src/boost_1_57_0/doc/html/boost/interprocess/basic_managed__idp65355360.html | HTML | apache-2.0 | 22,075 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.spi.blob;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.lang.ref.WeakReference;
import java.security.InvalidKeyException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.WeakHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import javax.annotation.Nonnull;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import com.google.common.base.Charsets;
import com.google.common.io.BaseEncoding;
import org.apache.commons.io.FileUtils;
import org.apache.jackrabbit.oak.commons.cache.Cache;
import org.apache.jackrabbit.oak.commons.IOUtils;
import org.apache.jackrabbit.oak.commons.StringUtils;
import org.apache.jackrabbit.oak.spi.blob.stats.BlobStatsCollector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* An abstract data store that splits the binaries in relatively small blocks,
* so that each block fits in memory.
* <p>
* Each data store id is a list of zero or more entries. Each entry is either
* <ul>
* <li>data (a number of bytes), or</li>
* <li>the hash code of the content of a number of bytes, or</li>
* <li>the hash code of the content of a data store id (indirect hash)</li>
* </ul>
* Thanks to the indirection, blocks can be kept relatively small, so that
* caching is simpler, and so that the storage backend doesn't need to support
* arbitrary size blobs (some storage backends buffer blobs in memory) and fast
* seeks (some storage backends re-read the whole blob when seeking).
* <p>
* The format of a 'data' entry is: type (one byte; 0 for data), length
* (variable size int), data (bytes).
* <p>
* The format of a 'hash of content' entry is: type (one byte; 1 for hash),
* level (variable size int, 0 meaning not nested), size (variable size long),
* hash code length (variable size int), hash code.
* <p>
* The format of a 'hash of data store id' entry is: type (one byte; 1 for
* hash), level (variable size int, nesting level), total size (variable size
* long), size of data store id (variable size long), hash code length (variable
* size int), hash code.
*/
public abstract class AbstractBlobStore implements GarbageCollectableBlobStore,
Cache.Backend<AbstractBlobStore.BlockId, AbstractBlobStore.Data> {
protected static final String HASH_ALGORITHM = "SHA-256";
protected static final int TYPE_DATA = 0;
protected static final int TYPE_HASH = 1;
/**
* The minimum block size. Blocks must be larger than that so that the
* content hash is always shorter than the data itself.
*/
protected static final int BLOCK_SIZE_LIMIT = 48;
/**
* The blob ids that are still floating around in memory. The blob store
* assumes such binaries must not be deleted, because those binaries are not
* referenced yet in a way the garbage collection algorithm can detect (not
* referenced at all, or only referenced in memory).
*/
protected Map<String, WeakReference<String>> inUse =
Collections.synchronizedMap(new WeakHashMap<String, WeakReference<String>>());
/**
* The minimum size of a block. Smaller blocks are inlined (the data store id
* is the data itself).
*/
private int blockSizeMin = 4096;
/**
* The size of a block. 128 KB has been found to be as fast as larger
* values, and faster than smaller values. 2 MB results in less files.
*/
private int blockSize = 2 * 1024 * 1024;
/**
* The byte array is re-used if possible, to avoid having to create a new,
* large byte array each time a (potentially very small) binary is stored.
*/
private AtomicReference<byte[]> blockBuffer = new AtomicReference<byte[]>();
/**
* Encryption algorithm used to encrypt blobId as references
*/
private static final String ALGORITHM = "HmacSHA1";
/**
* Encryption key for creating secure references from blobId
*/
private byte[] referenceKey;
private final Logger log = LoggerFactory.getLogger(getClass());
private BlobStatsCollector statsCollector = BlobStatsCollector.NOOP;
public void setBlockSizeMin(int x) {
validateBlockSize(x);
this.blockSizeMin = x;
}
@Override
public long getBlockSizeMin() {
return blockSizeMin;
}
@Override
public void setBlockSize(int x) {
validateBlockSize(x);
this.blockSize = x;
}
public void setStatsCollector(BlobStatsCollector stats) {
this.statsCollector = stats;
}
protected BlobStatsCollector getStatsCollector() {
return statsCollector;
}
private static void validateBlockSize(int x) {
if (x < BLOCK_SIZE_LIMIT) {
throw new IllegalArgumentException(
"The minimum size must be bigger " +
"than a content hash itself; limit = " + BLOCK_SIZE_LIMIT);
}
}
public int getBlockSize() {
return blockSize;
}
@Override
public String writeBlob(String tempFilePath) throws IOException {
File file = new File(tempFilePath);
InputStream in = null;
try {
in = new FileInputStream(file);
return writeBlob(in);
} finally {
org.apache.commons.io.IOUtils.closeQuietly(in);
FileUtils.forceDelete(file);
}
}
@Override
public String writeBlob(InputStream in) throws IOException {
try {
ByteArrayOutputStream idStream = new ByteArrayOutputStream();
convertBlobToId(in, idStream, 0, 0);
byte[] id = idStream.toByteArray();
// System.out.println(" write blob " + StringUtils.convertBytesToHex(id));
String blobId = StringUtils.convertBytesToHex(id);
usesBlobId(blobId);
statsCollector.uploadCompleted(blobId);
return blobId;
} finally {
try {
in.close();
} catch (IOException e) {
// ignore
}
}
}
@Override
public InputStream getInputStream(String blobId) throws IOException {
//Marking would handled by next call to store.readBlob
return new BlobStoreInputStream(this, blobId, 0);
}
//--------------------------------------------< Blob Reference >
@Override
public String getReference(@Nonnull String blobId) {
checkNotNull(blobId, "BlobId must be specified");
try {
Mac mac = Mac.getInstance(ALGORITHM);
mac.init(new SecretKeySpec(getReferenceKey(), ALGORITHM));
byte[] hash = mac.doFinal(blobId.getBytes("UTF-8"));
return blobId + ':' + BaseEncoding.base32Hex().encode(hash);
} catch (NoSuchAlgorithmException e) {
throw new IllegalStateException(e);
} catch (InvalidKeyException e) {
throw new IllegalStateException(e);
} catch (UnsupportedEncodingException e) {
throw new IllegalStateException(e);
}
}
@Override
public String getBlobId(@Nonnull String reference) {
checkNotNull(reference, "BlobId must be specified");
int colon = reference.indexOf(':');
if (colon != -1) {
String blobId = reference.substring(0, colon);
if (reference.equals(getReference(blobId))) {
return blobId;
}
log.debug("Possibly invalid reference as blobId does not match {}", reference);
}
return null;
}
/**
* Returns the reference key of this blob store. If one does not already
* exist, it is automatically created in an implementation-specific way.
* The default implementation simply creates a temporary random key that's
* valid only until the data store gets restarted. Subclasses can override
* and/or decorate this method to support a more persistent reference key.
* <p>
* This method is called only once during the lifetime of a data store
* instance and the return value is cached in memory, so it's no problem
* if the implementation is slow.
*
* @return reference key
*/
protected byte[] getOrCreateReferenceKey() {
byte[] referenceKeyValue = new byte[256];
new SecureRandom().nextBytes(referenceKeyValue);
log.info("Reference key is not specified for the BlobStore in use. " +
"Generating a random key. For stable " +
"reference ensure that reference key is specified");
return referenceKeyValue;
}
/**
* Returns the reference key of this data store. Synchronized to
* control concurrent access to the cached {@link #referenceKey} value.
*
* @return reference key
*/
private synchronized byte[] getReferenceKey() {
if (referenceKey == null) {
referenceKey = getOrCreateReferenceKey();
}
return referenceKey;
}
public void setReferenceKey(byte[] referenceKey) {
checkArgument(referenceKey != null, "Reference key already initialized by default means. " +
"To explicitly set it, setReferenceKey must be invoked before its first use");
this.referenceKey = referenceKey;
}
/**
* Set the referenceKey from Base64 encoded byte array
* @param encodedKey base64 encoded key
*/
public void setReferenceKeyEncoded(String encodedKey) {
setReferenceKey(BaseEncoding.base64().decode(encodedKey));
}
/**
* Set the referenceKey from plain text. Key content would be UTF-8 encoding
* of the string.
*
* <p>
* This is useful when setting key via generic bean property manipulation
* from string properties. User can specify the key in plain text and that
* would be passed on this object via
* {@link org.apache.jackrabbit.oak.commons.PropertiesUtil#populate(Object, java.util.Map, boolean)}
*
* @param textKey base64 encoded key
* @see org.apache.jackrabbit.oak.commons.PropertiesUtil#populate(Object,
* java.util.Map, boolean)
*/
public void setReferenceKeyPlainText(String textKey) {
setReferenceKey(textKey.getBytes(Charsets.UTF_8));
}
protected void usesBlobId(String blobId) {
inUse.put(blobId, new WeakReference<String>(blobId));
}
@Override
public void clearInUse() {
inUse.clear();
}
private void convertBlobToId(InputStream in,
ByteArrayOutputStream idStream, int level, long totalLength)
throws IOException {
int count = 0;
// try to re-use the block (but not concurrently)
byte[] block = blockBuffer.getAndSet(null);
if (block == null || block.length != blockSize) {
// not yet initialized yet, already in use, or wrong size:
// create a new one
block = new byte[blockSize];
}
while (true) {
int blockLen = IOUtils.readFully(in, block, 0, block.length);
count++;
if (blockLen == 0) {
break;
} else if (blockLen < blockSizeMin) {
idStream.write(TYPE_DATA);
IOUtils.writeVarInt(idStream, blockLen);
idStream.write(block, 0, blockLen);
totalLength += blockLen;
} else {
MessageDigest messageDigest;
try {
messageDigest = MessageDigest.getInstance(HASH_ALGORITHM);
} catch (NoSuchAlgorithmException e) {
throw new IOException(e);
}
messageDigest.update(block, 0, blockLen);
byte[] digest = messageDigest.digest();
idStream.write(TYPE_HASH);
IOUtils.writeVarInt(idStream, level);
if (level > 0) {
// level > 0: total size (size of all sub-blocks)
// (see class level javadoc for details)
IOUtils.writeVarLong(idStream, totalLength);
}
// level = 0: size (size of this block)
// level > 0: size of the indirection block
// (see class level javadoc for details)
IOUtils.writeVarLong(idStream, blockLen);
totalLength += blockLen;
IOUtils.writeVarInt(idStream, digest.length);
idStream.write(digest);
long start = System.nanoTime();
storeBlock(digest, level, Arrays.copyOf(block, blockLen));
statsCollector.uploaded(System.nanoTime() - start, TimeUnit.NANOSECONDS, blockLen);
}
if (idStream.size() > blockSize / 2) {
// convert large ids to a block, but ensure it can be stored as
// one block (otherwise the indirection no longer works)
byte[] idBlock = idStream.toByteArray();
idStream.reset();
convertBlobToId(new ByteArrayInputStream(idBlock), idStream, level + 1, totalLength);
count = 1;
}
}
// re-use the block
blockBuffer.set(block);
if (count > 0 && idStream.size() > blockSizeMin) {
// at the very end, convert large ids to a block,
// because large block ids are not handy
// (specially if they are used to read data in small chunks)
byte[] idBlock = idStream.toByteArray();
idStream.reset();
convertBlobToId(new ByteArrayInputStream(idBlock), idStream, level + 1, totalLength);
}
in.close();
}
/**
* Store a block of data.
*
* @param digest the content hash (32 bytes)
* @param level the indirection level (0 is for user data, 1 is a list of
* digests that point to user data, 2 is a list of digests that
* point to digests, and so on). This parameter is for
* informational use only, and it is not required to store it
* unless that's easy to achieve
* @param data the data to be stored (the number of bytes is at most the block size)
*/
protected abstract void storeBlock(byte[] digest, int level, byte[] data) throws IOException;
@Override
public abstract void startMark() throws IOException;
@Override
public abstract int sweep() throws IOException;
protected abstract boolean isMarkEnabled();
protected abstract void mark(BlockId id) throws Exception;
protected void markInUse() throws IOException {
for (String id : new ArrayList<String>(inUse.keySet())) {
mark(id);
}
}
@Override
public int readBlob(String blobId, long pos, byte[] buff, int off,
int length) throws IOException {
if (isMarkEnabled()) {
mark(blobId);
}
byte[] id = StringUtils.convertHexToBytes(blobId);
ByteArrayInputStream idStream = new ByteArrayInputStream(id);
while (true) {
int type = idStream.read();
if (type == -1) {
statsCollector.downloadCompleted(blobId);
return -1;
} else if (type == TYPE_DATA) {
int len = IOUtils.readVarInt(idStream);
if (pos < len) {
IOUtils.skipFully(idStream, (int) pos);
len -= pos;
if (length < len) {
len = length;
}
IOUtils.readFully(idStream, buff, off, len);
return len;
}
IOUtils.skipFully(idStream, len);
pos -= len;
} else if (type == TYPE_HASH) {
int level = IOUtils.readVarInt(idStream);
// level = 0: size (size of this block)
// level > 0: total size (size of all sub-blocks)
// (see class level javadoc for details)
long totalLength = IOUtils.readVarLong(idStream);
if (level > 0) {
// block length (ignored)
IOUtils.readVarLong(idStream);
}
byte[] digest = new byte[IOUtils.readVarInt(idStream)];
IOUtils.readFully(idStream, digest, 0, digest.length);
if (pos >= totalLength) {
pos -= totalLength;
} else {
if (level > 0) {
byte[] block = readBlock(digest, 0);
idStream = new ByteArrayInputStream(block);
} else {
long readPos = pos - pos % blockSize;
byte[] block = readBlock(digest, readPos);
ByteArrayInputStream in = new ByteArrayInputStream(block);
IOUtils.skipFully(in, pos - readPos);
return IOUtils.readFully(in, buff, off, length);
}
}
} else {
throw new IOException("Unknown blobs id type " + type + " for blob " + blobId);
}
}
}
byte[] readBlock(byte[] digest, long pos) {
BlockId id = new BlockId(digest, pos);
return load(id).data;
}
@Override
public Data load(BlockId id) {
byte[] data;
try {
data = readBlockFromBackend(id);
} catch (Exception e) {
throw new RuntimeException("failed to read block from backend, id " + id, e);
}
if (data == null) {
throw new IllegalArgumentException("The block with id " + id + " was not found");
}
return new Data(data);
}
/**
* Load the block from the storage backend. Returns null if the block was
* not found.
*
* @param id the block id
* @return the block data, or null
*/
protected abstract byte[] readBlockFromBackend(BlockId id) throws Exception;
@Override
public long getBlobLength(String blobId) throws IOException {
if (isMarkEnabled()) {
mark(blobId);
}
byte[] id = StringUtils.convertHexToBytes(blobId);
ByteArrayInputStream idStream = new ByteArrayInputStream(id);
long totalLength = 0;
while (true) {
int type = idStream.read();
if (type == -1) {
break;
}
if (type == TYPE_DATA) {
int len = IOUtils.readVarInt(idStream);
IOUtils.skipFully(idStream, len);
totalLength += len;
} else if (type == TYPE_HASH) {
int level = IOUtils.readVarInt(idStream);
// level = 0: size (size of this block)
// level > 0: total size (size of all sub-blocks)
// (see class level javadoc for details)
totalLength += IOUtils.readVarLong(idStream);
if (level > 0) {
// block length (ignored)
IOUtils.readVarLong(idStream);
}
int digestLength = IOUtils.readVarInt(idStream);
IOUtils.skipFully(idStream, digestLength);
} else {
throw new IOException("Datastore id type " + type + " for blob " + blobId);
}
}
return totalLength;
}
protected void mark(String blobId) throws IOException {
try {
byte[] id = StringUtils.convertHexToBytes(blobId);
ByteArrayInputStream idStream = new ByteArrayInputStream(id);
mark(idStream);
} catch (Exception e) {
throw new IOException("Mark failed for blob " + blobId, e);
}
}
private void mark(ByteArrayInputStream idStream) throws Exception {
while (true) {
int type = idStream.read();
if (type == -1) {
return;
} else if (type == TYPE_DATA) {
int len = IOUtils.readVarInt(idStream);
IOUtils.skipFully(idStream, len);
} else if (type == TYPE_HASH) {
int level = IOUtils.readVarInt(idStream);
// level = 0: size (size of this block)
// level > 0: total size (size of all sub-blocks)
// (see class level javadoc for details)
IOUtils.readVarLong(idStream);
if (level > 0) {
// block length (ignored)
IOUtils.readVarLong(idStream);
}
byte[] digest = new byte[IOUtils.readVarInt(idStream)];
IOUtils.readFully(idStream, digest, 0, digest.length);
BlockId id = new BlockId(digest, 0);
mark(id);
if (level > 0) {
byte[] block = readBlock(digest, 0);
idStream = new ByteArrayInputStream(block);
mark(idStream);
}
} else {
throw new IOException("Unknown blobs id type " + type);
}
}
}
@Override
public Iterator<String> resolveChunks(String blobId) throws IOException {
return new ChunkIterator(blobId);
}
@Override
public boolean deleteChunks(List<String> chunkIds, long maxLastModifiedTime) throws Exception {
return (chunkIds.size() == countDeleteChunks(chunkIds, maxLastModifiedTime));
}
/**
* A block id. Blocks are small enough to fit in memory, so they can be
* cached.
*/
public static class BlockId {
/**
* The digest (32 bytes).
*/
final byte[] digest;
final long pos;
BlockId(byte[] digest, long pos) {
this.digest = digest;
this.pos = pos;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || !(other instanceof BlockId)) {
return false;
}
BlockId o = (BlockId) other;
return Arrays.equals(digest, o.digest) &&
pos == o.pos;
}
@Override
public int hashCode() {
return Arrays.hashCode(digest) ^
(int) (pos >> 32) ^ (int) pos;
}
@Override
public String toString() {
return StringUtils.convertBytesToHex(digest) + "@" + pos;
}
public byte[] getDigest() {
return digest;
}
public long getPos() {
return pos;
}
}
/**
* The data for a block.
*/
public static class Data implements Cache.Value {
final byte[] data;
Data(byte[] data) {
this.data = data;
}
@Override
public String toString() {
String s = StringUtils.convertBytesToHex(data);
return s.length() > 100 ? s.substring(0, 100) + ".. (len=" + data.length + ")" : s;
}
@Override
public int getMemory() {
return data.length;
}
}
class ChunkIterator implements Iterator<String> {
private final static int BATCH = 2048;
private final ArrayDeque<String> queue;
private final ArrayDeque<ByteArrayInputStream> streamsStack;
public ChunkIterator(String blobId) {
byte[] id = StringUtils.convertHexToBytes(blobId);
ByteArrayInputStream idStream = new ByteArrayInputStream(id);
queue = new ArrayDeque<String>(BATCH);
streamsStack = new ArrayDeque<ByteArrayInputStream>();
streamsStack.push(idStream);
}
@Override
public boolean hasNext() {
if (!queue.isEmpty()) {
return true;
}
try {
while ((queue.size() < BATCH)
&& (streamsStack.peekFirst() != null)) {
ByteArrayInputStream idStream = streamsStack.peekFirst();
int type = idStream.read();
if (type == -1) {
streamsStack.pop();
} else if (type == TYPE_DATA) {
int len = IOUtils.readVarInt(idStream);
IOUtils.skipFully(idStream, len);
} else if (type == TYPE_HASH) {
int level = IOUtils.readVarInt(idStream);
// level = 0: size (size of this block)
// level > 0: total size (size of all sub-blocks)
// (see class level javadoc for details)
IOUtils.readVarLong(idStream);
if (level > 0) {
// block length (ignored)
IOUtils.readVarLong(idStream);
}
byte[] digest = new byte[IOUtils.readVarInt(idStream)];
IOUtils.readFully(idStream, digest, 0, digest.length);
if (level > 0) {
queue.add(StringUtils.convertBytesToHex(digest));
byte[] block = readBlock(digest, 0);
idStream = new ByteArrayInputStream(block);
streamsStack.push(idStream);
} else {
queue.add(StringUtils.convertBytesToHex(digest));
}
} else {
break;
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
// Check now if ids are available
if (!queue.isEmpty()) {
return true;
}
return false;
}
@Override
public String next() {
if (!hasNext()) {
throw new NoSuchElementException("No data");
}
return queue.remove();
}
@Override
public void remove() {
throw new UnsupportedOperationException("Remove not supported");
}
}
}
| meggermo/jackrabbit-oak | oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/AbstractBlobStore.java | Java | apache-2.0 | 27,876 |
/*
* Copyright (c) 2020 Intel Corporation
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <kernel.h>
#include <ksched.h>
#include <wait_q.h>
#include <init.h>
#include <linker/linker-defs.h>
void k_heap_init(struct k_heap *h, void *mem, size_t bytes)
{
z_waitq_init(&h->wait_q);
sys_heap_init(&h->heap, mem, bytes);
SYS_PORT_TRACING_OBJ_INIT(k_heap, h);
}
static int statics_init(const struct device *unused)
{
ARG_UNUSED(unused);
STRUCT_SECTION_FOREACH(k_heap, h) {
#if defined(CONFIG_DEMAND_PAGING) && !defined(CONFIG_LINKER_GENERIC_SECTIONS_PRESENT_AT_BOOT)
/* Some heaps may not present at boot, so we need to wait for
* paging mechanism to be initialized before we can initialize
* each heap.
*/
extern bool z_sys_post_kernel;
bool do_clear = z_sys_post_kernel;
/* During pre-kernel init, z_sys_post_kernel == false,
* initialize if within pinned region. Otherwise skip.
* In post-kernel init, z_sys_post_kernel == true, skip those in
* pinned region as they have already been initialized and
* possibly already in use. Otherwise initialize.
*/
if (lnkr_is_pinned((uint8_t *)h) &&
lnkr_is_pinned((uint8_t *)&h->wait_q) &&
lnkr_is_region_pinned((uint8_t *)h->heap.init_mem,
h->heap.init_bytes)) {
do_clear = !do_clear;
}
if (do_clear)
#endif /* CONFIG_DEMAND_PAGING && !CONFIG_LINKER_GENERIC_SECTIONS_PRESENT_AT_BOOT */
{
k_heap_init(h, h->heap.init_mem, h->heap.init_bytes);
}
}
return 0;
}
SYS_INIT(statics_init, PRE_KERNEL_1, CONFIG_KERNEL_INIT_PRIORITY_OBJECTS);
#if defined(CONFIG_DEMAND_PAGING) && !defined(CONFIG_LINKER_GENERIC_SECTIONS_PRESENT_AT_BOOT)
/* Need to wait for paging mechanism to be initialized before
* heaps that are not in pinned sections can be initialized.
*/
SYS_INIT(statics_init, POST_KERNEL, 0);
#endif /* CONFIG_DEMAND_PAGING && !CONFIG_LINKER_GENERIC_SECTIONS_PRESENT_AT_BOOT */
void *k_heap_aligned_alloc(struct k_heap *h, size_t align, size_t bytes,
k_timeout_t timeout)
{
int64_t now, end = sys_clock_timeout_end_calc(timeout);
void *ret = NULL;
k_spinlock_key_t key = k_spin_lock(&h->lock);
SYS_PORT_TRACING_OBJ_FUNC_ENTER(k_heap, aligned_alloc, h, timeout);
__ASSERT(!arch_is_in_isr() || K_TIMEOUT_EQ(timeout, K_NO_WAIT), "");
bool blocked_alloc = false;
while (ret == NULL) {
ret = sys_heap_aligned_alloc(&h->heap, align, bytes);
now = sys_clock_tick_get();
if (!IS_ENABLED(CONFIG_MULTITHREADING) ||
(ret != NULL) || ((end - now) <= 0)) {
break;
}
if (!blocked_alloc) {
blocked_alloc = true;
SYS_PORT_TRACING_OBJ_FUNC_BLOCKING(k_heap, aligned_alloc, h, timeout);
} else {
/**
* @todo Trace attempt to avoid empty trace segments
*/
}
(void) z_pend_curr(&h->lock, key, &h->wait_q,
K_TICKS(end - now));
key = k_spin_lock(&h->lock);
}
SYS_PORT_TRACING_OBJ_FUNC_EXIT(k_heap, aligned_alloc, h, timeout, ret);
k_spin_unlock(&h->lock, key);
return ret;
}
void *k_heap_alloc(struct k_heap *h, size_t bytes, k_timeout_t timeout)
{
SYS_PORT_TRACING_OBJ_FUNC_ENTER(k_heap, alloc, h, timeout);
void *ret = k_heap_aligned_alloc(h, sizeof(void *), bytes, timeout);
SYS_PORT_TRACING_OBJ_FUNC_EXIT(k_heap, alloc, h, timeout, ret);
return ret;
}
void k_heap_free(struct k_heap *h, void *mem)
{
k_spinlock_key_t key = k_spin_lock(&h->lock);
sys_heap_free(&h->heap, mem);
SYS_PORT_TRACING_OBJ_FUNC(k_heap, free, h);
if (IS_ENABLED(CONFIG_MULTITHREADING) && z_unpend_all(&h->wait_q) != 0) {
z_reschedule(&h->lock, key);
} else {
k_spin_unlock(&h->lock, key);
}
}
| zephyrproject-rtos/zephyr | kernel/kheap.c | C | apache-2.0 | 3,573 |
---
layout: page
title: Run on YARN
---
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
- [Introduction](#introduction)
- [Running on YARN: Quickstart](#starting-your-application-on-yarn)
- [Setting up a single node YARN cluster](#setting-up-a-single-node-yarn-cluster-optional)
- [Submitting the application to YARN](#submitting-the-application-to-yarn)
- [Application Master UI](#application-master-ui)
- [Configuration](#configuration)
- [Configuring parallelism](#configuring-parallelism)
- [Configuring resources](#configuring-resources)
- [Memory](#memory)
- [CPU](#cpu)
- [Configuring retries](#configuring-retries)
- [Configuring RM high-availability and NM work-preserving recovery](#configuring-rm-high-availability-and-nm-work-preserving-recovery)
- [Resource Manager high-availability](#resource-manager-high-availability)
- [NodeManager work-preserving recovery](#nodemanager-work-preserving-recovery)
- [Configuring host-affinity](#configuring-host-affinity)
- [Configuring security](#configuring-security)
- [Delegation token management strategy](#delegation-token-management-strategy)
- [Security Components](#security-components)
- [SecurityManager](#securitymanager)
- [Security Configuration](#security-configuration)
- [Job](#job)
- [YARN](#yarn)
- [Coordinator Internals](#coordinator-internals)
### Introduction
Apache YARN is part of the Hadoop project and provides the ability to run distributed applications on a cluster. A YARN cluster minimally consists of a Resource Manager (RM) and multiple Node Managers (NM). The RM is responsible for managing the resources in the cluster and allocating them to applications. Every node in the cluster has an NM (Node Manager), which is responsible for managing containers on that node - starting them, monitoring their resource usage and reporting the same to the RM.
Applications are run on the cluster by implementing a coordinator called an ApplicationMaster (AM). The AM is responsible for requesting resources including CPU, memory from the Resource Manager (RM) on behalf of the application. Samza provides its own implementation of the AM for each job.
### Running on YARN: Quickstart
We will demonstrate running a Samza application on YARN by using the `hello-samza` example. Lets first checkout our repository.
```bash
git clone https://github.com/apache/samza-hello-samza.git
cd samza-hello-samza
git checkout latest
```
#### Set up a single node YARN cluster
You can use the `grid` script included as part of the [hello-samza](https://github.com/apache/samza-hello-samza/) repository to setup a single-node cluster. The script also starts Zookeeper and Kafka locally.
```
./bin/grid bootstrap
```
### Submitting the application to YARN
Now that we have a YARN cluster ready, lets build our application. The below command does a maven-build and generates an archive in the `./target` folder.
```bash
./bin/build-package.sh
```
You can inspect the structure of the generated archive. To run on YARN, Samza jobs should be packaged with the following structure.
```bash
samza-job-name-folder
├── bin
│ ├── run-app.sh
│ ├── run-class.sh
│ └── ...
├── config
│ └── application.properties
└── lib
├── samza-api-0.14.0.jar
├── samza-core_2.11-0.14.0.jar
├── samza-kafka_2.11-0.14.0.jar
├── samza-yarn_2.11-0.14.0.jar
└── ...
```
Once the archive is built, the `run-app.sh` script can be used to submit the application to YARN's resource manager. The script takes 2 CLI parameters - the config factory and the config file for the application. As an example, lets run our [FilterExample](https://github.com/apache/samza-hello-samza/blob/latest/src/main/java/samza/examples/cookbook/FilterExample.java) on YARN as follows:
```bash
$ ./deploy/samza/bin/run-app.sh --config-path=./deploy/samza/config/filter-example.properties
```
Congratulations, you've successfully submitted your first job to YARN! You can view the YARN Web UI to view its status.
### Application Master UI
The YARN RM provides a Web UI to view the status of applications in the cluster, their containers and logs. By default, it can be accessed from `localhost:8088` on the RM host.

In addition to YARN's UI, Samza also offers a REST end-point and a web interface for its ApplicationMaster. To access it, simply click on the Tracking UI link corresponding to your application.
Samza's Application Master UI provides you the ability to view:
- Job-level runtime metadata - eg: JMX endpoints, running JVM version

- Information about individual containers eg: their uptime, status and logs

- Task Groups eg: Information on individual tasks, where they run and which partitions are consumed from what host

- Runtime configs for your application

### Configuration
In this section, we'll look at configuring your jobs when running on YARN.
#### Configuring parallelism
[Recall](/learn/documentation/{{site.version}}/architecture/architecture-overview.html#container) that Samza scales your applications by breaking them into multiple tasks. On YARN, these tasks are executed on one or more containers, each of which is a Java process. You can control the number of containers allocated to your application by configuring `cluster-manager.container.count`. For example, if we are consuming from an input topic with 5 partitions, Samza will create 5 tasks, each of which process one partition. Tasks are equally distributed among available containers. The number of containers can be utmost the number of tasks - since, we cannot have idle containers without any tasks assigned to them.
#### Configuring resources
Samza jobs on YARN run on a multi-tenant cluster and should be isolated from each other. YARN implements isolation by enforcing limits on memory and CPU each application can use.
##### Memory
You can configure the memory-limit per-container using `cluster-manager.container.memory.mb` and memory-limit for the AM using `yarn.am.container.memory.mb`. If your container process exceeds its configured memory-limits, it is automatically killed by YARN.
##### CPU
Similar to configuring memory-limits, you can configure the maximum number of vCores (virtual cores) each container can use by setting `cluster-manager.container.cpu.cores`. A _vCore_ is YARN's abstraction over a physical core on a NodeManager which allows for over-provisioning. YARN supports [isolation]((http://riccomini.name/posts/hadoop/2013-06-14-yarn-with-cgroups/)) of cpu cores using Linux CGroups.
#### Configuring retries
Failures are common when running any distributed system and should be handled gracefully. The Samza AM automatically restarts containers during a failure. The following properties govern this behavior.
`cluster-manager.container.retry.count`: This property determines the maximum number of times Samza will attempt to restart a failed container within a time window. If this property is set to 0, any failed container immediately causes the whole job to fail. If it is set to a negative number, there is no limit on the number of retries.
`cluster-manager.container.retry.window.ms`: This property determines how frequently a container is allowed to fail before we give up and fail the job. If the same container has failed more than cluster-manager.container.retry.count times and the time between failures is less than this property, then Samza terminates the job. There is no limit to the number of times we restart a container, if the time between failures is greater than cluster-manager.container.retry.window.ms.
## YARN - Operations Best practices
Although this section is not Samza specific, it describes some best practices for running a YARN cluster in production.
### Resource Manager high-availability
The Resource Manager (RM) provides services like scheduling, heartbeats, liveness monitoring to all applications running in the YARN cluster. Losing the host running the RM would kill every application running on the cluster - making it a single point of failure. The High Availability feature introduced in Hadoop 2.4 adds redundancy by allowing multiple stand-by RMs.
To configure YARN's ResourceManager to be highly available Resource Manager, set your yarn-site.xml file with the following configs:
```xml
<property>
<name>yarn.resourcemanager.ha.enabled</name>
<value>true</value>
</property>
<property>
<name>yarn.resourcemanager.cluster-id</name>
<value>cluster1</value>
</property>
<property>
<name>yarn.resourcemanager.ha.rm-ids</name>
<value>rm1,rm2</value>
</property>
<property>
<name>yarn.resourcemanager.hostname.rm1</name>
<value>master1</value>
</property>
<property>
<name>yarn.resourcemanager.hostname.rm2</name>
<value>master2</value>
</property>
<property>
<name>yarn.resourcemanager.webapp.address.rm1</name>
<value>master1:8088</value>
</property>
<property>
<name>yarn.resourcemanager.webapp.address.rm2</name>
<value>master2:8088</value>
</property>
<property>
<name>yarn.resourcemanager.zk-address</name>
<value>zk1:2181,zk2:2181,zk3:2181</value>
</property>
```
### Reserving memory for other services
Often, other services including monitoring daemons like Samza-REST run on the same nodes in the YARN cluster. You can configure `yarn.nodemanager.resource.system-reserved-memory-mb` to control the amount of physical memory reserved for non-YARN processes.
Another behaviour to keep in mind is that the Resource Manager allocates memory and cpu on the cluster in increments of `yarn.scheduler.minimum-allocation-mb` and `yarn.scheduler.minimum-allocation-vcores`. Hence, requesting allocations that are not multiples of the above configs will cause internal fragmentation.
### NodeManager work-preserving recovery
Often, NMs have to be bounced in the cluster for upgrades or maintenance reasons. By default, bouncing a Node Manager kills all containers running on its host. Work-preserving NM Restart enables NodeManagers to be restarted without losing active containers running on the node. You can turn on this feature by setting `yarn.nodemanager.recovery.enabled` to `true` in `yarn-site.xml`. You should also set `yarn.nodemanager.recovery.dir` to a directory where the NM should store its state needed for recovery.
### Configuring state-store directories
When a stateful Samza job is deployed in YARN, the state stores for the tasks are located in the current working directory of YARN’s attempt. YARN's DeletionService cleans up the working directories after an application exits. To ensure durability of Samza's state, its stores need to be persisted outside the scope of YARN's DeletionService. You can set this location by configuring an environment variable named `LOGGED_STORE_BASE_DIR` across the cluster.
To manage disk space and clean-up state stores that are no longer necessary, Samza-REST supports periodic, long-running tasks named [monitors](/learn/documentation/{{site.version}}/rest/monitors.html).
### Configuring security
You can run Samza jobs on a secure YARN cluster. YARN uses Kerberos as its authentication and authorization mechanism. See [this article](https://www.cloudera.com/documentation/enterprise/5-7-x/topics/cdh_sg_yarn_security.html) for details on operating Hadoop in secure mode.
#### Management of Kerberos tokens
One challenge for long-running applications on YARN is how they periodically renew their Kerberos tokens. Samza handles this by having the AM periodically create tokens and refresh them in a staging directory on HDFS. This directory is accessible only by the containers of your job. You can set your Kerberos principal and kerberos keytab file as follows:
```properties
# Use the SamzaYarnSecurityManagerFactory, which fetches and renews the Kerberos delegation tokens when the job is running in a secure environment.
job.security.manager.factory=org.apache.samza.job.yarn.SamzaYarnSecurityManagerFactory
# Kerberos principal
yarn.kerberos.principal=your-principal-name
# Path of the keytab file (local path)
yarn.kerberos.keytab=/tmp/keytab
```
By default, Kerberos tokens on YARN have a maximum life-time of 7 days, beyond which they auto-expire. Often streaming applications are long-running and don't terminate within this life-time. To get around this, you can configure YARN's Resource Manager to automatically re-create tokens on your behalf by setting these configs in your `yarn-site.xml` file.
```xml
<property>
<name>hadoop.proxyuser.yarn.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.yarn.groups</name>
<value>*</value>
</property>
```
# Samza Coordinator Internals
In this section, we will discuss some of implementation internals of the Samza ApplicationMaster (AM).
The Samza AM is the control-hub for a Samza application running on a YARN cluster. It is responsible for coordinating work assignment across individual containers. It includes the following componeents:
- YARNClusterResourceManager, which handles interactions with YARN and provides APIs for requesting resources and starting containers.
- ContainerProcessManager, which uses the above APIs to manage Samza containers - including restarting them on failure, ensuring they stay in a healthy state.

Here's a life-cycle of a Samza job submitted to YARN:
- The `run-app.sh` script is started providing the location of your application's binaries and its config file. The script instantiates an ApplicationRunner, which is the main entry-point responsible for running your application.
- The ApplicationRunner parses your configs and writes them to a special Kafka topic named - the coordinator stream for distributing them. It proceeds to submit a request to YARN to launch your application.
- The first step in launching any YARN application is starting its Application Master (AM).
- The ResourceManager allocates an available host and starts the Samza AM.
- The Samza AM is then responsible for managing the overall application. It reads configs from the Coordinator Stream and computes work-assignments for individual containers.
- It also determines the hosts each container should run on taking data-locality into account. It proceeds to request resources on those nodes using the `YARNClusterResourceManager` APIs.
- Once resources have been allocated, it proceeds to start the containers on the allocated hosts.
- When it is started, each container first queries the Samza AM to determine its work-assignments and configs. It then proceeds to execute its assigned tasks.
- The Samza AM periodically monitors each container using heartbeats and ensure they stay alive. | prateekm/samza | docs/learn/documentation/versioned/deployment/yarn.md | Markdown | apache-2.0 | 16,062 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_31) on Thu Jan 31 02:05:00 UTC 2013 -->
<TITLE>
Uses of Class org.apache.hadoop.fs.GlobPattern (Hadoop 1.1.2 API)
</TITLE>
<META NAME="date" CONTENT="2013-01-31">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.hadoop.fs.GlobPattern (Hadoop 1.1.2 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/apache/hadoop/fs/GlobPattern.html" title="class in org.apache.hadoop.fs"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/apache/hadoop/fs//class-useGlobPattern.html" target="_top"><B>FRAMES</B></A>
<A HREF="GlobPattern.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>org.apache.hadoop.fs.GlobPattern</B></H2>
</CENTER>
No usage of org.apache.hadoop.fs.GlobPattern
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/apache/hadoop/fs/GlobPattern.html" title="class in org.apache.hadoop.fs"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/apache/hadoop/fs//class-useGlobPattern.html" target="_top"><B>FRAMES</B></A>
<A HREF="GlobPattern.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright © 2009 The Apache Software Foundation
</BODY>
</HTML>
| markkerzner/nn_kove | hadoop/docs/api/org/apache/hadoop/fs/class-use/GlobPattern.html | HTML | apache-2.0 | 5,934 |
module DockerSupport
require 'cheffish/rspec/chef_run_support'
def self.extended(other)
other.extend Cheffish::RSpec::ChefRunSupport
end
require 'chef/provisioning/docker_driver'
def with_docker(description, *tags, &block)
context_block = proc do
docker_driver = Chef::Provisioning.driver_for_url("docker")
@@driver = docker_driver
def self.driver
@@driver
end
module_eval(&block)
end
when_the_repository "exists and #{description}", *tags, &context_block
end
end
module DockerConfig
def chef_config
@chef_config ||= {
driver: Chef::Provisioning.driver_for_url("docker"),
}
end
end
| marc-/chef-provisioning-docker | spec/docker_support.rb | Ruby | apache-2.0 | 678 |
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.integrationtest.deployment.cfg;
import org.camunda.bpm.application.ProcessApplication;
import org.camunda.bpm.application.impl.ServletProcessApplication;
/**
* @author Daniel Meyer
*
*/
@ProcessApplication(
deploymentDescriptors = {"my/alternate/location/processes.xml"}
)
public class CustomProcessApplication extends ServletProcessApplication {
}
| camunda/camunda-bpm-platform | qa/integration-tests-engine/src/test/java/org/camunda/bpm/integrationtest/deployment/cfg/CustomProcessApplication.java | Java | apache-2.0 | 1,194 |
set(SWIFT_HOST_VARIANT_SDK LINUX CACHE STRING "")
set(SWIFT_HOST_VARIANT_ARCH x86_64 CACHE STRING "")
# NOTE(compnerd) disable the tools, we are trying to build just the standard
# library.
set(SWIFT_INCLUDE_TOOLS NO CACHE BOOL "")
# NOTE(compnerd) cannot build tests since the tests require the toolchain
set(SWIFT_INCLUDE_TESTS NO CACHE BOOL "")
# NOTE(compnerd) cannot build docs since that requires perl
set(SWIFT_INCLUDE_DOCS NO CACHE BOOL "")
# NOTE(compnerd) these are part of the toolchain, not the runtime.
set(SWIFT_BUILD_SYNTAXPARSERLIB NO CACHE BOOL "")
set(SWIFT_BUILD_SOURCEKIT NO CACHE BOOL "")
# NOTE(compnerd) build with the compiler specified, not a just built compiler.
set(SWIFT_BUILD_RUNTIME_WITH_HOST_COMPILER YES CACHE BOOL "")
| rudkx/swift | cmake/caches/Runtime-Linux-x86_64.cmake | CMake | apache-2.0 | 757 |
# Copyright 2012 OpenStack Foundation
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import re
from oslo_log import log as logging
import testtools
from tempest.common.utils import data_utils
from tempest.common import waiters
from tempest import config
from tempest import exceptions
from tempest.scenario import manager
from tempest.services.network import resources as net_resources
from tempest import test
CONF = config.CONF
LOG = logging.getLogger(__name__)
Floating_IP_tuple = collections.namedtuple('Floating_IP_tuple',
['floating_ip', 'server'])
class TestNetworkBasicOps(manager.NetworkScenarioTest):
"""
This smoke test suite assumes that Nova has been configured to
boot VM's with Neutron-managed networking, and attempts to
verify network connectivity as follows:
There are presumed to be two types of networks: tenant and
public. A tenant network may or may not be reachable from the
Tempest host. A public network is assumed to be reachable from
the Tempest host, and it should be possible to associate a public
('floating') IP address with a tenant ('fixed') IP address to
facilitate external connectivity to a potentially unroutable
tenant IP address.
This test suite can be configured to test network connectivity to
a VM via a tenant network, a public network, or both. If both
networking types are to be evaluated, tests that need to be
executed remotely on the VM (via ssh) will only be run against
one of the networks (to minimize test execution time).
Determine which types of networks to test as follows:
* Configure tenant network checks (via the
'tenant_networks_reachable' key) if the Tempest host should
have direct connectivity to tenant networks. This is likely to
be the case if Tempest is running on the same host as a
single-node devstack installation with IP namespaces disabled.
* Configure checks for a public network if a public network has
been configured prior to the test suite being run and if the
Tempest host should have connectivity to that public network.
Checking connectivity for a public network requires that a
value be provided for 'public_network_id'. A value can
optionally be provided for 'public_router_id' if tenants will
use a shared router to access a public network (as is likely to
be the case when IP namespaces are not enabled). If a value is
not provided for 'public_router_id', a router will be created
for each tenant and use the network identified by
'public_network_id' as its gateway.
"""
@classmethod
def skip_checks(cls):
super(TestNetworkBasicOps, cls).skip_checks()
if not (CONF.network.tenant_networks_reachable
or CONF.network.public_network_id):
msg = ('Either tenant_networks_reachable must be "true", or '
'public_network_id must be defined.')
raise cls.skipException(msg)
for ext in ['router', 'security-group']:
if not test.is_extension_enabled(ext, 'network'):
msg = "%s extension not enabled." % ext
raise cls.skipException(msg)
@classmethod
def setup_credentials(cls):
# Create no network resources for these tests.
cls.set_network_resources()
super(TestNetworkBasicOps, cls).setup_credentials()
def setUp(self):
super(TestNetworkBasicOps, self).setUp()
self.keypairs = {}
self.servers = []
def _setup_network_and_servers(self, **kwargs):
boot_with_port = kwargs.pop('boot_with_port', False)
self.security_group = \
self._create_security_group(tenant_id=self.tenant_id)
self.network, self.subnet, self.router = self.create_networks(**kwargs)
self.check_networks()
self.ports = []
self.port_id = None
if boot_with_port:
# create a port on the network and boot with that
self.port_id = self._create_port(self.network['id']).id
self.ports.append({'port': self.port_id})
name = data_utils.rand_name('server-smoke')
server = self._create_server(name, self.network, self.port_id)
self._check_tenant_network_connectivity()
floating_ip = self.create_floating_ip(server)
self.floating_ip_tuple = Floating_IP_tuple(floating_ip, server)
def check_networks(self):
"""
Checks that we see the newly created network/subnet/router via
checking the result of list_[networks,routers,subnets]
"""
seen_nets = self._list_networks()
seen_names = [n['name'] for n in seen_nets]
seen_ids = [n['id'] for n in seen_nets]
self.assertIn(self.network.name, seen_names)
self.assertIn(self.network.id, seen_ids)
if self.subnet:
seen_subnets = self._list_subnets()
seen_net_ids = [n['network_id'] for n in seen_subnets]
seen_subnet_ids = [n['id'] for n in seen_subnets]
self.assertIn(self.network.id, seen_net_ids)
self.assertIn(self.subnet.id, seen_subnet_ids)
if self.router:
seen_routers = self._list_routers()
seen_router_ids = [n['id'] for n in seen_routers]
seen_router_names = [n['name'] for n in seen_routers]
self.assertIn(self.router.name,
seen_router_names)
self.assertIn(self.router.id,
seen_router_ids)
def _create_server(self, name, network, port_id=None):
keypair = self.create_keypair()
self.keypairs[keypair['name']] = keypair
security_groups = [{'name': self.security_group['name']}]
create_kwargs = {
'networks': [
{'uuid': network.id},
],
'key_name': keypair['name'],
'security_groups': security_groups,
}
if port_id is not None:
create_kwargs['networks'][0]['port'] = port_id
server = self.create_server(name=name, create_kwargs=create_kwargs)
self.servers.append(server)
return server
def _get_server_key(self, server):
return self.keypairs[server['key_name']]['private_key']
def _check_tenant_network_connectivity(self):
ssh_login = CONF.compute.image_ssh_user
for server in self.servers:
# call the common method in the parent class
super(TestNetworkBasicOps, self).\
_check_tenant_network_connectivity(
server, ssh_login, self._get_server_key(server),
servers_for_debug=self.servers)
def check_public_network_connectivity(
self, should_connect=True, msg=None,
should_check_floating_ip_status=True):
"""Verifies connectivty to a VM via public network and floating IP,
and verifies floating IP has resource status is correct.
:param should_connect: bool. determines if connectivity check is
negative or positive.
:param msg: Failure message to add to Error message. Should describe
the place in the test scenario where the method was called,
to indicate the context of the failure
:param should_check_floating_ip_status: bool. should status of
floating_ip be checked or not
"""
ssh_login = CONF.compute.image_ssh_user
floating_ip, server = self.floating_ip_tuple
ip_address = floating_ip.floating_ip_address
private_key = None
floatingip_status = 'DOWN'
if should_connect:
private_key = self._get_server_key(server)
floatingip_status = 'ACTIVE'
# Check FloatingIP Status before initiating a connection
if should_check_floating_ip_status:
self.check_floating_ip_status(floating_ip, floatingip_status)
# call the common method in the parent class
super(TestNetworkBasicOps, self).check_public_network_connectivity(
ip_address, ssh_login, private_key, should_connect, msg,
self.servers)
def _disassociate_floating_ips(self):
floating_ip, server = self.floating_ip_tuple
self._disassociate_floating_ip(floating_ip)
self.floating_ip_tuple = Floating_IP_tuple(
floating_ip, None)
def _reassociate_floating_ips(self):
floating_ip, server = self.floating_ip_tuple
name = data_utils.rand_name('new_server-smoke')
# create a new server for the floating ip
server = self._create_server(name, self.network)
self._associate_floating_ip(floating_ip, server)
self.floating_ip_tuple = Floating_IP_tuple(
floating_ip, server)
def _create_new_network(self, create_gateway=False):
self.new_net = self._create_network(tenant_id=self.tenant_id)
if create_gateway:
self.new_subnet = self._create_subnet(
network=self.new_net)
else:
self.new_subnet = self._create_subnet(
network=self.new_net,
gateway_ip=None)
def _hotplug_server(self):
old_floating_ip, server = self.floating_ip_tuple
ip_address = old_floating_ip.floating_ip_address
private_key = self._get_server_key(server)
ssh_client = self.get_remote_client(ip_address,
private_key=private_key)
old_nic_list = self._get_server_nics(ssh_client)
# get a port from a list of one item
port_list = self._list_ports(device_id=server['id'])
self.assertEqual(1, len(port_list))
old_port = port_list[0]
interface = self.interface_client.create_interface(
server_id=server['id'],
net_id=self.new_net.id)['interfaceAttachment']
self.addCleanup(self.network_client.wait_for_resource_deletion,
'port',
interface['port_id'])
self.addCleanup(self.delete_wrapper,
self.interface_client.delete_interface,
server['id'], interface['port_id'])
def check_ports():
self.new_port_list = [port for port in
self._list_ports(device_id=server['id'])
if port['id'] != old_port['id']]
return len(self.new_port_list) == 1
if not test.call_until_true(check_ports, CONF.network.build_timeout,
CONF.network.build_interval):
raise exceptions.TimeoutException(
"No new port attached to the server in time (%s sec)! "
"Old port: %s. Number of new ports: %d" % (
CONF.network.build_timeout, old_port,
len(self.new_port_list)))
new_port = net_resources.DeletablePort(client=self.network_client,
**self.new_port_list[0])
def check_new_nic():
new_nic_list = self._get_server_nics(ssh_client)
self.diff_list = [n for n in new_nic_list if n not in old_nic_list]
return len(self.diff_list) == 1
if not test.call_until_true(check_new_nic, CONF.network.build_timeout,
CONF.network.build_interval):
raise exceptions.TimeoutException("Interface not visible on the "
"guest after %s sec"
% CONF.network.build_timeout)
num, new_nic = self.diff_list[0]
ssh_client.assign_static_ip(nic=new_nic,
addr=new_port.fixed_ips[0]['ip_address'])
ssh_client.turn_nic_on(nic=new_nic)
def _get_server_nics(self, ssh_client):
reg = re.compile(r'(?P<num>\d+): (?P<nic_name>\w+):')
ipatxt = ssh_client.get_ip_list()
return reg.findall(ipatxt)
def _check_network_internal_connectivity(self, network,
should_connect=True):
"""
via ssh check VM internal connectivity:
- ping internal gateway and DHCP port, implying in-tenant connectivity
pinging both, because L3 and DHCP agents might be on different nodes
"""
floating_ip, server = self.floating_ip_tuple
# get internal ports' ips:
# get all network ports in the new network
internal_ips = (p['fixed_ips'][0]['ip_address'] for p in
self._list_ports(tenant_id=server['tenant_id'],
network_id=network.id)
if p['device_owner'].startswith('network'))
self._check_server_connectivity(floating_ip,
internal_ips,
should_connect)
def _check_network_external_connectivity(self):
"""
ping public network default gateway to imply external connectivity
"""
if not CONF.network.public_network_id:
msg = 'public network not defined.'
LOG.info(msg)
return
# We ping the external IP from the instance using its floating IP
# which is always IPv4, so we must only test connectivity to
# external IPv4 IPs if the external network is dualstack.
v4_subnets = [s for s in self._list_subnets(
network_id=CONF.network.public_network_id) if s['ip_version'] == 4]
self.assertEqual(1, len(v4_subnets),
"Found %d IPv4 subnets" % len(v4_subnets))
external_ips = [v4_subnets[0]['gateway_ip']]
self._check_server_connectivity(self.floating_ip_tuple.floating_ip,
external_ips)
def _check_server_connectivity(self, floating_ip, address_list,
should_connect=True):
ip_address = floating_ip.floating_ip_address
private_key = self._get_server_key(self.floating_ip_tuple.server)
ssh_source = self._ssh_to_server(ip_address, private_key)
for remote_ip in address_list:
if should_connect:
msg = ("Timed out waiting for %s to become "
"reachable") % remote_ip
else:
msg = "ip address %s is reachable" % remote_ip
try:
self.assertTrue(self._check_remote_connectivity
(ssh_source, remote_ip, should_connect),
msg)
except Exception:
LOG.exception("Unable to access {dest} via ssh to "
"floating-ip {src}".format(dest=remote_ip,
src=floating_ip))
raise
@test.attr(type='smoke')
@test.idempotent_id('f323b3ba-82f8-4db7-8ea6-6a895869ec49')
@test.services('compute', 'network')
def test_network_basic_ops(self):
"""
For a freshly-booted VM with an IP address ("port") on a given
network:
- the Tempest host can ping the IP address. This implies, but
does not guarantee (see the ssh check that follows), that the
VM has been assigned the correct IP address and has
connectivity to the Tempest host.
- the Tempest host can perform key-based authentication to an
ssh server hosted at the IP address. This check guarantees
that the IP address is associated with the target VM.
- the Tempest host can ssh into the VM via the IP address and
successfully execute the following:
- ping an external IP address, implying external connectivity.
- ping an external hostname, implying that dns is correctly
configured.
- ping an internal IP address, implying connectivity to another
VM on the same network.
- detach the floating-ip from the VM and verify that it becomes
unreachable
- associate detached floating ip to a new VM and verify connectivity.
VMs are created with unique keypair so connectivity also asserts that
floating IP is associated with the new VM instead of the old one
Verifies that floating IP status is updated correctly after each change
"""
self._setup_network_and_servers()
self.check_public_network_connectivity(should_connect=True)
self._check_network_internal_connectivity(network=self.network)
self._check_network_external_connectivity()
self._disassociate_floating_ips()
self.check_public_network_connectivity(should_connect=False,
msg="after disassociate "
"floating ip")
self._reassociate_floating_ips()
self.check_public_network_connectivity(should_connect=True,
msg="after re-associate "
"floating ip")
@test.idempotent_id('1546850e-fbaa-42f5-8b5f-03d8a6a95f15')
@testtools.skipIf(CONF.baremetal.driver_enabled,
'Baremetal relies on a shared physical network.')
@test.services('compute', 'network')
def test_connectivity_between_vms_on_different_networks(self):
"""
For a freshly-booted VM with an IP address ("port") on a given
network:
- the Tempest host can ping the IP address.
- the Tempest host can ssh into the VM via the IP address and
successfully execute the following:
- ping an external IP address, implying external connectivity.
- ping an external hostname, implying that dns is correctly
configured.
- ping an internal IP address, implying connectivity to another
VM on the same network.
- Create another network on the same tenant with subnet, create
an VM on the new network.
- Ping the new VM from previous VM failed since the new network
was not attached to router yet.
- Attach the new network to the router, Ping the new VM from
previous VM succeed.
"""
self._setup_network_and_servers()
self.check_public_network_connectivity(should_connect=True)
self._check_network_internal_connectivity(network=self.network)
self._check_network_external_connectivity()
self._create_new_network(create_gateway=True)
name = data_utils.rand_name('server-smoke')
self._create_server(name, self.new_net)
self._check_network_internal_connectivity(network=self.new_net,
should_connect=False)
self.new_subnet.add_to_router(self.router.id)
self._check_network_internal_connectivity(network=self.new_net,
should_connect=True)
@test.idempotent_id('c5adff73-e961-41f1-b4a9-343614f18cfa')
@testtools.skipUnless(CONF.compute_feature_enabled.interface_attach,
'NIC hotplug not available')
@testtools.skipIf(CONF.network.port_vnic_type in ['direct', 'macvtap'],
'NIC hotplug not supported for '
'vnic_type direct or macvtap')
@test.services('compute', 'network')
def test_hotplug_nic(self):
"""
1. create a new network, with no gateway (to prevent overwriting VM's
gateway)
2. connect VM to new network
3. set static ip and bring new nic up
4. check VM can ping new network dhcp port
"""
self._setup_network_and_servers()
self.check_public_network_connectivity(should_connect=True)
self._create_new_network()
self._hotplug_server()
self._check_network_internal_connectivity(network=self.new_net)
@test.idempotent_id('04b9fe4e-85e8-4aea-b937-ea93885ac59f')
@testtools.skipIf(CONF.baremetal.driver_enabled,
'Router state cannot be altered on a shared baremetal '
'network')
@test.services('compute', 'network')
def test_update_router_admin_state(self):
"""
1. Check public connectivity before updating
admin_state_up attribute of router to False
2. Check public connectivity after updating
admin_state_up attribute of router to False
3. Check public connectivity after updating
admin_state_up attribute of router to True
"""
self._setup_network_and_servers()
self.check_public_network_connectivity(
should_connect=True, msg="before updating "
"admin_state_up of router to False")
self._update_router_admin_state(self.router, False)
# TODO(alokmaurya): Remove should_check_floating_ip_status=False check
# once bug 1396310 is fixed
self.check_public_network_connectivity(
should_connect=False, msg="after updating "
"admin_state_up of router to False",
should_check_floating_ip_status=False)
self._update_router_admin_state(self.router, True)
self.check_public_network_connectivity(
should_connect=True, msg="after updating "
"admin_state_up of router to True")
@test.idempotent_id('d8bb918e-e2df-48b2-97cd-b73c95450980')
@testtools.skipIf(CONF.baremetal.driver_enabled,
'network isolation not available for baremetal nodes')
@testtools.skipUnless(CONF.scenario.dhcp_client,
"DHCP client is not available.")
@test.services('compute', 'network')
def test_subnet_details(self):
"""Tests that subnet's extra configuration details are affecting
the VMs. This test relies on non-shared, isolated tenant networks.
NOTE: Neutron subnets push data to servers via dhcp-agent, so any
update in subnet requires server to actively renew its DHCP lease.
1. Configure subnet with dns nameserver
2. retrieve the VM's configured dns and verify it matches the one
configured for the subnet.
3. update subnet's dns
4. retrieve the VM's configured dns and verify it matches the new one
configured for the subnet.
TODO(yfried): add host_routes
any resolution check would be testing either:
* l3 forwarding (tested in test_network_basic_ops)
* Name resolution of an external DNS nameserver - out of scope for
Tempest
"""
# this test check only updates (no actual resolution) so using
# arbitrary ip addresses as nameservers, instead of parsing CONF
initial_dns_server = '1.2.3.4'
alt_dns_server = '9.8.7.6'
# renewal should be immediate.
# Timeouts are suggested by salvatore-orlando in
# https://bugs.launchpad.net/neutron/+bug/1412325/comments/3
renew_delay = CONF.network.build_interval
renew_timeout = CONF.network.build_timeout
self._setup_network_and_servers(dns_nameservers=[initial_dns_server])
self.check_public_network_connectivity(should_connect=True)
floating_ip, server = self.floating_ip_tuple
ip_address = floating_ip.floating_ip_address
private_key = self._get_server_key(server)
ssh_client = self._ssh_to_server(ip_address, private_key)
dns_servers = [initial_dns_server]
servers = ssh_client.get_dns_servers()
self.assertEqual(set(dns_servers), set(servers),
'Looking for servers: {trgt_serv}. '
'Retrieved DNS nameservers: {act_serv} '
'From host: {host}.'
.format(host=ssh_client.ssh_client.host,
act_serv=servers,
trgt_serv=dns_servers))
self.subnet.update(dns_nameservers=[alt_dns_server])
# asserts that Neutron DB has updated the nameservers
self.assertEqual([alt_dns_server], self.subnet.dns_nameservers,
"Failed to update subnet's nameservers")
def check_new_dns_server():
"""Server needs to renew its dhcp lease in order to get the new dns
definitions from subnet
NOTE(amuller): we are renewing the lease as part of the retry
because Neutron updates dnsmasq asynchronously after the
subnet-update API call returns.
"""
ssh_client.renew_lease(fixed_ip=floating_ip['fixed_ip_address'])
if ssh_client.get_dns_servers() != [alt_dns_server]:
LOG.debug("Failed to update DNS nameservers")
return False
return True
self.assertTrue(test.call_until_true(check_new_dns_server,
renew_timeout,
renew_delay),
msg="DHCP renewal failed to fetch "
"new DNS nameservers")
@test.idempotent_id('f5dfcc22-45fd-409f-954c-5bd500d7890b')
@testtools.skipIf(CONF.baremetal.driver_enabled,
'admin_state of instance ports cannot be altered '
'for baremetal nodes')
@testtools.skipUnless(CONF.network_feature_enabled.port_admin_state_change,
"Changing a port's admin state is not supported "
"by the test environment")
@test.services('compute', 'network')
def test_update_instance_port_admin_state(self):
"""
1. Check public connectivity before updating
admin_state_up attribute of instance port to False
2. Check public connectivity after updating
admin_state_up attribute of instance port to False
3. Check public connectivity after updating
admin_state_up attribute of instance port to True
"""
self._setup_network_and_servers()
floating_ip, server = self.floating_ip_tuple
server_id = server['id']
port_id = self._list_ports(device_id=server_id)[0]['id']
self.check_public_network_connectivity(
should_connect=True, msg="before updating "
"admin_state_up of instance port to False")
self.network_client.update_port(port_id, admin_state_up=False)
self.check_public_network_connectivity(
should_connect=False, msg="after updating "
"admin_state_up of instance port to False",
should_check_floating_ip_status=False)
self.network_client.update_port(port_id, admin_state_up=True)
self.check_public_network_connectivity(
should_connect=True, msg="after updating "
"admin_state_up of instance port to True")
@test.idempotent_id('759462e1-8535-46b0-ab3a-33aa45c55aaa')
@testtools.skipUnless(CONF.compute_feature_enabled.preserve_ports,
'Preserving ports on instance delete may not be '
'supported in the version of Nova being tested.')
@test.services('compute', 'network')
def test_preserve_preexisting_port(self):
"""Tests that a pre-existing port provided on server boot is not
deleted if the server is deleted.
Nova should unbind the port from the instance on delete if the port was
not created by Nova as part of the boot request.
"""
# Setup the network, create a port and boot the server from that port.
self._setup_network_and_servers(boot_with_port=True)
_, server = self.floating_ip_tuple
self.assertEqual(1, len(self.ports),
'There should only be one port created for '
'server %s.' % server['id'])
port_id = self.ports[0]['port']
self.assertIsNotNone(port_id,
'Server should have been created from a '
'pre-existing port.')
# Assert the port is bound to the server.
port_list = self._list_ports(device_id=server['id'],
network_id=self.network['id'])
self.assertEqual(1, len(port_list),
'There should only be one port created for '
'server %s.' % server['id'])
self.assertEqual(port_id, port_list[0]['id'])
# Delete the server.
self.servers_client.delete_server(server['id'])
waiters.wait_for_server_termination(self.servers_client, server['id'])
# Assert the port still exists on the network but is unbound from
# the deleted server.
port = self.network_client.show_port(port_id)['port']
self.assertEqual(self.network['id'], port['network_id'])
self.assertEqual('', port['device_id'])
self.assertEqual('', port['device_owner'])
@test.idempotent_id('2e788c46-fb3f-4ac9-8f82-0561555bea73')
@test.services('compute', 'network')
def test_router_rescheduling(self):
"""Tests that router can be removed from agent and add to a new agent.
1. Verify connectivity
2. Remove router from all l3-agents
3. Verify connectivity is down
4. Assign router to new l3-agent (or old one if no new agent is
available)
5. Verify connectivity
"""
# TODO(yfried): refactor this test to be used for other agents (dhcp)
# as well
list_hosts = (self.admin_manager.network_client.
list_l3_agents_hosting_router)
schedule_router = (self.admin_manager.network_client.
add_router_to_l3_agent)
unschedule_router = (self.admin_manager.network_client.
remove_router_from_l3_agent)
agent_list = set(a["id"] for a in
self._list_agents(agent_type="L3 agent"))
self._setup_network_and_servers()
# NOTE(kevinbenton): we have to use the admin credentials to check
# for the distributed flag because self.router only has a tenant view.
admin = self.admin_manager.network_client.show_router(self.router.id)
if admin['router'].get('distributed', False):
msg = "Rescheduling test does not apply to distributed routers."
raise self.skipException(msg)
self.check_public_network_connectivity(should_connect=True)
# remove resource from agents
hosting_agents = set(a["id"] for a in
list_hosts(self.router.id)['agents'])
no_migration = agent_list == hosting_agents
LOG.info("Router will be assigned to {mig} hosting agent".
format(mig="the same" if no_migration else "a new"))
for hosting_agent in hosting_agents:
unschedule_router(hosting_agent, self.router.id)
self.assertNotIn(hosting_agent,
[a["id"] for a in
list_hosts(self.router.id)['agents']],
'unscheduling router failed')
# verify resource is un-functional
self.check_public_network_connectivity(
should_connect=False,
msg='after router unscheduling',
should_check_floating_ip_status=False
)
# schedule resource to new agent
target_agent = list(hosting_agents if no_migration else
agent_list - hosting_agents)[0]
schedule_router(target_agent,
self.router['id'])
self.assertEqual(
target_agent,
list_hosts(self.router.id)['agents'][0]['id'],
"Router failed to reschedule. Hosting agent doesn't match "
"target agent")
# verify resource is functional
self.check_public_network_connectivity(
should_connect=True,
msg='After router rescheduling')
| flyingfish007/tempest | tempest/scenario/test_network_basic_ops.py | Python | apache-2.0 | 32,963 |
'use strict';
goog.provide('grrUi.core.module');
goog.require('grrUi.core.aff4DownloadLinkDirective.Aff4DownloadLinkDirective');
goog.require('grrUi.core.aff4ItemsProviderDirective.Aff4ItemsProviderDirective');
goog.require('grrUi.core.aff4Service.Aff4Service');
goog.require('grrUi.core.apiItemsProviderDirective.ApiItemsProviderDirective');
goog.require('grrUi.core.apiService.ApiService');
goog.require('grrUi.core.basenameFilter.BasenameFilter');
goog.require('grrUi.core.clockDirective.ClockDirective');
goog.require('grrUi.core.downloadCollectionFilesDirective.DownloadCollectionFilesDirective');
goog.require('grrUi.core.encodeUriComponentFilter.EncodeUriComponentFilter');
goog.require('grrUi.core.forceRefreshDirective.ForceRefreshDirective');
goog.require('grrUi.core.infiniteTableDirective.InfiniteTableDirective');
goog.require('grrUi.core.legacyRendererDirective.LegacyRendererDirective');
goog.require('grrUi.core.memoryItemsProviderDirective.MemoryItemsProviderDirective');
goog.require('grrUi.core.pagedFilteredTableDirective.PagedFilteredTableDirective');
goog.require('grrUi.core.pagedFilteredTableDirective.TableBottomDirective');
goog.require('grrUi.core.pagedFilteredTableDirective.TableTopDirective');
goog.require('grrUi.core.reflectionService.ReflectionService');
goog.require('grrUi.core.resultsCollectionDirective.ResultsCollectionDirective');
goog.require('grrUi.core.splitterDirective.SplitterDirective');
goog.require('grrUi.core.splitterDirective.SplitterPaneDirective');
goog.require('grrUi.core.timeService.TimeService');
goog.require('grrUi.core.timeSinceFilter.TimeSinceFilter');
/**
* Angular module for core GRR UI components.
*/
grrUi.core.module = angular.module('grrUi.core', ['ngCookies',
'ui.bootstrap']);
grrUi.core.module.directive(
grrUi.core.aff4DownloadLinkDirective.Aff4DownloadLinkDirective
.directive_name,
grrUi.core.aff4DownloadLinkDirective.Aff4DownloadLinkDirective);
grrUi.core.module.directive(
grrUi.core.apiItemsProviderDirective.
ApiItemsProviderDirective.directive_name,
grrUi.core.apiItemsProviderDirective.ApiItemsProviderDirective);
grrUi.core.module.directive(
grrUi.core.aff4ItemsProviderDirective.
Aff4ItemsProviderDirective.directive_name,
grrUi.core.aff4ItemsProviderDirective.Aff4ItemsProviderDirective);
grrUi.core.module.directive(
grrUi.core.forceRefreshDirective.ForceRefreshDirective.directive_name,
grrUi.core.forceRefreshDirective.ForceRefreshDirective);
grrUi.core.module.directive(
grrUi.core.legacyRendererDirective.LegacyRendererDirective.directive_name,
grrUi.core.legacyRendererDirective.LegacyRendererDirective);
grrUi.core.module.directive(
grrUi.core.memoryItemsProviderDirective.
MemoryItemsProviderDirective.directive_name,
grrUi.core.memoryItemsProviderDirective.MemoryItemsProviderDirective);
grrUi.core.module.directive(
grrUi.core.pagedFilteredTableDirective.
PagedFilteredTableDirective.directive_name,
grrUi.core.pagedFilteredTableDirective.PagedFilteredTableDirective);
grrUi.core.module.directive(
grrUi.core.pagedFilteredTableDirective.TableTopDirective.directive_name,
grrUi.core.pagedFilteredTableDirective.TableTopDirective);
grrUi.core.module.directive(
grrUi.core.pagedFilteredTableDirective.TableBottomDirective.directive_name,
grrUi.core.pagedFilteredTableDirective.TableBottomDirective);
grrUi.core.module.directive(
grrUi.core.infiniteTableDirective.InfiniteTableDirective.directive_name,
grrUi.core.infiniteTableDirective.InfiniteTableDirective);
grrUi.core.module.service(
grrUi.core.aff4Service.Aff4Service.service_name,
grrUi.core.aff4Service.Aff4Service);
grrUi.core.module.service(
grrUi.core.apiService.ApiService.service_name,
grrUi.core.apiService.ApiService);
grrUi.core.module.directive(
grrUi.core.resultsCollectionDirective.ResultsCollectionDirective
.directive_name,
grrUi.core.resultsCollectionDirective.ResultsCollectionDirective);
grrUi.core.module.directive(
grrUi.core.splitterDirective.SplitterDirective.directive_name,
grrUi.core.splitterDirective.SplitterDirective);
grrUi.core.module.directive(
grrUi.core.splitterDirective.SplitterPaneDirective.directive_name,
grrUi.core.splitterDirective.SplitterPaneDirective);
grrUi.core.module.directive(
grrUi.core.clockDirective.ClockDirective.directive_name,
grrUi.core.clockDirective.ClockDirective);
grrUi.core.module.directive(
grrUi.core.downloadCollectionFilesDirective
.DownloadCollectionFilesDirective.directive_name,
grrUi.core.downloadCollectionFilesDirective
.DownloadCollectionFilesDirective);
grrUi.core.module.service(
grrUi.core.reflectionService.ReflectionService.service_name,
grrUi.core.reflectionService.ReflectionService);
grrUi.core.module.service(
grrUi.core.timeService.TimeService.service_name,
grrUi.core.timeService.TimeService);
grrUi.core.module.filter(grrUi.core.basenameFilter.BasenameFilter.filter_name,
grrUi.core.basenameFilter.BasenameFilter);
grrUi.core.module.filter(
grrUi.core.encodeUriComponentFilter.EncodeUriComponentFilter.filter_name,
grrUi.core.encodeUriComponentFilter.EncodeUriComponentFilter);
grrUi.core.module.filter(grrUi.core.timeSinceFilter.TimeSinceFilter.filter_name,
grrUi.core.timeSinceFilter.TimeSinceFilter);
| ahojjati/grr | gui/static/angular-components/core/core.js | JavaScript | apache-2.0 | 5,469 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.query.aggregation.datasketches.theta;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.PostAggregator;
import org.apache.druid.query.aggregation.post.PostAggregatorIds;
import org.apache.druid.query.cache.CacheKeyBuilder;
import org.apache.druid.segment.ColumnInspector;
import org.apache.druid.segment.column.ColumnType;
import java.util.Comparator;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
/**
* Returns a human-readable summary of a given Theta sketch.
* This is a string returned by toString() method of the sketch.
* This can be useful for debugging.
*/
public class SketchToStringPostAggregator implements PostAggregator
{
private final String name;
private final PostAggregator field;
@JsonCreator
public SketchToStringPostAggregator(
@JsonProperty("name") final String name,
@JsonProperty("field") final PostAggregator field
)
{
this.name = name;
this.field = field;
}
@Override
public Set<String> getDependentFields()
{
return field.getDependentFields();
}
@Override
public Comparator<String> getComparator()
{
return Comparator.nullsFirst(Comparator.naturalOrder());
}
@Override
public Object compute(final Map<String, Object> combinedAggregators)
{
final SketchHolder holder = (SketchHolder) field.compute(combinedAggregators);
return holder.getSketch().toString();
}
@Override
@JsonProperty
public String getName()
{
return name;
}
@Override
public ColumnType getType(ColumnInspector signature)
{
return ColumnType.STRING;
}
@Override
public PostAggregator decorate(final Map<String, AggregatorFactory> aggregators)
{
return this;
}
@JsonProperty
public PostAggregator getField()
{
return field;
}
@Override
public byte[] getCacheKey()
{
return new CacheKeyBuilder(PostAggregatorIds.THETA_SKETCH_TO_STRING)
.appendString(name)
.appendCacheable(field)
.build();
}
@Override
public String toString()
{
return getClass().getSimpleName() + "{" +
"name='" + name + '\'' +
", field=" + field +
"}";
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SketchToStringPostAggregator that = (SketchToStringPostAggregator) o;
return name.equals(that.name) &&
field.equals(that.field);
}
@Override
public int hashCode()
{
return Objects.hash(name, field);
}
}
| druid-io/druid | extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchToStringPostAggregator.java | Java | apache-2.0 | 3,556 |
/*
* This file is open source software, licensed to you under the terms
* of the Apache License, Version 2.0 (the "License"). See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. You may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Copyright 2015 Cloudius Systems
*/
#pragma once
#include <seastar/json/json_elements.hh>
#include <seastar/json/formatter.hh>
#include <seastar/http/routes.hh>
#include <seastar/http/transformers.hh>
#include <string>
#include <seastar/util/noncopyable_function.hh>
namespace seastar {
namespace httpd {
struct api_doc : public json::json_base {
json::json_element<std::string> path;
json::json_element<std::string> description;
void register_params() {
add(&path, "path");
add(&description, "description");
}
api_doc() {
register_params();
}
api_doc(const api_doc & e)
: json::json_base()
{
register_params();
path = e.path;
description = e.description;
}
template<class T>
api_doc& operator=(const T& e) {
path = e.path;
description = e.description;
return *this;
}
api_doc& operator=(const api_doc& e) {
path = e.path;
description = e.description;
return *this;
}
};
struct api_docs : public json::json_base {
json::json_element<std::string> apiVersion;
json::json_element<std::string> swaggerVersion;
json::json_list<api_doc> apis;
void register_params() {
add(&apiVersion, "apiVersion");
add(&swaggerVersion, "swaggerVersion");
add(&apis, "apis");
}
api_docs() {
apiVersion = "0.0.1";
swaggerVersion = "1.2";
register_params();
}
api_docs(const api_docs & e)
: json::json_base()
{
apiVersion = "0.0.1";
swaggerVersion = "1.2";
register_params();
}
template<class T>
api_docs& operator=(const T& e) {
apis = e.apis;
return *this;
}
api_docs& operator=(const api_docs& e) {
apis = e.apis;
return *this;
}
};
class api_registry_base : public handler_base {
protected:
sstring _base_path;
sstring _file_directory;
routes& _routes;
public:
api_registry_base(routes& routes, const sstring& file_directory,
const sstring& base_path)
: _base_path(base_path), _file_directory(file_directory), _routes(
routes) {
}
void set_route(handler_base* h) {
_routes.put(GET, _base_path, h);
}
virtual ~api_registry_base() = default;
};
class api_registry : public api_registry_base {
api_docs _docs;
public:
api_registry(routes& routes, const sstring& file_directory,
const sstring& base_path)
: api_registry_base(routes, file_directory, base_path) {
set_route(this);
}
future<std::unique_ptr<reply>> handle(const sstring& path,
std::unique_ptr<request> req, std::unique_ptr<reply> rep) override {
rep->_content = json::formatter::to_json(_docs);
rep->done("json");
return make_ready_future<std::unique_ptr<reply>>(std::move(rep));
}
void reg(const sstring& api, const sstring& description,
const sstring& alternative_path = "") {
api_doc doc;
doc.description = description;
doc.path = "/" + api;
_docs.apis.push(doc);
sstring path =
(alternative_path == "") ?
_file_directory + api + ".json" : alternative_path;
file_handler* index = new file_handler(path,
new content_replace("json"));
_routes.put(GET, _base_path + "/" + api, index);
}
};
class api_registry_builder_base {
protected:
sstring _file_directory;
sstring _base_path;
static const sstring DEFAULT_DIR;
static const sstring DEFAULT_PATH;
public:
api_registry_builder_base(const sstring& file_directory = DEFAULT_DIR,
const sstring& base_path = DEFAULT_PATH)
: _file_directory(file_directory), _base_path(base_path) {
}
};
class api_registry_builder : public api_registry_builder_base {
public:
api_registry_builder(const sstring& file_directory = DEFAULT_DIR,
const sstring& base_path = DEFAULT_PATH)
: api_registry_builder_base(file_directory, base_path) {
}
void set_api_doc(routes& r) {
new api_registry(r, _file_directory, _base_path);
}
void register_function(routes& r, const sstring& api,
const sstring& description, const sstring& alternative_path = "") {
auto h = r.get_exact_match(GET, _base_path);
if (h) {
// if a handler is found, it was added there by the api_registry_builder
// with the set_api_doc method, so we know it's the type
static_cast<api_registry*>(h)->reg(api, description, alternative_path);
};
}
};
using doc_entry = noncopyable_function<future<>(output_stream<char>&)>;
/*!
* \brief a helper function that creates a reader from a file
*/
doc_entry get_file_reader(sstring file_name);
/*!
* \brief An api doc that support swagger version 2.0
*
* The result is a unified JSON file with the swagger definitions.
*
* The file content is a concatenation of the doc_entry by the order of
* their entry.
*
* Definitions will be added under the definition section
*
* typical usage:
*
* First entry:
*
{
"swagger": "2.0",
"host": "localhost:10000",
"basePath": "/v2",
"paths": {
* entry:
"/config/{id}": {
"get": {
"description": "Return a config value",
"operationId": "findConfigId",
"produces": [
"application/json"
],
}
}
*
* Closing the entries:
},
"definitions": {
.....
.....
}
}
*
*/
class api_docs_20 {
std::vector<doc_entry> _apis;
content_replace _transform;
std::vector<doc_entry> _definitions;
public:
future<> write(output_stream<char>&&, std::unique_ptr<request> req);
void add_api(doc_entry&& f) {
_apis.emplace_back(std::move(f));
}
void add_definition(doc_entry&& f) {
_definitions.emplace_back(std::move(f));
}
};
class api_registry_20 : public api_registry_base {
api_docs_20 _docs;
public:
api_registry_20(routes& routes, const sstring& file_directory,
const sstring& base_path)
: api_registry_base(routes, file_directory, base_path) {
set_route(this);
}
future<std::unique_ptr<reply>> handle(const sstring& path,
std::unique_ptr<request> req, std::unique_ptr<reply> rep) override {
rep->write_body("json", [this, req = std::move(req)] (output_stream<char>&& os) mutable {
return _docs.write(std::move(os), std::move(req));
});
return make_ready_future<std::unique_ptr<reply>>(std::move(rep));
}
virtual void reg(doc_entry&& f) {
_docs.add_api(std::move(f));
}
virtual void add_definition(doc_entry&& f) {
_docs.add_definition(std::move(f));
}
};
class api_registry_builder20 : public api_registry_builder_base {
api_registry_20* get_register_base(routes& r) {
auto h = r.get_exact_match(GET, _base_path);
if (h) {
// if a handler is found, it was added there by the api_registry_builder
// with the set_api_doc method, so we know it's the type
return static_cast<api_registry_20*>(h);
}
return nullptr;
}
public:
api_registry_builder20(const sstring& file_directory = DEFAULT_DIR,
const sstring& base_path = DEFAULT_PATH)
: api_registry_builder_base(file_directory, base_path) {
}
void set_api_doc(routes& r) {
new api_registry_20(r, _file_directory, _base_path);
}
/*!
* \brief register a doc_entry
* This doc_entry can be used to either take the definition from a file
* or generate them dynamically.
*/
void register_function(routes& r, doc_entry&& f) {
auto h = get_register_base(r);
if (h) {
h->reg(std::move(f));
}
}
/*!
* \brief register an API
*/
void register_api_file(routes& r, const sstring& api) {
register_function(r, get_file_reader(_file_directory + "/" + api + ".json"));
}
/*!
* Add a footer doc_entry
*/
void add_definition(routes& r, doc_entry&& f) {
auto h = get_register_base(r);
if (h) {
h->add_definition(std::move(f));
}
}
/*!
* Add a definition file
*/
void add_definitions_file(routes& r, const sstring& file) {
add_definition(r, get_file_reader(_file_directory + file + ".def.json" ));
}
};
}
}
| scylladb/seastar | include/seastar/http/api_docs.hh | C++ | apache-2.0 | 9,305 |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This file has been auto-generated by code_generator_v8.py. DO NOT MODIFY!
#include "config.h"
#include "bindings/modules/v8/UnionTypesModules.h"
#include "bindings/core/v8/Dictionary.h"
#include "bindings/core/v8/UnionTypesCore.h"
#include "bindings/core/v8/V8ArrayBuffer.h"
#include "bindings/core/v8/V8ArrayBufferView.h"
#include "bindings/core/v8/V8Blob.h"
#include "bindings/core/v8/V8DOMStringList.h"
#include "bindings/core/v8/V8FormData.h"
#include "bindings/core/v8/V8HTMLCanvasElement.h"
#include "bindings/core/v8/V8HTMLImageElement.h"
#include "bindings/core/v8/V8HTMLVideoElement.h"
#include "bindings/core/v8/V8ImageBitmap.h"
#include "bindings/core/v8/V8MessagePort.h"
#include "bindings/modules/v8/UnionTypesModules.h"
#include "bindings/modules/v8/V8CanvasGradient.h"
#include "bindings/modules/v8/V8CanvasPattern.h"
#include "bindings/modules/v8/V8Request.h"
#include "bindings/modules/v8/V8ServiceWorker.h"
#include "core/workers/AbstractWorker.h"
namespace blink {
ArrayBufferOrArrayBufferViewOrDictionary::ArrayBufferOrArrayBufferViewOrDictionary()
: m_type(SpecificTypeNone)
{
}
PassRefPtr<DOMArrayBuffer> ArrayBufferOrArrayBufferViewOrDictionary::getAsArrayBuffer() const
{
ASSERT(isArrayBuffer());
return m_arrayBuffer;
}
void ArrayBufferOrArrayBufferViewOrDictionary::setArrayBuffer(PassRefPtr<DOMArrayBuffer> value)
{
ASSERT(isNull());
m_arrayBuffer = value;
m_type = SpecificTypeArrayBuffer;
}
ArrayBufferOrArrayBufferViewOrDictionary ArrayBufferOrArrayBufferViewOrDictionary::fromArrayBuffer(PassRefPtr<DOMArrayBuffer> value)
{
ArrayBufferOrArrayBufferViewOrDictionary container;
container.setArrayBuffer(value);
return container;
}
PassRefPtr<DOMArrayBufferView> ArrayBufferOrArrayBufferViewOrDictionary::getAsArrayBufferView() const
{
ASSERT(isArrayBufferView());
return m_arrayBufferView;
}
void ArrayBufferOrArrayBufferViewOrDictionary::setArrayBufferView(PassRefPtr<DOMArrayBufferView> value)
{
ASSERT(isNull());
m_arrayBufferView = value;
m_type = SpecificTypeArrayBufferView;
}
ArrayBufferOrArrayBufferViewOrDictionary ArrayBufferOrArrayBufferViewOrDictionary::fromArrayBufferView(PassRefPtr<DOMArrayBufferView> value)
{
ArrayBufferOrArrayBufferViewOrDictionary container;
container.setArrayBufferView(value);
return container;
}
Dictionary ArrayBufferOrArrayBufferViewOrDictionary::getAsDictionary() const
{
ASSERT(isDictionary());
return m_dictionary;
}
void ArrayBufferOrArrayBufferViewOrDictionary::setDictionary(Dictionary value)
{
ASSERT(isNull());
m_dictionary = value;
m_type = SpecificTypeDictionary;
}
ArrayBufferOrArrayBufferViewOrDictionary ArrayBufferOrArrayBufferViewOrDictionary::fromDictionary(Dictionary value)
{
ArrayBufferOrArrayBufferViewOrDictionary container;
container.setDictionary(value);
return container;
}
ArrayBufferOrArrayBufferViewOrDictionary::ArrayBufferOrArrayBufferViewOrDictionary(const ArrayBufferOrArrayBufferViewOrDictionary&) = default;
ArrayBufferOrArrayBufferViewOrDictionary::~ArrayBufferOrArrayBufferViewOrDictionary() = default;
ArrayBufferOrArrayBufferViewOrDictionary& ArrayBufferOrArrayBufferViewOrDictionary::operator=(const ArrayBufferOrArrayBufferViewOrDictionary&) = default;
DEFINE_TRACE(ArrayBufferOrArrayBufferViewOrDictionary)
{
}
void V8ArrayBufferOrArrayBufferViewOrDictionary::toImpl(v8::Isolate* isolate, v8::Local<v8::Value> v8Value, ArrayBufferOrArrayBufferViewOrDictionary& impl, ExceptionState& exceptionState)
{
if (v8Value.IsEmpty())
return;
if (V8ArrayBuffer::hasInstance(v8Value, isolate)) {
RefPtr<DOMArrayBuffer> cppValue = V8ArrayBuffer::toImpl(v8::Local<v8::Object>::Cast(v8Value));
impl.setArrayBuffer(cppValue);
return;
}
if (V8ArrayBufferView::hasInstance(v8Value, isolate)) {
RefPtr<DOMArrayBufferView> cppValue = V8ArrayBufferView::toImpl(v8::Local<v8::Object>::Cast(v8Value));
impl.setArrayBufferView(cppValue);
return;
}
if (isUndefinedOrNull(v8Value) || v8Value->IsObject()) {
Dictionary cppValue = Dictionary(v8Value, isolate, exceptionState);
if (exceptionState.hadException())
return;
impl.setDictionary(cppValue);
return;
}
exceptionState.throwTypeError("The provided value is not of type '(ArrayBuffer or ArrayBufferView or Dictionary)'");
}
v8::Local<v8::Value> toV8(const ArrayBufferOrArrayBufferViewOrDictionary& impl, v8::Local<v8::Object> creationContext, v8::Isolate* isolate)
{
switch (impl.m_type) {
case ArrayBufferOrArrayBufferViewOrDictionary::SpecificTypeNone:
return v8::Null(isolate);
case ArrayBufferOrArrayBufferViewOrDictionary::SpecificTypeArrayBuffer:
return toV8(impl.getAsArrayBuffer(), creationContext, isolate);
case ArrayBufferOrArrayBufferViewOrDictionary::SpecificTypeArrayBufferView:
return toV8(impl.getAsArrayBufferView(), creationContext, isolate);
case ArrayBufferOrArrayBufferViewOrDictionary::SpecificTypeDictionary:
return impl.getAsDictionary().v8Value();
default:
ASSERT_NOT_REACHED();
}
return v8::Local<v8::Value>();
}
ArrayBufferOrArrayBufferViewOrDictionary NativeValueTraits<ArrayBufferOrArrayBufferViewOrDictionary>::nativeValue(v8::Isolate* isolate, v8::Local<v8::Value> value, ExceptionState& exceptionState)
{
ArrayBufferOrArrayBufferViewOrDictionary impl;
V8ArrayBufferOrArrayBufferViewOrDictionary::toImpl(isolate, value, impl, exceptionState);
return impl;
}
ArrayBufferViewOrBlobOrStringOrFormData::ArrayBufferViewOrBlobOrStringOrFormData()
: m_type(SpecificTypeNone)
{
}
PassRefPtr<DOMArrayBufferView> ArrayBufferViewOrBlobOrStringOrFormData::getAsArrayBufferView() const
{
ASSERT(isArrayBufferView());
return m_arrayBufferView;
}
void ArrayBufferViewOrBlobOrStringOrFormData::setArrayBufferView(PassRefPtr<DOMArrayBufferView> value)
{
ASSERT(isNull());
m_arrayBufferView = value;
m_type = SpecificTypeArrayBufferView;
}
ArrayBufferViewOrBlobOrStringOrFormData ArrayBufferViewOrBlobOrStringOrFormData::fromArrayBufferView(PassRefPtr<DOMArrayBufferView> value)
{
ArrayBufferViewOrBlobOrStringOrFormData container;
container.setArrayBufferView(value);
return container;
}
Blob* ArrayBufferViewOrBlobOrStringOrFormData::getAsBlob() const
{
ASSERT(isBlob());
return m_blob;
}
void ArrayBufferViewOrBlobOrStringOrFormData::setBlob(Blob* value)
{
ASSERT(isNull());
m_blob = value;
m_type = SpecificTypeBlob;
}
ArrayBufferViewOrBlobOrStringOrFormData ArrayBufferViewOrBlobOrStringOrFormData::fromBlob(Blob* value)
{
ArrayBufferViewOrBlobOrStringOrFormData container;
container.setBlob(value);
return container;
}
String ArrayBufferViewOrBlobOrStringOrFormData::getAsString() const
{
ASSERT(isString());
return m_string;
}
void ArrayBufferViewOrBlobOrStringOrFormData::setString(String value)
{
ASSERT(isNull());
m_string = value;
m_type = SpecificTypeString;
}
ArrayBufferViewOrBlobOrStringOrFormData ArrayBufferViewOrBlobOrStringOrFormData::fromString(String value)
{
ArrayBufferViewOrBlobOrStringOrFormData container;
container.setString(value);
return container;
}
DOMFormData* ArrayBufferViewOrBlobOrStringOrFormData::getAsFormData() const
{
ASSERT(isFormData());
return m_formData;
}
void ArrayBufferViewOrBlobOrStringOrFormData::setFormData(DOMFormData* value)
{
ASSERT(isNull());
m_formData = value;
m_type = SpecificTypeFormData;
}
ArrayBufferViewOrBlobOrStringOrFormData ArrayBufferViewOrBlobOrStringOrFormData::fromFormData(DOMFormData* value)
{
ArrayBufferViewOrBlobOrStringOrFormData container;
container.setFormData(value);
return container;
}
ArrayBufferViewOrBlobOrStringOrFormData::ArrayBufferViewOrBlobOrStringOrFormData(const ArrayBufferViewOrBlobOrStringOrFormData&) = default;
ArrayBufferViewOrBlobOrStringOrFormData::~ArrayBufferViewOrBlobOrStringOrFormData() = default;
ArrayBufferViewOrBlobOrStringOrFormData& ArrayBufferViewOrBlobOrStringOrFormData::operator=(const ArrayBufferViewOrBlobOrStringOrFormData&) = default;
DEFINE_TRACE(ArrayBufferViewOrBlobOrStringOrFormData)
{
visitor->trace(m_blob);
visitor->trace(m_formData);
}
void V8ArrayBufferViewOrBlobOrStringOrFormData::toImpl(v8::Isolate* isolate, v8::Local<v8::Value> v8Value, ArrayBufferViewOrBlobOrStringOrFormData& impl, ExceptionState& exceptionState)
{
if (v8Value.IsEmpty())
return;
if (V8Blob::hasInstance(v8Value, isolate)) {
RawPtr<Blob> cppValue = V8Blob::toImpl(v8::Local<v8::Object>::Cast(v8Value));
impl.setBlob(cppValue);
return;
}
if (V8FormData::hasInstance(v8Value, isolate)) {
RawPtr<DOMFormData> cppValue = V8FormData::toImpl(v8::Local<v8::Object>::Cast(v8Value));
impl.setFormData(cppValue);
return;
}
if (V8ArrayBufferView::hasInstance(v8Value, isolate)) {
RefPtr<DOMArrayBufferView> cppValue = V8ArrayBufferView::toImpl(v8::Local<v8::Object>::Cast(v8Value));
impl.setArrayBufferView(cppValue);
return;
}
{
V8StringResource<> cppValue = v8Value;
if (!cppValue.prepare(exceptionState))
return;
impl.setString(cppValue);
return;
}
}
v8::Local<v8::Value> toV8(const ArrayBufferViewOrBlobOrStringOrFormData& impl, v8::Local<v8::Object> creationContext, v8::Isolate* isolate)
{
switch (impl.m_type) {
case ArrayBufferViewOrBlobOrStringOrFormData::SpecificTypeNone:
return v8::Null(isolate);
case ArrayBufferViewOrBlobOrStringOrFormData::SpecificTypeArrayBufferView:
return toV8(impl.getAsArrayBufferView(), creationContext, isolate);
case ArrayBufferViewOrBlobOrStringOrFormData::SpecificTypeBlob:
return toV8(impl.getAsBlob(), creationContext, isolate);
case ArrayBufferViewOrBlobOrStringOrFormData::SpecificTypeString:
return v8String(isolate, impl.getAsString());
case ArrayBufferViewOrBlobOrStringOrFormData::SpecificTypeFormData:
return toV8(impl.getAsFormData(), creationContext, isolate);
default:
ASSERT_NOT_REACHED();
}
return v8::Local<v8::Value>();
}
ArrayBufferViewOrBlobOrStringOrFormData NativeValueTraits<ArrayBufferViewOrBlobOrStringOrFormData>::nativeValue(v8::Isolate* isolate, v8::Local<v8::Value> value, ExceptionState& exceptionState)
{
ArrayBufferViewOrBlobOrStringOrFormData impl;
V8ArrayBufferViewOrBlobOrStringOrFormData::toImpl(isolate, value, impl, exceptionState);
return impl;
}
BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString()
: m_type(SpecificTypeNone)
{
}
Blob* BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::getAsBlob() const
{
ASSERT(isBlob());
return m_blob;
}
void BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::setBlob(Blob* value)
{
ASSERT(isNull());
m_blob = value;
m_type = SpecificTypeBlob;
}
BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::fromBlob(Blob* value)
{
BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString container;
container.setBlob(value);
return container;
}
PassRefPtr<DOMArrayBuffer> BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::getAsArrayBuffer() const
{
ASSERT(isArrayBuffer());
return m_arrayBuffer;
}
void BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::setArrayBuffer(PassRefPtr<DOMArrayBuffer> value)
{
ASSERT(isNull());
m_arrayBuffer = value;
m_type = SpecificTypeArrayBuffer;
}
BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::fromArrayBuffer(PassRefPtr<DOMArrayBuffer> value)
{
BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString container;
container.setArrayBuffer(value);
return container;
}
PassRefPtr<DOMArrayBufferView> BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::getAsArrayBufferView() const
{
ASSERT(isArrayBufferView());
return m_arrayBufferView;
}
void BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::setArrayBufferView(PassRefPtr<DOMArrayBufferView> value)
{
ASSERT(isNull());
m_arrayBufferView = value;
m_type = SpecificTypeArrayBufferView;
}
BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::fromArrayBufferView(PassRefPtr<DOMArrayBufferView> value)
{
BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString container;
container.setArrayBufferView(value);
return container;
}
DOMFormData* BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::getAsFormData() const
{
ASSERT(isFormData());
return m_formData;
}
void BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::setFormData(DOMFormData* value)
{
ASSERT(isNull());
m_formData = value;
m_type = SpecificTypeFormData;
}
BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::fromFormData(DOMFormData* value)
{
BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString container;
container.setFormData(value);
return container;
}
String BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::getAsUSVString() const
{
ASSERT(isUSVString());
return m_uSVString;
}
void BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::setUSVString(String value)
{
ASSERT(isNull());
m_uSVString = value;
m_type = SpecificTypeUSVString;
}
BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::fromUSVString(String value)
{
BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString container;
container.setUSVString(value);
return container;
}
BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString(const BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString&) = default;
BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::~BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString() = default;
BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString& BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::operator=(const BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString&) = default;
DEFINE_TRACE(BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString)
{
visitor->trace(m_blob);
visitor->trace(m_formData);
}
void V8BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::toImpl(v8::Isolate* isolate, v8::Local<v8::Value> v8Value, BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString& impl, ExceptionState& exceptionState)
{
if (v8Value.IsEmpty())
return;
if (V8Blob::hasInstance(v8Value, isolate)) {
RawPtr<Blob> cppValue = V8Blob::toImpl(v8::Local<v8::Object>::Cast(v8Value));
impl.setBlob(cppValue);
return;
}
if (V8FormData::hasInstance(v8Value, isolate)) {
RawPtr<DOMFormData> cppValue = V8FormData::toImpl(v8::Local<v8::Object>::Cast(v8Value));
impl.setFormData(cppValue);
return;
}
if (V8ArrayBuffer::hasInstance(v8Value, isolate)) {
RefPtr<DOMArrayBuffer> cppValue = V8ArrayBuffer::toImpl(v8::Local<v8::Object>::Cast(v8Value));
impl.setArrayBuffer(cppValue);
return;
}
if (V8ArrayBufferView::hasInstance(v8Value, isolate)) {
RefPtr<DOMArrayBufferView> cppValue = V8ArrayBufferView::toImpl(v8::Local<v8::Object>::Cast(v8Value));
impl.setArrayBufferView(cppValue);
return;
}
{
V8StringResource<> cppValue = toUSVString(isolate, v8Value, exceptionState);
if (exceptionState.hadException())
return;
impl.setUSVString(cppValue);
return;
}
}
v8::Local<v8::Value> toV8(const BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString& impl, v8::Local<v8::Object> creationContext, v8::Isolate* isolate)
{
switch (impl.m_type) {
case BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::SpecificTypeNone:
return v8::Null(isolate);
case BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::SpecificTypeBlob:
return toV8(impl.getAsBlob(), creationContext, isolate);
case BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::SpecificTypeArrayBuffer:
return toV8(impl.getAsArrayBuffer(), creationContext, isolate);
case BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::SpecificTypeArrayBufferView:
return toV8(impl.getAsArrayBufferView(), creationContext, isolate);
case BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::SpecificTypeFormData:
return toV8(impl.getAsFormData(), creationContext, isolate);
case BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::SpecificTypeUSVString:
return v8String(isolate, impl.getAsUSVString());
default:
ASSERT_NOT_REACHED();
}
return v8::Local<v8::Value>();
}
BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString NativeValueTraits<BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString>::nativeValue(v8::Isolate* isolate, v8::Local<v8::Value> value, ExceptionState& exceptionState)
{
BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString impl;
V8BlobOrArrayBufferOrArrayBufferViewOrFormDataOrUSVString::toImpl(isolate, value, impl, exceptionState);
return impl;
}
DictionaryOrString::DictionaryOrString()
: m_type(SpecificTypeNone)
{
}
Dictionary DictionaryOrString::getAsDictionary() const
{
ASSERT(isDictionary());
return m_dictionary;
}
void DictionaryOrString::setDictionary(Dictionary value)
{
ASSERT(isNull());
m_dictionary = value;
m_type = SpecificTypeDictionary;
}
DictionaryOrString DictionaryOrString::fromDictionary(Dictionary value)
{
DictionaryOrString container;
container.setDictionary(value);
return container;
}
String DictionaryOrString::getAsString() const
{
ASSERT(isString());
return m_string;
}
void DictionaryOrString::setString(String value)
{
ASSERT(isNull());
m_string = value;
m_type = SpecificTypeString;
}
DictionaryOrString DictionaryOrString::fromString(String value)
{
DictionaryOrString container;
container.setString(value);
return container;
}
DictionaryOrString::DictionaryOrString(const DictionaryOrString&) = default;
DictionaryOrString::~DictionaryOrString() = default;
DictionaryOrString& DictionaryOrString::operator=(const DictionaryOrString&) = default;
DEFINE_TRACE(DictionaryOrString)
{
}
void V8DictionaryOrString::toImpl(v8::Isolate* isolate, v8::Local<v8::Value> v8Value, DictionaryOrString& impl, ExceptionState& exceptionState)
{
if (v8Value.IsEmpty())
return;
if (isUndefinedOrNull(v8Value) || v8Value->IsObject()) {
Dictionary cppValue = Dictionary(v8Value, isolate, exceptionState);
if (exceptionState.hadException())
return;
impl.setDictionary(cppValue);
return;
}
{
V8StringResource<> cppValue = v8Value;
if (!cppValue.prepare(exceptionState))
return;
impl.setString(cppValue);
return;
}
}
v8::Local<v8::Value> toV8(const DictionaryOrString& impl, v8::Local<v8::Object> creationContext, v8::Isolate* isolate)
{
switch (impl.m_type) {
case DictionaryOrString::SpecificTypeNone:
return v8::Null(isolate);
case DictionaryOrString::SpecificTypeDictionary:
return impl.getAsDictionary().v8Value();
case DictionaryOrString::SpecificTypeString:
return v8String(isolate, impl.getAsString());
default:
ASSERT_NOT_REACHED();
}
return v8::Local<v8::Value>();
}
DictionaryOrString NativeValueTraits<DictionaryOrString>::nativeValue(v8::Isolate* isolate, v8::Local<v8::Value> value, ExceptionState& exceptionState)
{
DictionaryOrString impl;
V8DictionaryOrString::toImpl(isolate, value, impl, exceptionState);
return impl;
}
HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap()
: m_type(SpecificTypeNone)
{
}
PassRefPtrWillBeRawPtr<HTMLImageElement> HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::getAsHTMLImageElement() const
{
ASSERT(isHTMLImageElement());
return m_htmlImageElement;
}
void HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::setHTMLImageElement(PassRefPtrWillBeRawPtr<HTMLImageElement> value)
{
ASSERT(isNull());
m_htmlImageElement = value;
m_type = SpecificTypeHTMLImageElement;
}
HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::fromHTMLImageElement(PassRefPtrWillBeRawPtr<HTMLImageElement> value)
{
HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap container;
container.setHTMLImageElement(value);
return container;
}
PassRefPtrWillBeRawPtr<HTMLVideoElement> HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::getAsHTMLVideoElement() const
{
ASSERT(isHTMLVideoElement());
return m_htmlVideoElement;
}
void HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::setHTMLVideoElement(PassRefPtrWillBeRawPtr<HTMLVideoElement> value)
{
ASSERT(isNull());
m_htmlVideoElement = value;
m_type = SpecificTypeHTMLVideoElement;
}
HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::fromHTMLVideoElement(PassRefPtrWillBeRawPtr<HTMLVideoElement> value)
{
HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap container;
container.setHTMLVideoElement(value);
return container;
}
PassRefPtrWillBeRawPtr<HTMLCanvasElement> HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::getAsHTMLCanvasElement() const
{
ASSERT(isHTMLCanvasElement());
return m_htmlCanvasElement;
}
void HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::setHTMLCanvasElement(PassRefPtrWillBeRawPtr<HTMLCanvasElement> value)
{
ASSERT(isNull());
m_htmlCanvasElement = value;
m_type = SpecificTypeHTMLCanvasElement;
}
HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::fromHTMLCanvasElement(PassRefPtrWillBeRawPtr<HTMLCanvasElement> value)
{
HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap container;
container.setHTMLCanvasElement(value);
return container;
}
PassRefPtrWillBeRawPtr<ImageBitmap> HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::getAsImageBitmap() const
{
ASSERT(isImageBitmap());
return m_imageBitmap;
}
void HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::setImageBitmap(PassRefPtrWillBeRawPtr<ImageBitmap> value)
{
ASSERT(isNull());
m_imageBitmap = value;
m_type = SpecificTypeImageBitmap;
}
HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::fromImageBitmap(PassRefPtrWillBeRawPtr<ImageBitmap> value)
{
HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap container;
container.setImageBitmap(value);
return container;
}
HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap(const HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap&) = default;
HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::~HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap() = default;
HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap& HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::operator=(const HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap&) = default;
DEFINE_TRACE(HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap)
{
visitor->trace(m_htmlImageElement);
visitor->trace(m_htmlVideoElement);
visitor->trace(m_htmlCanvasElement);
visitor->trace(m_imageBitmap);
}
void V8HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::toImpl(v8::Isolate* isolate, v8::Local<v8::Value> v8Value, HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap& impl, ExceptionState& exceptionState)
{
if (v8Value.IsEmpty())
return;
if (V8HTMLImageElement::hasInstance(v8Value, isolate)) {
RefPtrWillBeRawPtr<HTMLImageElement> cppValue = V8HTMLImageElement::toImpl(v8::Local<v8::Object>::Cast(v8Value));
impl.setHTMLImageElement(cppValue);
return;
}
if (V8HTMLVideoElement::hasInstance(v8Value, isolate)) {
RefPtrWillBeRawPtr<HTMLVideoElement> cppValue = V8HTMLVideoElement::toImpl(v8::Local<v8::Object>::Cast(v8Value));
impl.setHTMLVideoElement(cppValue);
return;
}
if (V8HTMLCanvasElement::hasInstance(v8Value, isolate)) {
RefPtrWillBeRawPtr<HTMLCanvasElement> cppValue = V8HTMLCanvasElement::toImpl(v8::Local<v8::Object>::Cast(v8Value));
impl.setHTMLCanvasElement(cppValue);
return;
}
if (V8ImageBitmap::hasInstance(v8Value, isolate)) {
RefPtrWillBeRawPtr<ImageBitmap> cppValue = V8ImageBitmap::toImpl(v8::Local<v8::Object>::Cast(v8Value));
impl.setImageBitmap(cppValue);
return;
}
exceptionState.throwTypeError("The provided value is not of type '(HTMLImageElement or HTMLVideoElement or HTMLCanvasElement or ImageBitmap)'");
}
v8::Local<v8::Value> toV8(const HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap& impl, v8::Local<v8::Object> creationContext, v8::Isolate* isolate)
{
switch (impl.m_type) {
case HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::SpecificTypeNone:
return v8::Null(isolate);
case HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::SpecificTypeHTMLImageElement:
return toV8(impl.getAsHTMLImageElement(), creationContext, isolate);
case HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::SpecificTypeHTMLVideoElement:
return toV8(impl.getAsHTMLVideoElement(), creationContext, isolate);
case HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::SpecificTypeHTMLCanvasElement:
return toV8(impl.getAsHTMLCanvasElement(), creationContext, isolate);
case HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::SpecificTypeImageBitmap:
return toV8(impl.getAsImageBitmap(), creationContext, isolate);
default:
ASSERT_NOT_REACHED();
}
return v8::Local<v8::Value>();
}
HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap NativeValueTraits<HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap>::nativeValue(v8::Isolate* isolate, v8::Local<v8::Value> value, ExceptionState& exceptionState)
{
HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap impl;
V8HTMLImageElementOrHTMLVideoElementOrHTMLCanvasElementOrImageBitmap::toImpl(isolate, value, impl, exceptionState);
return impl;
}
RequestOrUSVString::RequestOrUSVString()
: m_type(SpecificTypeNone)
{
}
Request* RequestOrUSVString::getAsRequest() const
{
ASSERT(isRequest());
return m_request;
}
void RequestOrUSVString::setRequest(Request* value)
{
ASSERT(isNull());
m_request = value;
m_type = SpecificTypeRequest;
}
RequestOrUSVString RequestOrUSVString::fromRequest(Request* value)
{
RequestOrUSVString container;
container.setRequest(value);
return container;
}
String RequestOrUSVString::getAsUSVString() const
{
ASSERT(isUSVString());
return m_uSVString;
}
void RequestOrUSVString::setUSVString(String value)
{
ASSERT(isNull());
m_uSVString = value;
m_type = SpecificTypeUSVString;
}
RequestOrUSVString RequestOrUSVString::fromUSVString(String value)
{
RequestOrUSVString container;
container.setUSVString(value);
return container;
}
RequestOrUSVString::RequestOrUSVString(const RequestOrUSVString&) = default;
RequestOrUSVString::~RequestOrUSVString() = default;
RequestOrUSVString& RequestOrUSVString::operator=(const RequestOrUSVString&) = default;
DEFINE_TRACE(RequestOrUSVString)
{
visitor->trace(m_request);
}
void V8RequestOrUSVString::toImpl(v8::Isolate* isolate, v8::Local<v8::Value> v8Value, RequestOrUSVString& impl, ExceptionState& exceptionState)
{
if (v8Value.IsEmpty())
return;
if (V8Request::hasInstance(v8Value, isolate)) {
RawPtr<Request> cppValue = V8Request::toImpl(v8::Local<v8::Object>::Cast(v8Value));
impl.setRequest(cppValue);
return;
}
{
V8StringResource<> cppValue = toUSVString(isolate, v8Value, exceptionState);
if (exceptionState.hadException())
return;
impl.setUSVString(cppValue);
return;
}
}
v8::Local<v8::Value> toV8(const RequestOrUSVString& impl, v8::Local<v8::Object> creationContext, v8::Isolate* isolate)
{
switch (impl.m_type) {
case RequestOrUSVString::SpecificTypeNone:
return v8::Null(isolate);
case RequestOrUSVString::SpecificTypeRequest:
return toV8(impl.getAsRequest(), creationContext, isolate);
case RequestOrUSVString::SpecificTypeUSVString:
return v8String(isolate, impl.getAsUSVString());
default:
ASSERT_NOT_REACHED();
}
return v8::Local<v8::Value>();
}
RequestOrUSVString NativeValueTraits<RequestOrUSVString>::nativeValue(v8::Isolate* isolate, v8::Local<v8::Value> value, ExceptionState& exceptionState)
{
RequestOrUSVString impl;
V8RequestOrUSVString::toImpl(isolate, value, impl, exceptionState);
return impl;
}
ServiceWorkerOrMessagePort::ServiceWorkerOrMessagePort()
: m_type(SpecificTypeNone)
{
}
PassRefPtrWillBeRawPtr<ServiceWorker> ServiceWorkerOrMessagePort::getAsServiceWorker() const
{
ASSERT(isServiceWorker());
return m_serviceWorker;
}
void ServiceWorkerOrMessagePort::setServiceWorker(PassRefPtrWillBeRawPtr<ServiceWorker> value)
{
ASSERT(isNull());
m_serviceWorker = value;
m_type = SpecificTypeServiceWorker;
}
ServiceWorkerOrMessagePort ServiceWorkerOrMessagePort::fromServiceWorker(PassRefPtrWillBeRawPtr<ServiceWorker> value)
{
ServiceWorkerOrMessagePort container;
container.setServiceWorker(value);
return container;
}
MessagePort* ServiceWorkerOrMessagePort::getAsMessagePort() const
{
ASSERT(isMessagePort());
return m_messagePort;
}
void ServiceWorkerOrMessagePort::setMessagePort(MessagePort* value)
{
ASSERT(isNull());
m_messagePort = value;
m_type = SpecificTypeMessagePort;
}
ServiceWorkerOrMessagePort ServiceWorkerOrMessagePort::fromMessagePort(MessagePort* value)
{
ServiceWorkerOrMessagePort container;
container.setMessagePort(value);
return container;
}
ServiceWorkerOrMessagePort::ServiceWorkerOrMessagePort(const ServiceWorkerOrMessagePort&) = default;
ServiceWorkerOrMessagePort::~ServiceWorkerOrMessagePort() = default;
ServiceWorkerOrMessagePort& ServiceWorkerOrMessagePort::operator=(const ServiceWorkerOrMessagePort&) = default;
DEFINE_TRACE(ServiceWorkerOrMessagePort)
{
visitor->trace(m_serviceWorker);
visitor->trace(m_messagePort);
}
void V8ServiceWorkerOrMessagePort::toImpl(v8::Isolate* isolate, v8::Local<v8::Value> v8Value, ServiceWorkerOrMessagePort& impl, ExceptionState& exceptionState)
{
if (v8Value.IsEmpty())
return;
if (V8ServiceWorker::hasInstance(v8Value, isolate)) {
RefPtrWillBeRawPtr<ServiceWorker> cppValue = V8ServiceWorker::toImpl(v8::Local<v8::Object>::Cast(v8Value));
impl.setServiceWorker(cppValue);
return;
}
if (V8MessagePort::hasInstance(v8Value, isolate)) {
RawPtr<MessagePort> cppValue = V8MessagePort::toImpl(v8::Local<v8::Object>::Cast(v8Value));
impl.setMessagePort(cppValue);
return;
}
exceptionState.throwTypeError("The provided value is not of type '(ServiceWorker or MessagePort)'");
}
v8::Local<v8::Value> toV8(const ServiceWorkerOrMessagePort& impl, v8::Local<v8::Object> creationContext, v8::Isolate* isolate)
{
switch (impl.m_type) {
case ServiceWorkerOrMessagePort::SpecificTypeNone:
return v8::Null(isolate);
case ServiceWorkerOrMessagePort::SpecificTypeServiceWorker:
return toV8(impl.getAsServiceWorker(), creationContext, isolate);
case ServiceWorkerOrMessagePort::SpecificTypeMessagePort:
return toV8(impl.getAsMessagePort(), creationContext, isolate);
default:
ASSERT_NOT_REACHED();
}
return v8::Local<v8::Value>();
}
ServiceWorkerOrMessagePort NativeValueTraits<ServiceWorkerOrMessagePort>::nativeValue(v8::Isolate* isolate, v8::Local<v8::Value> value, ExceptionState& exceptionState)
{
ServiceWorkerOrMessagePort impl;
V8ServiceWorkerOrMessagePort::toImpl(isolate, value, impl, exceptionState);
return impl;
}
StringOrCanvasGradientOrCanvasPattern::StringOrCanvasGradientOrCanvasPattern()
: m_type(SpecificTypeNone)
{
}
String StringOrCanvasGradientOrCanvasPattern::getAsString() const
{
ASSERT(isString());
return m_string;
}
void StringOrCanvasGradientOrCanvasPattern::setString(String value)
{
ASSERT(isNull());
m_string = value;
m_type = SpecificTypeString;
}
StringOrCanvasGradientOrCanvasPattern StringOrCanvasGradientOrCanvasPattern::fromString(String value)
{
StringOrCanvasGradientOrCanvasPattern container;
container.setString(value);
return container;
}
CanvasGradient* StringOrCanvasGradientOrCanvasPattern::getAsCanvasGradient() const
{
ASSERT(isCanvasGradient());
return m_canvasGradient;
}
void StringOrCanvasGradientOrCanvasPattern::setCanvasGradient(CanvasGradient* value)
{
ASSERT(isNull());
m_canvasGradient = value;
m_type = SpecificTypeCanvasGradient;
}
StringOrCanvasGradientOrCanvasPattern StringOrCanvasGradientOrCanvasPattern::fromCanvasGradient(CanvasGradient* value)
{
StringOrCanvasGradientOrCanvasPattern container;
container.setCanvasGradient(value);
return container;
}
CanvasPattern* StringOrCanvasGradientOrCanvasPattern::getAsCanvasPattern() const
{
ASSERT(isCanvasPattern());
return m_canvasPattern;
}
void StringOrCanvasGradientOrCanvasPattern::setCanvasPattern(CanvasPattern* value)
{
ASSERT(isNull());
m_canvasPattern = value;
m_type = SpecificTypeCanvasPattern;
}
StringOrCanvasGradientOrCanvasPattern StringOrCanvasGradientOrCanvasPattern::fromCanvasPattern(CanvasPattern* value)
{
StringOrCanvasGradientOrCanvasPattern container;
container.setCanvasPattern(value);
return container;
}
StringOrCanvasGradientOrCanvasPattern::StringOrCanvasGradientOrCanvasPattern(const StringOrCanvasGradientOrCanvasPattern&) = default;
StringOrCanvasGradientOrCanvasPattern::~StringOrCanvasGradientOrCanvasPattern() = default;
StringOrCanvasGradientOrCanvasPattern& StringOrCanvasGradientOrCanvasPattern::operator=(const StringOrCanvasGradientOrCanvasPattern&) = default;
DEFINE_TRACE(StringOrCanvasGradientOrCanvasPattern)
{
visitor->trace(m_canvasGradient);
visitor->trace(m_canvasPattern);
}
void V8StringOrCanvasGradientOrCanvasPattern::toImpl(v8::Isolate* isolate, v8::Local<v8::Value> v8Value, StringOrCanvasGradientOrCanvasPattern& impl, ExceptionState& exceptionState)
{
if (v8Value.IsEmpty())
return;
if (V8CanvasGradient::hasInstance(v8Value, isolate)) {
RawPtr<CanvasGradient> cppValue = V8CanvasGradient::toImpl(v8::Local<v8::Object>::Cast(v8Value));
impl.setCanvasGradient(cppValue);
return;
}
if (V8CanvasPattern::hasInstance(v8Value, isolate)) {
RawPtr<CanvasPattern> cppValue = V8CanvasPattern::toImpl(v8::Local<v8::Object>::Cast(v8Value));
impl.setCanvasPattern(cppValue);
return;
}
{
V8StringResource<> cppValue = v8Value;
if (!cppValue.prepare(exceptionState))
return;
impl.setString(cppValue);
return;
}
}
v8::Local<v8::Value> toV8(const StringOrCanvasGradientOrCanvasPattern& impl, v8::Local<v8::Object> creationContext, v8::Isolate* isolate)
{
switch (impl.m_type) {
case StringOrCanvasGradientOrCanvasPattern::SpecificTypeNone:
return v8::Null(isolate);
case StringOrCanvasGradientOrCanvasPattern::SpecificTypeString:
return v8String(isolate, impl.getAsString());
case StringOrCanvasGradientOrCanvasPattern::SpecificTypeCanvasGradient:
return toV8(impl.getAsCanvasGradient(), creationContext, isolate);
case StringOrCanvasGradientOrCanvasPattern::SpecificTypeCanvasPattern:
return toV8(impl.getAsCanvasPattern(), creationContext, isolate);
default:
ASSERT_NOT_REACHED();
}
return v8::Local<v8::Value>();
}
StringOrCanvasGradientOrCanvasPattern NativeValueTraits<StringOrCanvasGradientOrCanvasPattern>::nativeValue(v8::Isolate* isolate, v8::Local<v8::Value> value, ExceptionState& exceptionState)
{
StringOrCanvasGradientOrCanvasPattern impl;
V8StringOrCanvasGradientOrCanvasPattern::toImpl(isolate, value, impl, exceptionState);
return impl;
}
StringOrStringSequence::StringOrStringSequence()
: m_type(SpecificTypeNone)
{
}
String StringOrStringSequence::getAsString() const
{
ASSERT(isString());
return m_string;
}
void StringOrStringSequence::setString(String value)
{
ASSERT(isNull());
m_string = value;
m_type = SpecificTypeString;
}
StringOrStringSequence StringOrStringSequence::fromString(String value)
{
StringOrStringSequence container;
container.setString(value);
return container;
}
const Vector<String>& StringOrStringSequence::getAsStringSequence() const
{
ASSERT(isStringSequence());
return m_stringSequence;
}
void StringOrStringSequence::setStringSequence(const Vector<String>& value)
{
ASSERT(isNull());
m_stringSequence = value;
m_type = SpecificTypeStringSequence;
}
StringOrStringSequence StringOrStringSequence::fromStringSequence(const Vector<String>& value)
{
StringOrStringSequence container;
container.setStringSequence(value);
return container;
}
StringOrStringSequence::StringOrStringSequence(const StringOrStringSequence&) = default;
StringOrStringSequence::~StringOrStringSequence() = default;
StringOrStringSequence& StringOrStringSequence::operator=(const StringOrStringSequence&) = default;
DEFINE_TRACE(StringOrStringSequence)
{
}
void V8StringOrStringSequence::toImpl(v8::Isolate* isolate, v8::Local<v8::Value> v8Value, StringOrStringSequence& impl, ExceptionState& exceptionState)
{
if (v8Value.IsEmpty())
return;
if (v8Value->IsArray()) {
Vector<String> cppValue = toImplArray<Vector<String>>(v8Value, 0, isolate, exceptionState);
if (exceptionState.hadException())
return;
impl.setStringSequence(cppValue);
return;
}
{
V8StringResource<> cppValue = v8Value;
if (!cppValue.prepare(exceptionState))
return;
impl.setString(cppValue);
return;
}
}
v8::Local<v8::Value> toV8(const StringOrStringSequence& impl, v8::Local<v8::Object> creationContext, v8::Isolate* isolate)
{
switch (impl.m_type) {
case StringOrStringSequence::SpecificTypeNone:
return v8::Null(isolate);
case StringOrStringSequence::SpecificTypeString:
return v8String(isolate, impl.getAsString());
case StringOrStringSequence::SpecificTypeStringSequence:
return toV8(impl.getAsStringSequence(), creationContext, isolate);
default:
ASSERT_NOT_REACHED();
}
return v8::Local<v8::Value>();
}
StringOrStringSequence NativeValueTraits<StringOrStringSequence>::nativeValue(v8::Isolate* isolate, v8::Local<v8::Value> value, ExceptionState& exceptionState)
{
StringOrStringSequence impl;
V8StringOrStringSequence::toImpl(isolate, value, impl, exceptionState);
return impl;
}
StringOrStringSequenceOrDOMStringList::StringOrStringSequenceOrDOMStringList()
: m_type(SpecificTypeNone)
{
}
String StringOrStringSequenceOrDOMStringList::getAsString() const
{
ASSERT(isString());
return m_string;
}
void StringOrStringSequenceOrDOMStringList::setString(String value)
{
ASSERT(isNull());
m_string = value;
m_type = SpecificTypeString;
}
StringOrStringSequenceOrDOMStringList StringOrStringSequenceOrDOMStringList::fromString(String value)
{
StringOrStringSequenceOrDOMStringList container;
container.setString(value);
return container;
}
const Vector<String>& StringOrStringSequenceOrDOMStringList::getAsStringSequence() const
{
ASSERT(isStringSequence());
return m_stringSequence;
}
void StringOrStringSequenceOrDOMStringList::setStringSequence(const Vector<String>& value)
{
ASSERT(isNull());
m_stringSequence = value;
m_type = SpecificTypeStringSequence;
}
StringOrStringSequenceOrDOMStringList StringOrStringSequenceOrDOMStringList::fromStringSequence(const Vector<String>& value)
{
StringOrStringSequenceOrDOMStringList container;
container.setStringSequence(value);
return container;
}
PassRefPtrWillBeRawPtr<DOMStringList> StringOrStringSequenceOrDOMStringList::getAsDOMStringList() const
{
ASSERT(isDOMStringList());
return m_dOMStringList;
}
void StringOrStringSequenceOrDOMStringList::setDOMStringList(PassRefPtrWillBeRawPtr<DOMStringList> value)
{
ASSERT(isNull());
m_dOMStringList = value;
m_type = SpecificTypeDOMStringList;
}
StringOrStringSequenceOrDOMStringList StringOrStringSequenceOrDOMStringList::fromDOMStringList(PassRefPtrWillBeRawPtr<DOMStringList> value)
{
StringOrStringSequenceOrDOMStringList container;
container.setDOMStringList(value);
return container;
}
StringOrStringSequenceOrDOMStringList::StringOrStringSequenceOrDOMStringList(const StringOrStringSequenceOrDOMStringList&) = default;
StringOrStringSequenceOrDOMStringList::~StringOrStringSequenceOrDOMStringList() = default;
StringOrStringSequenceOrDOMStringList& StringOrStringSequenceOrDOMStringList::operator=(const StringOrStringSequenceOrDOMStringList&) = default;
DEFINE_TRACE(StringOrStringSequenceOrDOMStringList)
{
visitor->trace(m_dOMStringList);
}
void V8StringOrStringSequenceOrDOMStringList::toImpl(v8::Isolate* isolate, v8::Local<v8::Value> v8Value, StringOrStringSequenceOrDOMStringList& impl, ExceptionState& exceptionState)
{
if (v8Value.IsEmpty())
return;
if (V8DOMStringList::hasInstance(v8Value, isolate)) {
RefPtrWillBeRawPtr<DOMStringList> cppValue = V8DOMStringList::toImpl(v8::Local<v8::Object>::Cast(v8Value));
impl.setDOMStringList(cppValue);
return;
}
if (v8Value->IsArray()) {
Vector<String> cppValue = toImplArray<Vector<String>>(v8Value, 0, isolate, exceptionState);
if (exceptionState.hadException())
return;
impl.setStringSequence(cppValue);
return;
}
{
V8StringResource<> cppValue = v8Value;
if (!cppValue.prepare(exceptionState))
return;
impl.setString(cppValue);
return;
}
}
v8::Local<v8::Value> toV8(const StringOrStringSequenceOrDOMStringList& impl, v8::Local<v8::Object> creationContext, v8::Isolate* isolate)
{
switch (impl.m_type) {
case StringOrStringSequenceOrDOMStringList::SpecificTypeNone:
return v8::Null(isolate);
case StringOrStringSequenceOrDOMStringList::SpecificTypeString:
return v8String(isolate, impl.getAsString());
case StringOrStringSequenceOrDOMStringList::SpecificTypeStringSequence:
return toV8(impl.getAsStringSequence(), creationContext, isolate);
case StringOrStringSequenceOrDOMStringList::SpecificTypeDOMStringList:
return toV8(impl.getAsDOMStringList(), creationContext, isolate);
default:
ASSERT_NOT_REACHED();
}
return v8::Local<v8::Value>();
}
StringOrStringSequenceOrDOMStringList NativeValueTraits<StringOrStringSequenceOrDOMStringList>::nativeValue(v8::Isolate* isolate, v8::Local<v8::Value> value, ExceptionState& exceptionState)
{
StringOrStringSequenceOrDOMStringList impl;
V8StringOrStringSequenceOrDOMStringList::toImpl(isolate, value, impl, exceptionState);
return impl;
}
StringOrUnsignedLong::StringOrUnsignedLong()
: m_type(SpecificTypeNone)
{
}
String StringOrUnsignedLong::getAsString() const
{
ASSERT(isString());
return m_string;
}
void StringOrUnsignedLong::setString(String value)
{
ASSERT(isNull());
m_string = value;
m_type = SpecificTypeString;
}
StringOrUnsignedLong StringOrUnsignedLong::fromString(String value)
{
StringOrUnsignedLong container;
container.setString(value);
return container;
}
unsigned StringOrUnsignedLong::getAsUnsignedLong() const
{
ASSERT(isUnsignedLong());
return m_unsignedLong;
}
void StringOrUnsignedLong::setUnsignedLong(unsigned value)
{
ASSERT(isNull());
m_unsignedLong = value;
m_type = SpecificTypeUnsignedLong;
}
StringOrUnsignedLong StringOrUnsignedLong::fromUnsignedLong(unsigned value)
{
StringOrUnsignedLong container;
container.setUnsignedLong(value);
return container;
}
StringOrUnsignedLong::StringOrUnsignedLong(const StringOrUnsignedLong&) = default;
StringOrUnsignedLong::~StringOrUnsignedLong() = default;
StringOrUnsignedLong& StringOrUnsignedLong::operator=(const StringOrUnsignedLong&) = default;
DEFINE_TRACE(StringOrUnsignedLong)
{
}
void V8StringOrUnsignedLong::toImpl(v8::Isolate* isolate, v8::Local<v8::Value> v8Value, StringOrUnsignedLong& impl, ExceptionState& exceptionState)
{
if (v8Value.IsEmpty())
return;
if (v8Value->IsNumber()) {
unsigned cppValue = toUInt32(isolate, v8Value, NormalConversion, exceptionState);
if (exceptionState.hadException())
return;
impl.setUnsignedLong(cppValue);
return;
}
{
V8StringResource<> cppValue = v8Value;
if (!cppValue.prepare(exceptionState))
return;
impl.setString(cppValue);
return;
}
}
v8::Local<v8::Value> toV8(const StringOrUnsignedLong& impl, v8::Local<v8::Object> creationContext, v8::Isolate* isolate)
{
switch (impl.m_type) {
case StringOrUnsignedLong::SpecificTypeNone:
return v8::Null(isolate);
case StringOrUnsignedLong::SpecificTypeString:
return v8String(isolate, impl.getAsString());
case StringOrUnsignedLong::SpecificTypeUnsignedLong:
return v8::Integer::NewFromUnsigned(isolate, impl.getAsUnsignedLong());
default:
ASSERT_NOT_REACHED();
}
return v8::Local<v8::Value>();
}
StringOrUnsignedLong NativeValueTraits<StringOrUnsignedLong>::nativeValue(v8::Isolate* isolate, v8::Local<v8::Value> value, ExceptionState& exceptionState)
{
StringOrUnsignedLong impl;
V8StringOrUnsignedLong::toImpl(isolate, value, impl, exceptionState);
return impl;
}
UnsignedLongLongOrString::UnsignedLongLongOrString()
: m_type(SpecificTypeNone)
{
}
unsigned long long UnsignedLongLongOrString::getAsUnsignedLongLong() const
{
ASSERT(isUnsignedLongLong());
return m_unsignedLongLong;
}
void UnsignedLongLongOrString::setUnsignedLongLong(unsigned long long value)
{
ASSERT(isNull());
m_unsignedLongLong = value;
m_type = SpecificTypeUnsignedLongLong;
}
UnsignedLongLongOrString UnsignedLongLongOrString::fromUnsignedLongLong(unsigned long long value)
{
UnsignedLongLongOrString container;
container.setUnsignedLongLong(value);
return container;
}
String UnsignedLongLongOrString::getAsString() const
{
ASSERT(isString());
return m_string;
}
void UnsignedLongLongOrString::setString(String value)
{
ASSERT(isNull());
m_string = value;
m_type = SpecificTypeString;
}
UnsignedLongLongOrString UnsignedLongLongOrString::fromString(String value)
{
UnsignedLongLongOrString container;
container.setString(value);
return container;
}
UnsignedLongLongOrString::UnsignedLongLongOrString(const UnsignedLongLongOrString&) = default;
UnsignedLongLongOrString::~UnsignedLongLongOrString() = default;
UnsignedLongLongOrString& UnsignedLongLongOrString::operator=(const UnsignedLongLongOrString&) = default;
DEFINE_TRACE(UnsignedLongLongOrString)
{
}
void V8UnsignedLongLongOrString::toImpl(v8::Isolate* isolate, v8::Local<v8::Value> v8Value, UnsignedLongLongOrString& impl, ExceptionState& exceptionState)
{
if (v8Value.IsEmpty())
return;
if (v8Value->IsNumber()) {
unsigned long long cppValue = toUInt64(isolate, v8Value, NormalConversion, exceptionState);
if (exceptionState.hadException())
return;
impl.setUnsignedLongLong(cppValue);
return;
}
{
V8StringResource<> cppValue = v8Value;
if (!cppValue.prepare(exceptionState))
return;
impl.setString(cppValue);
return;
}
}
v8::Local<v8::Value> toV8(const UnsignedLongLongOrString& impl, v8::Local<v8::Object> creationContext, v8::Isolate* isolate)
{
switch (impl.m_type) {
case UnsignedLongLongOrString::SpecificTypeNone:
return v8::Null(isolate);
case UnsignedLongLongOrString::SpecificTypeUnsignedLongLong:
return v8::Number::New(isolate, static_cast<double>(impl.getAsUnsignedLongLong()));
case UnsignedLongLongOrString::SpecificTypeString:
return v8String(isolate, impl.getAsString());
default:
ASSERT_NOT_REACHED();
}
return v8::Local<v8::Value>();
}
UnsignedLongLongOrString NativeValueTraits<UnsignedLongLongOrString>::nativeValue(v8::Isolate* isolate, v8::Local<v8::Value> value, ExceptionState& exceptionState)
{
UnsignedLongLongOrString impl;
V8UnsignedLongLongOrString::toImpl(isolate, value, impl, exceptionState);
return impl;
}
UnsignedLongOrUnsignedLongSequence::UnsignedLongOrUnsignedLongSequence()
: m_type(SpecificTypeNone)
{
}
unsigned UnsignedLongOrUnsignedLongSequence::getAsUnsignedLong() const
{
ASSERT(isUnsignedLong());
return m_unsignedLong;
}
void UnsignedLongOrUnsignedLongSequence::setUnsignedLong(unsigned value)
{
ASSERT(isNull());
m_unsignedLong = value;
m_type = SpecificTypeUnsignedLong;
}
UnsignedLongOrUnsignedLongSequence UnsignedLongOrUnsignedLongSequence::fromUnsignedLong(unsigned value)
{
UnsignedLongOrUnsignedLongSequence container;
container.setUnsignedLong(value);
return container;
}
const Vector<unsigned>& UnsignedLongOrUnsignedLongSequence::getAsUnsignedLongSequence() const
{
ASSERT(isUnsignedLongSequence());
return m_unsignedLongSequence;
}
void UnsignedLongOrUnsignedLongSequence::setUnsignedLongSequence(const Vector<unsigned>& value)
{
ASSERT(isNull());
m_unsignedLongSequence = value;
m_type = SpecificTypeUnsignedLongSequence;
}
UnsignedLongOrUnsignedLongSequence UnsignedLongOrUnsignedLongSequence::fromUnsignedLongSequence(const Vector<unsigned>& value)
{
UnsignedLongOrUnsignedLongSequence container;
container.setUnsignedLongSequence(value);
return container;
}
UnsignedLongOrUnsignedLongSequence::UnsignedLongOrUnsignedLongSequence(const UnsignedLongOrUnsignedLongSequence&) = default;
UnsignedLongOrUnsignedLongSequence::~UnsignedLongOrUnsignedLongSequence() = default;
UnsignedLongOrUnsignedLongSequence& UnsignedLongOrUnsignedLongSequence::operator=(const UnsignedLongOrUnsignedLongSequence&) = default;
DEFINE_TRACE(UnsignedLongOrUnsignedLongSequence)
{
}
void V8UnsignedLongOrUnsignedLongSequence::toImpl(v8::Isolate* isolate, v8::Local<v8::Value> v8Value, UnsignedLongOrUnsignedLongSequence& impl, ExceptionState& exceptionState)
{
if (v8Value.IsEmpty())
return;
if (v8Value->IsArray()) {
Vector<unsigned> cppValue = toImplArray<Vector<unsigned>>(v8Value, 0, isolate, exceptionState);
if (exceptionState.hadException())
return;
impl.setUnsignedLongSequence(cppValue);
return;
}
if (v8Value->IsNumber()) {
unsigned cppValue = toUInt32(isolate, v8Value, NormalConversion, exceptionState);
if (exceptionState.hadException())
return;
impl.setUnsignedLong(cppValue);
return;
}
{
unsigned cppValue = toUInt32(isolate, v8Value, NormalConversion, exceptionState);
if (exceptionState.hadException())
return;
impl.setUnsignedLong(cppValue);
return;
}
}
v8::Local<v8::Value> toV8(const UnsignedLongOrUnsignedLongSequence& impl, v8::Local<v8::Object> creationContext, v8::Isolate* isolate)
{
switch (impl.m_type) {
case UnsignedLongOrUnsignedLongSequence::SpecificTypeNone:
return v8::Null(isolate);
case UnsignedLongOrUnsignedLongSequence::SpecificTypeUnsignedLong:
return v8::Integer::NewFromUnsigned(isolate, impl.getAsUnsignedLong());
case UnsignedLongOrUnsignedLongSequence::SpecificTypeUnsignedLongSequence:
return toV8(impl.getAsUnsignedLongSequence(), creationContext, isolate);
default:
ASSERT_NOT_REACHED();
}
return v8::Local<v8::Value>();
}
UnsignedLongOrUnsignedLongSequence NativeValueTraits<UnsignedLongOrUnsignedLongSequence>::nativeValue(v8::Isolate* isolate, v8::Local<v8::Value> value, ExceptionState& exceptionState)
{
UnsignedLongOrUnsignedLongSequence impl;
V8UnsignedLongOrUnsignedLongSequence::toImpl(isolate, value, impl, exceptionState);
return impl;
}
} // namespace blink
| weolar/miniblink49 | gen/blink/bindings/modules/v8/UnionTypesModules.cpp | C++ | apache-2.0 | 55,403 |
// Copyright 2015 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package command
import (
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"os"
"os/signal"
"time"
"github.com/codegangsta/cli"
"github.com/coreos/etcd/client"
"golang.org/x/net/context"
)
func NewClusterHealthCommand() cli.Command {
return cli.Command{
Name: "cluster-health",
Usage: "check the health of the etcd cluster",
ArgsUsage: " ",
Flags: []cli.Flag{
cli.BoolFlag{Name: "forever, f", Usage: "forever check the health every 10 second until CTRL+C"},
},
Action: handleClusterHealth,
}
}
func handleClusterHealth(c *cli.Context) {
forever := c.Bool("forever")
if forever {
sigch := make(chan os.Signal, 1)
signal.Notify(sigch, os.Interrupt)
go func() {
<-sigch
os.Exit(0)
}()
}
tr, err := getTransport(c)
if err != nil {
handleError(ExitServerError, err)
}
hc := http.Client{
Transport: tr,
}
cln := mustNewClientNoSync(c)
mi := client.NewMembersAPI(cln)
ms, err := mi.List(context.TODO())
if err != nil {
fmt.Println("cluster may be unhealthy: failed to list members")
handleError(ExitServerError, err)
}
for {
health := false
for _, m := range ms {
if len(m.ClientURLs) == 0 {
fmt.Printf("member %s is unreachable: no available published client urls\n", m.ID)
continue
}
checked := false
for _, url := range m.ClientURLs {
resp, err := hc.Get(url + "/health")
if err != nil {
fmt.Printf("failed to check the health of member %s on %s: %v\n", m.ID, url, err)
continue
}
result := struct{ Health string }{}
nresult := struct{ Health bool }{}
bytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
fmt.Printf("failed to check the health of member %s on %s: %v\n", m.ID, url, err)
continue
}
resp.Body.Close()
err = json.Unmarshal(bytes, &result)
if err != nil {
err = json.Unmarshal(bytes, &nresult)
}
if err != nil {
fmt.Printf("failed to check the health of member %s on %s: %v\n", m.ID, url, err)
continue
}
checked = true
if result.Health == "true" || nresult.Health {
health = true
fmt.Printf("member %s is healthy: got healthy result from %s\n", m.ID, url)
} else {
fmt.Printf("member %s is unhealthy: got unhealthy result from %s\n", m.ID, url)
}
break
}
if !checked {
fmt.Printf("member %s is unreachable: %v are all unreachable\n", m.ID, m.ClientURLs)
}
}
if health {
fmt.Println("cluster is healthy")
} else {
fmt.Println("cluster is unhealthy")
}
if !forever {
if health {
os.Exit(ExitSuccess)
} else {
os.Exit(ExitClusterNotHealthy)
}
}
fmt.Printf("\nnext check after 10 second...\n\n")
time.Sleep(10 * time.Second)
}
}
| dnaeon/etcd | etcdctl/ctlv2/command/cluster_health.go | GO | apache-2.0 | 3,317 |
#set( $symbol_pound = '#' )
#set( $symbol_dollar = '$' )
#set( $symbol_escape = '\' )
package ${package}.dao.account;
import org.springframework.stereotype.Component;
import ${package}.entity.account.Authority;
import org.springside.modules.orm.hibernate.HibernateDao;
/**
* 授权对象的泛型DAO.
*
* @author calvin
*/
@Component
public class AuthorityDao extends HibernateDao<Authority, Long> {
}
| rakesh4u/springside-sub | tools/maven/archetype/src/main/resources/archetype-resources/src/main/java/dao/account/AuthorityDao.java | Java | apache-2.0 | 423 |
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Controllers for the moderator page."""
from core.controllers import base
from core.domain import acl_decorators
from core.domain import activity_domain
from core.domain import activity_services
from core.domain import email_manager
from core.domain import summary_services
import feconf
class ModeratorPage(base.BaseHandler):
"""The moderator page."""
@acl_decorators.can_access_moderator_page
def get(self):
"""Handles GET requests."""
self.render_template('pages/moderator/moderator.html')
class FeaturedActivitiesHandler(base.BaseHandler):
"""The moderator page handler for featured activities."""
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
@acl_decorators.can_access_moderator_page
def get(self):
"""Handles GET requests."""
self.render_json({
'featured_activity_references': [
activity_reference.to_dict() for activity_reference in
activity_services.get_featured_activity_references()
],
})
@acl_decorators.can_access_moderator_page
def post(self):
"""Handles POST requests."""
featured_activity_reference_dicts = self.payload.get(
'featured_activity_reference_dicts')
featured_activity_references = [
activity_domain.ActivityReference(
reference_dict['type'], reference_dict['id'])
for reference_dict in featured_activity_reference_dicts]
try:
summary_services.require_activities_to_be_public(
featured_activity_references)
except Exception as e:
raise self.InvalidInputException(e)
activity_services.update_featured_activity_references(
featured_activity_references)
self.render_json({})
class EmailDraftHandler(base.BaseHandler):
"""Provide default email templates for moderator emails."""
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
@acl_decorators.can_send_moderator_emails
def get(self, action):
"""Handles GET requests."""
self.render_json({
'draft_email_body': (
email_manager.get_draft_moderator_action_email(action)),
})
| himanshu-dixit/oppia | core/controllers/moderator.py | Python | apache-2.0 | 2,847 |
/*
* ====================================================================
*
* The Apache Software License, Version 1.1
*
* Copyright (c) 1999 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "The Jakarta Project", "Tomcat", and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Group.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
* [Additional notices, if required by prior licensing conditions]
*
*/
package checkbox;
public class CheckTest {
String b[] = new String[] { "1", "2", "3", "4" };
public String[] getFruit() {
return b;
}
public void setFruit(String [] b) {
this.b = b;
}
}
| devjin24/howtomcatworks | bookrefer/jakarta-tomcat-5.0.18-src/jakarta-servletapi-5/jsr152/examples/WEB-INF/classes/checkbox/CheckTest.java | Java | apache-2.0 | 2,940 |
package com.netflix.governator.autobind.scopes;
import com.google.inject.Injector;
import com.google.inject.Stage;
import com.netflix.governator.annotations.AutoBindSingleton;
import com.netflix.governator.guice.LifecycleInjector;
import com.netflix.governator.guice.LifecycleInjectorMode;
import com.netflix.governator.guice.lazy.LazySingleton;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.util.concurrent.atomic.AtomicInteger;
public class TestAutoBindSingletonScopes {
@AutoBindSingleton
public static class AutoBindEagerSingleton {
public static AtomicInteger counter = new AtomicInteger();
public AutoBindEagerSingleton() {
counter.incrementAndGet();
}
}
@AutoBindSingleton(eager=false)
public static class AutoBindNotEagerSingleton {
public static AtomicInteger counter = new AtomicInteger();
public AutoBindNotEagerSingleton() {
counter.incrementAndGet();
}
}
@AutoBindSingleton
@LazySingleton
public static class AutoBindLazySingleton {
public static AtomicInteger counter = new AtomicInteger();
public AutoBindLazySingleton() {
counter.incrementAndGet();
}
}
@Before
public void before() {
AutoBindEagerSingleton.counter.set(0);
AutoBindLazySingleton.counter.set(0);
AutoBindNotEagerSingleton.counter.set(0);
}
@Test
public void scopesAreHonoredInDevMode() {
Injector injector = LifecycleInjector.builder()
.inStage(Stage.DEVELOPMENT)
.usingBasePackages("com.netflix.governator.autobind.scopes")
.build()
.createInjector();
injector.getInstance(AutoBindEagerSingleton.class);
injector.getInstance(AutoBindEagerSingleton.class);
Assert.assertEquals(1, AutoBindEagerSingleton.counter.get());
Assert.assertEquals(0, AutoBindNotEagerSingleton.counter.get());
Assert.assertEquals(0, AutoBindLazySingleton.counter.get());
}
@Test
public void scopesAreHonoredInProd() {
Injector injector = LifecycleInjector.builder()
.inStage(Stage.PRODUCTION)
.usingBasePackages("com.netflix.governator.autobind.scopes")
.build()
.createInjector();
injector.getInstance(AutoBindEagerSingleton.class);
injector.getInstance(AutoBindEagerSingleton.class);
Assert.assertEquals(1, AutoBindEagerSingleton.counter.get());
Assert.assertEquals(0, AutoBindNotEagerSingleton.counter.get());
Assert.assertEquals(0, AutoBindLazySingleton.counter.get());
}
@Test
public void scopesAreHonoredInDevModeNoChild() {
Injector injector = LifecycleInjector.builder()
.inStage(Stage.DEVELOPMENT)
.withMode(LifecycleInjectorMode.SIMULATED_CHILD_INJECTORS)
.usingBasePackages("com.netflix.governator.autobind.scopes")
.build()
.createInjector();
injector.getInstance(AutoBindEagerSingleton.class);
injector.getInstance(AutoBindEagerSingleton.class);
Assert.assertEquals(1, AutoBindEagerSingleton.counter.get());
Assert.assertEquals(0, AutoBindNotEagerSingleton.counter.get());
Assert.assertEquals(0, AutoBindLazySingleton.counter.get());
}
@Test
public void scopesAreHonoredInProdNoChild() {
Injector injector = LifecycleInjector.builder()
.inStage(Stage.PRODUCTION)
.withMode(LifecycleInjectorMode.SIMULATED_CHILD_INJECTORS)
.usingBasePackages("com.netflix.governator.autobind.scopes")
.build()
.createInjector();
injector.getInstance(AutoBindEagerSingleton.class);
injector.getInstance(AutoBindEagerSingleton.class);
Assert.assertEquals(1, AutoBindEagerSingleton.counter.get());
Assert.assertEquals(0, AutoBindNotEagerSingleton.counter.get());
Assert.assertEquals(0, AutoBindLazySingleton.counter.get());
}
}
| drtechniko/governator | governator-legacy/src/test/java/com/netflix/governator/autobind/scopes/TestAutoBindSingletonScopes.java | Java | apache-2.0 | 4,112 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.parse;
import java.sql.SQLException;
import java.util.Collections;
import java.util.List;
/**
*
* Node representing the IS NULL and IS NOT NULL expressions in SQL
*
*
* @since 0.1
*/
public class IsNullParseNode extends UnaryParseNode {
private final boolean negate;
IsNullParseNode(ParseNode expr, boolean negate) {
super(expr);
this.negate = negate;
}
public boolean isNegate() {
return negate;
}
@Override
public <T> T accept(ParseNodeVisitor<T> visitor) throws SQLException {
List<T> l = Collections.emptyList();
if (visitor.visitEnter(this)) {
l = acceptChildren(visitor);
}
return visitor.visitLeave(this, l);
}
}
| jffnothing/phoenix-4.0.0-incubating | phoenix-core/src/main/java/org/apache/phoenix/parse/IsNullParseNode.java | Java | apache-2.0 | 1,572 |
package org.batfish.common;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertThat;
import com.google.common.collect.ImmutableSet;
import com.google.common.testing.EqualsTester;
import org.apache.commons.lang3.SerializationUtils;
import org.batfish.common.autocomplete.LocationCompletionMetadata;
import org.batfish.datamodel.Ip;
import org.batfish.datamodel.collections.NodeInterfacePair;
import org.batfish.specifier.InterfaceLocation;
import org.junit.Test;
public class CompletionMetadataTest {
@Test
public void testJavaSerialization() {
CompletionMetadata completionMetadata =
CompletionMetadata.builder()
.setFilterNames(ImmutableSet.of("filter"))
.setInterfaces(ImmutableSet.of(NodeInterfacePair.of("node", "interface")))
.setIps(ImmutableSet.of(Ip.parse("1.1.1.1")))
.setLocations(
ImmutableSet.of(
new LocationCompletionMetadata(
new InterfaceLocation("node", "interface"), true)))
.setMlagIds(ImmutableSet.of("mlag"))
.setNodes(ImmutableSet.of("node"))
.setPrefixes(ImmutableSet.of("prefix"))
.setStructureNames(ImmutableSet.of("structure"))
.setVrfs(ImmutableSet.of("vrf"))
.setZones(ImmutableSet.of("zone"))
.build();
assertThat(SerializationUtils.clone(completionMetadata), equalTo(completionMetadata));
}
@Test
public void testEquals() {
CompletionMetadata.Builder builder = CompletionMetadata.builder();
CompletionMetadata initial = builder.build();
new EqualsTester()
.addEqualityGroup(new Object())
.addEqualityGroup(initial, initial, builder.build(), CompletionMetadata.EMPTY)
.addEqualityGroup(builder.setFilterNames(ImmutableSet.of("filter")).build())
.addEqualityGroup(
builder
.setInterfaces(ImmutableSet.of(NodeInterfacePair.of("node", "interface")))
.build())
.addEqualityGroup(builder.setIps(ImmutableSet.of(Ip.parse("1.1.1.1"))).build())
.addEqualityGroup(
builder
.setLocations(
ImmutableSet.of(
new LocationCompletionMetadata(
new InterfaceLocation("node", "interface"), true)))
.build())
.addEqualityGroup(builder.setMlagIds(ImmutableSet.of("mlag")).build())
.addEqualityGroup(builder.setNodes(ImmutableSet.of("node")).build())
.addEqualityGroup(builder.setPrefixes(ImmutableSet.of("prefix")).build())
.addEqualityGroup(builder.setStructureNames(ImmutableSet.of("structure")).build())
.addEqualityGroup(builder.setVrfs(ImmutableSet.of("vrf")).build())
.addEqualityGroup(builder.setZones(ImmutableSet.of("zone")).build())
.testEquals();
}
}
| arifogel/batfish | projects/batfish-common-protocol/src/test/java/org/batfish/common/CompletionMetadataTest.java | Java | apache-2.0 | 2,905 |
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.emoji.text;
import android.app.Activity;
import android.os.Bundle;
import androidx.emoji.test.R;
public class TestActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_default);
}
}
| AndroidX/androidx | emoji/emoji/src/androidTest/java/androidx/emoji/text/TestActivity.java | Java | apache-2.0 | 953 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.connectors.jdbc;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import org.junit.Before;
import org.junit.Test;
import org.apache.geode.cache.LoaderHelper;
import org.apache.geode.connectors.jdbc.internal.SqlHandler;
import org.apache.geode.internal.cache.InternalCache;
import org.apache.geode.internal.cache.InternalRegion;
import org.apache.geode.test.fake.Fakes;
public class JdbcLoaderTest {
private SqlHandler sqlHandler;
private LoaderHelper loaderHelper;
private JdbcLoader<Object, Object> loader;
private InternalCache cache;
@Before
public void setUp() throws Exception {
cache = Fakes.cache();
sqlHandler = mock(SqlHandler.class);
loaderHelper = mock(LoaderHelper.class);
when(loaderHelper.getRegion()).thenReturn(mock(InternalRegion.class));
loader = new JdbcLoader<>(sqlHandler, cache);
}
@Test
public void loadReadsFromSqlHandler() throws Exception {
loader.load(loaderHelper);
verify(sqlHandler, times(1)).read(any(), any());
}
}
| deepakddixit/incubator-geode | geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcLoaderTest.java | Java | apache-2.0 | 1,984 |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.hbase.CompatibilityFactory;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.test.MetricsAssertHelper;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.*;
@Category(SmallTests.class)
public class TestRpcMetrics {
public MetricsAssertHelper HELPER = CompatibilityFactory.getInstance(MetricsAssertHelper.class);
@Test
public void testFactory() {
MetricsHBaseServer masterMetrics = new MetricsHBaseServer("HMaster", new MetricsHBaseServerWrapperStub());
MetricsHBaseServerSource masterSource = masterMetrics.getMetricsSource();
MetricsHBaseServer rsMetrics = new MetricsHBaseServer("HRegionServer", new MetricsHBaseServerWrapperStub());
MetricsHBaseServerSource rsSource = rsMetrics.getMetricsSource();
assertEquals("master", masterSource.getMetricsContext());
assertEquals("regionserver", rsSource.getMetricsContext());
assertEquals("Master,sub=IPC", masterSource.getMetricsJmxContext());
assertEquals("RegionServer,sub=IPC", rsSource.getMetricsJmxContext());
assertEquals("Master", masterSource.getMetricsName());
assertEquals("RegionServer", rsSource.getMetricsName());
}
/**
* This test makes sure that the numbers from a MetricsHBaseServerWrapper are correctly exported
* to hadoop metrics 2 system.
*/
@Test
public void testWrapperSource() {
MetricsHBaseServer mrpc = new MetricsHBaseServer("HMaster", new MetricsHBaseServerWrapperStub());
MetricsHBaseServerSource serverSource = mrpc.getMetricsSource();
HELPER.assertGauge("queueSize", 101, serverSource);
HELPER.assertGauge("numCallsInGeneralQueue", 102, serverSource);
HELPER.assertGauge("numCallsInReplicationQueue", 103, serverSource);
HELPER.assertGauge("numCallsInPriorityQueue", 104, serverSource);
HELPER.assertGauge("numOpenConnections", 105, serverSource);
HELPER.assertGauge("numActiveHandler", 106, serverSource);
}
/**
* Test to make sure that all the actively called method on MetricsHBaseServer work.
*/
@Test
public void testSourceMethods() {
MetricsHBaseServer mrpc = new MetricsHBaseServer("HMaster", new MetricsHBaseServerWrapperStub());
MetricsHBaseServerSource serverSource = mrpc.getMetricsSource();
for (int i=0; i < 12; i++) {
mrpc.authenticationFailure();
}
for (int i=0; i < 13; i++) {
mrpc.authenticationSuccess();
}
HELPER.assertCounter("authenticationFailures", 12, serverSource);
HELPER.assertCounter("authenticationSuccesses", 13, serverSource);
for (int i=0; i < 14; i++) {
mrpc.authorizationSuccess();
}
for (int i=0; i < 15; i++) {
mrpc.authorizationFailure();
}
HELPER.assertCounter("authorizationSuccesses", 14, serverSource);
HELPER.assertCounter("authorizationFailures", 15, serverSource);
mrpc.dequeuedCall(100);
mrpc.processedCall(101);
HELPER.assertCounter("queueCallTime_NumOps", 1, serverSource);
HELPER.assertCounter("processCallTime_NumOps", 1, serverSource);
mrpc.sentBytes(103);
mrpc.sentBytes(103);
mrpc.sentBytes(103);
mrpc.receivedBytes(104);
mrpc.receivedBytes(104);
HELPER.assertCounter("sentBytes", 309, serverSource);
HELPER.assertCounter("receivedBytes", 208, serverSource);
}
}
| grokcoder/pbase | hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcMetrics.java | Java | apache-2.0 | 4,214 |
/*
Copyright 2014 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package resource
import (
"fmt"
"io"
"net/url"
"os"
"strings"
"github.com/GoogleCloudPlatform/kubernetes/pkg/api/meta"
"github.com/GoogleCloudPlatform/kubernetes/pkg/labels"
"github.com/GoogleCloudPlatform/kubernetes/pkg/runtime"
"github.com/GoogleCloudPlatform/kubernetes/pkg/util"
"github.com/GoogleCloudPlatform/kubernetes/pkg/util/errors"
)
// Builder provides convenience functions for taking arguments and parameters
// from the command line and converting them to a list of resources to iterate
// over using the Visitor interface.
type Builder struct {
mapper *Mapper
errs []error
paths []Visitor
stream bool
dir bool
selector labels.Selector
resources []string
namespace string
name string
defaultNamespace bool
requireNamespace bool
flatten bool
latest bool
singleResourceType bool
continueOnError bool
}
// NewBuilder creates a builder that operates on generic objects.
func NewBuilder(mapper meta.RESTMapper, typer runtime.ObjectTyper, clientMapper ClientMapper) *Builder {
return &Builder{
mapper: &Mapper{typer, mapper, clientMapper},
}
}
// Filename is parameters passed via a filename argument which may be URLs, the "-" argument indicating
// STDIN, or paths to files or directories. If ContinueOnError() is set prior to this method being called,
// objects on the path that are unrecognized will be ignored (but logged at V(2)).
func (b *Builder) FilenameParam(paths ...string) *Builder {
for _, s := range paths {
switch {
case s == "-":
b.Stdin()
case strings.Index(s, "http://") == 0 || strings.Index(s, "https://") == 0:
url, err := url.Parse(s)
if err != nil {
b.errs = append(b.errs, fmt.Errorf("the URL passed to filename %q is not valid: %v", s, err))
continue
}
b.URL(url)
default:
b.Path(s)
}
}
return b
}
// URL accepts a number of URLs directly.
func (b *Builder) URL(urls ...*url.URL) *Builder {
for _, u := range urls {
b.paths = append(b.paths, &URLVisitor{
Mapper: b.mapper,
URL: u,
})
}
return b
}
// Stdin will read objects from the standard input. If ContinueOnError() is set
// prior to this method being called, objects in the stream that are unrecognized
// will be ignored (but logged at V(2)).
func (b *Builder) Stdin() *Builder {
return b.Stream(os.Stdin, "STDIN")
}
// Stream will read objects from the provided reader, and if an error occurs will
// include the name string in the error message. If ContinueOnError() is set
// prior to this method being called, objects in the stream that are unrecognized
// will be ignored (but logged at V(2)).
func (b *Builder) Stream(r io.Reader, name string) *Builder {
b.stream = true
b.paths = append(b.paths, NewStreamVisitor(r, b.mapper, name, b.continueOnError))
return b
}
// Path is a set of filesystem paths that may be files containing one or more
// resources. If ContinueOnError() is set prior to this method being called,
// objects on the path that are unrecognized will be ignored (but logged at V(2)).
func (b *Builder) Path(paths ...string) *Builder {
for _, p := range paths {
i, err := os.Stat(p)
if os.IsNotExist(err) {
b.errs = append(b.errs, fmt.Errorf("the path %q does not exist", p))
continue
}
if err != nil {
b.errs = append(b.errs, fmt.Errorf("the path %q cannot be accessed: %v", p, err))
continue
}
var visitor Visitor
if i.IsDir() {
b.dir = true
visitor = &DirectoryVisitor{
Mapper: b.mapper,
Path: p,
Extensions: []string{".json", ".yaml"},
Recursive: false,
IgnoreErrors: b.continueOnError,
}
} else {
visitor = &PathVisitor{
Mapper: b.mapper,
Path: p,
IgnoreErrors: b.continueOnError,
}
}
b.paths = append(b.paths, visitor)
}
return b
}
// ResourceTypes is a list of types of resources to operate on, when listing objects on
// the server or retrieving objects that match a selector.
func (b *Builder) ResourceTypes(types ...string) *Builder {
b.resources = append(b.resources, types...)
return b
}
// SelectorParam defines a selector that should be applied to the object types to load.
// This will not affect files loaded from disk or URL. If the parameter is empty it is
// a no-op - to select all resources invoke `b.Selector(labels.Everything)`.
func (b *Builder) SelectorParam(s string) *Builder {
selector, err := labels.ParseSelector(s)
if err != nil {
b.errs = append(b.errs, fmt.Errorf("the provided selector %q is not valid: %v", s, err))
return b
}
if selector.Empty() {
return b
}
return b.Selector(selector)
}
// Selector accepts a selector directly, and if non nil will trigger a list action.
func (b *Builder) Selector(selector labels.Selector) *Builder {
b.selector = selector
return b
}
// The namespace that these resources should be assumed to under - used by DefaultNamespace()
// and RequireNamespace()
func (b *Builder) NamespaceParam(namespace string) *Builder {
b.namespace = namespace
return b
}
// DefaultNamespace instructs the builder to set the namespace value for any object found
// to NamespaceParam() if empty.
func (b *Builder) DefaultNamespace() *Builder {
b.defaultNamespace = true
return b
}
// RequireNamespace instructs the builder to set the namespace value for any object found
// to NamespaceParam() if empty, and if the value on the resource does not match
// NamespaceParam() an error will be returned.
func (b *Builder) RequireNamespace() *Builder {
b.requireNamespace = true
return b
}
// ResourceTypeOrNameArgs indicates that the builder should accept one or two arguments
// of the form `(<type1>[,<type2>,...]|<type> <name>)`. When one argument is received, the types
// provided will be retrieved from the server (and be comma delimited). When two arguments are
// received, they must be a single type and name. If more than two arguments are provided an
// error is set.
func (b *Builder) ResourceTypeOrNameArgs(args ...string) *Builder {
switch len(args) {
case 2:
b.name = args[1]
b.ResourceTypes(SplitResourceArgument(args[0])...)
case 1:
b.ResourceTypes(SplitResourceArgument(args[0])...)
if b.selector == nil {
b.selector = labels.Everything()
}
case 0:
default:
b.errs = append(b.errs, fmt.Errorf("when passing arguments, must be resource or resource and name"))
}
return b
}
// ResourceTypeAndNameArgs expects two arguments, a resource type, and a resource name. The resource
// matching that type and and name will be retrieved from the server.
func (b *Builder) ResourceTypeAndNameArgs(args ...string) *Builder {
switch len(args) {
case 2:
b.name = args[1]
b.ResourceTypes(SplitResourceArgument(args[0])...)
case 0:
default:
b.errs = append(b.errs, fmt.Errorf("when passing arguments, must be resource and name"))
}
return b
}
// Flatten will convert any objects with a field named "Items" that is an array of runtime.Object
// compatible types into individual entries and give them their own items. The original object
// is not passed to any visitors.
func (b *Builder) Flatten() *Builder {
b.flatten = true
return b
}
// Latest will fetch the latest copy of any objects loaded from URLs or files from the server.
func (b *Builder) Latest() *Builder {
b.latest = true
return b
}
// ContinueOnError will attempt to load and visit as many objects as possible, even if some visits
// return errors or some objects cannot be loaded. The default behavior is to terminate after
// the first error is returned from a VisitorFunc.
func (b *Builder) ContinueOnError() *Builder {
b.continueOnError = true
return b
}
// SingleResourceType will cause the builder to error if the user specifies more than a single type
// of resource.
func (b *Builder) SingleResourceType() *Builder {
b.singleResourceType = true
return b
}
func (b *Builder) resourceMappings() ([]*meta.RESTMapping, error) {
if len(b.resources) > 1 && b.singleResourceType {
return nil, fmt.Errorf("you may only specify a single resource type")
}
mappings := []*meta.RESTMapping{}
for _, r := range b.resources {
version, kind, err := b.mapper.VersionAndKindForResource(r)
if err != nil {
return nil, err
}
mapping, err := b.mapper.RESTMapping(kind, version)
if err != nil {
return nil, err
}
mappings = append(mappings, mapping)
}
return mappings, nil
}
func (b *Builder) visitorResult() *Result {
if len(b.errs) > 0 {
return &Result{err: errors.NewAggregate(b.errs)}
}
// visit selectors
if b.selector != nil {
if len(b.name) != 0 {
return &Result{err: fmt.Errorf("name cannot be provided when a selector is specified")}
}
if len(b.resources) == 0 {
return &Result{err: fmt.Errorf("at least one resource must be specified to use a selector")}
}
// empty selector has different error message for paths being provided
if len(b.paths) != 0 {
if b.selector.Empty() {
return &Result{err: fmt.Errorf("when paths, URLs, or stdin is provided as input, you may not specify a resource by arguments as well")}
} else {
return &Result{err: fmt.Errorf("a selector may not be specified when path, URL, or stdin is provided as input")}
}
}
mappings, err := b.resourceMappings()
if err != nil {
return &Result{err: err}
}
visitors := []Visitor{}
for _, mapping := range mappings {
client, err := b.mapper.ClientForMapping(mapping)
if err != nil {
return &Result{err: err}
}
selectorNamespace := b.namespace
if mapping.Scope.Name() != meta.RESTScopeNameNamespace {
selectorNamespace = ""
}
visitors = append(visitors, NewSelector(client, mapping, selectorNamespace, b.selector))
}
if b.continueOnError {
return &Result{visitor: EagerVisitorList(visitors), sources: visitors}
}
return &Result{visitor: VisitorList(visitors), sources: visitors}
}
// visit single item specified by name
if len(b.name) != 0 {
if len(b.paths) != 0 {
return &Result{singular: true, err: fmt.Errorf("when paths, URLs, or stdin is provided as input, you may not specify a resource by arguments as well")}
}
if len(b.resources) == 0 {
return &Result{singular: true, err: fmt.Errorf("you must provide a resource and a resource name together")}
}
if len(b.resources) > 1 {
return &Result{singular: true, err: fmt.Errorf("you must specify only one resource")}
}
mappings, err := b.resourceMappings()
if err != nil {
return &Result{singular: true, err: err}
}
mapping := mappings[0]
if mapping.Scope.Name() != meta.RESTScopeNameNamespace {
b.namespace = ""
} else {
if len(b.namespace) == 0 {
return &Result{singular: true, err: fmt.Errorf("namespace may not be empty when retrieving a resource by name")}
}
}
client, err := b.mapper.ClientForMapping(mapping)
if err != nil {
return &Result{singular: true, err: err}
}
info := NewInfo(client, mappings[0], b.namespace, b.name)
if err := info.Get(); err != nil {
return &Result{singular: true, err: err}
}
return &Result{singular: true, visitor: info, sources: []Visitor{info}}
}
// visit items specified by paths
if len(b.paths) != 0 {
singular := !b.dir && !b.stream && len(b.paths) == 1
if len(b.resources) != 0 {
return &Result{singular: singular, err: fmt.Errorf("when paths, URLs, or stdin is provided as input, you may not specify resource arguments as well")}
}
var visitors Visitor
if b.continueOnError {
visitors = EagerVisitorList(b.paths)
} else {
visitors = VisitorList(b.paths)
}
// only items from disk can be refetched
if b.latest {
// must flatten lists prior to fetching
if b.flatten {
visitors = NewFlattenListVisitor(visitors, b.mapper)
}
visitors = NewDecoratedVisitor(visitors, RetrieveLatest)
}
return &Result{singular: singular, visitor: visitors, sources: b.paths}
}
return &Result{err: fmt.Errorf("you must provide one or more resources by argument or filename")}
}
// Do returns a Result object with a Visitor for the resources identified by the Builder.
// The visitor will respect the error behavior specified by ContinueOnError. Note that stream
// inputs are consumed by the first execution - use Infos() or Object() on the Result to capture a list
// for further iteration.
func (b *Builder) Do() *Result {
r := b.visitorResult()
if r.err != nil {
return r
}
if b.flatten {
r.visitor = NewFlattenListVisitor(r.visitor, b.mapper)
}
helpers := []VisitorFunc{}
if b.defaultNamespace {
helpers = append(helpers, SetNamespace(b.namespace))
}
if b.requireNamespace {
helpers = append(helpers, RequireNamespace(b.namespace))
}
helpers = append(helpers, FilterNamespace())
r.visitor = NewDecoratedVisitor(r.visitor, helpers...)
return r
}
func SplitResourceArgument(arg string) []string {
set := util.NewStringSet()
set.Insert(strings.Split(arg, ",")...)
return set.List()
}
| KyleAMathews/kubernetes | pkg/kubectl/resource/builder.go | GO | apache-2.0 | 13,413 |
/**
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex;
import java.util.concurrent.TimeUnit;
import org.openjdk.jmh.annotations.*;
@BenchmarkMode(Mode.Throughput)
@Warmup(iterations = 5)
@Measurement(iterations = 5, time = 5, timeUnit = TimeUnit.SECONDS)
@OutputTimeUnit(TimeUnit.SECONDS)
@Fork(value = 1)
@State(Scope.Thread)
public class BlockingGetPerf {
Flowable<Integer> flowable;
Observable<Integer> observable;
Single<Integer> single;
Maybe<Integer> maybe;
Completable completable;
@Setup
public void setup() {
flowable = Flowable.just(1);
observable = Observable.just(1);
single = Single.just(1);
maybe = Maybe.just(1);
completable = Completable.complete();
}
@Benchmark
public Object flowableBlockingFirst() {
return flowable.blockingFirst();
}
@Benchmark
public Object flowableBlockingLast() {
return flowable.blockingLast();
}
@Benchmark
public Object observableBlockingLast() {
return observable.blockingLast();
}
@Benchmark
public Object observableBlockingFirst() {
return observable.blockingFirst();
}
@Benchmark
public Object single() {
return single.blockingGet();
}
@Benchmark
public Object maybe() {
return maybe.blockingGet();
}
@Benchmark
public Object completable() {
return completable.blockingGet();
}
}
| AttwellBrian/RxJava | src/perf/java/io/reactivex/BlockingGetPerf.java | Java | apache-2.0 | 2,031 |
/*
Copyright IBM Corp. All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
*/
package blkstoragetest
import (
"crypto/sha256"
"hash"
"io/ioutil"
"os"
"path/filepath"
"testing"
"github.com/hyperledger/fabric-protos-go/common"
"github.com/hyperledger/fabric/common/ledger/blkstorage"
"github.com/hyperledger/fabric/common/metrics/disabled"
"github.com/hyperledger/fabric/protoutil"
"github.com/stretchr/testify/require"
)
var (
testNewHashFunc = func() (hash.Hash, error) {
return sha256.New(), nil
}
attrsToIndex = []blkstorage.IndexableAttr{
blkstorage.IndexableAttrBlockHash,
blkstorage.IndexableAttrBlockNum,
blkstorage.IndexableAttrTxID,
blkstorage.IndexableAttrBlockNumTranNum,
}
)
// BootstrapBlockstoreFromSnapshot does the following:
// - create a block store using the provided blocks
// - generate a snapshot from the block store
// - bootstrap another block store from the snapshot
func BootstrapBlockstoreFromSnapshot(t *testing.T, ledgerName string, blocks []*common.Block) (*blkstorage.BlockStore, func()) {
require.NotEqual(t, 0, len(blocks))
testDir, err := ioutil.TempDir("", ledgerName)
require.NoError(t, err)
snapshotDir := filepath.Join(testDir, "snapshot")
require.NoError(t, os.Mkdir(snapshotDir, 0o755))
conf := blkstorage.NewConf(testDir, 0)
indexConfig := &blkstorage.IndexConfig{AttrsToIndex: attrsToIndex}
provider, err := blkstorage.NewProvider(conf, indexConfig, &disabled.Provider{})
require.NoError(t, err)
// create an original store from the provided blocks so that we can create a snapshot
originalBlkStore, err := provider.Open(ledgerName + "original")
require.NoError(t, err)
for _, block := range blocks {
require.NoError(t, originalBlkStore.AddBlock(block))
}
_, err = originalBlkStore.ExportTxIds(snapshotDir, testNewHashFunc)
require.NoError(t, err)
lastBlockInSnapshot := blocks[len(blocks)-1]
snapshotInfo := &blkstorage.SnapshotInfo{
LastBlockHash: protoutil.BlockHeaderHash(lastBlockInSnapshot.Header),
LastBlockNum: lastBlockInSnapshot.Header.Number,
PreviousBlockHash: lastBlockInSnapshot.Header.PreviousHash,
}
err = provider.ImportFromSnapshot(ledgerName, snapshotDir, snapshotInfo)
require.NoError(t, err)
blockStore, err := provider.Open(ledgerName)
require.NoError(t, err)
cleanup := func() {
provider.Close()
os.RemoveAll(testDir)
}
return blockStore, cleanup
}
| stemlending/fabric | common/ledger/blkstorage/blkstoragetest/blkstoragetest.go | GO | apache-2.0 | 2,406 |
//// [tests/cases/compiler/defaultDeclarationEmitDefaultImport.ts] ////
//// [root.ts]
export function getSomething(): Something { return null as any }
export default class Something {}
//// [main.ts]
import Thing, { getSomething } from "./root";
export const instance = getSomething();
//// [root.js]
"use strict";
exports.__esModule = true;
exports.getSomething = void 0;
function getSomething() { return null; }
exports.getSomething = getSomething;
var Something = /** @class */ (function () {
function Something() {
}
return Something;
}());
exports["default"] = Something;
//// [main.js]
"use strict";
exports.__esModule = true;
exports.instance = void 0;
var root_1 = require("./root");
exports.instance = root_1.getSomething();
//// [root.d.ts]
export declare function getSomething(): Something;
export default class Something {
}
//// [main.d.ts]
import Thing from "./root";
export declare const instance: Thing;
| nojvek/TypeScript | tests/baselines/reference/defaultDeclarationEmitDefaultImport.js | JavaScript | apache-2.0 | 972 |
/*
* $Id$
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tiles.template;
import java.io.IOException;
import java.io.Writer;
import java.util.Deque;
import org.apache.tiles.Attribute;
import org.apache.tiles.TilesContainer;
import org.apache.tiles.access.TilesAccess;
import org.apache.tiles.autotag.core.runtime.ModelBody;
import org.apache.tiles.autotag.core.runtime.annotation.Parameter;
import org.apache.tiles.request.Request;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <p>
* <strong> Render the value of the specified template attribute to the current
* Writer</strong>
* </p>
*
* <p>
* Retrieve the value of the specified template attribute property, and render
* it to the current Writer as a String. The usual toString() conversions is
* applied on found value.
* </p>
*
* @version $Rev$ $Date$
* @since 2.2.0
*/
public class GetAsStringModel {
/**
* The logging object.
*/
private Logger log = LoggerFactory.getLogger(getClass());
/**
* The attribute resolver to use.
*/
private AttributeResolver attributeResolver;
/**
* Constructor that uses the defaut attribute resolver.
*
* @since 3.0.0
*/
public GetAsStringModel() {
this(new DefaultAttributeResolver());
}
/**
* Constructor.
*
* @param attributeResolver The attribute resolver to use.
* @since 2.2.0
*/
public GetAsStringModel(AttributeResolver attributeResolver) {
this.attributeResolver = attributeResolver;
}
/**
* Executes the operation.
* @param ignore If <code>true</code>, if an exception happens during
* rendering, of if the attribute is null, the problem will be ignored.
* @param preparer The preparer to invoke before rendering the attribute.
* @param role A comma-separated list of roles. If present, the attribute
* will be rendered only if the current user belongs to one of the roles.
* @param defaultValue The default value of the attribute. To use only if
* the attribute was not computed.
* @param defaultValueRole The default comma-separated list of roles. To use
* only if the attribute was not computed.
* @param defaultValueType The default type of the attribute. To use only if
* the attribute was not computed.
* @param name The name of the attribute.
* @param value The attribute to use immediately, if not null.
* @param request The request.
* @param modelBody The body.
* @throws IOException If an I/O error happens during rendering.
* @since 2.2.0
*/
public void execute(boolean ignore, String preparer, String role,
Object defaultValue, String defaultValueRole,
String defaultValueType, @Parameter(required = true) String name,
Attribute value, Request request, ModelBody modelBody)
throws IOException {
TilesContainer container = TilesAccess.getCurrentContainer(request);
Deque<Object> composeStack = ComposeStackUtil.getComposeStack(request);
Attribute attribute = resolveAttribute(container, ignore, preparer,
role, defaultValue, defaultValueRole, defaultValueType, name,
value, request);
if (attribute != null) {
composeStack.push(attribute);
}
modelBody.evaluateWithoutWriting();
container = TilesAccess.getCurrentContainer(request);
Writer writer = request.getWriter();
if (attribute != null) {
attribute = (Attribute) composeStack.pop();
}
renderAttribute(attribute, container, writer, ignore, request);
}
/**
* Resolves the attribute. and starts the context.
*
* @param container The Tiles container to use.
* @param ignore If <code>true</code>, if an exception happens during
* rendering, of if the attribute is null, the problem will be ignored.
* @param preparer The preparer to invoke before rendering the attribute.
* @param role A comma-separated list of roles. If present, the attribute
* will be rendered only if the current user belongs to one of the roles.
* @param defaultValue The default value of the attribute. To use only if
* the attribute was not computed.
* @param defaultValueRole The default comma-separated list of roles. To use
* only if the attribute was not computed.
* @param defaultValueType The default type of the attribute. To use only if
* the attribute was not computed.
* @param name The name of the attribute.
* @param value The attribute to use immediately, if not null.
* @param request The request.
* @return The resolved attribute.
*/
private Attribute resolveAttribute(TilesContainer container,
boolean ignore, String preparer, String role, Object defaultValue,
String defaultValueRole, String defaultValueType, String name,
Attribute value, Request request) {
if (preparer != null) {
container.prepare(preparer, request);
}
Attribute attribute = attributeResolver.computeAttribute(container,
value, name, role, ignore, defaultValue, defaultValueRole,
defaultValueType, request);
container.startContext(request);
return attribute;
}
/**
* Renders the attribute as a string.
*
* @param attribute The attribute to use, previously resolved.
* @param container The Tiles container to use.
* @param writer The writer into which the attribute will be written.
* @param ignore If <code>true</code>, if an exception happens during
* rendering, of if the attribute is null, the problem will be ignored.
* @param request The request.
* @throws IOException If an I/O error happens during rendering.
*/
private void renderAttribute(Attribute attribute, TilesContainer container,
Writer writer, boolean ignore, Request request)
throws IOException {
try {
if (attribute == null && ignore) {
return;
}
Object value = container.evaluate(attribute, request);
if(value != null) {
writer.write(value.toString());
}
} catch (IOException e) {
if (!ignore) {
throw e;
} else if (log.isDebugEnabled()) {
log.debug("Ignoring exception", e);
}
} catch (RuntimeException e) {
if (!ignore) {
throw e;
} else if (log.isDebugEnabled()) {
log.debug("Ignoring exception", e);
}
} finally {
container.endContext(request);
}
}
}
| timgifford/tiles | tiles-template/src/main/java/org/apache/tiles/template/GetAsStringModel.java | Java | apache-2.0 | 7,572 |
DROP TABLE IF EXISTS test_2; | kenota/kommentator | vendor/github.com/mattes/migrate/database/clickhouse/examples/migrations/002_create_table.down.sql | SQL | apache-2.0 | 28 |
require_relative '../spec_helper'
describe HostNode do
before(:all) do
described_class.create_indexes
end
it { should be_timestamped_document }
it { should have_fields(:node_id, :name, :os, :driver, :public_ip).of_type(String) }
it { should have_fields(:labels).of_type(Array) }
it { should have_fields(:mem_total, :mem_limit).of_type(Integer) }
it { should belong_to(:grid) }
it { should have_many(:containers) }
it { should have_index_for(grid_id: 1) }
it { should have_index_for(grid_id: 1, node_number: 1).with_options(sparse: true, unique: true) }
it { should have_index_for(node_id: 1) }
describe '#connected?' do
it 'returns true when connected' do
subject.connected = true
expect(subject.connected?).to eq(true)
end
it 'returns false when not connected' do
expect(subject.connected?).to eq(false)
end
end
describe '#attributes_from_docker' do
it 'sets public_ip' do
expect {
subject.attributes_from_docker({'PublicIp' => '127.0.0.1'})
}.to change{ subject.public_ip }.to('127.0.0.1')
end
end
describe '#save!' do
let(:grid) { double(:grid, free_node_numbers: (1..254).to_a )}
it 'reserves node number' do |variable|
allow(subject).to receive(:grid).and_return(grid)
subject.attributes = {node_id: 'bb', grid_id: 1}
subject.save!
expect(subject.node_number).to eq(1)
end
it 'reserves node number successfully after race condition error' do
node1 = HostNode.create!(node_id: 'aa', node_number: 1, grid_id: 1)
allow(subject).to receive(:grid).and_return(grid)
subject.attributes = {node_id: 'bb', grid_id: 1}
subject.save!
expect(subject.node_number).to eq(2)
end
end
end
| robxu9/kontena | server/spec/models/host_node_spec.rb | Ruby | apache-2.0 | 1,762 |
from zerver.lib.test_classes import WebhookTestCase
class PagerDutyHookTests(WebhookTestCase):
STREAM_NAME = 'pagerduty'
URL_TEMPLATE = "/api/v1/external/pagerduty?api_key={api_key}&stream={stream}"
FIXTURE_DIR_NAME = 'pagerduty'
def test_trigger(self) -> None:
expected_message = 'Incident [3](https://zulip-test.pagerduty.com/incidents/P140S4Y) triggered by [Test service](https://zulip-test.pagerduty.com/services/PIL5CUQ) (assigned to [armooo](https://zulip-test.pagerduty.com/users/POBCFRJ)):\n\n``` quote\nfoo\n```'
self.send_and_test_stream_message('trigger', "Incident 3", expected_message)
def test_trigger_v2(self) -> None:
expected_message = 'Incident [33](https://webdemo.pagerduty.com/incidents/PRORDTY) triggered by [Production XDB Cluster](https://webdemo.pagerduty.com/services/PN49J75) (assigned to [Laura Haley](https://webdemo.pagerduty.com/users/P553OPV)):\n\n``` quote\nMy new incident\n```'
self.send_and_test_stream_message('trigger_v2', 'Incident 33', expected_message)
def test_trigger_without_assignee_v2(self) -> None:
expected_message = 'Incident [33](https://webdemo.pagerduty.com/incidents/PRORDTY) triggered by [Production XDB Cluster](https://webdemo.pagerduty.com/services/PN49J75) (assigned to nobody):\n\n``` quote\nMy new incident\n```'
self.send_and_test_stream_message('trigger_without_assignee_v2', 'Incident 33', expected_message)
def test_unacknowledge(self) -> None:
expected_message = 'Incident [3](https://zulip-test.pagerduty.com/incidents/P140S4Y) unacknowledged by [Test service](https://zulip-test.pagerduty.com/services/PIL5CUQ) (assigned to [armooo](https://zulip-test.pagerduty.com/users/POBCFRJ)):\n\n``` quote\nfoo\n```'
self.send_and_test_stream_message('unacknowledge', "Incident 3", expected_message)
def test_resolved(self) -> None:
expected_message = 'Incident [1](https://zulip-test.pagerduty.com/incidents/PO1XIJ5) resolved by [armooo](https://zulip-test.pagerduty.com/users/POBCFRJ):\n\n``` quote\nIt is on fire\n```'
self.send_and_test_stream_message('resolved', "Incident 1", expected_message)
def test_resolved_v2(self) -> None:
expected_message = 'Incident [33](https://webdemo.pagerduty.com/incidents/PRORDTY) resolved by [Laura Haley](https://webdemo.pagerduty.com/users/P553OPV):\n\n``` quote\nMy new incident\n```'
self.send_and_test_stream_message('resolve_v2', 'Incident 33', expected_message)
def test_auto_resolved(self) -> None:
expected_message = 'Incident [2](https://zulip-test.pagerduty.com/incidents/PX7K9J2) resolved:\n\n``` quote\nnew\n```'
self.send_and_test_stream_message('auto_resolved', "Incident 2", expected_message)
def test_acknowledge(self) -> None:
expected_message = 'Incident [1](https://zulip-test.pagerduty.com/incidents/PO1XIJ5) acknowledged by [armooo](https://zulip-test.pagerduty.com/users/POBCFRJ):\n\n``` quote\nIt is on fire\n```'
self.send_and_test_stream_message('acknowledge', "Incident 1", expected_message)
def test_acknowledge_without_trigger_summary_data(self) -> None:
expected_message = 'Incident [1](https://zulip-test.pagerduty.com/incidents/PO1XIJ5) acknowledged by [armooo](https://zulip-test.pagerduty.com/users/POBCFRJ):\n\n``` quote\n\n```'
self.send_and_test_stream_message('acknowledge_without_trigger_summary_data',
"Incident 1", expected_message)
def test_acknowledge_v2(self) -> None:
expected_message = 'Incident [33](https://webdemo.pagerduty.com/incidents/PRORDTY) acknowledged by [Laura Haley](https://webdemo.pagerduty.com/users/P553OPV):\n\n``` quote\nMy new incident\n```'
self.send_and_test_stream_message('acknowledge_v2', 'Incident 33', expected_message)
def test_incident_assigned_v2(self) -> None:
expected_message = 'Incident [33](https://webdemo.pagerduty.com/incidents/PRORDTY) assigned to [Wiley Jacobson](https://webdemo.pagerduty.com/users/PFBSJ2Z):\n\n``` quote\nMy new incident\n```'
self.send_and_test_stream_message('assign_v2', 'Incident 33', expected_message)
def test_no_subject(self) -> None:
expected_message = 'Incident [48219](https://dropbox.pagerduty.com/incidents/PJKGZF9) resolved:\n\n``` quote\nmp_error_block_down_critical\u2119\u01b4\n```'
self.send_and_test_stream_message('mp_fail', "Incident 48219", expected_message)
| timabbott/zulip | zerver/webhooks/pagerduty/tests.py | Python | apache-2.0 | 4,489 |
# Copyright 2015 Hewlett-Packard Development Company, L.P.dsvsv
# Copyright 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.tests.api import base
from neutron.tests.tempest import config
from neutron.tests.tempest import test
from tempest_lib.common.utils import data_utils
CONF = config.CONF
class SharedNetworksTest(base.BaseAdminNetworkTest):
@classmethod
def resource_setup(cls):
super(SharedNetworksTest, cls).resource_setup()
cls.shared_network = cls.create_shared_network()
@test.idempotent_id('6661d219-b96d-4597-ad10-55766ce4abf7')
def test_create_update_shared_network(self):
shared_network = self.create_shared_network()
net_id = shared_network['id']
self.assertEqual('ACTIVE', shared_network['status'])
self.assertIsNotNone(shared_network['id'])
self.assertTrue(self.shared_network['shared'])
new_name = "New_shared_network"
body = self.admin_client.update_network(net_id, name=new_name,
admin_state_up=False,
shared=False)
updated_net = body['network']
self.assertEqual(new_name, updated_net['name'])
self.assertFalse(updated_net['shared'])
self.assertFalse(updated_net['admin_state_up'])
@test.idempotent_id('9c31fabb-0181-464f-9ace-95144fe9ca77')
def test_create_port_shared_network_as_non_admin_tenant(self):
# create a port as non admin
body = self.client.create_port(network_id=self.shared_network['id'])
port = body['port']
self.addCleanup(self.admin_client.delete_port, port['id'])
# verify the tenant id of admin network and non admin port
self.assertNotEqual(self.shared_network['tenant_id'],
port['tenant_id'])
@test.idempotent_id('3e39c4a6-9caf-4710-88f1-d20073c6dd76')
def test_create_bulk_shared_network(self):
# Creates 2 networks in one request
net_nm = [data_utils.rand_name('network'),
data_utils.rand_name('network')]
body = self.admin_client.create_bulk_network(net_nm, shared=True)
created_networks = body['networks']
for net in created_networks:
self.addCleanup(self.admin_client.delete_network, net['id'])
self.assertIsNotNone(net['id'])
self.assertTrue(net['shared'])
def _list_shared_networks(self, user):
body = user.list_networks(shared=True)
networks_list = [net['id'] for net in body['networks']]
self.assertIn(self.shared_network['id'], networks_list)
self.assertTrue(self.shared_network['shared'])
@test.idempotent_id('a064a9fd-e02f-474a-8159-f828cd636a28')
def test_list_shared_networks(self):
# List the shared networks and confirm that
# shared network extension attribute is returned for those networks
# that are created as shared
self._list_shared_networks(self.admin_client)
self._list_shared_networks(self.client)
def _show_shared_network(self, user):
body = user.show_network(self.shared_network['id'])
show_shared_net = body['network']
self.assertEqual(self.shared_network['name'], show_shared_net['name'])
self.assertEqual(self.shared_network['id'], show_shared_net['id'])
self.assertTrue(show_shared_net['shared'])
@test.idempotent_id('e03c92a2-638d-4bfa-b50a-b1f66f087e58')
def test_show_shared_networks_attribute(self):
# Show a shared network and confirm that
# shared network extension attribute is returned.
self._show_shared_network(self.admin_client)
self._show_shared_network(self.client)
| pnavarro/neutron | neutron/tests/api/admin/test_shared_network_extension.py | Python | apache-2.0 | 4,322 |
/* Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
// Generic feature extractor for extracting features from objects. The feature
// extractor can be used for extracting features from any object. The feature
// extractor and feature function classes are template classes that have to
// be instantiated for extracting feature from a specific object type.
//
// A feature extractor consists of a hierarchy of feature functions. Each
// feature function extracts one or more feature type and value pairs from the
// object.
//
// The feature extractor has a modular design where new feature functions can be
// registered as components. The feature extractor is initialized from a
// descriptor represented by a protocol buffer. The feature extractor can also
// be initialized from a text-based source specification of the feature
// extractor. Feature specification parsers can be added as components. By
// default the feature extractor can be read from an ASCII protocol buffer or in
// a simple feature modeling language (fml).
// A feature function is invoked with a focus. Nested feature function can be
// invoked with another focus determined by the parent feature function.
#ifndef SYNTAXNET_FEATURE_EXTRACTOR_H_
#define SYNTAXNET_FEATURE_EXTRACTOR_H_
#include <memory>
#include <string>
#include <vector>
#include "syntaxnet/feature_extractor.pb.h"
#include "syntaxnet/feature_types.h"
#include "syntaxnet/proto_io.h"
#include "syntaxnet/registry.h"
#include "syntaxnet/sentence.pb.h"
#include "syntaxnet/task_context.h"
#include "syntaxnet/utils.h"
#include "syntaxnet/workspace.h"
#include "tensorflow/core/lib/core/status.h"
#include "tensorflow/core/lib/core/stringpiece.h"
#include "tensorflow/core/lib/io/inputbuffer.h"
#include "tensorflow/core/lib/io/record_reader.h"
#include "tensorflow/core/lib/io/record_writer.h"
#include "tensorflow/core/lib/strings/strcat.h"
#include "tensorflow/core/platform/env.h"
namespace syntaxnet {
// Use the same type for feature values as is used for predicated.
typedef int64 Predicate;
typedef Predicate FeatureValue;
// Output feature model in FML format.
void ToFMLFunction(const FeatureFunctionDescriptor &function, string *output);
void ToFML(const FeatureFunctionDescriptor &function, string *output);
// A feature vector contains feature type and value pairs.
class FeatureVector {
public:
FeatureVector() {}
// Adds feature type and value pair to feature vector.
void add(FeatureType *type, FeatureValue value) {
features_.emplace_back(type, value);
}
// Removes all elements from the feature vector.
void clear() { features_.clear(); }
// Returns the number of elements in the feature vector.
int size() const { return features_.size(); }
// Reserves space in the underlying feature vector.
void reserve(int n) { features_.reserve(n); }
// Returns feature type for an element in the feature vector.
FeatureType *type(int index) const { return features_[index].type; }
// Returns feature value for an element in the feature vector.
FeatureValue value(int index) const { return features_[index].value; }
private:
// Structure for holding feature type and value pairs.
struct Element {
Element() : type(nullptr), value(-1) {}
Element(FeatureType *t, FeatureValue v) : type(t), value(v) {}
FeatureType *type;
FeatureValue value;
};
// Array for storing feature vector elements.
vector<Element> features_;
TF_DISALLOW_COPY_AND_ASSIGN(FeatureVector);
};
// The generic feature extractor is the type-independent part of a feature
// extractor. This holds the descriptor for the feature extractor and the
// collection of feature types used in the feature extractor. The feature
// types are not available until FeatureExtractor<>::Init() has been called.
class GenericFeatureExtractor {
public:
GenericFeatureExtractor();
virtual ~GenericFeatureExtractor();
// Initializes the feature extractor from a source representation of the
// feature extractor. The first line is used for determining the feature
// specification language. If the first line starts with #! followed by a name
// then this name is used for instantiating a feature specification parser
// with that name. If the language cannot be detected this way it falls back
// to using the default language supplied.
void Parse(const string &source);
// Returns the feature extractor descriptor.
const FeatureExtractorDescriptor &descriptor() const { return descriptor_; }
FeatureExtractorDescriptor *mutable_descriptor() { return &descriptor_; }
// Returns the number of feature types in the feature extractor. Invalid
// before Init() has been called.
int feature_types() const { return feature_types_.size(); }
// Returns all feature types names used by the extractor. The names are
// added to the types_names array. Invalid before Init() has been called.
void GetFeatureTypeNames(vector<string> *type_names) const;
// Returns a feature type used in the extractor. Invalid before Init() has
// been called.
const FeatureType *feature_type(int index) const {
return feature_types_[index];
}
// Returns the feature domain size of this feature extractor.
// NOTE: The way that domain size is calculated is, for some, unintuitive. It
// is the largest domain size of any feature type.
FeatureValue GetDomainSize() const;
protected:
// Initializes the feature types used by the extractor. Called from
// FeatureExtractor<>::Init().
void InitializeFeatureTypes();
private:
// Initializes the top-level feature functions.
virtual void InitializeFeatureFunctions() = 0;
// Returns all feature types used by the extractor. The feature types are
// added to the result array.
virtual void GetFeatureTypes(vector<FeatureType *> *types) const = 0;
// Descriptor for the feature extractor. This is a protocol buffer that
// contains all the information about the feature extractor. The feature
// functions are initialized from the information in the descriptor.
FeatureExtractorDescriptor descriptor_;
// All feature types used by the feature extractor. The collection of all the
// feature types describes the feature space of the feature set produced by
// the feature extractor. Not owned.
vector<FeatureType *> feature_types_;
};
// The generic feature function is the type-independent part of a feature
// function. Each feature function is associated with the descriptor that it is
// instantiated from. The feature types associated with this feature function
// will be established by the time FeatureExtractor<>::Init() completes.
class GenericFeatureFunction {
public:
// A feature value that represents the absence of a value.
static constexpr FeatureValue kNone = -1;
GenericFeatureFunction();
virtual ~GenericFeatureFunction();
// Sets up the feature function. NB: FeatureTypes of nested functions are not
// guaranteed to be available until Init().
virtual void Setup(TaskContext *context) {}
// Initializes the feature function. NB: The FeatureType of this function must
// be established when this method completes.
virtual void Init(TaskContext *context) {}
// Requests workspaces from a registry to obtain indices into a WorkspaceSet
// for any Workspace objects used by this feature function. NB: This will be
// called after Init(), so it can depend on resources and arguments.
virtual void RequestWorkspaces(WorkspaceRegistry *registry) {}
// Appends the feature types produced by the feature function to types. The
// default implementation appends feature_type(), if non-null. Invalid
// before Init() has been called.
virtual void GetFeatureTypes(vector<FeatureType *> *types) const;
// Returns the feature type for feature produced by this feature function. If
// the feature function produces features of different types this returns
// null. Invalid before Init() has been called.
virtual FeatureType *GetFeatureType() const;
// Returns the name of the registry used for creating the feature function.
// This can be used for checking if two feature functions are of the same
// kind.
virtual const char *RegistryName() const = 0;
// Returns the value of a named parameter in the feature functions descriptor.
// If the named parameter is not found the global parameters are searched.
string GetParameter(const string &name) const;
int GetIntParameter(const string &name, int default_value) const;
// Returns the FML function description for the feature function, i.e. the
// name and parameters without the nested features.
string FunctionName() const {
string output;
ToFMLFunction(*descriptor_, &output);
return output;
}
// Returns the prefix for nested feature functions. This is the prefix of this
// feature function concatenated with the feature function name.
string SubPrefix() const {
return prefix_.empty() ? FunctionName() : prefix_ + "." + FunctionName();
}
// Returns/sets the feature extractor this function belongs to.
GenericFeatureExtractor *extractor() const { return extractor_; }
void set_extractor(GenericFeatureExtractor *extractor) {
extractor_ = extractor;
}
// Returns/sets the feature function descriptor.
FeatureFunctionDescriptor *descriptor() const { return descriptor_; }
void set_descriptor(FeatureFunctionDescriptor *descriptor) {
descriptor_ = descriptor;
}
// Returns a descriptive name for the feature function. The name is taken from
// the descriptor for the feature function. If the name is empty or the
// feature function is a variable the name is the FML representation of the
// feature, including the prefix.
string name() const {
string output;
if (descriptor_->name().empty()) {
if (!prefix_.empty()) {
output.append(prefix_);
output.append(".");
}
ToFML(*descriptor_, &output);
} else {
output = descriptor_->name();
}
tensorflow::StringPiece stripped(output);
utils::RemoveWhitespaceContext(&stripped);
return stripped.ToString();
}
// Returns the argument from the feature function descriptor. It defaults to
// 0 if the argument has not been specified.
int argument() const {
return descriptor_->has_argument() ? descriptor_->argument() : 0;
}
// Returns/sets/clears function name prefix.
const string &prefix() const { return prefix_; }
void set_prefix(const string &prefix) { prefix_ = prefix; }
protected:
// Returns the feature type for single-type feature functions.
FeatureType *feature_type() const { return feature_type_; }
// Sets the feature type for single-type feature functions. This takes
// ownership of feature_type. Can only be called once.
void set_feature_type(FeatureType *feature_type) {
CHECK(feature_type_ == nullptr);
feature_type_ = feature_type;
}
private:
// Feature extractor this feature function belongs to. Not owned.
GenericFeatureExtractor *extractor_ = nullptr;
// Descriptor for feature function. Not owned.
FeatureFunctionDescriptor *descriptor_ = nullptr;
// Feature type for features produced by this feature function. If the
// feature function produces features of multiple feature types this is null
// and the feature function must return it's feature types in
// GetFeatureTypes(). Owned.
FeatureType *feature_type_ = nullptr;
// Prefix used for sub-feature types of this function.
string prefix_;
};
// Feature function that can extract features from an object. Templated on
// two type arguments:
//
// OBJ: The "object" from which features are extracted; e.g., a sentence. This
// should be a plain type, rather than a reference or pointer.
//
// ARGS: A set of 0 or more types that are used to "index" into some part of the
// object that should be extracted, e.g. an int token index for a sentence
// object. This should not be a reference type.
template<class OBJ, class ...ARGS>
class FeatureFunction
: public GenericFeatureFunction,
public RegisterableClass< FeatureFunction<OBJ, ARGS...> > {
public:
using Self = FeatureFunction<OBJ, ARGS...>;
// Preprocesses the object. This will be called prior to calling Evaluate()
// or Compute() on that object.
virtual void Preprocess(WorkspaceSet *workspaces, OBJ *object) const {}
// Appends features computed from the object and focus to the result. The
// default implementation delegates to Compute(), adding a single value if
// available. Multi-valued feature functions must override this method.
virtual void Evaluate(const WorkspaceSet &workspaces, const OBJ &object,
ARGS... args, FeatureVector *result) const {
FeatureValue value = Compute(workspaces, object, args..., result);
if (value != kNone) result->add(feature_type(), value);
}
// Returns a feature value computed from the object and focus, or kNone if no
// value is computed. Single-valued feature functions only need to override
// this method.
virtual FeatureValue Compute(const WorkspaceSet &workspaces,
const OBJ &object,
ARGS... args,
const FeatureVector *fv) const {
return kNone;
}
// Instantiates a new feature function in a feature extractor from a feature
// descriptor.
static Self *Instantiate(GenericFeatureExtractor *extractor,
FeatureFunctionDescriptor *fd,
const string &prefix) {
Self *f = Self::Create(fd->type());
f->set_extractor(extractor);
f->set_descriptor(fd);
f->set_prefix(prefix);
return f;
}
// Returns the name of the registry for the feature function.
const char *RegistryName() const override {
return Self::registry()->name;
}
private:
// Special feature function class for resolving variable references. The type
// of the feature function is used for resolving the variable reference. When
// evaluated it will either get the feature value(s) from the variable portion
// of the feature vector, if present, or otherwise it will call the referenced
// feature extractor function directly to extract the feature(s).
class Reference;
};
// Base class for features with nested feature functions. The nested functions
// are of type NES, which may be different from the type of the parent function.
// NB: NestedFeatureFunction will ensure that all initialization of nested
// functions takes place during Setup() and Init() -- after the nested features
// are initialized, the parent feature is initialized via SetupNested() and
// InitNested(). Alternatively, a derived classes that overrides Setup() and
// Init() directly should call Parent::Setup(), Parent::Init(), etc. first.
//
// Note: NestedFeatureFunction cannot know how to call Preprocess, Evaluate, or
// Compute, since the nested functions may be of a different type.
template<class NES, class OBJ, class ...ARGS>
class NestedFeatureFunction : public FeatureFunction<OBJ, ARGS...> {
public:
using Parent = NestedFeatureFunction<NES, OBJ, ARGS...>;
// Clean up nested functions.
~NestedFeatureFunction() override { utils::STLDeleteElements(&nested_); }
// By default, just appends the nested feature types.
void GetFeatureTypes(vector<FeatureType *> *types) const override {
CHECK(!this->nested().empty())
<< "Nested features require nested features to be defined.";
for (auto *function : nested_) function->GetFeatureTypes(types);
}
// Sets up the nested features.
void Setup(TaskContext *context) override {
CreateNested(this->extractor(), this->descriptor(), &nested_,
this->SubPrefix());
for (auto *function : nested_) function->Setup(context);
SetupNested(context);
}
// Sets up this NestedFeatureFunction specifically.
virtual void SetupNested(TaskContext *context) {}
// Initializes the nested features.
void Init(TaskContext *context) override {
for (auto *function : nested_) function->Init(context);
InitNested(context);
}
// Initializes this NestedFeatureFunction specifically.
virtual void InitNested(TaskContext *context) {}
// Gets all the workspaces needed for the nested functions.
void RequestWorkspaces(WorkspaceRegistry *registry) override {
for (auto *function : nested_) function->RequestWorkspaces(registry);
}
// Returns the list of nested feature functions.
const vector<NES *> &nested() const { return nested_; }
// Instantiates nested feature functions for a feature function. Creates and
// initializes one feature function for each sub-descriptor in the feature
// descriptor.
static void CreateNested(GenericFeatureExtractor *extractor,
FeatureFunctionDescriptor *fd,
vector<NES *> *functions,
const string &prefix) {
for (int i = 0; i < fd->feature_size(); ++i) {
FeatureFunctionDescriptor *sub = fd->mutable_feature(i);
NES *f = NES::Instantiate(extractor, sub, prefix);
functions->push_back(f);
}
}
protected:
// The nested feature functions, if any, in order of declaration in the
// feature descriptor. Owned.
vector<NES *> nested_;
};
// Base class for a nested feature function that takes nested features with the
// same signature as these features, i.e. a meta feature. For this class, we can
// provide preprocessing of the nested features.
template<class OBJ, class ...ARGS>
class MetaFeatureFunction : public NestedFeatureFunction<
FeatureFunction<OBJ, ARGS...>, OBJ, ARGS...> {
public:
// Preprocesses using the nested features.
void Preprocess(WorkspaceSet *workspaces, OBJ *object) const override {
for (auto *function : this->nested_) {
function->Preprocess(workspaces, object);
}
}
};
// Template for a special type of locator: The locator of type
// FeatureFunction<OBJ, ARGS...> calls nested functions of type
// FeatureFunction<OBJ, IDX, ARGS...>, where the derived class DER is
// responsible for translating by providing the following:
//
// // Gets the new additional focus.
// IDX GetFocus(const WorkspaceSet &workspaces, const OBJ &object);
//
// This is useful to e.g. add a token focus to a parser state based on some
// desired property of that state.
template<class DER, class OBJ, class IDX, class ...ARGS>
class FeatureAddFocusLocator : public NestedFeatureFunction<
FeatureFunction<OBJ, IDX, ARGS...>, OBJ, ARGS...> {
public:
void Preprocess(WorkspaceSet *workspaces, OBJ *object) const override {
for (auto *function : this->nested_) {
function->Preprocess(workspaces, object);
}
}
void Evaluate(const WorkspaceSet &workspaces, const OBJ &object,
ARGS... args, FeatureVector *result) const override {
IDX focus = static_cast<const DER *>(this)->GetFocus(
workspaces, object, args...);
for (auto *function : this->nested()) {
function->Evaluate(workspaces, object, focus, args..., result);
}
}
// Returns the first nested feature's computed value.
FeatureValue Compute(const WorkspaceSet &workspaces,
const OBJ &object,
ARGS... args,
const FeatureVector *result) const override {
IDX focus = static_cast<const DER *>(this)->GetFocus(
workspaces, object, args...);
return this->nested()[0]->Compute(
workspaces, object, focus, args..., result);
}
};
// CRTP feature locator class. This is a meta feature that modifies ARGS and
// then calls the nested feature functions with the modified ARGS. Note that in
// order for this template to work correctly, all of ARGS must be types for
// which the reference operator & can be interpreted as a pointer to the
// argument. The derived class DER must implement the UpdateFocus method which
// takes pointers to the ARGS arguments:
//
// // Updates the current arguments.
// void UpdateArgs(const OBJ &object, ARGS *...args) const;
template<class DER, class OBJ, class ...ARGS>
class FeatureLocator : public MetaFeatureFunction<OBJ, ARGS...> {
public:
// Feature locators have an additional check that there is no intrinsic type.
void GetFeatureTypes(vector<FeatureType *> *types) const override {
CHECK(this->feature_type() == nullptr)
<< "FeatureLocators should not have an intrinsic type.";
MetaFeatureFunction<OBJ, ARGS...>::GetFeatureTypes(types);
}
// Evaluates the locator.
void Evaluate(const WorkspaceSet &workspaces, const OBJ &object,
ARGS... args, FeatureVector *result) const override {
static_cast<const DER *>(this)->UpdateArgs(workspaces, object, &args...);
for (auto *function : this->nested()) {
function->Evaluate(workspaces, object, args..., result);
}
}
// Returns the first nested feature's computed value.
FeatureValue Compute(const WorkspaceSet &workspaces, const OBJ &object,
ARGS... args,
const FeatureVector *result) const override {
static_cast<const DER *>(this)->UpdateArgs(workspaces, object, &args...);
return this->nested()[0]->Compute(workspaces, object, args..., result);
}
};
// Feature extractor for extracting features from objects of a certain class.
// Template type parameters are as defined for FeatureFunction.
template<class OBJ, class ...ARGS>
class FeatureExtractor : public GenericFeatureExtractor {
public:
// Feature function type for top-level functions in the feature extractor.
typedef FeatureFunction<OBJ, ARGS...> Function;
typedef FeatureExtractor<OBJ, ARGS...> Self;
// Feature locator type for the feature extractor.
template<class DER>
using Locator = FeatureLocator<DER, OBJ, ARGS...>;
// Initializes feature extractor.
FeatureExtractor() {}
~FeatureExtractor() override { utils::STLDeleteElements(&functions_); }
// Sets up the feature extractor. Note that only top-level functions exist
// until Setup() is called. This does not take ownership over the context,
// which must outlive this.
void Setup(TaskContext *context) {
for (Function *function : functions_) function->Setup(context);
}
// Initializes the feature extractor. Must be called after Setup(). This
// does not take ownership over the context, which must outlive this.
void Init(TaskContext *context) {
for (Function *function : functions_) function->Init(context);
this->InitializeFeatureTypes();
}
// Requests workspaces from the registry. Must be called after Init(), and
// before Preprocess(). Does not take ownership over registry. This should be
// the same registry used to initialize the WorkspaceSet used in Preprocess()
// and ExtractFeatures(). NB: This is a different ordering from that used in
// SentenceFeatureRepresentation style feature computation.
void RequestWorkspaces(WorkspaceRegistry *registry) {
for (auto *function : functions_) function->RequestWorkspaces(registry);
}
// Preprocesses the object using feature functions for the phase. Must be
// called before any calls to ExtractFeatures() on that object and phase.
void Preprocess(WorkspaceSet *workspaces, OBJ *object) const {
for (Function *function : functions_) {
function->Preprocess(workspaces, object);
}
}
// Extracts features from an object with a focus. This invokes all the
// top-level feature functions in the feature extractor. Only feature
// functions belonging to the specified phase are invoked.
void ExtractFeatures(const WorkspaceSet &workspaces, const OBJ &object,
ARGS... args, FeatureVector *result) const {
result->reserve(this->feature_types());
// Extract features.
for (int i = 0; i < functions_.size(); ++i) {
functions_[i]->Evaluate(workspaces, object, args..., result);
}
}
private:
// Creates and initializes all feature functions in the feature extractor.
void InitializeFeatureFunctions() override {
// Create all top-level feature functions.
for (int i = 0; i < descriptor().feature_size(); ++i) {
FeatureFunctionDescriptor *fd = mutable_descriptor()->mutable_feature(i);
Function *function = Function::Instantiate(this, fd, "");
functions_.push_back(function);
}
}
// Collect all feature types used in the feature extractor.
void GetFeatureTypes(vector<FeatureType *> *types) const override {
for (int i = 0; i < functions_.size(); ++i) {
functions_[i]->GetFeatureTypes(types);
}
}
// Top-level feature functions (and variables) in the feature extractor.
// Owned.
vector<Function *> functions_;
};
#define REGISTER_FEATURE_FUNCTION(base, name, component) \
REGISTER_CLASS_COMPONENT(base, name, component)
} // namespace syntaxnet
#endif // SYNTAXNET_FEATURE_EXTRACTOR_H_
| clinc/models | syntaxnet/syntaxnet/feature_extractor.h | C | apache-2.0 | 25,535 |
/*!
* Bootstrap-select v1.13.9 (https://developer.snapappointments.com/bootstrap-select)
*
* Copyright 2012-2019 SnapAppointments, LLC
* Licensed under MIT (https://github.com/snapappointments/bootstrap-select/blob/master/LICENSE)
*/
(function (root, factory) {
if (root === undefined && window !== undefined) root = window;
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module unless amdModuleId is set
define(["jquery"], function (a0) {
return (factory(a0));
});
} else if (typeof module === 'object' && module.exports) {
// Node. Does not work with strict CommonJS, but
// only CommonJS-like environments that support module.exports,
// like Node.
module.exports = factory(require("jquery"));
} else {
factory(root["jQuery"]);
}
}(this, function (jQuery) {
(function ($) {
$.fn.selectpicker.defaults = {
noneSelectedText: 'Hiçbiri seçilmedi',
noneResultsText: 'Hiçbir sonuç bulunamadı {0}',
countSelectedText: function (numSelected, numTotal) {
return (numSelected == 1) ? '{0} öğe seçildi' : '{0} öğe seçildi';
},
maxOptionsText: function (numAll, numGroup) {
return [
(numAll == 1) ? 'Limit aşıldı (maksimum {n} sayıda öğe )' : 'Limit aşıldı (maksimum {n} sayıda öğe)',
(numGroup == 1) ? 'Grup limiti aşıldı (maksimum {n} sayıda öğe)' : 'Grup limiti aşıldı (maksimum {n} sayıda öğe)'
];
},
selectAllText: 'Tümünü Seç',
deselectAllText: 'Seçiniz',
multipleSeparator: ', '
};
})(jQuery);
}));
//# sourceMappingURL=defaults-tr_TR.js.map | philippegui2/ScolarControl | web/bSelect/dist/js/i18n/defaults-tr_TR.js | JavaScript | apache-2.0 | 1,646 |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.FindUsages;
using Microsoft.CodeAnalysis.Host;
namespace Microsoft.CodeAnalysis.Editor.FindUsages
{
internal interface IFindUsagesService : ILanguageService
{
/// <summary>
/// Finds the references for the symbol at the specific position in the document,
/// pushing the results into the context instance.
/// </summary>
Task FindReferencesAsync(Document document, int position, IFindUsagesContext context);
/// <summary>
/// Finds the implementationss for the symbol at the specific position in the document,
/// pushing the results into the context instance.
/// </summary>
Task FindImplementationsAsync(Document document, int position, IFindUsagesContext context);
}
}
| bbarry/roslyn | src/EditorFeatures/Core/FindUsages/IFindUsagesService.cs | C# | apache-2.0 | 990 |
package org.apereo.cas.trusted.web.support;
import org.apereo.cas.web.support.CookieRetrievingCookieGenerator;
import org.apereo.cas.web.support.CookieValueManager;
/**
* {@link CookieRetrievingCookieGenerator} for trusted device cookies.
*
* @author Daniel Frett
* @since 5.3.0
*/
public class TrustedDeviceCookieRetrievingCookieGenerator extends CookieRetrievingCookieGenerator {
private static final long serialVersionUID = 3555244208199798618L;
public TrustedDeviceCookieRetrievingCookieGenerator(final String name, final String path, final int maxAge,
final boolean secure, final String domain,
final boolean httpOnly,
final CookieValueManager cookieValueManager) {
super(name, path, maxAge, secure, domain, httpOnly, cookieValueManager);
}
}
| robertoschwald/cas | support/cas-server-support-trusted-mfa/src/main/java/org/apereo/cas/trusted/web/support/TrustedDeviceCookieRetrievingCookieGenerator.java | Java | apache-2.0 | 944 |
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.app;
import android.content.SharedPreferences;
import android.os.FileUtils;
import android.os.Looper;
import android.util.Log;
import com.google.android.collect.Maps;
import com.android.internal.util.XmlUtils;
import dalvik.system.BlockGuard;
import org.xmlpull.v1.XmlPullParserException;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import libcore.io.ErrnoException;
import libcore.io.IoUtils;
import libcore.io.Libcore;
import libcore.io.StructStat;
final class SharedPreferencesImpl implements SharedPreferences {
private static final String TAG = "SharedPreferencesImpl";
private static final boolean DEBUG = false;
// Lock ordering rules:
// - acquire SharedPreferencesImpl.this before EditorImpl.this
// - acquire mWritingToDiskLock before EditorImpl.this
private final File mFile;
private final File mBackupFile;
private final int mMode;
private Map<String, Object> mMap; // guarded by 'this'
private int mDiskWritesInFlight = 0; // guarded by 'this'
private boolean mLoaded = false; // guarded by 'this'
private long mStatTimestamp; // guarded by 'this'
private long mStatSize; // guarded by 'this'
private final Object mWritingToDiskLock = new Object();
private static final Object mContent = new Object();
private final WeakHashMap<OnSharedPreferenceChangeListener, Object> mListeners =
new WeakHashMap<OnSharedPreferenceChangeListener, Object>();
SharedPreferencesImpl(File file, int mode) {
mFile = file;
mBackupFile = makeBackupFile(file);
mMode = mode;
mLoaded = false;
mMap = null;
startLoadFromDisk();
}
private void startLoadFromDisk() {
synchronized (this) {
mLoaded = false;
}
new Thread("SharedPreferencesImpl-load") {
public void run() {
synchronized (SharedPreferencesImpl.this) {
loadFromDiskLocked();
}
}
}.start();
}
private void loadFromDiskLocked() {
if (mLoaded) {
return;
}
if (mBackupFile.exists()) {
mFile.delete();
mBackupFile.renameTo(mFile);
}
// Debugging
if (mFile.exists() && !mFile.canRead()) {
Log.w(TAG, "Attempt to read preferences file " + mFile + " without permission");
}
Map map = null;
StructStat stat = null;
try {
stat = Libcore.os.stat(mFile.getPath());
if (mFile.canRead()) {
BufferedInputStream str = null;
try {
str = new BufferedInputStream(
new FileInputStream(mFile), 16*1024);
map = XmlUtils.readMapXml(str);
} catch (XmlPullParserException e) {
Log.w(TAG, "getSharedPreferences", e);
} catch (FileNotFoundException e) {
Log.w(TAG, "getSharedPreferences", e);
} catch (IOException e) {
Log.w(TAG, "getSharedPreferences", e);
} finally {
IoUtils.closeQuietly(str);
}
}
} catch (ErrnoException e) {
}
mLoaded = true;
if (map != null) {
mMap = map;
mStatTimestamp = stat.st_mtime;
mStatSize = stat.st_size;
} else {
mMap = new HashMap<String, Object>();
}
notifyAll();
}
private static File makeBackupFile(File prefsFile) {
return new File(prefsFile.getPath() + ".bak");
}
void startReloadIfChangedUnexpectedly() {
synchronized (this) {
// TODO: wait for any pending writes to disk?
if (!hasFileChangedUnexpectedly()) {
return;
}
startLoadFromDisk();
}
}
// Has the file changed out from under us? i.e. writes that
// we didn't instigate.
private boolean hasFileChangedUnexpectedly() {
synchronized (this) {
if (mDiskWritesInFlight > 0) {
// If we know we caused it, it's not unexpected.
if (DEBUG) Log.d(TAG, "disk write in flight, not unexpected.");
return false;
}
}
final StructStat stat;
try {
/*
* Metadata operations don't usually count as a block guard
* violation, but we explicitly want this one.
*/
BlockGuard.getThreadPolicy().onReadFromDisk();
stat = Libcore.os.stat(mFile.getPath());
} catch (ErrnoException e) {
return true;
}
synchronized (this) {
return mStatTimestamp != stat.st_mtime || mStatSize != stat.st_size;
}
}
public void registerOnSharedPreferenceChangeListener(OnSharedPreferenceChangeListener listener) {
synchronized(this) {
mListeners.put(listener, mContent);
}
}
public void unregisterOnSharedPreferenceChangeListener(OnSharedPreferenceChangeListener listener) {
synchronized(this) {
mListeners.remove(listener);
}
}
private void awaitLoadedLocked() {
if (!mLoaded) {
// Raise an explicit StrictMode onReadFromDisk for this
// thread, since the real read will be in a different
// thread and otherwise ignored by StrictMode.
BlockGuard.getThreadPolicy().onReadFromDisk();
}
while (!mLoaded) {
try {
wait();
} catch (InterruptedException unused) {
}
}
}
public Map<String, ?> getAll() {
synchronized (this) {
awaitLoadedLocked();
//noinspection unchecked
return new HashMap<String, Object>(mMap);
}
}
public String getString(String key, String defValue) {
synchronized (this) {
awaitLoadedLocked();
String v = (String)mMap.get(key);
return v != null ? v : defValue;
}
}
public Set<String> getStringSet(String key, Set<String> defValues) {
synchronized (this) {
awaitLoadedLocked();
Set<String> v = (Set<String>) mMap.get(key);
return v != null ? v : defValues;
}
}
public int getInt(String key, int defValue) {
synchronized (this) {
awaitLoadedLocked();
Integer v = (Integer)mMap.get(key);
return v != null ? v : defValue;
}
}
public long getLong(String key, long defValue) {
synchronized (this) {
awaitLoadedLocked();
Long v = (Long)mMap.get(key);
return v != null ? v : defValue;
}
}
public float getFloat(String key, float defValue) {
synchronized (this) {
awaitLoadedLocked();
Float v = (Float)mMap.get(key);
return v != null ? v : defValue;
}
}
public boolean getBoolean(String key, boolean defValue) {
synchronized (this) {
awaitLoadedLocked();
Boolean v = (Boolean)mMap.get(key);
return v != null ? v : defValue;
}
}
public boolean contains(String key) {
synchronized (this) {
awaitLoadedLocked();
return mMap.containsKey(key);
}
}
public Editor edit() {
// TODO: remove the need to call awaitLoadedLocked() when
// requesting an editor. will require some work on the
// Editor, but then we should be able to do:
//
// context.getSharedPreferences(..).edit().putString(..).apply()
//
// ... all without blocking.
synchronized (this) {
awaitLoadedLocked();
}
return new EditorImpl();
}
// Return value from EditorImpl#commitToMemory()
private static class MemoryCommitResult {
public boolean changesMade; // any keys different?
public List<String> keysModified; // may be null
public Set<OnSharedPreferenceChangeListener> listeners; // may be null
public Map<?, ?> mapToWriteToDisk;
public final CountDownLatch writtenToDiskLatch = new CountDownLatch(1);
public volatile boolean writeToDiskResult = false;
public void setDiskWriteResult(boolean result) {
writeToDiskResult = result;
writtenToDiskLatch.countDown();
}
}
public final class EditorImpl implements Editor {
private final Map<String, Object> mModified = Maps.newHashMap();
private boolean mClear = false;
public Editor putString(String key, String value) {
synchronized (this) {
mModified.put(key, value);
return this;
}
}
public Editor putStringSet(String key, Set<String> values) {
synchronized (this) {
mModified.put(key,
(values == null) ? null : new HashSet<String>(values));
return this;
}
}
public Editor putInt(String key, int value) {
synchronized (this) {
mModified.put(key, value);
return this;
}
}
public Editor putLong(String key, long value) {
synchronized (this) {
mModified.put(key, value);
return this;
}
}
public Editor putFloat(String key, float value) {
synchronized (this) {
mModified.put(key, value);
return this;
}
}
public Editor putBoolean(String key, boolean value) {
synchronized (this) {
mModified.put(key, value);
return this;
}
}
public Editor remove(String key) {
synchronized (this) {
mModified.put(key, this);
return this;
}
}
public Editor clear() {
synchronized (this) {
mClear = true;
return this;
}
}
public void apply() {
final MemoryCommitResult mcr = commitToMemory();
final Runnable awaitCommit = new Runnable() {
public void run() {
try {
mcr.writtenToDiskLatch.await();
} catch (InterruptedException ignored) {
}
}
};
QueuedWork.add(awaitCommit);
Runnable postWriteRunnable = new Runnable() {
public void run() {
awaitCommit.run();
QueuedWork.remove(awaitCommit);
}
};
SharedPreferencesImpl.this.enqueueDiskWrite(mcr, postWriteRunnable);
// Okay to notify the listeners before it's hit disk
// because the listeners should always get the same
// SharedPreferences instance back, which has the
// changes reflected in memory.
notifyListeners(mcr);
}
// Returns true if any changes were made
private MemoryCommitResult commitToMemory() {
MemoryCommitResult mcr = new MemoryCommitResult();
synchronized (SharedPreferencesImpl.this) {
// We optimistically don't make a deep copy until
// a memory commit comes in when we're already
// writing to disk.
if (mDiskWritesInFlight > 0) {
// We can't modify our mMap as a currently
// in-flight write owns it. Clone it before
// modifying it.
// noinspection unchecked
mMap = new HashMap<String, Object>(mMap);
}
mcr.mapToWriteToDisk = mMap;
mDiskWritesInFlight++;
boolean hasListeners = mListeners.size() > 0;
if (hasListeners) {
mcr.keysModified = new ArrayList<String>();
mcr.listeners =
new HashSet<OnSharedPreferenceChangeListener>(mListeners.keySet());
}
synchronized (this) {
if (mClear) {
if (!mMap.isEmpty()) {
mcr.changesMade = true;
mMap.clear();
}
mClear = false;
}
for (Map.Entry<String, Object> e : mModified.entrySet()) {
String k = e.getKey();
Object v = e.getValue();
// "this" is the magic value for a removal mutation. In addition,
// setting a value to "null" for a given key is specified to be
// equivalent to calling remove on that key.
if (v == this || v == null) {
if (!mMap.containsKey(k)) {
continue;
}
mMap.remove(k);
} else {
if (mMap.containsKey(k)) {
Object existingValue = mMap.get(k);
if (existingValue != null && existingValue.equals(v)) {
continue;
}
}
mMap.put(k, v);
}
mcr.changesMade = true;
if (hasListeners) {
mcr.keysModified.add(k);
}
}
mModified.clear();
}
}
return mcr;
}
public boolean commit() {
MemoryCommitResult mcr = commitToMemory();
SharedPreferencesImpl.this.enqueueDiskWrite(
mcr, null /* sync write on this thread okay */);
try {
mcr.writtenToDiskLatch.await();
} catch (InterruptedException e) {
return false;
}
notifyListeners(mcr);
return mcr.writeToDiskResult;
}
private void notifyListeners(final MemoryCommitResult mcr) {
if (mcr.listeners == null || mcr.keysModified == null ||
mcr.keysModified.size() == 0) {
return;
}
if (Looper.myLooper() == Looper.getMainLooper()) {
for (int i = mcr.keysModified.size() - 1; i >= 0; i--) {
final String key = mcr.keysModified.get(i);
for (OnSharedPreferenceChangeListener listener : mcr.listeners) {
if (listener != null) {
listener.onSharedPreferenceChanged(SharedPreferencesImpl.this, key);
}
}
}
} else {
// Run this function on the main thread.
ActivityThread.sMainThreadHandler.post(new Runnable() {
public void run() {
notifyListeners(mcr);
}
});
}
}
}
/**
* Enqueue an already-committed-to-memory result to be written
* to disk.
*
* They will be written to disk one-at-a-time in the order
* that they're enqueued.
*
* @param postWriteRunnable if non-null, we're being called
* from apply() and this is the runnable to run after
* the write proceeds. if null (from a regular commit()),
* then we're allowed to do this disk write on the main
* thread (which in addition to reducing allocations and
* creating a background thread, this has the advantage that
* we catch them in userdebug StrictMode reports to convert
* them where possible to apply() ...)
*/
private void enqueueDiskWrite(final MemoryCommitResult mcr,
final Runnable postWriteRunnable) {
final Runnable writeToDiskRunnable = new Runnable() {
public void run() {
synchronized (mWritingToDiskLock) {
writeToFile(mcr);
}
synchronized (SharedPreferencesImpl.this) {
mDiskWritesInFlight--;
}
if (postWriteRunnable != null) {
postWriteRunnable.run();
}
}
};
final boolean isFromSyncCommit = (postWriteRunnable == null);
// Typical #commit() path with fewer allocations, doing a write on
// the current thread.
if (isFromSyncCommit) {
boolean wasEmpty = false;
synchronized (SharedPreferencesImpl.this) {
wasEmpty = mDiskWritesInFlight == 1;
}
if (wasEmpty) {
writeToDiskRunnable.run();
return;
}
}
QueuedWork.singleThreadExecutor().execute(writeToDiskRunnable);
}
private static FileOutputStream createFileOutputStream(File file) {
FileOutputStream str = null;
try {
str = new FileOutputStream(file);
} catch (FileNotFoundException e) {
File parent = file.getParentFile();
if (!parent.mkdir()) {
Log.e(TAG, "Couldn't create directory for SharedPreferences file " + file);
return null;
}
FileUtils.setPermissions(
parent.getPath(),
FileUtils.S_IRWXU|FileUtils.S_IRWXG|FileUtils.S_IXOTH,
-1, -1);
try {
str = new FileOutputStream(file);
} catch (FileNotFoundException e2) {
Log.e(TAG, "Couldn't create SharedPreferences file " + file, e2);
}
}
return str;
}
// Note: must hold mWritingToDiskLock
private void writeToFile(MemoryCommitResult mcr) {
// Rename the current file so it may be used as a backup during the next read
if (mFile.exists()) {
if (!mcr.changesMade) {
// If the file already exists, but no changes were
// made to the underlying map, it's wasteful to
// re-write the file. Return as if we wrote it
// out.
mcr.setDiskWriteResult(true);
return;
}
if (!mBackupFile.exists()) {
if (!mFile.renameTo(mBackupFile)) {
Log.e(TAG, "Couldn't rename file " + mFile
+ " to backup file " + mBackupFile);
mcr.setDiskWriteResult(false);
return;
}
} else {
mFile.delete();
}
}
// Attempt to write the file, delete the backup and return true as atomically as
// possible. If any exception occurs, delete the new file; next time we will restore
// from the backup.
try {
FileOutputStream str = createFileOutputStream(mFile);
if (str == null) {
mcr.setDiskWriteResult(false);
return;
}
XmlUtils.writeMapXml(mcr.mapToWriteToDisk, str);
FileUtils.sync(str);
str.close();
ContextImpl.setFilePermissionsFromMode(mFile.getPath(), mMode, 0);
try {
final StructStat stat = Libcore.os.stat(mFile.getPath());
synchronized (this) {
mStatTimestamp = stat.st_mtime;
mStatSize = stat.st_size;
}
} catch (ErrnoException e) {
// Do nothing
}
// Writing was successful, delete the backup file if there is one.
mBackupFile.delete();
mcr.setDiskWriteResult(true);
return;
} catch (XmlPullParserException e) {
Log.w(TAG, "writeToFile: Got exception:", e);
} catch (IOException e) {
Log.w(TAG, "writeToFile: Got exception:", e);
}
// Clean up an unsuccessfully written file
if (mFile.exists()) {
if (!mFile.delete()) {
Log.e(TAG, "Couldn't clean up partially-written file " + mFile);
}
}
mcr.setDiskWriteResult(false);
}
}
| JSDemos/android-sdk-20 | src/android/app/SharedPreferencesImpl.java | Java | apache-2.0 | 22,013 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.jobmaster.slotpool;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.util.clock.Clock;
import javax.annotation.Nonnull;
/** Default slot pool factory. */
public class DefaultSlotPoolServiceFactory extends AbstractSlotPoolServiceFactory {
public DefaultSlotPoolServiceFactory(
@Nonnull Clock clock,
@Nonnull Time rpcTimeout,
@Nonnull Time slotIdleTimeout,
@Nonnull Time batchSlotTimeout) {
super(clock, rpcTimeout, slotIdleTimeout, batchSlotTimeout);
}
@Override
@Nonnull
public SlotPoolService createSlotPoolService(@Nonnull JobID jobId) {
return new SlotPoolImpl(jobId, clock, rpcTimeout, slotIdleTimeout, batchSlotTimeout);
}
}
| tillrohrmann/flink | flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/slotpool/DefaultSlotPoolServiceFactory.java | Java | apache-2.0 | 1,626 |
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=US-ASCII">
<title>Class bad_function_call</title>
<link rel="stylesheet" href="../../../../doc/src/boostbook.css" type="text/css">
<meta name="generator" content="DocBook XSL Stylesheets V1.78.1">
<link rel="home" href="../../index.html" title="The Boost C++ Libraries BoostBook Documentation Subset">
<link rel="up" href="../../boost_typeerasure/reference.html#header.boost.type_erasure.exception_hpp" title="Header <boost/type_erasure/exception.hpp>">
<link rel="prev" href="derived.html" title="Struct template derived">
<link rel="next" href="bad_any_cast.html" title="Class bad_any_cast">
</head>
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
<table cellpadding="2" width="100%"><tr>
<td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../boost.png"></td>
<td align="center"><a href="../../../../index.html">Home</a></td>
<td align="center"><a href="../../../../libs/libraries.htm">Libraries</a></td>
<td align="center"><a href="http://www.boost.org/users/people.html">People</a></td>
<td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td>
<td align="center"><a href="../../../../more/index.htm">More</a></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="derived.html"><img src="../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../boost_typeerasure/reference.html#header.boost.type_erasure.exception_hpp"><img src="../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="bad_any_cast.html"><img src="../../../../doc/src/images/next.png" alt="Next"></a>
</div>
<div class="refentry">
<a name="boost.type_erasure.bad_function_call"></a><div class="titlepage"></div>
<div class="refnamediv">
<h2><span class="refentrytitle">Class bad_function_call</span></h2>
<p>boost::type_erasure::bad_function_call</p>
</div>
<h2 xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv-title">Synopsis</h2>
<div xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv"><pre class="synopsis"><span class="comment">// In header: <<a class="link" href="../../boost_typeerasure/reference.html#header.boost.type_erasure.exception_hpp" title="Header <boost/type_erasure/exception.hpp>">boost/type_erasure/exception.hpp</a>>
</span>
<span class="keyword">class</span> <a class="link" href="bad_function_call.html" title="Class bad_function_call">bad_function_call</a> <span class="special">:</span> <span class="keyword">public</span> <span class="identifier">invalid_argument</span> <span class="special">{</span>
<span class="keyword">public</span><span class="special">:</span>
<span class="comment">// <a class="link" href="bad_function_call.html#boost.type_erasure.bad_function_callconstruct-copy-destruct">construct/copy/destruct</a></span>
<a class="link" href="bad_function_call.html#idp308165568-bb"><span class="identifier">bad_function_call</span></a><span class="special">(</span><span class="special">)</span><span class="special">;</span>
<span class="special">}</span><span class="special">;</span></pre></div>
<div class="refsect1">
<a name="idp478916896"></a><h2>Description</h2>
<p>Exception thrown when the arguments to a primitive concept are incorrect.</p>
<p><span class="bold"><strong>See Also:</strong></span></p>
<p> <a class="link" href="call_idp150247888.html" title="Function call">call</a>, <a class="link" href="require_match.html" title="Function require_match">require_match</a> </p>
<p>
</p>
<div class="refsect2">
<a name="idp478920752"></a><h3>
<a name="boost.type_erasure.bad_function_callconstruct-copy-destruct"></a><code class="computeroutput">bad_function_call</code>
public
construct/copy/destruct</h3>
<div class="orderedlist"><ol class="orderedlist" type="1"><li class="listitem"><pre class="literallayout"><a name="idp308165568-bb"></a><span class="identifier">bad_function_call</span><span class="special">(</span><span class="special">)</span><span class="special">;</span></pre></li></ol></div>
</div>
</div>
</div>
<table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr>
<td align="left"></td>
<td align="right"><div class="copyright-footer">Copyright © 2011-2013 Steven Watanabe<p>
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>)
</p>
</div></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="derived.html"><img src="../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../boost_typeerasure/reference.html#header.boost.type_erasure.exception_hpp"><img src="../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="bad_any_cast.html"><img src="../../../../doc/src/images/next.png" alt="Next"></a>
</div>
</body>
</html>
| biospi/seamass-windeps | src/boost_1_57_0/doc/html/boost/type_erasure/bad_function_call.html | HTML | apache-2.0 | 5,282 |
/*
* Copyright (C) 2014 Michael Pardo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ollie.internal.codegen.validator;
import javax.lang.model.element.Element;
public class MigrationValidator implements Validator {
@Override
public boolean validate(Element enclosingElement, Element element) {
return true;
}
}
| pardom/Ollie | compiler/src/main/java/ollie/internal/codegen/validator/MigrationValidator.java | Java | apache-2.0 | 844 |
/*
* Copyright (c) 2007, 2016, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
/*
* Copyright 2001-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: AbsoluteLocationPath.java,v 1.2.4.1 2005/09/12 09:44:03 pvedula Exp $
*/
package com.sun.org.apache.xalan.internal.xsltc.compiler;
import com.sun.org.apache.bcel.internal.generic.ALOAD;
import com.sun.org.apache.bcel.internal.generic.ASTORE;
import com.sun.org.apache.bcel.internal.generic.ConstantPoolGen;
import com.sun.org.apache.bcel.internal.generic.INVOKEINTERFACE;
import com.sun.org.apache.bcel.internal.generic.INVOKESPECIAL;
import com.sun.org.apache.bcel.internal.generic.InstructionList;
import com.sun.org.apache.bcel.internal.generic.LocalVariableGen;
import com.sun.org.apache.bcel.internal.generic.NEW;
import com.sun.org.apache.xalan.internal.xsltc.compiler.util.ClassGenerator;
import com.sun.org.apache.xalan.internal.xsltc.compiler.util.MethodGenerator;
import com.sun.org.apache.xalan.internal.xsltc.compiler.util.NodeType;
import com.sun.org.apache.xalan.internal.xsltc.compiler.util.Type;
import com.sun.org.apache.xalan.internal.xsltc.compiler.util.TypeCheckError;
import com.sun.org.apache.xalan.internal.xsltc.compiler.util.Util;
/**
* @author Jacek Ambroziak
* @author Santiago Pericas-Geertsen
*/
final class AbsoluteLocationPath extends Expression {
private Expression _path; // may be null
public AbsoluteLocationPath() {
_path = null;
}
public AbsoluteLocationPath(Expression path) {
_path = path;
if (path != null) {
_path.setParent(this);
}
}
public void setParser(Parser parser) {
super.setParser(parser);
if (_path != null) {
_path.setParser(parser);
}
}
public Expression getPath() {
return(_path);
}
public String toString() {
return "AbsoluteLocationPath(" +
(_path != null ? _path.toString() : "null") + ')';
}
public Type typeCheck(SymbolTable stable) throws TypeCheckError {
if (_path != null) {
final Type ptype = _path.typeCheck(stable);
if (ptype instanceof NodeType) { // promote to node-set
_path = new CastExpr(_path, Type.NodeSet);
}
}
return _type = Type.NodeSet;
}
public void translate(ClassGenerator classGen, MethodGenerator methodGen) {
final ConstantPoolGen cpg = classGen.getConstantPool();
final InstructionList il = methodGen.getInstructionList();
if (_path != null) {
final int initAI = cpg.addMethodref(ABSOLUTE_ITERATOR,
"<init>",
"("
+ NODE_ITERATOR_SIG
+ ")V");
// Compile relative path iterator(s)
//
// Backwards branches are prohibited if an uninitialized object is
// on the stack by section 4.9.4 of the JVM Specification, 2nd Ed.
// We don't know whether this code might contain backwards branches,
// so we mustn't create the new object until after we've created
// this argument to its constructor. Instead we calculate the
// value of the argument to the constructor first, store it in
// a temporary variable, create the object and reload the argument
// from the temporary to avoid the problem.
_path.translate(classGen, methodGen);
LocalVariableGen relPathIterator
= methodGen.addLocalVariable("abs_location_path_tmp",
Util.getJCRefType(NODE_ITERATOR_SIG),
null, null);
relPathIterator.setStart(
il.append(new ASTORE(relPathIterator.getIndex())));
// Create new AbsoluteIterator
il.append(new NEW(cpg.addClass(ABSOLUTE_ITERATOR)));
il.append(DUP);
relPathIterator.setEnd(
il.append(new ALOAD(relPathIterator.getIndex())));
// Initialize AbsoluteIterator with iterator from the stack
il.append(new INVOKESPECIAL(initAI));
}
else {
final int gitr = cpg.addInterfaceMethodref(DOM_INTF,
"getIterator",
"()"+NODE_ITERATOR_SIG);
il.append(methodGen.loadDOM());
il.append(new INVOKEINTERFACE(gitr, 1));
}
}
}
| shun634501730/java_source_cn | src_en/com/sun/org/apache/xalan/internal/xsltc/compiler/AbsoluteLocationPath.java | Java | apache-2.0 | 5,303 |
package com.sage42.android.view_samples.ui;
import android.annotation.TargetApi;
import android.app.Activity;
import android.os.Build;
import android.os.Bundle;
import android.os.CountDownTimer;
import android.view.MenuItem;
import com.sage42.android.view.ui.SegmentedProgressBar;
import com.sage42.android.view_samples.R;
/**
* Copyright (C) 2013- Sage 42 App Sdn Bhd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @author Corey Scott (corey.scott@sage42.com)
*
*/
public class SegmentedProgressBarActivity extends Activity
{
private static final int TOTAL_TIME_IN_SEC = 20;
private static final int ONE_SECOND_IN_MS = 1000;
// view elements
private SegmentedProgressBar mProgressBar1;
// a countdown timer to provide a little action
private CountDownTimer mTimerCountUp;
@Override
protected void onCreate(final Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
// wire up the layout
this.setContentView(R.layout.segmented_progress_bar_activity);
// wire up the ui elements
this.mProgressBar1 = (SegmentedProgressBar) this.findViewById(R.id.segmented_bar1);
// enable the back btn on newer phones
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.HONEYCOMB)
{
this.enableUpButton();
}
}
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
private void enableUpButton()
{
this.getActionBar().setDisplayHomeAsUpEnabled(true);
}
@Override
public boolean onOptionsItemSelected(final MenuItem item)
{
switch (item.getItemId())
{
case android.R.id.home:
// Respond to the action bar's Up/Home button
this.finish();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
@SuppressWarnings("synthetic-access")
protected void onResume()
{
super.onResume();
// start some timers so that things move
this.mTimerCountUp = new CountDownTimer(TOTAL_TIME_IN_SEC * ONE_SECOND_IN_MS, ONE_SECOND_IN_MS)
{
@Override
public void onTick(final long millisUntilFinished)
{
final double progress = ((TOTAL_TIME_IN_SEC - (millisUntilFinished / (double) ONE_SECOND_IN_MS)) / TOTAL_TIME_IN_SEC) * 100;
SegmentedProgressBarActivity.this.mProgressBar1.setProgress((int) progress);
}
@Override
public void onFinish()
{
SegmentedProgressBarActivity.this.mProgressBar1
.setProgress(SegmentedProgressBarActivity.this.mProgressBar1.getMax());
}
}.start();
}
@Override
protected void onPause()
{
super.onPause();
// stop any running timers
// there are needed to be clear and be sure that the timers don't cause exceptions when this activity is not in focus
if (this.mTimerCountUp != null)
{
this.mTimerCountUp.cancel();
}
}
}
| rameshvoltella/AndroidViewUtils | samples/src/main/java/com/sage42/android/view_samples/ui/SegmentedProgressBarActivity.java | Java | apache-2.0 | 3,681 |
// +build integration,perftest
package main
import (
"flag"
"fmt"
"net/http"
"os"
"strings"
"time"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
)
type Config struct {
Bucket string
Filename string
Size int64
TempDir string
LogVerbose bool
SDK SDKConfig
Client ClientConfig
}
func (c *Config) SetupFlags(prefix string, flagset *flag.FlagSet) {
flagset.StringVar(&c.Bucket, "bucket", "",
"The S3 bucket `name` to upload the object to.")
flagset.StringVar(&c.Filename, "file", "",
"The `path` of the local file to upload.")
flagset.Int64Var(&c.Size, "size", 0,
"The S3 object size in bytes to upload")
flagset.StringVar(&c.TempDir, "temp", os.TempDir(), "location to create temporary files")
flagset.BoolVar(&c.LogVerbose, "verbose", false,
"The output log will include verbose request information")
c.SDK.SetupFlags(prefix, flagset)
c.Client.SetupFlags(prefix, flagset)
}
func (c *Config) Validate() error {
var errs Errors
if len(c.Bucket) == 0 || (c.Size <= 0 && c.Filename == "") {
errs = append(errs, fmt.Errorf("bucket and filename/size are required"))
}
if err := c.SDK.Validate(); err != nil {
errs = append(errs, err)
}
if err := c.Client.Validate(); err != nil {
errs = append(errs, err)
}
if len(errs) != 0 {
return errs
}
return nil
}
type SDKConfig struct {
PartSize int64
Concurrency int
WithUnsignedPayload bool
WithContentMD5 bool
ExpectContinue bool
BufferProvider s3manager.ReadSeekerWriteToProvider
}
func (c *SDKConfig) SetupFlags(prefix string, flagset *flag.FlagSet) {
prefix += "sdk."
flagset.Int64Var(&c.PartSize, prefix+"part-size", s3manager.DefaultUploadPartSize,
"Specifies the `size` of parts of the object to upload.")
flagset.IntVar(&c.Concurrency, prefix+"concurrency", s3manager.DefaultUploadConcurrency,
"Specifies the number of parts to upload `at once`.")
flagset.BoolVar(&c.WithUnsignedPayload, prefix+"unsigned", false,
"Specifies if the SDK will use UNSIGNED_PAYLOAD for part SHA256 in request signature.")
flagset.BoolVar(&c.WithContentMD5, prefix+"content-md5", true,
"Specifies if the SDK should compute the content md5 header for S3 uploads.")
flagset.BoolVar(&c.ExpectContinue, prefix+"100-continue", true,
"Specifies if the SDK requests will wait for the 100 continue response before sending request payload.")
}
func (c *SDKConfig) Validate() error {
return nil
}
type ClientConfig struct {
KeepAlive bool
Timeouts Timeouts
MaxIdleConns int
MaxIdleConnsPerHost int
}
func (c *ClientConfig) SetupFlags(prefix string, flagset *flag.FlagSet) {
prefix += "client."
flagset.BoolVar(&c.KeepAlive, prefix+"http-keep-alive", true,
"Specifies if HTTP keep alive is enabled.")
defTR := http.DefaultTransport.(*http.Transport)
flagset.IntVar(&c.MaxIdleConns, prefix+"idle-conns", defTR.MaxIdleConns,
"Specifies max idle connection pool size.")
flagset.IntVar(&c.MaxIdleConnsPerHost, prefix+"idle-conns-host", http.DefaultMaxIdleConnsPerHost,
"Specifies max idle connection pool per host, will be truncated by idle-conns.")
c.Timeouts.SetupFlags(prefix, flagset)
}
func (c *ClientConfig) Validate() error {
var errs Errors
if err := c.Timeouts.Validate(); err != nil {
errs = append(errs, err)
}
if len(errs) != 0 {
return errs
}
return nil
}
type Timeouts struct {
Connect time.Duration
TLSHandshake time.Duration
ExpectContinue time.Duration
ResponseHeader time.Duration
}
func (c *Timeouts) SetupFlags(prefix string, flagset *flag.FlagSet) {
prefix += "timeout."
flagset.DurationVar(&c.Connect, prefix+"connect", 30*time.Second,
"The `timeout` connecting to the remote host.")
defTR := http.DefaultTransport.(*http.Transport)
flagset.DurationVar(&c.TLSHandshake, prefix+"tls", defTR.TLSHandshakeTimeout,
"The `timeout` waiting for the TLS handshake to complete.")
flagset.DurationVar(&c.ExpectContinue, prefix+"expect-continue", defTR.ExpectContinueTimeout,
"The `timeout` waiting for the TLS handshake to complete.")
flagset.DurationVar(&c.ResponseHeader, prefix+"response-header", defTR.ResponseHeaderTimeout,
"The `timeout` waiting for the TLS handshake to complete.")
}
func (c *Timeouts) Validate() error {
return nil
}
type Errors []error
func (es Errors) Error() string {
var buf strings.Builder
for _, e := range es {
buf.WriteString(e.Error())
}
return buf.String()
}
| Miciah/origin | vendor/github.com/aws/aws-sdk-go/awstesting/integration/performance/s3UploadManager/config.go | GO | apache-2.0 | 4,444 |
package archive // import "github.com/ory/dockertest/v3/docker/pkg/archive"
import (
"syscall"
"time"
)
func timeToTimespec(time time.Time) (ts syscall.Timespec) {
if time.IsZero() {
// Return UTIME_OMIT special value
ts.Sec = 0
ts.Nsec = ((1 << 30) - 2)
return
}
return syscall.NsecToTimespec(time.UnixNano())
}
| drasko/mainflux | vendor/github.com/ory/dockertest/v3/docker/pkg/archive/time_linux.go | GO | apache-2.0 | 328 |
#include <stdlib.h>
#include <stdio.h>
#include "uv.h"
#include "connection_dispatcher.h"
#include "connection_consumer.h"
static struct sockaddr_in listen_addr;
void ipc_close_cb(uv_handle_t* handle)
{
struct ipc_peer_ctx* ctx;
ctx = container_of(handle, struct ipc_peer_ctx, peer_handle);
free(ctx);
}
void ipc_write_cb(uv_write_t* req, int status)
{
struct ipc_peer_ctx* ctx;
ctx = container_of(req, struct ipc_peer_ctx, write_req);
uv_close((uv_handle_t*) &ctx->peer_handle, ipc_close_cb);
}
void ipc_connection_cb(uv_stream_t* ipc_pipe, int status)
{
int rc;
struct ipc_server_ctx* sc;
struct ipc_peer_ctx* pc;
uv_loop_t* loop;
uv_buf_t buf;
loop = ipc_pipe->loop;
buf = uv_buf_init("PING", 4);
sc = container_of(ipc_pipe, struct ipc_server_ctx, ipc_pipe);
pc = calloc(1, sizeof(*pc));
//ASSERT(pc != NULL);
if (ipc_pipe->type == UV_TCP) {
rc = uv_tcp_init(loop, (uv_tcp_t*) &pc->peer_handle);
if (sc->tcp_nodelay) {
rc = uv_tcp_nodelay((uv_tcp_t*) &pc->peer_handle, 1);
}
}
else if (ipc_pipe->type == UV_NAMED_PIPE)
rc = uv_pipe_init(loop, (uv_pipe_t*) &pc->peer_handle, 1);
rc = uv_accept(ipc_pipe, (uv_stream_t*) &pc->peer_handle);
rc = uv_write2(&pc->write_req,
(uv_stream_t*) &pc->peer_handle,
&buf,
1,
(uv_stream_t*) &sc->server_handle,
ipc_write_cb);
if (--sc->num_connects == 0)
uv_close((uv_handle_t*) ipc_pipe, NULL);
}
extern void print_configuration();
/* Set up an IPC pipe server that hands out listen sockets to the worker
* threads. It's kind of cumbersome for such a simple operation, maybe we
* should revive uv_import() and uv_export().
*/
void start_connection_dispatching(uv_handle_type type, unsigned int num_servers, struct server_ctx* servers, char* listen_address, int listen_port, bool tcp_nodelay, int listen_backlog)
{
int rc;
struct ipc_server_ctx ctx;
uv_loop_t* loop;
unsigned int i;
loop = uv_default_loop();
ctx.num_connects = num_servers;
ctx.tcp_nodelay = tcp_nodelay;
if (type == UV_TCP)
{
uv_ip4_addr(listen_address, listen_port, &listen_addr);
rc = uv_tcp_init(loop, (uv_tcp_t*) &ctx.server_handle);
if (ctx.tcp_nodelay) {
rc = uv_tcp_nodelay((uv_tcp_t*) &ctx.server_handle, 1);
}
rc = uv_tcp_bind((uv_tcp_t*) &ctx.server_handle, (const struct sockaddr*)&listen_addr, 0);
print_configuration();
printf("Listening...\n");
}
rc = uv_pipe_init(loop, &ctx.ipc_pipe, 1);
rc = uv_pipe_bind(&ctx.ipc_pipe, "HAYWIRE_CONNECTION_DISPATCH_PIPE_NAME");
rc = uv_listen((uv_stream_t*) &ctx.ipc_pipe, listen_backlog, ipc_connection_cb);
for (i = 0; i < num_servers; i++)
uv_sem_post(&servers[i].semaphore);
rc = uv_run(loop, UV_RUN_DEFAULT);
uv_close((uv_handle_t*) &ctx.server_handle, NULL);
rc = uv_run(loop, UV_RUN_DEFAULT);
for (i = 0; i < num_servers; i++)
uv_sem_wait(&servers[i].semaphore);
}
| darcythomas/Haywire | src/haywire/connection_dispatcher.c | C | apache-2.0 | 3,231 |
#!/bin/bash
set -e
json_file="json_file/ert.json"
cat > networking_poe_cert_filter <<-'EOF'
.properties.properties.".properties.networking_point_of_entry.{{pcf_ert_networking_pointofentry}}.ssl_rsa_certificate".value = {
"cert_pem": $cert_pem,
"private_key_pem": $private_key_pem
}
EOF
jq \
--arg cert_pem "$pcf_ert_ssl_cert" \
--arg private_key_pem "$pcf_ert_ssl_key" \
--from-file networking_poe_cert_filter \
$json_file > config.json
mv config.json $json_file
# Remove .properties.networking_point_of_entry.external_ssl.ssl_rsa_certificate
# added by generic configure-json script
jq \
'del(.properties.properties.".properties.networking_point_of_entry.external_ssl.ssl_rsa_certificate")' \
$json_file > config.json
mv config.json $json_file
cat > elb_filter <<-'EOF'
.jobs.ha_proxy = {
"instance_type": {
"id": "automatic"
},
"instances": $haproxy_instance_count | tonumber,
"elb_names": [
$haproxy_elb_name
]
} |
.jobs.router = {
"instance_type": {
"id": "automatic"
},
"instances": $router_instance_count | tonumber,
"elb_names": [
$router_elb_name
]
}
EOF
router_instance_count="$(jq '.jobs.router.instances' $json_file)"
if [[ "${pcf_ert_networking_pointofentry}" == "haproxy" ]]; then
jq \
--arg haproxy_instance_count 1 \
--arg haproxy_elb_name "${terraform_prefix}-web-lb" \
--arg router_instance_count ${router_instance_count} \
--arg router_elb_name "" \
--from-file elb_filter \
$json_file > config.json
mv config.json $json_file
jq \
'del(.jobs.router.elb_names)' \
$json_file > config.json
mv config.json $json_file
else
jq \
--arg haproxy_instance_count 0 \
--arg haproxy_elb_name "" \
--arg router_instance_count ${router_instance_count} \
--arg router_elb_name "${terraform_prefix}-web-lb" \
--from-file elb_filter \
$json_file > config.json
mv config.json $json_file
jq \
'del(.jobs.ha_proxy.elb_names)' \
$json_file > config.json
mv config.json $json_file
fi
sed -i \
-e "s%{{pcf_ert_networking_pointofentry}}%${pcf_ert_networking_pointofentry}%g" \
$json_file
if [[ "${azure_access_key}" != "" ]]; then
# Use prefix to strip down a Storage Account Prefix String
env_short_name=$(echo ${terraform_prefix} | tr -d "-" | tr -d "_" | tr -d "[0-9]")
env_short_name=$(echo ${env_short_name:0:10})
cat ${json_file} | jq \
--arg azure_access_key "${azure_access_key}" \
--arg azure_account_name "${env_short_name}${azure_account_name}" \
--arg azure_buildpacks_container "${azure_buildpacks_container}" \
--arg azure_droplets_container "${azure_droplets_container}" \
--arg azure_packages_container "${azure_packages_container}" \
--arg azure_resources_container "${azure_resources_container}" \
'
.properties.properties |= .+ {
".properties.system_blobstore.external_azure.access_key": {
"value": {
"secret": $azure_access_key
}
},
".properties.system_blobstore": {
"value": "external_azure"
},
".properties.system_blobstore.external_azure.account_name": {
"value": $azure_account_name
},
".properties.system_blobstore.external_azure.buildpacks_container": {
"value": $azure_buildpacks_container
},
".properties.system_blobstore.external_azure.droplets_container": {
"value": $azure_droplets_container
},
".properties.system_blobstore.external_azure.packages_container": {
"value": $azure_packages_container
},
".properties.system_blobstore.external_azure.resources_container": {
"value": $azure_resources_container
}
}
' > /tmp/ert.json
mv /tmp/ert.json ${json_file}
fi
| cah-josephgeorge/pcf-pipelines | tasks/install-ert/scripts/iaas-specific-config/azure/run.sh | Shell | apache-2.0 | 3,785 |
/*
* Copyright 2013 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.overlord.rtgov.ui.server.services;
import java.util.Set;
import javax.ws.rs.ApplicationPath;
import javax.ws.rs.core.Application;
import org.overlord.rtgov.common.util.BeanResolverUtil;
import org.overlord.rtgov.ui.client.shared.services.IServicesService;
import org.overlord.rtgov.ui.client.shared.services.ISituationsService;
/**
* Services application.
*/
@ApplicationPath("/rest")
public class RTGovApplication extends Application {
private java.util.Set<Object> _singletons=new java.util.HashSet<Object>();
public RTGovApplication() {
IServicesService services = BeanResolverUtil.getBean(IServicesService.class);
if (services != null) {
_singletons.add(services);
}
ISituationsService situations = BeanResolverUtil.getBean(ISituationsService.class);
if (situations != null) {
_singletons.add(situations);
}
}
public Set<Object> getSingletons() {
return (_singletons);
}
} | jorgemoralespou/rtgov | ui/overlord-rtgov-ui-base/src/main/java/org/overlord/rtgov/ui/server/services/RTGovApplication.java | Java | apache-2.0 | 1,604 |
/*
Copyright 2021 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package controller
import "net/http"
// Interface defines the base of a controller managed by a controller manager
type Interface interface {
// Name returns the canonical name of the controller.
Name() string
}
// Debuggable defines a controller that allows the controller manager
// to expose a debugging handler for the controller
//
// If a controller implements Debuggable, and the returned handler is
// not nil, the controller manager can mount the handler during startup.
type Debuggable interface {
// DebuggingHandler returns a Handler that expose debugging information
// for the controller, or nil if a debugging handler is not desired.
//
// The handler will be accessible at "/debug/controllers/{controllerName}/".
DebuggingHandler() http.Handler
}
| x13n/kubernetes | staging/src/k8s.io/controller-manager/controller/interfaces.go | GO | apache-2.0 | 1,344 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.storage.am.common.ophelpers;
public class SlotOffTupleOff implements Comparable<SlotOffTupleOff> {
public int tupleIndex;
public int slotOff;
public int tupleOff;
public SlotOffTupleOff(int tupleIndex, int slotOff, int recOff) {
this.tupleIndex = tupleIndex;
this.slotOff = slotOff;
this.tupleOff = recOff;
}
@Override
public int compareTo(SlotOffTupleOff o) {
return tupleOff - o.tupleOff;
}
@Override
public String toString() {
return tupleIndex + " " + slotOff + " " + tupleOff;
}
}
| apache/incubator-asterixdb-hyracks | hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/ophelpers/SlotOffTupleOff.java | Java | apache-2.0 | 1,347 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.yarn;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.lang.reflect.Field;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
import org.apache.avro.Schema;
import org.apache.commons.io.FileUtils;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.test.TestingServer;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.MiniYARNCluster;
import org.apache.helix.HelixManager;
import org.apache.helix.HelixManagerFactory;
import org.apache.helix.InstanceType;
import org.apache.helix.model.Message;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableMap;
import com.google.common.eventbus.EventBus;
import com.google.common.io.Closer;
import com.google.common.util.concurrent.Service;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValueFactory;
import org.apache.gobblin.cluster.GobblinClusterConfigurationKeys;
import org.apache.gobblin.cluster.GobblinHelixConstants;
import org.apache.gobblin.cluster.GobblinHelixMultiManager;
import org.apache.gobblin.cluster.HelixMessageTestBase;
import org.apache.gobblin.cluster.HelixUtils;
import org.apache.gobblin.cluster.TestHelper;
import org.apache.gobblin.cluster.TestShutdownMessageHandlerFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.DynamicConfigGenerator;
import org.apache.gobblin.metrics.kafka.KafkaAvroSchemaRegistry;
import org.apache.gobblin.metrics.kafka.KafkaSchemaRegistry;
import org.apache.gobblin.runtime.app.ServiceBasedAppLauncher;
import org.apache.gobblin.testing.AssertWithBackoff;
import static org.mockito.Mockito.times;
/**
* Unit tests for {@link GobblinYarnAppLauncher}.
*
* <p>
* This class uses a {@link TestingServer} as an embedded ZooKeeper server for testing. The Curator
* framework is used to provide a ZooKeeper client. This class also uses the {@link HelixManager} to
* act as a testing Helix controller to receive the ApplicationMaster shutdown request message. It
* also starts a {@link MiniYARNCluster} so submission of a Gobblin Yarn application can be tested.
* A {@link YarnClient} is used to work with the {@link MiniYARNCluster}.
* </p>
*
* @author Yinan Li
*/
@Test(groups = { "gobblin.yarn" }, singleThreaded=true)
public class GobblinYarnAppLauncherTest implements HelixMessageTestBase {
private static final Object MANAGED_HELIX_CLUSTER_NAME = "GobblinYarnAppLauncherTestManagedHelix";
public static final String TEST_HELIX_INSTANCE_NAME_MANAGED = HelixUtils.getHelixInstanceName("TestInstance", 1);
public static final String DYNAMIC_CONF_PATH = "dynamic.conf";
public static final String YARN_SITE_XML_PATH = "yarn-site.xml";
final Logger LOG = LoggerFactory.getLogger(GobblinYarnAppLauncherTest.class);
private YarnClient yarnClient;
private CuratorFramework curatorFramework;
private Config config;
private Config configManagedHelix;
private HelixManager helixManager;
private HelixManager helixManagerManagedHelix;
private GobblinYarnAppLauncher gobblinYarnAppLauncher;
private GobblinYarnAppLauncher gobblinYarnAppLauncherManagedHelix;
private ApplicationId applicationId;
private final Closer closer = Closer.create();
private static void setEnv(String key, String value) {
try {
Map<String, String> env = System.getenv();
Class<?> cl = env.getClass();
Field field = cl.getDeclaredField("m");
field.setAccessible(true);
Map<String, String> writableEnv = (Map<String, String>) field.get(env);
writableEnv.put(key, value);
} catch (Exception e) {
throw new IllegalStateException("Failed to set environment variable", e);
}
}
@BeforeClass
public void setUp() throws Exception {
// Set java home in environment since it isn't set on some systems
String javaHome = System.getProperty("java.home");
setEnv("JAVA_HOME", javaHome);
final YarnConfiguration clusterConf = new YarnConfiguration();
clusterConf.set("yarn.resourcemanager.connect.max-wait.ms", "10000");
MiniYARNCluster miniYARNCluster = this.closer.register(new MiniYARNCluster("TestCluster", 1, 1, 1));
miniYARNCluster.init(clusterConf);
miniYARNCluster.start();
// YARN client should not be started before the Resource Manager is up
AssertWithBackoff.create().logger(LOG).timeoutMs(10000)
.assertTrue(new Predicate<Void>() {
@Override public boolean apply(Void input) {
return !clusterConf.get(YarnConfiguration.RM_ADDRESS).contains(":0");
}
}, "Waiting for RM");
this.yarnClient = this.closer.register(YarnClient.createYarnClient());
this.yarnClient.init(clusterConf);
this.yarnClient.start();
// Use a random ZK port
TestingServer testingZKServer = this.closer.register(new TestingServer(-1));
LOG.info("Testing ZK Server listening on: " + testingZKServer.getConnectString());
// the zk port is dynamically configured
try (PrintWriter pw = new PrintWriter(DYNAMIC_CONF_PATH)) {
File dir = new File("target/dummydir");
// dummy directory specified in configuration
dir.mkdir();
pw.println("gobblin.cluster.zk.connection.string=\"" + testingZKServer.getConnectString() + "\"");
pw.println("jobconf.fullyQualifiedPath=\"" + dir.getAbsolutePath() + "\"");
}
// YARN config is dynamic and needs to be passed to other processes
try (OutputStream os = new FileOutputStream(new File(YARN_SITE_XML_PATH))) {
clusterConf.writeXml(os);
}
this.curatorFramework = TestHelper.createZkClient(testingZKServer, this.closer);
URL url = GobblinYarnAppLauncherTest.class.getClassLoader()
.getResource(GobblinYarnAppLauncherTest.class.getSimpleName() + ".conf");
Assert.assertNotNull(url, "Could not find resource " + url);
this.config = ConfigFactory.parseURL(url)
.withValue("gobblin.cluster.zk.connection.string",
ConfigValueFactory.fromAnyRef(testingZKServer.getConnectString()))
.resolve();
String zkConnectionString = this.config.getString(GobblinClusterConfigurationKeys.ZK_CONNECTION_STRING_KEY);
this.helixManager = HelixManagerFactory.getZKHelixManager(
this.config.getString(GobblinClusterConfigurationKeys.HELIX_CLUSTER_NAME_KEY), TestHelper.TEST_HELIX_INSTANCE_NAME,
InstanceType.CONTROLLER, zkConnectionString);
this.gobblinYarnAppLauncher = new GobblinYarnAppLauncher(this.config, clusterConf);
this.configManagedHelix = ConfigFactory.parseURL(url)
.withValue("gobblin.cluster.zk.connection.string",
ConfigValueFactory.fromAnyRef(testingZKServer.getConnectString()))
.withValue(GobblinClusterConfigurationKeys.HELIX_CLUSTER_NAME_KEY, ConfigValueFactory.fromAnyRef(MANAGED_HELIX_CLUSTER_NAME))
.withValue(GobblinClusterConfigurationKeys.HELIX_INSTANCE_NAME_KEY, ConfigValueFactory.fromAnyRef(TEST_HELIX_INSTANCE_NAME_MANAGED))
.withValue(GobblinClusterConfigurationKeys.IS_HELIX_CLUSTER_MANAGED, ConfigValueFactory.fromAnyRef("true"))
.resolve();
this.helixManagerManagedHelix = HelixManagerFactory.getZKHelixManager(
this.configManagedHelix.getString(GobblinClusterConfigurationKeys.HELIX_CLUSTER_NAME_KEY), TEST_HELIX_INSTANCE_NAME_MANAGED,
InstanceType.PARTICIPANT, zkConnectionString);
this.gobblinYarnAppLauncherManagedHelix = new GobblinYarnAppLauncher(this.configManagedHelix, clusterConf);
}
@Test
public void testBuildApplicationMasterCommand() {
String command = this.gobblinYarnAppLauncher.buildApplicationMasterCommand("application_1234_3456", 64);
// 41 is from 64 * 0.8 - 10
Assert.assertTrue(command.contains("-Xmx41"));
}
@Test
public void testCreateHelixCluster() throws Exception {
// This is tested here instead of in HelixUtilsTest to avoid setting up yet another testing ZooKeeper server.
HelixUtils.createGobblinHelixCluster(
this.config.getString(GobblinClusterConfigurationKeys.ZK_CONNECTION_STRING_KEY),
this.config.getString(GobblinClusterConfigurationKeys.HELIX_CLUSTER_NAME_KEY));
Assert.assertEquals(this.curatorFramework.checkExists()
.forPath(String.format("/%s", GobblinYarnAppLauncherTest.class.getSimpleName())).getVersion(), 0);
Assert.assertEquals(
this.curatorFramework.checkExists()
.forPath(String.format("/%s/CONTROLLER", GobblinYarnAppLauncherTest.class.getSimpleName())).getVersion(),
0);
//Create managed Helix cluster and test it is created successfully
HelixUtils.createGobblinHelixCluster(
this.configManagedHelix.getString(GobblinClusterConfigurationKeys.ZK_CONNECTION_STRING_KEY),
this.configManagedHelix.getString(GobblinClusterConfigurationKeys.HELIX_CLUSTER_NAME_KEY));
Assert.assertEquals(this.curatorFramework.checkExists()
.forPath(String.format("/%s/CONTROLLER", MANAGED_HELIX_CLUSTER_NAME)).getVersion(), 0);
Assert.assertEquals(
this.curatorFramework.checkExists()
.forPath(String.format("/%s/CONTROLLER", MANAGED_HELIX_CLUSTER_NAME)).getVersion(),
0);
}
/**
* For some yet unknown reason, hostname resolution for the ResourceManager in {@link MiniYARNCluster}
* has some issue that causes the {@link YarnClient} not be able to connect and submit the Gobblin Yarn
* application successfully. This works fine on local machine though. So disabling this and the test
* below that depends on it on Travis-CI.
*/
@Test(enabled=false, groups = { "disabledOnCI" }, dependsOnMethods = "testCreateHelixCluster")
public void testSetupAndSubmitApplication() throws Exception {
HelixUtils.createGobblinHelixCluster(
this.config.getString(GobblinClusterConfigurationKeys.ZK_CONNECTION_STRING_KEY),
this.config.getString(GobblinClusterConfigurationKeys.HELIX_CLUSTER_NAME_KEY));
this.gobblinYarnAppLauncher.startYarnClient();
this.applicationId = this.gobblinYarnAppLauncher.setupAndSubmitApplication();
int i;
// wait for application to come up
for (i = 0; i < 120; i++) {
if (yarnClient.getApplicationReport(applicationId).getYarnApplicationState() ==
YarnApplicationState.RUNNING) {
break;
}
Thread.sleep(1000);
}
Assert.assertTrue(i < 120, "timed out waiting for RUNNING state");
// wait another 10 seconds and check state again to make sure that the application stays up
Thread.sleep(10000);
Assert.assertEquals(yarnClient.getApplicationReport(applicationId).getYarnApplicationState(),
YarnApplicationState.RUNNING, "Application may have aborted");
}
@Test(enabled=false, groups = { "disabledOnCI" }, dependsOnMethods = "testSetupAndSubmitApplication")
public void testGetReconnectableApplicationId() throws Exception {
Assert.assertEquals(this.gobblinYarnAppLauncher.getReconnectableApplicationId().get(), this.applicationId);
this.yarnClient.killApplication(this.applicationId);
Assert.assertEquals(yarnClient.getApplicationReport(applicationId).getYarnApplicationState(),
YarnApplicationState.KILLED, "Application not killed");
// takes some time for kill to take effect and app master to go down
Thread.sleep(5000);
}
@Test(dependsOnMethods = "testCreateHelixCluster")
public void testSendShutdownRequest() throws Exception {
this.helixManager.connect();
this.helixManager.getMessagingService().registerMessageHandlerFactory(GobblinHelixConstants.SHUTDOWN_MESSAGE_TYPE,
new TestShutdownMessageHandlerFactory(this));
this.gobblinYarnAppLauncher.connectHelixManager();
this.gobblinYarnAppLauncher.sendShutdownRequest();
Assert.assertEquals(this.curatorFramework.checkExists()
.forPath(String.format("/%s/CONTROLLER/MESSAGES", GobblinYarnAppLauncherTest.class.getSimpleName()))
.getVersion(), 0);
YarnSecurityManagerTest.GetHelixMessageNumFunc getCtrlMessageNum =
new YarnSecurityManagerTest.GetHelixMessageNumFunc(GobblinYarnAppLauncherTest.class.getSimpleName(), InstanceType.CONTROLLER, "",
this.curatorFramework);
AssertWithBackoff assertWithBackoff =
AssertWithBackoff.create().logger(LoggerFactory.getLogger("testSendShutdownRequest")).timeoutMs(20000);
assertWithBackoff.assertEquals(getCtrlMessageNum, 1, "1 controller message queued");
// Give Helix sometime to handle the message
assertWithBackoff.assertEquals(getCtrlMessageNum, 0, "all controller messages processed");
this.helixManagerManagedHelix.connect();
this.helixManagerManagedHelix.getMessagingService().registerMessageHandlerFactory(GobblinHelixConstants.SHUTDOWN_MESSAGE_TYPE,
new TestShutdownMessageHandlerFactory(this));
this.gobblinYarnAppLauncherManagedHelix.connectHelixManager();
this.gobblinYarnAppLauncherManagedHelix.sendShutdownRequest();
Assert.assertEquals(this.curatorFramework.checkExists()
.forPath(String.format("/%s/INSTANCES/%s/MESSAGES", this.configManagedHelix.getString(GobblinClusterConfigurationKeys.HELIX_CLUSTER_NAME_KEY), TEST_HELIX_INSTANCE_NAME_MANAGED))
.getVersion(), 0);
YarnSecurityManagerTest.GetHelixMessageNumFunc getInstanceMessageNum =
new YarnSecurityManagerTest.GetHelixMessageNumFunc(this.configManagedHelix.getString(
GobblinClusterConfigurationKeys.HELIX_CLUSTER_NAME_KEY),
InstanceType.PARTICIPANT, TEST_HELIX_INSTANCE_NAME_MANAGED, this.curatorFramework);
assertWithBackoff =
AssertWithBackoff.create().logger(LoggerFactory.getLogger("testSendShutdownRequest")).timeoutMs(20000);
assertWithBackoff.assertEquals(getInstanceMessageNum, 1, "1 controller message queued");
// Give Helix sometime to handle the message
assertWithBackoff.assertEquals(getInstanceMessageNum, 0, "all controller messages processed");
}
@AfterClass
public void tearDown() throws IOException, TimeoutException {
try {
Files.deleteIfExists(Paths.get(DYNAMIC_CONF_PATH));
Files.deleteIfExists(Paths.get(YARN_SITE_XML_PATH));
this.gobblinYarnAppLauncher.stopYarnClient();
if (this.helixManager.isConnected()) {
this.helixManager.disconnect();
}
if (this.helixManagerManagedHelix.isConnected()) {
this.helixManagerManagedHelix.disconnect();
}
this.gobblinYarnAppLauncher.disconnectHelixManager();
if (applicationId != null) {
this.gobblinYarnAppLauncher.cleanUpAppWorkDirectory(applicationId);
}
} finally {
this.closer.close();
}
}
@Test(enabled = false)
@Override
public void assertMessageReception(Message message) {
Assert.assertEquals(message.getMsgType(), GobblinHelixConstants.SHUTDOWN_MESSAGE_TYPE);
Assert.assertEquals(message.getMsgSubType(), HelixMessageSubTypes.APPLICATION_MASTER_SHUTDOWN.toString());
}
/**
* Test that the dynamic config is added to the config specified when the {@link GobblinApplicationMaster}
* is instantiated.
*/
@Test
public void testDynamicConfig() throws Exception {
Config config = this.config.withFallback(
ConfigFactory.parseMap(
ImmutableMap.of(ConfigurationKeys.DYNAMIC_CONFIG_GENERATOR_CLASS_KEY,
TestDynamicConfigGenerator.class.getName())));
ContainerId containerId = ContainerId.newInstance(
ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 0), 0), 0);
TestApplicationMaster
appMaster = new TestApplicationMaster("testApp", containerId, config,
new YarnConfiguration());
Assert.assertEquals(appMaster.getConfig().getString("dynamicKey1"), "dynamicValue1");
Assert.assertEquals(appMaster.getConfig().getString(ConfigurationKeys.DYNAMIC_CONFIG_GENERATOR_CLASS_KEY),
TestDynamicConfigGenerator.class.getName());
ServiceBasedAppLauncher appLauncher = appMaster.getAppLauncher();
Field servicesField = ServiceBasedAppLauncher.class.getDeclaredField("services");
servicesField.setAccessible(true);
List<Service> services = (List<Service>) servicesField.get(appLauncher);
Optional<Service> yarnServiceOptional = services.stream().filter(e -> e instanceof YarnService).findFirst();
Assert.assertTrue(yarnServiceOptional.isPresent());
YarnService yarnService = (YarnService) yarnServiceOptional.get();
Field configField = YarnService.class.getDeclaredField("config");
configField.setAccessible(true);
Config yarnServiceConfig = (Config) configField.get(yarnService);
Assert.assertEquals(yarnServiceConfig.getString("dynamicKey1"), "dynamicValue1");
Assert.assertEquals(yarnServiceConfig.getString(ConfigurationKeys.DYNAMIC_CONFIG_GENERATOR_CLASS_KEY),
TestDynamicConfigGenerator.class.getName());
}
/**
* Test that the job cleanup call is called
*/
@Test
public void testJobCleanup() throws Exception {
ContainerId containerId = ContainerId.newInstance(
ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 0), 0), 0);
TestApplicationMaster
appMaster = Mockito.spy(new TestApplicationMaster("testApp", containerId, config,
new YarnConfiguration()));
GobblinHelixMultiManager mockMultiManager = Mockito.mock(GobblinHelixMultiManager.class);
appMaster.setMultiManager(mockMultiManager);
appMaster.start();
Mockito.verify(mockMultiManager, times(1)).cleanUpJobs();
}
@Test
public void testOutputConfig() throws IOException {
File tmpTestDir = com.google.common.io.Files.createTempDir();
try {
Path outputPath = Paths.get(tmpTestDir.toString(), "application.conf");
Config config = ConfigFactory.empty()
.withValue(ConfigurationKeys.FS_URI_KEY, ConfigValueFactory.fromAnyRef("file:///"))
.withValue(GobblinYarnAppLauncher.GOBBLIN_YARN_CONFIG_OUTPUT_PATH,
ConfigValueFactory.fromAnyRef(outputPath.toString()));
GobblinYarnAppLauncher.outputConfigToFile(config);
String configString = com.google.common.io.Files.toString(outputPath.toFile(), Charsets.UTF_8);
Assert.assertTrue(configString.contains("fs"));
} finally {
FileUtils.deleteDirectory(tmpTestDir);
}
}
@Test
public void testAddMetricReportingDynamicConfig()
throws IOException {
KafkaAvroSchemaRegistry schemaRegistry = Mockito.mock(KafkaAvroSchemaRegistry.class);
Mockito.when(schemaRegistry.register(Mockito.any(Schema.class), Mockito.anyString())).thenAnswer(new Answer<String>() {
@Override
public String answer(InvocationOnMock invocation) {
return "testId";
}
});
Config config = ConfigFactory.empty().withValue(ConfigurationKeys.METRICS_KAFKA_TOPIC_EVENTS, ConfigValueFactory.fromAnyRef("topic"))
.withValue(ConfigurationKeys.METRICS_REPORTING_KAFKA_ENABLED_KEY, ConfigValueFactory.fromAnyRef(true))
.withValue(ConfigurationKeys.METRICS_REPORTING_KAFKA_USE_SCHEMA_REGISTRY, ConfigValueFactory.fromAnyRef(true))
.withValue(KafkaSchemaRegistry.KAFKA_SCHEMA_REGISTRY_URL, ConfigValueFactory.fromAnyRef("http://testSchemaReg:0000"));
config = GobblinYarnAppLauncher.addMetricReportingDynamicConfig(config, schemaRegistry);
Assert.assertEquals(config.getString(ConfigurationKeys.METRICS_REPORTING_EVENTS_KAFKA_AVRO_SCHEMA_ID), "testId");
Assert.assertFalse(config.hasPath(ConfigurationKeys.METRICS_REPORTING_METRICS_KAFKA_AVRO_SCHEMA_ID));
}
/**
* An application master for accessing protected fields in {@link GobblinApplicationMaster}
* for testing.
*/
private static class TestApplicationMaster extends GobblinApplicationMaster {
public TestApplicationMaster(String applicationName, ContainerId containerId, Config config,
YarnConfiguration yarnConfiguration)
throws Exception {
super(applicationName, containerId.getApplicationAttemptId().getApplicationId().toString(), containerId, config, yarnConfiguration);
}
@Override
protected YarnService buildYarnService(Config config, String applicationName, String applicationId,
YarnConfiguration yarnConfiguration, FileSystem fs) throws Exception {
YarnService testYarnService = new TestYarnService(config, applicationName, applicationId, yarnConfiguration, fs,
new EventBus("GobblinYarnAppLauncherTest"));
return testYarnService;
}
public Config getConfig() {
return this.config;
}
public ServiceBasedAppLauncher getAppLauncher() {
return this.applicationLauncher;
}
public void setMultiManager(GobblinHelixMultiManager multiManager) {
this.multiManager = multiManager;
}
}
/**
* Class for testing that dynamic config is injected
*/
@VisibleForTesting
public static class TestDynamicConfigGenerator implements DynamicConfigGenerator {
public TestDynamicConfigGenerator() {
}
@Override
public Config generateDynamicConfig(Config config) {
return ConfigFactory.parseMap(ImmutableMap.of("dynamicKey1", "dynamicValue1"));
}
}
/**
* Test class for mocking out YarnService. Need to use this instead of Mockito because of final methods.
*/
private static class TestYarnService extends YarnService {
public TestYarnService(Config config, String applicationName, String applicationId, YarnConfiguration yarnConfiguration,
FileSystem fs, EventBus eventBus) throws Exception {
super(config, applicationName, applicationId, yarnConfiguration, fs, eventBus, null);
}
@Override
protected void startUp()
throws Exception {
// do nothing
}
}
}
| arjun4084346/gobblin | gobblin-yarn/src/test/java/org/apache/gobblin/yarn/GobblinYarnAppLauncherTest.java | Java | apache-2.0 | 23,573 |
/**
* Generate the node required for user display length changing
* @param {object} oSettings dataTables settings object
* @returns {node} Display length feature node
* @memberof DataTable#oApi
*/
function _fnFeatureHtmlLength(oSettings) {
if (oSettings.oScroll.bInfinite) {
return null;
}
/* This can be overruled by not using the _MENU_ var/macro in the language variable */
var sName = 'name="' + oSettings.sTableId + '_length"';
var sStdMenu = '<select size="1" ' + sName + '>';
var i, iLen;
var aLengthMenu = oSettings.aLengthMenu;
if (aLengthMenu.length == 2 && typeof aLengthMenu[0] === 'object' &&
typeof aLengthMenu[1] === 'object') {
for (i = 0, iLen = aLengthMenu[0].length; i < iLen; i++) {
sStdMenu += '<option value="' + aLengthMenu[0][i] + '">' + aLengthMenu[1][i] + '</option>';
}
}
else {
for (i = 0, iLen = aLengthMenu.length; i < iLen; i++) {
sStdMenu += '<option value="' + aLengthMenu[i] + '">' + aLengthMenu[i] + '</option>';
}
}
sStdMenu += '</select>';
var nLength = document.createElement('div');
if (!oSettings.aanFeatures.l) {
nLength.id = oSettings.sTableId + '_length';
}
nLength.className = oSettings.oClasses.sLength;
nLength.innerHTML = '<label>' + oSettings.oLanguage.sLengthMenu.replace('_MENU_', sStdMenu) + '</label>';
/*
* Set the length to the current display length - thanks to Andrea Pavlovic for this fix,
* and Stefan Skopnik for fixing the fix!
*/
$('select option[value="' + oSettings._iDisplayLength + '"]', nLength).attr("selected", true);
$('select', nLength).bind('change.DT', function (e) {
var iVal = $(this).val();
/* Update all other length options for the new display */
var n = oSettings.aanFeatures.l;
for (i = 0, iLen = n.length; i < iLen; i++) {
if (n[i] != this.parentNode) {
$('select', n[i]).val(iVal);
}
}
/* Redraw the table */
oSettings._iDisplayLength = parseInt(iVal, 10);
_fnCalculateEnd(oSettings);
/* If we have space to show extra rows (backing up from the end point - then do so */
if (oSettings.fnDisplayEnd() == oSettings.fnRecordsDisplay()) {
oSettings._iDisplayStart = oSettings.fnDisplayEnd() - oSettings._iDisplayLength;
if (oSettings._iDisplayStart < 0) {
oSettings._iDisplayStart = 0;
}
}
if (oSettings._iDisplayLength == -1) {
oSettings._iDisplayStart = 0;
}
_fnDraw(oSettings);
});
$('select', nLength).attr('aria-controls', oSettings.sTableId);
return nLength;
}
/**
* Recalculate the end point based on the start point
* @param {object} oSettings dataTables settings object
* @memberof DataTable#oApi
*/
function _fnCalculateEnd(oSettings) {
if (oSettings.oFeatures.bPaginate === false) {
oSettings._iDisplayEnd = oSettings.aiDisplay.length;
}
else {
/* Set the end point of the display - based on how many elements there are
* still to display
*/
if (oSettings._iDisplayStart + oSettings._iDisplayLength > oSettings.aiDisplay.length ||
oSettings._iDisplayLength == -1) {
oSettings._iDisplayEnd = oSettings.aiDisplay.length;
}
else {
oSettings._iDisplayEnd = oSettings._iDisplayStart + oSettings._iDisplayLength;
}
}
}
| tanvirshuvo/abtec | target/ABTeC/resources/assets/advanced-datatable/media/src/core/core.length.js | JavaScript | apache-2.0 | 3,552 |
package com.camunda.fox.process.stub;
import org.camunda.bpm.engine.impl.bpmn.behavior.BpmnActivityBehavior;
import org.camunda.bpm.engine.impl.pvm.delegate.ActivityBehavior;
import org.camunda.bpm.engine.impl.pvm.delegate.ActivityExecution;
/**
* This class is used to stub the activiti-camel component in test cases.
*
* Why is this needed?
* -------------------
* The activiti-camel component hooks into the process execution by extending the
* BpmnActivityBehavior class. At the end of the "execute" method, activiti-camel usually
* calls "performDefaultOutgoingBehavior(execution)", which continues the execution of the
* process instance. However, if we replaced activiti-camel's CamelBehaviour with a mock,
* this method call would not be executed and the process instance would be stuck at the
* first occurrence of a service task that contains a delegate-expression with the value
* "#{camel}".
*
* @author Nils Preusker - nilspreusker
*/
public class CamelBehaviourStub extends BpmnActivityBehavior implements ActivityBehavior {
@Override
public void execute(ActivityExecution execution) throws Exception {
performDefaultOutgoingBehavior(execution);
}
}
| tobiasschaefer/camunda-consulting | showcases/bank-account-opening-mule/src/test/java/com/camunda/fox/process/stub/CamelBehaviourStub.java | Java | apache-2.0 | 1,195 |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.dimensionlookup;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Date;
import org.junit.Before;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.pentaho.di.core.database.Database;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.logging.LogLevel;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.StepMeta;
public class DimensionLookupIT {
private StepMeta mockStepMeta;
private DimensionLookupMeta mockDimensionLookupMeta;
private DimensionLookupData mockDimensionLookupData;
private DatabaseMeta mockDatabaseMeta;
private RowMetaInterface mockOutputRowMeta;
private TransMeta mockTransMeta;
private Trans mockTrans;
private Connection mockConnection;
private DimensionLookup dimensionLookup;
@Before
public void setup() {
mockStepMeta = mock( StepMeta.class );
mockDimensionLookupMeta = mock( DimensionLookupMeta.class );
mockDimensionLookupData = mock( DimensionLookupData.class );
mockDatabaseMeta = mock( DatabaseMeta.class );
mockOutputRowMeta = mock( RowMetaInterface.class );
mockTransMeta = mock( TransMeta.class );
mockTrans = mock( Trans.class );
mockConnection = mock( Connection.class );
mockDimensionLookupData.outputRowMeta = mockOutputRowMeta;
String stepName = "testName";
when( mockStepMeta.getName() ).thenReturn( stepName );
when( mockTransMeta.findStep( stepName ) ).thenReturn( mockStepMeta );
when( mockTrans.getLogLevel() ).thenReturn( LogLevel.ROWLEVEL );
dimensionLookup = new DimensionLookup( mockStepMeta, mockDimensionLookupData, 0, mockTransMeta, mockTrans );
dimensionLookup.init( mockDimensionLookupMeta, mockDimensionLookupData );
}
public void prepareMocksForInsertTest() {
mockDimensionLookupData.schemaTable = "testSchemaTable";
ValueMetaInterface mockKeyValueMeta = mock( ValueMetaInterface.class );
when( mockDimensionLookupMeta.getDatabaseMeta() ).thenReturn( mockDatabaseMeta );
when( mockDatabaseMeta.quoteField( anyString() ) ).thenAnswer( new Answer<String>() {
public String answer( InvocationOnMock invocation ) throws Throwable {
return "\"" + invocation.getArguments()[0] + "\"";
}
} );
String keyField = "testKeyField";
when( mockDimensionLookupMeta.getKeyField() ).thenReturn( keyField );
when( mockDimensionLookupMeta.getVersionField() ).thenReturn( "testVersionField" );
when( mockDimensionLookupMeta.getDateFrom() ).thenReturn( "1900-01-01" );
when( mockDimensionLookupMeta.getDateTo() ).thenReturn( "1901-01-01" );
when( mockDimensionLookupMeta.getKeyLookup() ).thenReturn( new String[] {} );
when( mockDimensionLookupMeta.getFieldLookup() ).thenReturn( new String[] {} );
when( mockDimensionLookupMeta.getFieldUpdate() ).thenReturn( new int[] {} );
mockDimensionLookupData.keynrs = new int[] {};
mockDimensionLookupData.fieldnrs = new int[] {};
Database mockDatabase = mock( Database.class );
when( mockDatabase.getConnection() ).thenReturn( mockConnection );
mockDimensionLookupData.db = mockDatabase;
when( mockKeyValueMeta.getName() ).thenReturn( "testKey" );
when( mockOutputRowMeta.getValueMeta( 0 ) ).thenReturn( mockKeyValueMeta );
}
@Test
public void testDimInsertPreparesStatementWithReturnKeysForNullTechnicalKey() throws KettleException,
SQLException {
RowMetaInterface mockMetaInterface = mock( RowMetaInterface.class );
Object[] row = new Object[0];
Long technicalKey = null;
boolean newEntry = false;
Long versionNr = 2L;
Date dateFrom = new Date();
Date dateTo = new Date();
prepareMocksForInsertTest();
dimensionLookup.dimInsert( mockMetaInterface, row, technicalKey, newEntry, versionNr, dateFrom, dateTo );
// Insert statement with keys
verify( mockConnection, times( 1 ) ).prepareStatement( anyString(), eq( Statement.RETURN_GENERATED_KEYS ) );
// Update statement without
verify( mockConnection, times( 1 ) ).prepareStatement( anyString() );
}
@Test
public void testDimInsertPreparesStatementWithReturnKeysForNotNullTechnicalKey() throws KettleException,
SQLException {
RowMetaInterface mockMetaInterface = mock( RowMetaInterface.class );
Object[] row = new Object[0];
Long technicalKey = 1L;
boolean newEntry = false;
Long versionNr = 2L;
Date dateFrom = new Date();
Date dateTo = new Date();
prepareMocksForInsertTest();
dimensionLookup.dimInsert( mockMetaInterface, row, technicalKey, newEntry, versionNr, dateFrom, dateTo );
// Neither insert nor update should have keys
verify( mockConnection, times( 2 ) ).prepareStatement( anyString() );
}
}
| TatsianaKasiankova/pentaho-kettle | integration/src/it/java/org/pentaho/di/trans/steps/dimensionlookup/DimensionLookupIT.java | Java | apache-2.0 | 6,190 |
# Getting Started with rkt
The following guide will show you how to build and run a self-contained Go app using rkt, the reference implementation of the [App Container Specification](https://github.com/appc/spec).
If you're not on Linux, you should do all of this inside [the rkt Vagrant](https://github.com/coreos/rkt/blob/master/Documentation/trying-out-rkt.md#rkt-using-vagrant).
## Create a hello go application
```go
package main
import (
"log"
"net/http"
)
func main() {
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
log.Printf("request from %v\n", r.RemoteAddr)
w.Write([]byte("hello\n"))
})
log.Fatal(http.ListenAndServe(":5000", nil))
}
```
### Build a statically linked Go binary
Next we need to build our application.
We are going to statically link our app so we can ship an App Container Image with no external dependencies.
```
$ CGO_ENABLED=0 go build -ldflags '-extldflags "-static"'
```
Before proceeding, verify that the produced binary is statically linked:
```
$ file hello
hello: ELF 64-bit LSB executable, x86-64, version 1 (SYSV), statically linked, not stripped
$ ldd hello
not a dynamic executable
```
## Create the image
To create the image, we can use `acbuild`, which can be downloaded via one of the [releases in the acbuild repository](https://github.com/appc/acbuild/releases)
The following commands will create an ACI containing our application and important metadata.
```bash
acbuild begin
acbuild set-name example.com/hello
acbuild copy hello /bin/hello
acbuild set-exec /bin/hello
acbuild port add www tcp 5000
acbuild label add version 0.0.1
acbuild label add arch amd64
acbuild label add os linux
acbuild annotation add authors "Carly Container <carly@example.com>"
acbuild write hello-0.0.1-linux-amd64.aci
acbuild end
```
## Run
### Launch a local application image
```
# rkt --insecure-options=image run hello-0.0.1-linux-amd64.aci
```
Note that `--insecure-options=image` is required because, by default, rkt expects our images to be signed.
See the [Signing and Verification Guide](https://github.com/coreos/rkt/blob/master/Documentation/signing-and-verification-guide.md) for more details.
At this point our hello app is running and ready to handle HTTP requests.
You can also [run rkt as a daemon](https://github.com/coreos/rkt/blob/master/Documentation/subcommands/run.md#run-rkt-as-a-daemon).
### Test with curl
By default, rkt will assign the running container an IP address. Use `rkt list` to discover what it is:
```
# rkt list
UUID APP IMAGE NAME STATE NETWORKS
885876b0 hello example.com/hello:0.0.1 running default:ip4=172.16.28.2
```
Then you can `curl` that IP on port 5000:
```
$ curl 172.16.28.2:5000
hello
```
| cloverstd/open-falcon-agent-in-docker | vendor/github.com/coreos/rkt/Documentation/getting-started-guide.md | Markdown | apache-2.0 | 2,726 |
package net.lr.tutorial.karaf.cxf.personservice.impl;
import java.io.InputStream;
import javax.ws.rs.core.Response;
import net.lr.tutorial.karaf.cxf.personrest.impl.PersonServiceImpl;
import net.lr.tutorial.karaf.cxf.personrest.model.Person;
import net.lr.tutorial.karaf.cxf.personrest.model.PersonService;
import org.apache.cxf.endpoint.Server;
import org.apache.cxf.jaxrs.JAXRSServerFactoryBean;
import org.apache.cxf.jaxrs.client.JAXRSClientFactory;
import org.apache.cxf.jaxrs.client.WebClient;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
public class PersonServiceRestTest {
private static final String PERSONSERVICE_TESTURL = "http://localhost:8282/person";
private static Server server;
@BeforeClass
public static void startServer() {
PersonService personService = new PersonServiceImpl();;
JAXRSServerFactoryBean factory = new JAXRSServerFactoryBean();
factory.setAddress(PERSONSERVICE_TESTURL);
factory.setServiceBean(personService);
server = factory.create();
server.start();
}
@Test
public void testInterface() {
PersonService personService = JAXRSClientFactory.create(PERSONSERVICE_TESTURL, PersonService.class);
Person person = new Person();
person.setId("1002");
person.setName("Christian Schneider");
personService.updatePerson("1002", person);
Person person2 = personService.getPerson("1002");
assertCorrectPerson(person2);
}
@Test
public void testWebClient() {
WebClient client = WebClient.create(PERSONSERVICE_TESTURL + "/1001");
putPerson(client);
Person person = client.get(Person.class);
assertCorrectPerson(person);
}
private void putPerson(WebClient client) {
InputStream is = this.getClass().getResourceAsStream("/person1.xml");
Response resp = client.put(is);
System.out.println(resp);
}
@AfterClass
public static void stopServer() {
server.stop();
}
private void assertCorrectPerson(Person person) {
Assert.assertNotNull(person);
Assert.assertEquals("Christian Schneider", person.getName());
}
}
| mhcxp/Karaf-Tutorial | cxf/personservice-rest/server/src/test/java/net/lr/tutorial/karaf/cxf/personservice/impl/PersonServiceRestTest.java | Java | apache-2.0 | 2,276 |
import asyncio
from unittest import mock
import pytest
from waterbutler.core import utils
class TestAsyncRetry:
@pytest.mark.asyncio
async def test_returns_success(self):
mock_func = mock.Mock(return_value='Foo')
retryable = utils.async_retry(5, 0, raven=None)(mock_func)
x = await retryable()
assert x == 'Foo'
assert mock_func.call_count == 1
@pytest.mark.asyncio
async def test_retries_until(self):
mock_func = mock.Mock(side_effect=[Exception(), 'Foo'])
retryable = utils.async_retry(5, 0, raven=None)(mock_func)
x = await retryable()
assert x == 'Foo'
assert mock_func.call_count == 2
@pytest.mark.asyncio
async def test_retries_then_raises(self):
mock_func = mock.Mock(side_effect=Exception('Foo'))
retryable = utils.async_retry(5, 0, raven=None)(mock_func)
with pytest.raises(Exception) as e:
coro = await retryable()
assert e.type == Exception
assert e.value.args == ('Foo',)
assert mock_func.call_count == 6
@pytest.mark.asyncio
async def test_retries_by_its_self(self):
mock_func = mock.Mock(side_effect=Exception())
retryable = utils.async_retry(8, 0, raven=None)(mock_func)
retryable()
await asyncio.sleep(.1)
assert mock_func.call_count == 9
async def test_docstring_survives(self):
async def mytest():
'''This is a docstring'''
pass
retryable = utils.async_retry(8, 0, raven=None)(mytest)
assert retryable.__doc__ == '''This is a docstring'''
@pytest.mark.asyncio
async def test_kwargs_work(self):
async def mytest(mack, *args, **kwargs):
mack()
assert args == ('test', 'Foo')
assert kwargs == {'test': 'Foo', 'baz': 'bam'}
return True
retryable = utils.async_retry(8, 0, raven=None)(mytest)
merk = mock.Mock(side_effect=[Exception(''), 5])
fut = retryable(merk, 'test', 'Foo', test='Foo', baz='bam')
assert await fut
assert merk.call_count == 2
@pytest.mark.asyncio
async def test_all_retry(self):
mock_func = mock.Mock(side_effect=Exception())
retryable = utils.async_retry(8, 0, raven=None)(mock_func)
retryable()
retryable()
await asyncio.sleep(.1)
assert mock_func.call_count == 18
| TomBaxter/waterbutler | tests/core/test_utils.py | Python | apache-2.0 | 2,451 |
/**
*
* Copyright 2003-2007 Jive Software.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.muc;
/**
* A listener that is fired anytime your participant's status in a room is changed, such as the
* user being kicked, banned, or granted admin permissions.
*
* @author Gaston Dombiak
*/
public interface UserStatusListener {
/**
* Called when a moderator kicked your user from the room. This means that you are no longer
* participanting in the room.
*
* @param actor the moderator that kicked your user from the room (e.g. user@host.org).
* @param reason the reason provided by the actor to kick you from the room.
*/
public abstract void kicked(String actor, String reason);
/**
* Called when a moderator grants voice to your user. This means that you were a visitor in
* the moderated room before and now you can participate in the room by sending messages to
* all occupants.
*
*/
public abstract void voiceGranted();
/**
* Called when a moderator revokes voice from your user. This means that you were a
* participant in the room able to speak and now you are a visitor that can't send
* messages to the room occupants.
*
*/
public abstract void voiceRevoked();
/**
* Called when an administrator or owner banned your user from the room. This means that you
* will no longer be able to join the room unless the ban has been removed.
*
* @param actor the administrator that banned your user (e.g. user@host.org).
* @param reason the reason provided by the administrator to banned you.
*/
public abstract void banned(String actor, String reason);
/**
* Called when an administrator grants your user membership to the room. This means that you
* will be able to join the members-only room.
*
*/
public abstract void membershipGranted();
/**
* Called when an administrator revokes your user membership to the room. This means that you
* will not be able to join the members-only room.
*
*/
public abstract void membershipRevoked();
/**
* Called when an administrator grants moderator privileges to your user. This means that you
* will be able to kick users, grant and revoke voice, invite other users, modify room's
* subject plus all the partcipants privileges.
*
*/
public abstract void moderatorGranted();
/**
* Called when an administrator revokes moderator privileges from your user. This means that
* you will no longer be able to kick users, grant and revoke voice, invite other users,
* modify room's subject plus all the partcipants privileges.
*
*/
public abstract void moderatorRevoked();
/**
* Called when an owner grants to your user ownership on the room. This means that you
* will be able to change defining room features as well as perform all administrative
* functions.
*
*/
public abstract void ownershipGranted();
/**
* Called when an owner revokes from your user ownership on the room. This means that you
* will no longer be able to change defining room features as well as perform all
* administrative functions.
*
*/
public abstract void ownershipRevoked();
/**
* Called when an owner grants administrator privileges to your user. This means that you
* will be able to perform administrative functions such as banning users and edit moderator
* list.
*
*/
public abstract void adminGranted();
/**
* Called when an owner revokes administrator privileges from your user. This means that you
* will no longer be able to perform administrative functions such as banning users and edit
* moderator list.
*
*/
public abstract void adminRevoked();
}
| Soo000/SooChat | src/org/jivesoftware/smackx/muc/UserStatusListener.java | Java | apache-2.0 | 4,465 |
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.exec;
import com.google.common.collect.ImmutableMap;
import com.google.devtools.build.lib.rules.test.TestRunnerAction;
import com.google.devtools.build.lib.util.UserUtils;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.HashMap;
import java.util.Map;
/**
* A policy for running tests. It currently only encompasses the environment computation for the
* test.
*/
public class TestPolicy {
/**
* The user name of the user running Bazel; this may differ from ${USER} for tests that are run
* remotely.
*/
public static final String SYSTEM_USER_NAME = "${SYSTEM_USER_NAME}";
/** An absolute path to a writable directory that is reserved for the current test. */
public static final String TEST_TMP_DIR = "${TEST_TMP_DIR}";
/** The path of the runfiles directory. */
public static final String RUNFILES_DIR = "${RUNFILES_DIR}";
public static final String INHERITED = "${inherited}";
private final ImmutableMap<String, String> envVariables;
/**
* Creates a new instance. The map's keys are the names of the environment variables, while the
* values can be either fixed values, or one of the constants in this class, specifically {@link
* #SYSTEM_USER_NAME}, {@link #TEST_TMP_DIR}, {@link #RUNFILES_DIR}, or {@link #INHERITED}.
*/
public TestPolicy(ImmutableMap<String, String> envVariables) {
this.envVariables = envVariables;
}
/**
* Returns a mutable map of the environment variables for a specific test. This is intended to be
* the final, complete environment - callers should avoid relying on the mutability of the return
* value, and instead change the policy itself.
*/
public Map<String, String> computeTestEnvironment(
TestRunnerAction testAction,
ImmutableMap<String, String> clientEnv,
int timeoutInSeconds,
PathFragment relativeRunfilesDir,
PathFragment tmpDir) {
Map<String, String> env = new HashMap<>();
// Add all env variables, allow some string replacements and inheritance.
String userProp = UserUtils.getUserName();
String tmpDirPath = tmpDir.getPathString();
String runfilesDirPath = relativeRunfilesDir.getPathString();
for (Map.Entry<String, String> entry : envVariables.entrySet()) {
String val = entry.getValue();
if (val.contains("${")) {
if (val.equals(INHERITED)) {
if (!clientEnv.containsKey(entry.getKey())) {
continue;
}
val = clientEnv.get(entry.getKey());
} else {
val = val.replace(SYSTEM_USER_NAME, userProp);
val = val.replace(TEST_TMP_DIR, tmpDirPath);
val = val.replace(RUNFILES_DIR, runfilesDirPath);
}
}
env.put(entry.getKey(), val);
}
// Overwrite with the environment common to all actions, see --action_env.
// TODO(ulfjack): This also includes env variables from the configuration fragments, and it does
// not include the env variables which are supposed to be inherited, i.e., for with --action_env
// does not specify an explicit value.
env.putAll(testAction.getConfiguration().getLocalShellEnvironment());
// Overwrite with the environment common to all tests, see --test_env.
// TODO(ulfjack): This is handled differently from --action_env such that changing the
// --test_env flag (or any of the inherited env variables) requires a full re-analysis of
// everything, instead of triggering just the subset of actions that rely on inherited
// variables. Needless to say, that is not optimal, and we should fix it to use the same
// approach as --action_env.
env.putAll(testAction.getTestEnv());
// Setup any test-specific env variables; note that this does not overwrite existing values for
// TEST_RANDOM_SEED or TEST_SIZE if they're already set.
testAction.setupEnvVariables(env, timeoutInSeconds);
return env;
}
}
| juhalindfors/bazel-patches | src/main/java/com/google/devtools/build/lib/exec/TestPolicy.java | Java | apache-2.0 | 4,559 |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.bookkeeper.client;
import java.util.Enumeration;
/**
* Implements objects to help with the synchronization of asynchronous calls.
*
*/
class SyncCounter {
int i;
int rc;
int total;
Enumeration<LedgerEntry> seq = null;
LedgerHandle lh = null;
synchronized void inc() {
i++;
total++;
}
synchronized void dec() {
i--;
notifyAll();
}
synchronized void block(int limit) throws InterruptedException {
while (i > limit) {
int prev = i;
wait();
if (i == prev) {
break;
}
}
}
synchronized int total() {
return total;
}
void setrc(int rc) {
this.rc = rc;
}
int getrc() {
return rc;
}
void setSequence(Enumeration<LedgerEntry> seq) {
this.seq = seq;
}
Enumeration<LedgerEntry> getSequence() {
return seq;
}
void setLh(LedgerHandle lh) {
this.lh = lh;
}
LedgerHandle getLh() {
return lh;
}
}
| sijie/bookkeeper | bookkeeper-server/src/main/java/org/apache/bookkeeper/client/SyncCounter.java | Java | apache-2.0 | 1,898 |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.ErrorReporting;
using Microsoft.CodeAnalysis.Internal.Log;
using Microsoft.CodeAnalysis.Options;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis
{
/// <summary>
/// Represents a source code document that is part of a project.
/// It provides access to the source text, parsed syntax tree and the corresponding semantic model.
/// </summary>
[DebuggerDisplay("{GetDebuggerDisplay(),nq}")]
public partial class Document : TextDocument
{
private WeakReference<SemanticModel> _model;
private Task<SyntaxTree> _syntaxTreeResultTask;
internal Document(Project project, DocumentState state) :
base(project, state)
{
}
private DocumentState DocumentState => (DocumentState)State;
/// <summary>
/// The kind of source code this document contains.
/// </summary>
public SourceCodeKind SourceCodeKind => DocumentState.SourceCodeKind;
/// <summary>
/// True if the info of the document change (name, folders, file path; not the content)
/// </summary>
internal bool HasInfoChanged(Document otherDocument)
{
return DocumentState.Info != otherDocument.DocumentState.Info
|| DocumentState.SourceCodeKind != otherDocument.SourceCodeKind;
}
/// <summary>
/// Gets a <see cref="DocumentInfo"/> for this document w/o the content.
/// </summary>
internal DocumentInfo GetDocumentInfoWithoutContent()
{
return DocumentState.Info.WithSourceCodeKind(DocumentState.SourceCodeKind);
}
/// <summary>
/// True if the document content has potentially changed.
/// Does not compare actual text.
/// </summary>
internal bool HasContentChanged(Document otherDocument)
{
return DocumentState.HasContentChanged(otherDocument.DocumentState);
}
/// <summary>
/// Get the current syntax tree for the document if the text is already loaded and the tree is already parsed.
/// In almost all cases, you should call <see cref="GetSyntaxTreeAsync"/> to fetch the tree, which will parse the tree
/// if it's not already parsed.
/// </summary>
public bool TryGetSyntaxTree(out SyntaxTree syntaxTree)
{
// if we already have cache, use it
if (_syntaxTreeResultTask != null)
{
syntaxTree = _syntaxTreeResultTask.Result;
return true;
}
if (!DocumentState.TryGetSyntaxTree(out syntaxTree))
{
return false;
}
// cache the result if it is not already cached
if (_syntaxTreeResultTask == null)
{
var result = Task.FromResult(syntaxTree);
Interlocked.CompareExchange(ref _syntaxTreeResultTask, result, null);
}
return true;
}
/// <summary>
/// Get the current syntax tree version for the document if the text is already loaded and the tree is already parsed.
/// In almost all cases, you should call <see cref="GetSyntaxVersionAsync"/> to fetch the version, which will load the tree
/// if it's not already available.
/// </summary>
public bool TryGetSyntaxVersion(out VersionStamp version)
{
version = default(VersionStamp);
if (!this.TryGetTextVersion(out var textVersion))
{
return false;
}
var projectVersion = this.Project.Version;
version = textVersion.GetNewerVersion(projectVersion);
return true;
}
/// <summary>
/// Gets the version of the document's top level signature if it is already loaded and available.
/// </summary>
internal bool TryGetTopLevelChangeTextVersion(out VersionStamp version)
{
return DocumentState.TryGetTopLevelChangeTextVersion(out version);
}
/// <summary>
/// Gets the version of the syntax tree. This is generally the newer of the text version and the project's version.
/// </summary>
public async Task<VersionStamp> GetSyntaxVersionAsync(CancellationToken cancellationToken = default(CancellationToken))
{
var textVersion = await this.GetTextVersionAsync(cancellationToken).ConfigureAwait(false);
var projectVersion = this.Project.Version;
return textVersion.GetNewerVersion(projectVersion);
}
/// <summary>
/// <code>true</code> if this Document supports providing data through the
/// <see cref="GetSyntaxTreeAsync"/> and <see cref="GetSyntaxRootAsync"/> methods.
///
/// If <code>false</code> then these methods will return <code>null</code> instead.
/// </summary>
public bool SupportsSyntaxTree => DocumentState.SupportsSyntaxTree;
/// <summary>
/// <code>true</code> if this Document supports providing data through the
/// <see cref="GetSemanticModelAsync"/> method.
///
/// If <code>false</code> then this method will return <code>null</code> instead.
/// </summary>
public bool SupportsSemanticModel
{
get
{
return this.SupportsSyntaxTree && this.Project.SupportsCompilation;
}
}
/// <summary>
/// Gets the <see cref="SyntaxTree" /> for this document asynchronously.
/// </summary>
public Task<SyntaxTree> GetSyntaxTreeAsync(CancellationToken cancellationToken = default(CancellationToken))
{
// If the language doesn't support getting syntax trees for a document, then bail out immediately.
if (!this.SupportsSyntaxTree)
{
return SpecializedTasks.Default<SyntaxTree>();
}
// if we have a cached result task use it
if (_syntaxTreeResultTask != null)
{
return _syntaxTreeResultTask;
}
// check to see if we already have the tree before actually going async
if (TryGetSyntaxTree(out var tree))
{
// stash a completed result task for this value for the next request (to reduce extraneous allocations of tasks)
// don't use the actual async task because it depends on a specific cancellation token
// its okay to cache the task and hold onto the SyntaxTree, because the DocumentState already keeps the SyntaxTree alive.
Interlocked.CompareExchange(ref _syntaxTreeResultTask, Task.FromResult(tree), null);
return _syntaxTreeResultTask;
}
// do it async for real.
return DocumentState.GetSyntaxTreeAsync(cancellationToken);
}
internal SyntaxTree GetSyntaxTreeSynchronously(CancellationToken cancellationToken)
{
if (!this.SupportsSyntaxTree)
{
return null;
}
return DocumentState.GetSyntaxTree(cancellationToken);
}
/// <summary>
/// Gets the root node of the current syntax tree if the syntax tree has already been parsed and the tree is still cached.
/// In almost all cases, you should call <see cref="GetSyntaxRootAsync"/> to fetch the root node, which will parse
/// the document if necessary.
/// </summary>
public bool TryGetSyntaxRoot(out SyntaxNode root)
{
root = null;
return this.TryGetSyntaxTree(out var tree) && tree.TryGetRoot(out root) && root != null;
}
/// <summary>
/// Gets the root node of the syntax tree asynchronously.
/// </summary>
public async Task<SyntaxNode> GetSyntaxRootAsync(CancellationToken cancellationToken = default(CancellationToken))
{
if (!this.SupportsSyntaxTree)
{
return null;
}
var tree = await this.GetSyntaxTreeAsync(cancellationToken).ConfigureAwait(false);
return await tree.GetRootAsync(cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Only for features that absolutely must run synchronously (probably because they're
/// on the UI thread). Right now, the only feature this is for is Outlining as VS will
/// block on that feature from the UI thread when a document is opened.
/// </summary>
internal SyntaxNode GetSyntaxRootSynchronously(CancellationToken cancellationToken)
{
if (!this.SupportsSyntaxTree)
{
return null;
}
var tree = this.GetSyntaxTreeSynchronously(cancellationToken);
return tree.GetRoot(cancellationToken);
}
/// <summary>
/// Gets the current semantic model for this document if the model is already computed and still cached.
/// In almost all cases, you should call <see cref="GetSemanticModelAsync"/>, which will compute the semantic model
/// if necessary.
/// </summary>
public bool TryGetSemanticModel(out SemanticModel semanticModel)
{
semanticModel = null;
return _model != null && _model.TryGetTarget(out semanticModel);
}
/// <summary>
/// Gets the semantic model for this document asynchronously.
/// </summary>
public async Task<SemanticModel> GetSemanticModelAsync(CancellationToken cancellationToken = default(CancellationToken))
{
try
{
if (!this.SupportsSemanticModel)
{
return null;
}
if (this.TryGetSemanticModel(out var semanticModel))
{
return semanticModel;
}
var syntaxTree = await this.GetSyntaxTreeAsync(cancellationToken).ConfigureAwait(false);
var compilation = await this.Project.GetCompilationAsync(cancellationToken).ConfigureAwait(false);
var result = compilation.GetSemanticModel(syntaxTree);
Contract.ThrowIfNull(result);
// first try set the cache if it has not been set
var original = Interlocked.CompareExchange(ref _model, new WeakReference<SemanticModel>(result), null);
// okay, it is first time.
if (original == null)
{
return result;
}
// it looks like someone has set it. try to reuse same semantic model
if (original.TryGetTarget(out semanticModel))
{
return semanticModel;
}
// it looks like cache is gone. reset the cache.
original.SetTarget(result);
return result;
}
catch (Exception e) when (FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
}
/// <summary>
/// Creates a new instance of this document updated to have the source code kind specified.
/// </summary>
public Document WithSourceCodeKind(SourceCodeKind kind)
{
return this.Project.Solution.WithDocumentSourceCodeKind(this.Id, kind).GetDocument(this.Id);
}
/// <summary>
/// Creates a new instance of this document updated to have the text specified.
/// </summary>
public Document WithText(SourceText text)
{
return this.Project.Solution.WithDocumentText(this.Id, text, PreservationMode.PreserveIdentity).GetDocument(this.Id);
}
/// <summary>
/// Creates a new instance of this document updated to have a syntax tree rooted by the specified syntax node.
/// </summary>
public Document WithSyntaxRoot(SyntaxNode root)
{
return this.Project.Solution.WithDocumentSyntaxRoot(this.Id, root, PreservationMode.PreserveIdentity).GetDocument(this.Id);
}
/// <summary>
/// Creates a new instance of this document updated to have the specified name.
/// </summary>
public Document WithName(string name)
{
return this.Project.Solution.WithDocumentName(this.Id, name).GetDocument(this.Id);
}
/// <summary>
/// Creates a new instance of this document updated to have the specified folders.
/// </summary>
public Document WithFolders(IEnumerable<string> folders)
{
return this.Project.Solution.WithDocumentFolders(this.Id, folders).GetDocument(this.Id);
}
/// <summary>
/// Creates a new instance of this document updated to have the specified file path.
/// </summary>
/// <param name="filePath"></param>
public Document WithFilePath(string filePath)
{
return this.Project.Solution.WithDocumentFilePath(this.Id, filePath).GetDocument(this.Id);
}
/// <summary>
/// Get the text changes between this document and a prior version of the same document.
/// The changes, when applied to the text of the old document, will produce the text of the current document.
/// </summary>
public async Task<IEnumerable<TextChange>> GetTextChangesAsync(Document oldDocument, CancellationToken cancellationToken = default(CancellationToken))
{
try
{
using (Logger.LogBlock(FunctionId.Workspace_Document_GetTextChanges, this.Name, cancellationToken))
{
if (oldDocument == this)
{
// no changes
return SpecializedCollections.EmptyEnumerable<TextChange>();
}
if (this.Id != oldDocument.Id)
{
throw new ArgumentException(WorkspacesResources.The_specified_document_is_not_a_version_of_this_document);
}
// first try to see if text already knows its changes
IList<TextChange> textChanges = null;
if (this.TryGetText(out var text) && oldDocument.TryGetText(out var oldText))
{
if (text == oldText)
{
return SpecializedCollections.EmptyEnumerable<TextChange>();
}
var container = text.Container;
if (container != null)
{
textChanges = text.GetTextChanges(oldText).ToList();
// if changes are significant (not the whole document being replaced) then use these changes
if (textChanges.Count > 1 || (textChanges.Count == 1 && textChanges[0].Span != new TextSpan(0, oldText.Length)))
{
return textChanges;
}
}
}
// get changes by diffing the trees
if (this.SupportsSyntaxTree)
{
var tree = await this.GetSyntaxTreeAsync(cancellationToken).ConfigureAwait(false);
var oldTree = await oldDocument.GetSyntaxTreeAsync(cancellationToken).ConfigureAwait(false);
return tree.GetChanges(oldTree);
}
text = await this.GetTextAsync(cancellationToken).ConfigureAwait(false);
oldText = await oldDocument.GetTextAsync(cancellationToken).ConfigureAwait(false);
return text.GetTextChanges(oldText).ToList();
}
}
catch (Exception e) when (FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
}
/// <summary>
/// Gets the list of <see cref="DocumentId"/>s that are linked to this
/// <see cref="Document" />. <see cref="Document"/>s are considered to be linked if they
/// share the same <see cref="TextDocument.FilePath" />. This <see cref="DocumentId"/> is excluded from the
/// result.
/// </summary>
public ImmutableArray<DocumentId> GetLinkedDocumentIds()
{
var documentIdsWithPath = this.Project.Solution.GetDocumentIdsWithFilePath(this.FilePath);
var filteredDocumentIds = this.Project.Solution.FilterDocumentIdsByLanguage(documentIdsWithPath, this.Project.Language).ToImmutableArray();
return filteredDocumentIds.Remove(this.Id);
}
/// <summary>
/// Creates a branched version of this document that has its semantic model frozen in whatever state it is available at the time,
/// assuming a background process is constructing the semantics asynchronously. Repeated calls to this method may return
/// documents with increasingly more complete semantics.
///
/// Use this method to gain access to potentially incomplete semantics quickly.
/// </summary>
internal async Task<Document> WithFrozenPartialSemanticsAsync(CancellationToken cancellationToken)
{
var solution = this.Project.Solution;
var workspace = solution.Workspace;
// only produce doc with frozen semantics if this document is part of the workspace's
// primary branch and there is actual background compilation going on, since w/o
// background compilation the semantics won't be moving toward completeness. Also,
// ensure that the project that this document is part of actually supports compilations,
// as partial semantics don't make sense otherwise.
if (solution.BranchId == workspace.PrimaryBranchId &&
workspace.PartialSemanticsEnabled &&
this.Project.SupportsCompilation)
{
var newSolution = await this.Project.Solution.WithFrozenPartialCompilationIncludingSpecificDocumentAsync(this.Id, cancellationToken).ConfigureAwait(false);
return newSolution.GetDocument(this.Id);
}
else
{
return this;
}
}
private string GetDebuggerDisplay()
{
return this.Name;
}
private AsyncLazy<DocumentOptionSet> _cachedOptions;
/// <summary>
/// Returns the options that should be applied to this document. This consists of global options from <see cref="Solution.Options"/>,
/// merged with any settings the user has specified at the document levels.
/// </summary>
/// <remarks>
/// This method is async because this may require reading other files. In files that are already open, this is expected to be cheap and complete synchronously.
/// </remarks>
public Task<DocumentOptionSet> GetOptionsAsync(CancellationToken cancellationToken = default(CancellationToken))
{
return GetOptionsAsync(Project.Solution.Options, cancellationToken);
}
internal Task<DocumentOptionSet> GetOptionsAsync(OptionSet solutionOptions, CancellationToken cancellationToken)
{
// TODO: we have this workaround since Solution.Options is not actually snapshot but just return Workspace.Options which violate snapshot model.
// this doesn't validate whether same optionset is given to invalidate the cache or not. this is not new since existing implementation
// also didn't check whether Workspace.Option is same as before or not. all wierd-ness come from the root cause of Solution.Options violating
// snapshot model. once that is fixed, we can remove this workaround - https://github.com/dotnet/roslyn/issues/19284
if (_cachedOptions == null)
{
var newAsyncLazy = new AsyncLazy<DocumentOptionSet>(async c =>
{
var optionsService = Project.Solution.Workspace.Services.GetRequiredService<IOptionService>();
var documentOptionSet = await optionsService.GetUpdatedOptionSetForDocumentAsync(this, solutionOptions, c).ConfigureAwait(false);
return new DocumentOptionSet(documentOptionSet, Project.Language);
}, cacheResult: true);
Interlocked.CompareExchange(ref _cachedOptions, newAsyncLazy, comparand: null);
}
return _cachedOptions.GetValueAsync(cancellationToken);
}
}
}
| amcasey/roslyn | src/Workspaces/Core/Portable/Workspace/Solution/Document.cs | C# | apache-2.0 | 21,517 |
// Copyright (c) 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <cstring>
#include <iostream>
#include <memory>
#include <sstream>
#include <vector>
#include "message.h"
#include "source/opt/build_module.h"
#include "source/opt/ir_loader.h"
#include "source/opt/pass_manager.h"
#include "source/opt/passes.h"
#include "tools/io.h"
using namespace spvtools;
void PrintUsage(const char* program) {
printf(
R"(%s - Optimize a SPIR-V binary file.
USAGE: %s [options] [<input>] -o <output>
The SPIR-V binary is read from <input>. If no file is specified,
or if <input> is "-", then the binary is read from standard input.
if <output> is "-", then the optimized output is written to
standard output.
NOTE: The optimizer is a work in progress.
Options:
--strip-debug
Remove all debug instructions.
--freeze-spec-const
Freeze the values of specialization constants to their default
values.
--eliminate-dead-const
Eliminate dead constants.
--fold-spec-const-op-composite
Fold the spec constants defined by OpSpecConstantOp or
OpSpecConstantComposite instructions to front-end constants
when possible.
--set-spec-const-default-value "<spec id>:<default value> ..."
Set the default values of the specialization constants with
<spec id>:<default value> pairs specified in a double-quoted
string. <spec id>:<default value> pairs must be separated by
blank spaces, and in each pair, spec id and default value must
be separated with colon ':' without any blank spaces in between.
e.g.: --set-spec-const-default-value "1:100 2:400"
--unify-const
Remove the duplicated constants.
-h, --help Print this help.
--version Display optimizer version information.
)",
program, program);
}
int main(int argc, char** argv) {
const char* in_file = nullptr;
const char* out_file = nullptr;
spv_target_env target_env = SPV_ENV_UNIVERSAL_1_1;
opt::PassManager pass_manager;
pass_manager.SetMessageConsumer(
[](spv_message_level_t level, const char* source,
const spv_position_t& position, const char* message) {
std::cerr << StringifyMessage(level, source, position, message)
<< std::endl;
});
for (int argi = 1; argi < argc; ++argi) {
const char* cur_arg = argv[argi];
if ('-' == cur_arg[0]) {
if (0 == strcmp(cur_arg, "--version")) {
printf("%s\n", spvSoftwareVersionDetailsString());
return 0;
} else if (0 == strcmp(cur_arg, "--help") || 0 == strcmp(cur_arg, "-h")) {
PrintUsage(argv[0]);
return 0;
} else if (0 == strcmp(cur_arg, "-o")) {
if (!out_file && argi + 1 < argc) {
out_file = argv[++argi];
} else {
PrintUsage(argv[0]);
return 1;
}
} else if (0 == strcmp(cur_arg, "--strip-debug")) {
pass_manager.AddPass<opt::StripDebugInfoPass>();
} else if (0 == strcmp(cur_arg, "--set-spec-const-default-value")) {
if (++argi < argc) {
auto spec_ids_vals =
opt::SetSpecConstantDefaultValuePass::ParseDefaultValuesString(
argv[argi]);
if (!spec_ids_vals) {
fprintf(stderr,
"error: Invalid argument for "
"--set-spec-const-default-value: %s\n",
argv[argi]);
return 1;
}
pass_manager.AddPass<opt::SetSpecConstantDefaultValuePass>(
std::move(*spec_ids_vals));
} else {
fprintf(
stderr,
"error: Expected a string of <spec id>:<default value> pairs.");
return 1;
}
} else if (0 == strcmp(cur_arg, "--freeze-spec-const")) {
pass_manager.AddPass<opt::FreezeSpecConstantValuePass>();
} else if (0 == strcmp(cur_arg, "--eliminate-dead-const")) {
pass_manager.AddPass<opt::EliminateDeadConstantPass>();
} else if (0 == strcmp(cur_arg, "--fold-spec-const-op-composite")) {
pass_manager.AddPass<opt::FoldSpecConstantOpAndCompositePass>();
} else if (0 == strcmp(cur_arg, "--unify-const")) {
pass_manager.AddPass<opt::UnifyConstantPass>();
} else if ('\0' == cur_arg[1]) {
// Setting a filename of "-" to indicate stdin.
if (!in_file) {
in_file = cur_arg;
} else {
fprintf(stderr, "error: More than one input file specified\n");
return 1;
}
} else {
PrintUsage(argv[0]);
return 1;
}
} else {
if (!in_file) {
in_file = cur_arg;
} else {
fprintf(stderr, "error: More than one input file specified\n");
return 1;
}
}
}
if (out_file == nullptr) {
fprintf(stderr, "error: -o required\n");
return 1;
}
std::vector<uint32_t> source;
if (!ReadFile<uint32_t>(in_file, "rb", &source)) return 1;
// Let's do validation first.
spv_context context = spvContextCreate(target_env);
spv_diagnostic diagnostic = nullptr;
spv_const_binary_t binary = {source.data(), source.size()};
spv_result_t error = spvValidate(context, &binary, &diagnostic);
if (error) {
spvDiagnosticPrint(diagnostic);
spvDiagnosticDestroy(diagnostic);
spvContextDestroy(context);
return error;
}
spvDiagnosticDestroy(diagnostic);
spvContextDestroy(context);
std::unique_ptr<ir::Module> module = BuildModule(
target_env, pass_manager.consumer(), source.data(), source.size());
pass_manager.Run(module.get());
std::vector<uint32_t> target;
module->ToBinary(&target, /* skip_nop = */ true);
if (!WriteFile<uint32_t>(out_file, "wb", target.data(), target.size())) {
return 1;
}
return 0;
}
| umar456/SPIRV-Tools | tools/opt/opt.cpp | C++ | apache-2.0 | 6,412 |
/*
Zenburn theme for Shout.
Based on the Morning Theme by Riku Rouvila
Installation instructions can be found here
http://shout-irc.com/docs/server/configuration.html#theme
Author: JP Smith
GitHub: https://github.com/japesinator
*/
/*
BACKGROUND #3f3f3f
INPUT BACKGROUND #434443
PRIMARY #dcdccc
SECONDARY #d2d39b
BORDERS #333333
QUIT #bc6c4c
*/
#windows .chan, #windows .window {
background: #3f3f3f;
}
#main #chat,
#main #form,
#form .input,
#chat,
#windows .header {
font-family: 'Open Sans', sans-serif !important;
font-size: 13px;
}
#settings, #sign-in, #connect {
color: #dcdccc;
}
#settings, #sign-in, #connect .title {
color: #88b090;
}
#settings, #sign-in, #connect .opt {
color: #dcdccc;
}
#sidebar {
background: #2b2b2b;
bottom: 48px;
}
#footer {
background: #33332f;
border-top: 1px solid #000;
}
#chat .sidebar {
background: #3f3f3f;
}
#chat .count {
background-color: #434443;
}
#chat .search {
color: #88b090;
padding: 15px 16px;
}
#chat .search::-webkit-input-placeholder {
color: #d2d39b;
opacity: 0.5;
}
/* Borders */
#chat .from, #windows .header,
#chat .user-mode:before,
#chat .sidebar {
border-color: #333333;
}
/* Attach chat to window borders */
#windows .window:before, #windows .chan:before {
display: none;
}
#footer {
left: 0;
bottom: 0;
width: 220px;
}
#main {
top: 0;
bottom: 0;
right: 0;
border-radius: 0;
}
#chat .chat, #chat .sidebar {
top: 48px;
}
/* User list */
#chat .user-mode {
color: #dcdccc;
}
/* Nicknames */
#chat.no-colors .from button,
#chat.no-colors .sidebar button {
color: #bc8cbc !important;
}
#chat.no-colors .from button:hover,
#chat.no-colors .sidebar button:hover {
color: #dcdccc !important;
}
#chat a {
color: #8c8cbc;
}
#chat button:hover {
opacity: 1;
}
/* Message form */
#form {
background: #333333;
border-color: #101010;
}
#form .input {
margin-right: 0;
}
#form #input {
background-color: #434443;
border-color: #101010;
color: #dcdccc;
padding-left: 1em !important;
}
#form #nick {
display: none;
}
#chat .header {
color: #d2d39b;
}
/* Hide unnecessary buttons */
#windows .header .button,
#form #submit {
display: none;
}
/* Setup text colors */
#chat .msg {
color: #ffcfaf;
}
#chat .message {
color: #dcdccc;
}
#chat .self .text {
color: #d2d39b;
}
#chat .error,
#chat .error .from,
#chat .highlight,
#chat .highlight .from {
color: #bc6c4c;
}
#chat .msg.quit .time,
#chat .msg.quit .from button,
#chat .msg.quit .type {
color: ##bc6c9c !important;
}
#chat .msg.topic {
color: #dcdccc;
}
#chat .msg .type {
margin-right: 0.5em;
}
#chat .msg.join .time,
#chat .msg.join .from button,
#chat .msg.join .type {
color: #8cd0d3 !important;
}
/* Embeds */
#chat .toggle-content,
#chat .toggle-button {
background: #93b3a3;
color: #dcdccc;
}
#chat .toggle-content img {
float: left;
margin-right: 0.5em;
}
#chat .toggle-content .body {
color: #d2d39b;
}
| mat-mo/shout_ynh | sources/client/themes/zenburn.css | CSS | apache-2.0 | 2,958 |
/* This file is part of ArcCore.
*
* ArcCore is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* ArcCore is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with ArcCore; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
* Author: Stephen Lane-Walsh
*
*/
#ifndef ARC_CORE_VECTOR_3_H
#define ARC_CORE_VECTOR_3_H
#include <sstream>
#include "Vector2.h"
#include "Types.h"
#include "RandomFunctions.h"
#include "MathFunctions.h"
using std::stringstream;
namespace Arc
{
class Vector3
: public Vector2
{
public:
static Vector3 ZERO;
static Vector3 ONE;
static Vector3 NEGATIVE_ONE;
inline Vector3( void )
: Vector2(),
m_Z()
{ }
inline Vector3( const float& x, const float& y, const float& z )
: Vector2(x, y),
m_Z(z)
{ }
inline Vector3( const float& all )
: Vector2(all),
m_Z(all)
{ }
inline Vector3( const Vector3& rhs )
: Vector2(rhs.getX(), rhs.getY()),
m_Z(rhs.m_Z)
{ }
virtual inline ~Vector3( void ) { }
virtual inline string getClassName( void ) const { return "Arc Vector3"; }
virtual inline string toString( void ) const
{
stringstream ss;
ss << "[X: " << m_X << ", Y: " << m_Y << ", Z: " << m_Z << "]";
return ss.str();
}
/* Generate a random Vector3 between the min and max values
*
* @param minX: The minimum X value to be generated randomly
* @param minY: The minimum Y value to be generated randomly
* @param minZ: The minimum Z value to be generated randomly
* @param maxX: The maximum X value to be generated randomly
* @param maxY: The maximum Y value to be generated randomly
* @param maxZ: The maximum Z value to be generated randomly
* @returns: A random number between the min and max values
*/
inline static Vector3 Rand( float minX, float minY, float minZ, float maxX, float maxY, float maxZ )
{
return Vector3(Arc_RandFloat(minX, maxX), Arc_RandFloat(minY, maxY), Arc_RandFloat(minZ, maxZ));
}
/* Lerp between the start and end vectors by the fraction amount
*
* @param start: The starting values
* @param end: The ending values
* @param fraction: The amount to lerp between the values, should be between 0 and 1
* @returns: The lerped value between the start and end vectors by the fraction amount
*/
inline static Vector3 Lerp( const Vector3& start, const Vector3& end, float fraction )
{
return Vector3(Arc_Lerp(start.getX(), end.getX(), fraction), Arc_Lerp(start.getY(), end.getY(), fraction), Arc_Lerp(start.getZ(), end.getZ(), fraction));
}
virtual inline float getZ( void ) const { return m_Z; }
virtual inline void setZ( float z ) { m_Z = z; }
/*
* @returns: Half of the Z value
*/
inline float getHalfZ( void ) const { return (getZ() * 0.5f); }
/* Normalizes the vector and stores the new values in X, Y, and Z
*/
virtual void normalize( void );
/* Gets a normalized copy of this vector
*
* @returns: A normalized copy of this vector
*/
Vector3 getNormalized( void );
virtual inline float getLengthSquared( void )
{
return (m_X * m_X) + (m_Y * m_Y) + (m_Z * m_Z);
}
inline float getDot( const Vector3& rhs )
{
return (getX() * rhs.getX()) + (getY() * rhs.getY()) + (getZ() * rhs.getZ());
}
inline Vector3 getCross( Vector3& other )
{
Vector3 result;
result.setX( (getY() * other.getZ()) - (getZ() * other.getY()) );
result.setY( (getZ() * other.getX()) - (getX() * other.getZ()) );
result.setZ( (getX() * other.getY()) - (getY() * other.getX()) );
return result;
}
virtual inline Vector3 operator-( const Vector3& rhs )
{
Vector3 tmp;
tmp.m_X = m_X - rhs.m_X;
tmp.m_Y = m_Y - rhs.m_Y;
tmp.m_Z = m_Z - rhs.m_Z;
return tmp;
}
virtual inline Vector3 operator+( const Vector3& rhs )
{
Vector3 tmp;
tmp.m_X = m_X + rhs.m_X;
tmp.m_Y = m_Y + rhs.m_Y;
tmp.m_Z = m_Z + rhs.m_Z;
return tmp;
}
virtual inline Vector3 operator/( const Vector3& rhs )
{
Vector3 tmp;
tmp.m_X = m_X / rhs.m_X;
tmp.m_Y = m_Y / rhs.m_Y;
tmp.m_Z = m_Z / rhs.m_Z;
return tmp;
}
virtual inline Vector3 operator*( const Vector3& rhs )
{
Vector3 tmp;
tmp.m_X = m_X * rhs.m_X;
tmp.m_Y = m_Y * rhs.m_Y;
tmp.m_Z = m_Z * rhs.m_Z;
return tmp;
}
virtual inline void operator=( const Vector3& rhs )
{
Vector2::operator=(rhs);
m_Z = rhs.m_Z;
}
virtual inline Vector3& operator+=( const Vector3& rhs )
{
Vector2::operator+=(rhs);
m_Z += rhs.m_Z;
return *this;
}
virtual inline Vector3& operator-=( const Vector3& rhs )
{
Vector2::operator-=(rhs);
m_Z -= rhs.m_Z;
return *this;
}
virtual inline Vector3& operator*=( const Vector3& rhs )
{
Vector2::operator*=(rhs);
m_Z *= rhs.m_Z;
return *this;
}
virtual inline Vector3& operator/=( const Vector3& rhs )
{
Vector2::operator/=(rhs);
m_Z /= rhs.m_Z;
return *this;
}
virtual inline Vector3& operator*=( float value )
{
Vector2::operator*=(value);
m_Z *= value;
return *this;
}
virtual inline Vector3& operator/=( float value )
{
Vector2::operator/=(value);
m_Z /= value;
return *this;
}
virtual inline bool operator==( const Vector3& rhs ) const
{
return (Vector2::operator==(rhs) && m_Z == rhs.m_Z);
}
virtual inline bool operator!=( const Vector3& rhs ) const
{
return ! (*this == rhs);
}
virtual inline bool operator<( const Vector3& rhs ) const
{
return (Vector2::operator<(rhs) && m_Z < rhs.m_Z);
}
virtual inline bool operator>( const Vector3& rhs ) const
{
return (Vector2::operator>(rhs) && m_Z > rhs.m_Z);
}
virtual inline bool operator<=( const Vector3& rhs ) const
{
return (Vector2::operator<=(rhs) && m_Z <= rhs.m_Z);
}
virtual inline bool operator>=( const Vector3& rhs ) const
{
return (Vector2::operator>=(rhs) && m_Z >= rhs.m_Z);
}
protected:
float m_Z;
};
} // namespace Arc
#endif // ARC_CORE_VECTOR_3_H
| MorrigansWings/GamePhysics | RubeGoldberg/Libraries/Arc/include/Arc/Vector3.h | C | apache-2.0 | 6,326 |
#include "resource.h"
#include <windows.h>
#include <tchar.h>
#include <commctrl.h>
#include "uwinapp.h"
#include "ubasewindow.h"
#include "colors.h"
#include "ubitmap.h"
//#include "udibapi.h"
#include "udc.h"
#include "adt/ustring.h"
class UMyWindow : public UBaseWindow
{
typedef huys::ADT::UStringAnsi UString;
public:
UMyWindow()
: UBaseWindow(NULL, ::GetModuleHandle(NULL))
{
this->setTitle(_T("Mouse Test 0.0.1"));
this->setPos(0, 0, 600, 600);
}
BOOL onCreate()
{
this->setIconBig(IDI_PEN);
return UBaseWindow::onCreate();
}
/* virtual */ BOOL onMouseMove(WPARAM wParam, LPARAM lParam)
{
int xPos = GET_X_LPARAM(lParam);
int yPos = GET_Y_LPARAM(lParam);
UString str;
str.format("(%d, %d)", xPos, yPos);
UPrivateDC dc(this->getHandle());
dc.textOutEx(100, 100, str);
return FALSE;
}
//
virtual void onDraw(HDC hdc)
{
RECT rc = {0};
this->getClientRect(&rc);
//huys::PaintDIB(hdc, &rc, _dib, &rc, NULL);
}
BOOL onLButtonDown(WPARAM wParam, LPARAM lParam)
{
//huys::HDIB hNewDIB = huys::RotateDIB(_dib);
//_dib = hNewDIB;
//invalidate(TRUE);
return FALSE;
}
virtual BOOL onChar(WPARAM wParam, LPARAM lParam)
{
switch (wParam)
{
case VK_ESCAPE:
return
UBaseWindow::onClose();
default:
return UBaseWindow::onChar(wParam, lParam);
}
}
private:
//huys::HDIB _dib;
};
int WINAPI WinMain(HINSTANCE hInstance, HINSTANCE, LPSTR lpszCmdLine, int nCmdShow)
{
UWinApp app;
app.setMainWindow(new UMyWindow);
app.init(hInstance);
return app.run();
}
| baiyunping333/ulib-win | demo/mouse/test_mouse.cpp | C++ | apache-2.0 | 1,793 |
// Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package opentsdb
import (
"fmt"
"net/url"
"testing"
"time"
opentsdb "github.com/bluebreezecf/opentsdb-goclient/client"
opentsdbcfg "github.com/bluebreezecf/opentsdb-goclient/config"
"github.com/stretchr/testify/assert"
"k8s.io/heapster/extpoints"
sink_api "k8s.io/heapster/sinks/api"
sink_util "k8s.io/heapster/sinks/util"
kube_api "k8s.io/kubernetes/pkg/api"
kube_api_unv "k8s.io/kubernetes/pkg/api/unversioned"
"k8s.io/kubernetes/pkg/types"
)
var (
fakeOpenTSDBHost = "192.168.1.8:823"
fakeNodeIp = "192.168.1.23"
fakePodName = "redis-test"
fakePodUid = "redis-test-uid"
fakeLabel = map[string]string{
"name": "redis",
"io.kubernetes.pod.name": "default/redis-test",
"pod_id": fakePodUid,
"pod_namespace": "default",
"pod_name": fakePodName,
"container_name": "redis",
"container_base_image": "kubernetes/redis:v1",
"namespace_id": "namespace-test-uid",
"host_id": fakeNodeIp,
}
errorPingFailed = fmt.Errorf("Failed to connect the target opentsdb.")
errorPutFailed = fmt.Errorf("The target opentsdb gets error and failed to store the datapoints.")
)
type fakeOpenTSDBClient struct {
successfulPing bool
successfulPut bool
receivedDataPoints []opentsdb.DataPoint
}
func (client *fakeOpenTSDBClient) Ping() error {
if client.successfulPing {
return nil
}
return errorPingFailed
}
func (client *fakeOpenTSDBClient) Put(datapoints []opentsdb.DataPoint, queryParam string) (*opentsdb.PutResponse, error) {
if !client.successfulPut {
return nil, errorPutFailed
}
client.receivedDataPoints = append(client.receivedDataPoints, datapoints...)
putRes := opentsdb.PutResponse{
StatusCode: 200,
Failed: 0,
Success: int64(len(datapoints)),
}
return &putRes, nil
}
type fakeOpenTSDBSink struct {
sink_api.ExternalSink
fakeClient *fakeOpenTSDBClient
}
func NewFakeOpenTSDBSink(successfulPing, successfulPut bool) fakeOpenTSDBSink {
client := &fakeOpenTSDBClient{
successfulPing: successfulPing,
successfulPut: successfulPut,
}
cfg := opentsdbcfg.OpenTSDBConfig{OpentsdbHost: fakeOpenTSDBHost}
return fakeOpenTSDBSink{
&openTSDBSink{
client: client,
config: cfg,
ci: sink_util.NewClientInitializer("test", func() error { return nil }, func() error { return nil }, time.Millisecond),
},
client,
}
}
func TestStoreTimeseriesNilInput(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
err := fakeSink.StoreTimeseries(nil)
assert.NoError(t, err)
assert.Equal(t, 0, len(fakeSink.fakeClient.receivedDataPoints))
}
func TestStoreTimeseriesEmptyInput(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
err := fakeSink.StoreTimeseries([]sink_api.Timeseries{})
assert.NoError(t, err)
assert.Equal(t, 0, len(fakeSink.fakeClient.receivedDataPoints))
}
func TestStoreTimeseriesWithPingFailed(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(false, true)
seriesList := generateFakeTimeseriesList()
err := fakeSink.StoreTimeseries(seriesList)
assert.Equal(t, err, errorPingFailed)
assert.Equal(t, 0, len(fakeSink.fakeClient.receivedDataPoints))
}
func TestStoreTimeseriesWithPutFailed(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, false)
seriesList := generateFakeTimeseriesList()
err := fakeSink.StoreTimeseries(seriesList)
assert.Equal(t, err, errorPutFailed)
assert.Equal(t, 0, len(fakeSink.fakeClient.receivedDataPoints))
}
func TestStoreTimeseriesSingleTimeserieInput(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
seriesName := "cpu/limit"
series := generateFakeTimeseries(seriesName, sink_api.MetricGauge, sink_api.UnitsCount, 1000)
//Without any labels
series.Point.Labels = map[string]string{}
seriesList := []sink_api.Timeseries{series}
err := fakeSink.StoreTimeseries(seriesList)
assert.NoError(t, err)
assert.Equal(t, 1, len(fakeSink.fakeClient.receivedDataPoints))
assert.Equal(t, "cpu_limit_gauge", fakeSink.fakeClient.receivedDataPoints[0].Metric)
//tsdbSink.secureTags() add a default tag key and value pair
assert.Equal(t, 1, len(fakeSink.fakeClient.receivedDataPoints[0].Tags))
assert.Equal(t, defaultTagValue, fakeSink.fakeClient.receivedDataPoints[0].Tags[defaultTagName])
}
func TestStoreTimeseriesMultipleTimeseriesInput(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
seriesList := generateFakeTimeseriesList()
err := fakeSink.StoreTimeseries(seriesList)
assert.NoError(t, err)
assert.Equal(t, len(seriesList), len(fakeSink.fakeClient.receivedDataPoints))
}
func TestStoreEventsNilInput(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
err := fakeSink.StoreEvents(nil)
assert.NoError(t, err)
assert.Equal(t, 0, len(fakeSink.fakeClient.receivedDataPoints))
}
func TestStoreEventsEmptyInput(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
err := fakeSink.StoreEvents([]kube_api.Event{})
assert.NoError(t, err)
assert.Equal(t, 0, len(fakeSink.fakeClient.receivedDataPoints))
}
func TestStoreEventsWithPingFailed(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(false, true)
err := fakeSink.StoreEvents(generateFakeEvents())
assert.Equal(t, err, errorPingFailed)
assert.Equal(t, 0, len(fakeSink.fakeClient.receivedDataPoints))
}
func TestStoreEventsWithPutFailed(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, false)
err := fakeSink.StoreEvents(generateFakeEvents())
assert.Equal(t, err, errorPutFailed)
assert.Equal(t, 0, len(fakeSink.fakeClient.receivedDataPoints))
}
func TestStoreEventsSingleEventInput(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
eventTime := kube_api_unv.Unix(12345, 0)
eventSourceHostname := fakeNodeIp
eventReason := "created"
involvedObject := kube_api.ObjectReference{
Kind: "Pod",
Name: fakePodName,
UID: types.UID(fakePodUid),
Namespace: "default",
}
events := []kube_api.Event{
{
Reason: eventReason,
LastTimestamp: eventTime,
Source: kube_api.EventSource{
Host: fakeNodeIp,
},
InvolvedObject: involvedObject,
},
}
err := fakeSink.StoreEvents(events)
assert.NoError(t, err)
assert.Equal(t, 1, len(fakeSink.fakeClient.receivedDataPoints))
assert.Equal(t, eventMetricName, fakeSink.fakeClient.receivedDataPoints[0].Metric)
assert.Equal(t, 4, len(fakeSink.fakeClient.receivedDataPoints[0].Tags))
assert.Equal(t, eventTime.Time.Unix(), fakeSink.fakeClient.receivedDataPoints[0].Timestamp)
assert.Equal(t, fakePodUid, fakeSink.fakeClient.receivedDataPoints[0].Tags["pod_id"])
assert.Equal(t, eventSourceHostname, fakeSink.fakeClient.receivedDataPoints[0].Tags[sink_api.LabelHostname.Key])
assert.Contains(t, fakeSink.fakeClient.receivedDataPoints[0].Value, eventReason)
}
func TestStoreEventsMultipleEventsInput(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
event1Time := kube_api_unv.Unix(12345, 0)
event2Time := kube_api_unv.Unix(12366, 0)
event1SourceHostname := "event1HostName"
event2SourceHostname := "event2HostName"
event1Reason := "event1"
event2Reason := "event2"
events := []kube_api.Event{
{
Reason: event1Reason,
LastTimestamp: event1Time,
Source: kube_api.EventSource{
Host: event1SourceHostname,
},
},
{
Reason: event2Reason,
LastTimestamp: event2Time,
Source: kube_api.EventSource{
Host: event2SourceHostname,
},
},
}
err := fakeSink.StoreEvents(events)
assert.NoError(t, err)
assert.Equal(t, 2, len(fakeSink.fakeClient.receivedDataPoints))
assert.Equal(t, eventMetricName, fakeSink.fakeClient.receivedDataPoints[0].Metric)
assert.Equal(t, 2, len(fakeSink.fakeClient.receivedDataPoints[0].Tags))
assert.Equal(t, event1Time.Time.Unix(), fakeSink.fakeClient.receivedDataPoints[0].Timestamp)
assert.Equal(t, "", fakeSink.fakeClient.receivedDataPoints[0].Tags["pod_id"])
assert.Equal(t, event1SourceHostname, fakeSink.fakeClient.receivedDataPoints[0].Tags[sink_api.LabelHostname.Key])
assert.Contains(t, fakeSink.fakeClient.receivedDataPoints[0].Value, event1Reason)
assert.Equal(t, eventMetricName, fakeSink.fakeClient.receivedDataPoints[1].Metric)
assert.Equal(t, 2, len(fakeSink.fakeClient.receivedDataPoints[1].Tags))
assert.Equal(t, event2Time.Time.Unix(), fakeSink.fakeClient.receivedDataPoints[1].Timestamp)
assert.Equal(t, "", fakeSink.fakeClient.receivedDataPoints[1].Tags["pod_id"])
assert.Equal(t, event2SourceHostname, fakeSink.fakeClient.receivedDataPoints[1].Tags[sink_api.LabelHostname.Key])
assert.Contains(t, fakeSink.fakeClient.receivedDataPoints[1].Value, event2Reason)
}
func TestRegister(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
err := fakeSink.Register([]sink_api.MetricDescriptor{})
assert.NoError(t, err)
assert.Nil(t, err)
}
func TestUnregister(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
err := fakeSink.Unregister([]sink_api.MetricDescriptor{})
assert.NoError(t, err)
assert.Nil(t, err)
}
func TestName(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
name := fakeSink.Name()
assert.Equal(t, name, opentsdbSinkName)
}
func TestDebugInfo(t *testing.T) {
fakeSink := NewFakeOpenTSDBSink(true, true)
debugInfo := fakeSink.DebugInfo()
assert.Contains(t, debugInfo, "Sink Type: OpenTSDB")
assert.Contains(t, debugInfo, "client: Host "+fakeOpenTSDBHost)
assert.Contains(t, debugInfo, "Number of write failures:")
}
func TestCreateOpenTSDBSinkWithEmptyInputs(t *testing.T) {
extSinks, err := CreateOpenTSDBSink(&url.URL{}, extpoints.HeapsterConf{})
assert.NoError(t, err)
assert.NotNil(t, extSinks)
assert.Equal(t, 1, len(extSinks))
tsdbSink, ok := extSinks[0].(*openTSDBSink)
assert.Equal(t, true, ok)
assert.Equal(t, defaultOpentsdbHost, tsdbSink.config.OpentsdbHost)
}
func TestCreateOpenTSDBSinkWithNoEmptyInputs(t *testing.T) {
fakeOpentsdbHost := "192.168.8.23:4242"
extSinks, err := CreateOpenTSDBSink(&url.URL{Host: fakeOpentsdbHost}, extpoints.HeapsterConf{})
assert.NoError(t, err)
assert.NotNil(t, extSinks)
assert.Equal(t, 1, len(extSinks))
tsdbSink, ok := extSinks[0].(*openTSDBSink)
assert.Equal(t, true, ok)
assert.Equal(t, fakeOpentsdbHost, tsdbSink.config.OpentsdbHost)
}
func generateFakeEvents() []kube_api.Event {
event1Time := kube_api_unv.Unix(12345, 0)
event2Time := kube_api_unv.Unix(12366, 0)
event1SourceHostname := "event1HostName"
event2SourceHostname := "event2HostName"
event1Reason := "event1"
event2Reason := "event2"
events := []kube_api.Event{
{
Reason: event1Reason,
LastTimestamp: event1Time,
Source: kube_api.EventSource{
Host: event1SourceHostname,
},
},
{
Reason: event2Reason,
LastTimestamp: event2Time,
Source: kube_api.EventSource{
Host: event2SourceHostname,
},
},
}
return events
}
func generateFakeTimeseriesList() []sink_api.Timeseries {
timeseriesList := make([]sink_api.Timeseries, 0)
series := generateFakeTimeseries("cpu/limit", sink_api.MetricGauge, sink_api.UnitsCount, 1000)
timeseriesList = append(timeseriesList, series)
series = generateFakeTimeseries("cpu/usage", sink_api.MetricCumulative, sink_api.UnitsNanoseconds, 43363664)
timeseriesList = append(timeseriesList, series)
series = generateFakeTimeseries("filesystem/limit", sink_api.MetricGauge, sink_api.UnitsBytes, 42241163264)
timeseriesList = append(timeseriesList, series)
series = generateFakeTimeseries("filesystem/usage", sink_api.MetricGauge, sink_api.UnitsBytes, 32768)
timeseriesList = append(timeseriesList, series)
series = generateFakeTimeseries("memory/limit", sink_api.MetricGauge, sink_api.UnitsBytes, -1)
timeseriesList = append(timeseriesList, series)
series = generateFakeTimeseries("memory/usage", sink_api.MetricGauge, sink_api.UnitsBytes, 487424)
timeseriesList = append(timeseriesList, series)
series = generateFakeTimeseries("memory/working_set", sink_api.MetricGauge, sink_api.UnitsBytes, 491520)
timeseriesList = append(timeseriesList, series)
series = generateFakeTimeseries("uptime", sink_api.MetricCumulative, sink_api.UnitsMilliseconds, 910823)
timeseriesList = append(timeseriesList, series)
return timeseriesList
}
func generateFakeTimeseries(name string, metricType sink_api.MetricType, metricUnits sink_api.MetricUnitsType, value interface{}) sink_api.Timeseries {
end := time.Now()
start := end.Add(-10)
point := sink_api.Point{
Name: name,
Labels: fakeLabel,
Value: value,
Start: start,
End: end,
}
metricDesc := sink_api.MetricDescriptor{
Type: metricType,
Units: metricUnits,
}
series := sink_api.Timeseries{
Point: &point,
MetricDescriptor: &metricDesc,
}
return series
}
| rhuss/gofabric8 | vendor/k8s.io/heapster/sinks/opentsdb/driver_test.go | GO | apache-2.0 | 13,234 |
{% extends 'airflow/model_list.html' %}
{% block model_menu_bar %}
{% if not admin_view.is_secure() %}
<div class="alert alert-danger"><b>Warning:</b> Connection passwords are stored in plaintext until you install the Python "cryptography" library. You can find installation instructions here: <a href=https://cryptography.io/en/latest/installation/>https://cryptography.io/en/latest/installation/</a>. Once installed, instructions for creating an encryption key will be displayed the next time you import Airflow. </div>
{% endif %}
{% if admin_view.alert_fernet_key() %}
<div class="alert alert-danger"><b>Warning:</b>
Airflow is currently storing passwords in <b>plain text</b>.
To turn on password encryption for connections, you need to add a
"fernet_key" option to the "core" section of your airflow.cfg file.
To generate a key, you can call the function
<code>airflow.configuration.generate_fernet_key()</code>
</div>
{% endif %}
{{ super() }}
{% endblock %}
| dud225/incubator-airflow | airflow/www/templates/airflow/conn_list.html | HTML | apache-2.0 | 1,018 |
/*
* Copyright 2017 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.domain;
import com.google.gson.GsonBuilder;
import com.thoughtworks.go.domain.Plugin;
import com.thoughtworks.go.domain.config.ConfigurationKey;
import com.thoughtworks.go.domain.config.ConfigurationProperty;
import com.thoughtworks.go.domain.config.ConfigurationValue;
import com.thoughtworks.go.domain.config.EncryptedConfigurationValue;
import com.thoughtworks.go.plugin.access.common.settings.PluginSettingsConfiguration;
import com.thoughtworks.go.plugin.access.common.settings.PluginSettingsProperty;
import com.thoughtworks.go.plugin.domain.common.Metadata;
import com.thoughtworks.go.plugin.domain.common.PluggableInstanceSettings;
import com.thoughtworks.go.plugin.domain.common.PluginConfiguration;
import com.thoughtworks.go.plugin.domain.configrepo.ConfigRepoPluginInfo;
import com.thoughtworks.go.security.GoCipher;
import org.bouncycastle.crypto.InvalidCipherTextException;
import org.junit.Test;
import java.util.*;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
public class PluginSettingsTest {
public static final String PLUGIN_ID = "plugin-id";
@Test
public void shouldPopulateSettingsMapFromPluginFromDB() {
Map<String, String> configuration = new HashMap<>();
configuration.put("k1", "v1");
configuration.put("k2", "");
configuration.put("k3", null);
Plugin plugin = new Plugin(PLUGIN_ID, toJSON(configuration));
PluginSettings pluginSettings = new PluginSettings(PLUGIN_ID);
pluginSettings.populateSettingsMap(plugin);
assertThat(pluginSettings.getPluginSettingsKeys().size(), is(3));
assertThat(pluginSettings.getValueFor("k1"), is("v1"));
assertThat(pluginSettings.getValueFor("k2"), is(""));
assertThat(pluginSettings.getValueFor("k3"), is(nullValue()));
}
@Test
public void shouldPopulateSettingsMapFromPluginFromConfiguration() {
PluginSettingsConfiguration configuration = new PluginSettingsConfiguration();
configuration.add(new PluginSettingsProperty("k1", "v1"));
configuration.add(new PluginSettingsProperty("k2", ""));
configuration.add(new PluginSettingsProperty("k3", null));
PluginSettings pluginSettings = new PluginSettings(PLUGIN_ID);
pluginSettings.populateSettingsMap(configuration);
assertThat(pluginSettings.getValueFor("k1"), is(""));
assertThat(pluginSettings.getValueFor("k2"), is(""));
assertThat(pluginSettings.getValueFor("k3"), is(""));
}
@Test
public void shouldPopulateSettingsMapFromKeyValueMap() {
Map<String, String> parameterMap = new HashMap<>();
parameterMap.put("k1", "v1");
parameterMap.put("k2", "");
parameterMap.put("k3", null);
PluginSettings pluginSettings = new PluginSettings(PLUGIN_ID);
pluginSettings.populateSettingsMap(parameterMap);
assertThat(pluginSettings.getValueFor("k1"), is("v1"));
assertThat(pluginSettings.getValueFor("k2"), is(""));
assertThat(pluginSettings.getValueFor("k3"), is(nullValue()));
}
@Test
public void shouldGetSettingsMapAsKeyValueMap() {
Map<String, String> parameterMap = new HashMap<>();
parameterMap.put("k1", "v1");
parameterMap.put("k2", "");
parameterMap.put("k3", null);
PluginSettings pluginSettings = new PluginSettings(PLUGIN_ID);
pluginSettings.populateSettingsMap(parameterMap);
Map<String, String> settingsAsKeyValuePair = pluginSettings.getSettingsAsKeyValuePair();
assertThat(settingsAsKeyValuePair.size(), is(3));
assertThat(settingsAsKeyValuePair.get("k1"), is("v1"));
assertThat(settingsAsKeyValuePair.get("k2"), is(""));
assertThat(settingsAsKeyValuePair.get("k3"), is(nullValue()));
}
@Test
public void shouldPopulateSettingsMapWithErrorsCorrectly() {
PluginSettings pluginSettings = new PluginSettings(PLUGIN_ID);
Map<String, String> parameterMap = new HashMap<>();
parameterMap.put("k1", "v1");
pluginSettings.populateSettingsMap(parameterMap);
pluginSettings.populateErrorMessageFor("k1", "e1");
assertThat(pluginSettings.getErrorFor("k1"), is(Arrays.asList("e1")));
}
@Test
public void shouldProvidePluginSettingsAsAWeirdMapForView() {
PluginSettings pluginSettings = new PluginSettings(PLUGIN_ID);
Map<String, String> parameterMap = new HashMap<>();
parameterMap.put("k1", "v1");
pluginSettings.populateSettingsMap(parameterMap);
pluginSettings.populateErrorMessageFor("k1", "e1");
HashMap<String, Map<String, String>> expectedMap = new HashMap<>();
HashMap<String, String> valuesAndErrorsMap = new HashMap<>();
valuesAndErrorsMap.put("value", "v1");
valuesAndErrorsMap.put("errors", "[e1]");
expectedMap.put("k1", valuesAndErrorsMap);
Map<String, Map<String, String>> settingsMap = pluginSettings.getSettingsMap();
assertThat(settingsMap, is(expectedMap));
}
@Test
public void shouldPopulateHasErrorsCorrectly() {
PluginSettings pluginSettings = new PluginSettings(PLUGIN_ID);
assertThat(pluginSettings.hasErrors(), is(false));
pluginSettings.populateErrorMessageFor("k1", "e1");
assertThat(pluginSettings.hasErrors(), is(true));
}
@Test
public void shouldCreatePluginFromConfigurationCorrectly() {
Map<String, String> parameterMap = new HashMap<>();
parameterMap.put("k1", "v1");
parameterMap.put("k2", "");
parameterMap.put("k3", null);
PluginSettings pluginSettings = new PluginSettings(PLUGIN_ID);
pluginSettings.populateSettingsMap(parameterMap);
PluginSettingsConfiguration configuration = pluginSettings.toPluginSettingsConfiguration();
assertThat(configuration.size(), is(3));
assertThat(configuration.get("k1").getValue(), is("v1"));
assertThat(configuration.get("k2").getValue(), is(""));
assertThat(configuration.get("k3").getValue(), is(nullValue()));
}
@Test
public void shouldAddConfigurationsToSettingsMapCorrectly() throws InvalidCipherTextException {
ArrayList<PluginConfiguration> pluginConfigurations = new ArrayList<>();
pluginConfigurations.add(new PluginConfiguration("k1", new Metadata(true, false)));
pluginConfigurations.add(new PluginConfiguration("k2", new Metadata(true, true)));
ConfigRepoPluginInfo pluginInfo = new ConfigRepoPluginInfo(null, new PluggableInstanceSettings(pluginConfigurations));
ArrayList<ConfigurationProperty> configurationProperties = new ArrayList<>();
configurationProperties.add(new ConfigurationProperty(new ConfigurationKey("k1"), new ConfigurationValue("v1")));
configurationProperties.add(new ConfigurationProperty(new ConfigurationKey("k2"), new EncryptedConfigurationValue(new GoCipher().encrypt("v2"))));
PluginSettings pluginSettings = new PluginSettings(PLUGIN_ID);
pluginSettings.addConfigurations(pluginInfo, configurationProperties);
PluginSettingsConfiguration pluginSettingsProperties = pluginSettings.toPluginSettingsConfiguration();
assertThat(pluginSettingsProperties.size(), is(2));
assertThat(pluginSettingsProperties.get("k1").getValue(), is("v1"));
assertThat(pluginSettingsProperties.get("k2").getValue(), is("v2"));
}
@Test
public void shouldEncryptedValuesForSecureProperties() throws InvalidCipherTextException {
ArrayList<PluginConfiguration> pluginConfigurations = new ArrayList<>();
pluginConfigurations.add(new PluginConfiguration("k1", new Metadata(true, false)));
pluginConfigurations.add(new PluginConfiguration("k2", new Metadata(true, true)));
ConfigRepoPluginInfo pluginInfo = new ConfigRepoPluginInfo(null, new PluggableInstanceSettings(pluginConfigurations));
ConfigurationProperty configProperty1 = new ConfigurationProperty(new ConfigurationKey("k1"), new ConfigurationValue("v1"));
ConfigurationProperty configProperty2 = new ConfigurationProperty(new ConfigurationKey("k2"), new EncryptedConfigurationValue(new GoCipher().encrypt("v2")));
ArrayList<ConfigurationProperty> configurationProperties = new ArrayList<>();
configurationProperties.add(configProperty1);
configurationProperties.add(configProperty2);
PluginSettings pluginSettings = new PluginSettings(PLUGIN_ID);
pluginSettings.addConfigurations(pluginInfo, configurationProperties);
List<ConfigurationProperty> pluginSettingsProperties = pluginSettings.getSecurePluginSettingsProperties(pluginInfo);
assertThat(pluginSettingsProperties.size(), is(2));
assertThat(pluginSettingsProperties.get(0), is(configProperty1));
assertThat(pluginSettingsProperties.get(1), is(configProperty2));
}
private String toJSON(Map<String, String> map) {
return new GsonBuilder().serializeNulls().create().toJson(map);
}
}
| sghill/gocd | server/test/unit/com/thoughtworks/go/server/domain/PluginSettingsTest.java | Java | apache-2.0 | 9,794 |
<?php
ini_set(
'include_path',
implode(
PATH_SEPARATOR,
array(
realpath(implode(
DIRECTORY_SEPARATOR, array(__DIR__, '..', 'src', 'mg')
)),
ini_get('include_path'),
)
)
);
error_reporting(E_ALL);
ini_set('display_errors', 1);
if (!defined('RESOURCES_DIR')) {
define ('RESOURCES_DIR', realpath(__DIR__) . DIRECTORY_SEPARATOR . 'resources');
}
require_once 'PAMI/Autoloader/Autoloader.php';
\PAMI\Autoloader\Autoloader::register();
| mbonneau/PAMI | test/bootstrap.php | PHP | apache-2.0 | 523 |
package com.twitter.finatra.http.integration.doeverything.main.domain
import com.twitter.finatra.request.QueryParam
case class RequestWithQueryParamSeqString(@QueryParam foo: Seq[String])
case class ResponseOfQueryParamSeqString(foo: Seq[String])
case class RequestWithQueryParamSeqLong(@QueryParam foo: Seq[Long])
case class ResponseOfQueryParamSeqLong(foo: Seq[Long])
| tom-chan/finatra | http/src/test/scala/com/twitter/finatra/http/integration/doeverything/main/domain/RequestWithQueryParamSeqString.scala | Scala | apache-2.0 | 375 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import {
createDurationFormatter,
getNumberFormatter,
getNumberFormatterRegistry,
NumberFormats,
getTimeFormatterRegistry,
smartDateFormatter,
smartDateVerboseFormatter,
} from '@superset-ui/core';
export default function setupFormatters() {
getNumberFormatterRegistry()
// Add shims for format strings that are deprecated or common typos.
// Temporary solution until performing a db migration to fix this.
.registerValue(',0', getNumberFormatter(',.4~f'))
.registerValue('null', getNumberFormatter(',.4~f'))
.registerValue('%', getNumberFormatter('.0%'))
.registerValue('.', getNumberFormatter('.4~f'))
.registerValue(',f', getNumberFormatter(',d'))
.registerValue(',r', getNumberFormatter(',.4f'))
.registerValue('0f', getNumberFormatter(',d'))
.registerValue(',#', getNumberFormatter(',.4~f'))
.registerValue('$,f', getNumberFormatter('$,d'))
.registerValue('0%', getNumberFormatter('.0%'))
.registerValue('f', getNumberFormatter(',d'))
.registerValue(',.', getNumberFormatter(',.4~f'))
.registerValue('.1%f', getNumberFormatter('.1%'))
.registerValue('1%', getNumberFormatter('.0%'))
.registerValue('3%', getNumberFormatter('.0%'))
.registerValue(',%', getNumberFormatter(',.0%'))
.registerValue('.r', getNumberFormatter('.4~f'))
.registerValue('$,.0', getNumberFormatter('$,d'))
.registerValue('$,.1', getNumberFormatter('$,.1~f'))
.registerValue(',0s', getNumberFormatter(',.4~f'))
.registerValue('%%%', getNumberFormatter('.0%'))
.registerValue(',0f', getNumberFormatter(',d'))
.registerValue('+,%', getNumberFormatter('+,.0%'))
.registerValue('$f', getNumberFormatter('$,d'))
.registerValue('+,', getNumberFormatter(NumberFormats.INTEGER_SIGNED))
.registerValue(',2f', getNumberFormatter(',.4~f'))
.registerValue(',g', getNumberFormatter(',.4~f'))
.registerValue('int', getNumberFormatter(NumberFormats.INTEGER))
.registerValue('.0%f', getNumberFormatter('.1%'))
.registerValue('$,0', getNumberFormatter('$,.4f'))
.registerValue('$,0f', getNumberFormatter('$,.4f'))
.registerValue('$,.f', getNumberFormatter('$,.4f'))
.registerValue('DURATION', createDurationFormatter())
.registerValue(
'DURATION_SUB',
createDurationFormatter({ formatSubMilliseconds: true }),
);
getTimeFormatterRegistry()
.registerValue('smart_date', smartDateFormatter)
.registerValue('smart_date_verbose', smartDateVerboseFormatter)
.setDefaultKey('smart_date');
}
| mistercrunch/panoramix | superset-frontend/src/setup/setupFormatters.ts | TypeScript | apache-2.0 | 3,346 |
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace WebApplication3 {
public partial class SiteMaster {
/// <summary>
/// MainContent control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.ContentPlaceHolder MainContent;
}
}
| GeekTrainer/BootstrapMVA | Module 4 - Visual Studio/Web Forms Scaffolding/WebApplication3/Site.Master.designer.cs | C# | apache-2.0 | 781 |
/**
* Copyright (c) 2009
* Sven Wagner-Boysen
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package de.hpi.bpmn2_0.model.activity.loop;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlSeeAlso;
import javax.xml.bind.annotation.XmlType;
import de.hpi.bpmn2_0.model.BaseElement;
/**
* <p>Java class for tLoopCharacteristics complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="tLoopCharacteristics">
* <complexContent>
* <extension base="{http://www.omg.org/bpmn20}tBaseElement">
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "tLoopCharacteristics")
@XmlSeeAlso({
StandardLoopCharacteristics.class,
MultiInstanceLoopCharacteristics.class
})
public abstract class LoopCharacteristics
extends BaseElement
{
}
| grasscrm/gdesigner | editor/server/src/de/hpi/bpmn2_0/model/activity/loop/LoopCharacteristics.java | Java | apache-2.0 | 2,062 |
# -*- coding: utf-8 -*-
from __future__ import with_statement
import datetime
from cms.api import create_page, publish_page, add_plugin
from cms.exceptions import PluginAlreadyRegistered, PluginNotRegistered
from cms.models import Page, Placeholder
from cms.models.pluginmodel import CMSPlugin, PluginModelBase
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cms.plugins.utils import get_plugins_for_page
from cms.plugins.file.models import File
from cms.plugins.inherit.models import InheritPagePlaceholder
from cms.plugins.link.forms import LinkForm
from cms.plugins.link.models import Link
from cms.plugins.picture.models import Picture
from cms.plugins.text.models import Text
from cms.plugins.text.utils import (plugin_tags_to_id_list, plugin_tags_to_admin_html)
from cms.plugins.twitter.models import TwitterRecentEntries
from cms.test_utils.project.pluginapp.models import Article, Section
from cms.test_utils.project.pluginapp.plugins.manytomany_rel.models import (
ArticlePluginModel)
from cms.test_utils.testcases import CMSTestCase, URL_CMS_PAGE, URL_CMS_PLUGIN_MOVE, \
URL_CMS_PAGE_ADD, URL_CMS_PLUGIN_ADD, URL_CMS_PLUGIN_EDIT, URL_CMS_PAGE_CHANGE, URL_CMS_PLUGIN_REMOVE, \
URL_CMS_PLUGIN_HISTORY_EDIT
from cms.sitemaps.cms_sitemap import CMSSitemap
from cms.test_utils.util.context_managers import SettingsOverride
from cms.utils.copy_plugins import copy_plugins_to
from django.utils import timezone
from django.conf import settings
from django.contrib import admin
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.management import call_command
from django.forms.widgets import Media
from django.test.testcases import TestCase
import os
class DumbFixturePlugin(CMSPluginBase):
model = CMSPlugin
name = "Dumb Test Plugin. It does nothing."
render_template = ""
admin_preview = False
allow_children = True
def render(self, context, instance, placeholder):
return context
class PluginsTestBaseCase(CMSTestCase):
def setUp(self):
self.super_user = User(username="test", is_staff=True, is_active=True, is_superuser=True)
self.super_user.set_password("test")
self.super_user.save()
self.slave = User(username="slave", is_staff=True, is_active=True, is_superuser=False)
self.slave.set_password("slave")
self.slave.save()
self.FIRST_LANG = settings.LANGUAGES[0][0]
self.SECOND_LANG = settings.LANGUAGES[1][0]
self._login_context = self.login_user_context(self.super_user)
self._login_context.__enter__()
def tearDown(self):
self._login_context.__exit__(None, None, None)
def approve_page(self, page):
response = self.client.get(URL_CMS_PAGE + "%d/approve/" % page.pk)
self.assertRedirects(response, URL_CMS_PAGE)
# reload page
return self.reload_page(page)
def get_request(self, *args, **kwargs):
request = super(PluginsTestBaseCase, self).get_request(*args, **kwargs)
request.placeholder_media = Media()
return request
class PluginsTestCase(PluginsTestBaseCase):
def _create_text_plugin_on_page(self, page):
plugin_data = {
'plugin_type': "TextPlugin",
'language': settings.LANGUAGES[0][0],
'placeholder': page.placeholders.get(slot="body").pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEquals(response.status_code, 200)
created_plugin_id = int(response.content)
self.assertEquals(created_plugin_id, CMSPlugin.objects.all()[0].pk)
return created_plugin_id
def _edit_text_plugin(self, plugin_id, text):
edit_url = "%s%s/" % (URL_CMS_PLUGIN_EDIT, plugin_id)
response = self.client.get(edit_url)
self.assertEquals(response.status_code, 200)
data = {
"body": text
}
response = self.client.post(edit_url, data)
self.assertEquals(response.status_code, 200)
txt = Text.objects.get(pk=plugin_id)
return txt
def test_add_edit_plugin(self):
"""
Test that you can add a text plugin
"""
# add a new text plugin
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
created_plugin_id = self._create_text_plugin_on_page(page)
# now edit the plugin
txt = self._edit_text_plugin(created_plugin_id, "Hello World")
self.assertEquals("Hello World", txt.body)
# edit body, but click cancel button
data = {
"body": "Hello World!!",
"_cancel": True,
}
edit_url = '%s%d/' % (URL_CMS_PLUGIN_EDIT, created_plugin_id)
response = self.client.post(edit_url, data)
self.assertEquals(response.status_code, 200)
txt = Text.objects.all()[0]
self.assertEquals("Hello World", txt.body)
def test_plugin_history_view(self):
"""
Test plugin history view
"""
import reversion
page_data = self.get_new_page_data()
# two versions created by simply creating the page
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
page_id = int(page.pk)
# page version 3
created_plugin_id = self._create_text_plugin_on_page(page)
# page version 4
txt = self._edit_text_plugin(created_plugin_id, "Hello Foo")
self.assertEquals("Hello Foo", txt.body)
# page version 5
txt = self._edit_text_plugin(created_plugin_id, "Hello Bar")
self.assertEquals("Hello Bar", txt.body)
versions = [v.pk for v in reversed(reversion.get_for_object(page))]
history_url = '%s%d/' % (
URL_CMS_PLUGIN_HISTORY_EDIT % (page_id, versions[-2]),
created_plugin_id)
response = self.client.get(history_url)
self.assertEquals(response.status_code, 200)
self.assertIn('Hello Foo', response.content)
def test_plugin_order(self):
"""
Test that plugin position is saved after creation
"""
page_en = create_page("PluginOrderPage", "col_two.html", "en",
slug="page1", published=True, in_navigation=True)
ph_en = page_en.placeholders.get(slot="col_left")
# We check created objects and objects from the DB to be sure the position value
# has been saved correctly
text_plugin_1 = add_plugin(ph_en, "TextPlugin", "en", body="I'm the first")
text_plugin_2 = add_plugin(ph_en, "TextPlugin", "en", body="I'm the second")
db_plugin_1 = CMSPlugin.objects.get(pk=text_plugin_1.pk)
db_plugin_2 = CMSPlugin.objects.get(pk=text_plugin_2.pk)
with SettingsOverride(CMS_PERMISSION=False):
self.assertEqual(text_plugin_1.position, 1)
self.assertEqual(db_plugin_1.position, 1)
self.assertEqual(text_plugin_2.position, 2)
self.assertEqual(db_plugin_2.position, 2)
## Finally we render the placeholder to test the actual content
rendered_placeholder = ph_en.render(self.get_context(page_en.get_absolute_url()), None)
self.assertEquals(rendered_placeholder, "I'm the firstI'm the second")
def test_add_cancel_plugin(self):
"""
Test that you can cancel a new plugin before editing and
that the plugin is removed.
"""
# add a new text plugin
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
plugin_data = {
'plugin_type': "TextPlugin",
'language': settings.LANGUAGES[0][0],
'placeholder': page.placeholders.get(slot="body").pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEquals(response.status_code, 200)
self.assertEquals(int(response.content), CMSPlugin.objects.all()[0].pk)
# now click cancel instead of editing
edit_url = URL_CMS_PLUGIN_EDIT + response.content + "/"
response = self.client.get(edit_url)
self.assertEquals(response.status_code, 200)
data = {
"body": "Hello World",
"_cancel": True,
}
response = self.client.post(edit_url, data)
self.assertEquals(response.status_code, 200)
self.assertEquals(0, Text.objects.count())
def test_add_text_plugin_empty_tag(self):
"""
Test that you can add a text plugin
"""
# add a new text plugin
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
plugin_data = {
'plugin_type': "TextPlugin",
'language': settings.LANGUAGES[0][0],
'placeholder': page.placeholders.get(slot="body").pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEquals(response.status_code, 200)
self.assertEquals(int(response.content), CMSPlugin.objects.all()[0].pk)
# now edit the plugin
edit_url = URL_CMS_PLUGIN_EDIT + response.content + "/"
response = self.client.get(edit_url)
self.assertEquals(response.status_code, 200)
data = {
"body": '<div class="someclass"></div><p>foo</p>'
}
response = self.client.post(edit_url, data)
self.assertEquals(response.status_code, 200)
txt = Text.objects.all()[0]
self.assertEquals('<div class="someclass"></div><p>foo</p>', txt.body)
def test_add_text_plugin_html_sanitizer(self):
"""
Test that you can add a text plugin
"""
# add a new text plugin
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
plugin_data = {
'plugin_type': "TextPlugin",
'language': settings.LANGUAGES[0][0],
'placeholder': page.placeholders.get(slot="body").pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEquals(response.status_code, 200)
self.assertEquals(int(response.content), CMSPlugin.objects.all()[0].pk)
# now edit the plugin
edit_url = URL_CMS_PLUGIN_EDIT + response.content + "/"
response = self.client.get(edit_url)
self.assertEquals(response.status_code, 200)
data = {
"body": '<script>var bar="hacked"</script>'
}
response = self.client.post(edit_url, data)
self.assertEquals(response.status_code, 200)
txt = Text.objects.all()[0]
self.assertEquals('<script>var bar="hacked"</script>', txt.body)
def test_copy_plugins(self):
"""
Test that copying plugins works as expected.
"""
# create some objects
page_en = create_page("CopyPluginTestPage (EN)", "nav_playground.html", "en")
page_de = create_page("CopyPluginTestPage (DE)", "nav_playground.html", "de")
ph_en = page_en.placeholders.get(slot="body")
ph_de = page_de.placeholders.get(slot="body")
# add the text plugin
text_plugin_en = add_plugin(ph_en, "TextPlugin", "en", body="Hello World")
self.assertEquals(text_plugin_en.pk, CMSPlugin.objects.all()[0].pk)
# add a *nested* link plugin
link_plugin_en = add_plugin(ph_en, "LinkPlugin", "en", target=text_plugin_en,
name="A Link", url="https://www.django-cms.org")
# the call above to add a child makes a plugin reload required here.
text_plugin_en = self.reload(text_plugin_en)
# check the relations
self.assertEquals(text_plugin_en.get_children().count(), 1)
self.assertEqual(link_plugin_en.parent.pk, text_plugin_en.pk)
# just sanity check that so far everything went well
self.assertEqual(CMSPlugin.objects.count(), 2)
# copy the plugins to the german placeholder
copy_plugins_to(ph_en.get_plugins(), ph_de, 'de')
self.assertEqual(ph_de.cmsplugin_set.filter(parent=None).count(), 1)
text_plugin_de = ph_de.cmsplugin_set.get(parent=None).get_plugin_instance()[0]
self.assertEqual(text_plugin_de.get_children().count(), 1)
link_plugin_de = text_plugin_de.get_children().get().get_plugin_instance()[0]
# check we have twice as many plugins as before
self.assertEqual(CMSPlugin.objects.count(), 4)
# check language plugins
self.assertEqual(CMSPlugin.objects.filter(language='de').count(), 2)
self.assertEqual(CMSPlugin.objects.filter(language='en').count(), 2)
text_plugin_en = self.reload(text_plugin_en)
link_plugin_en = self.reload(link_plugin_en)
# check the relations in english didn't change
self.assertEquals(text_plugin_en.get_children().count(), 1)
self.assertEqual(link_plugin_en.parent.pk, text_plugin_en.pk)
self.assertEqual(link_plugin_de.name, link_plugin_en.name)
self.assertEqual(link_plugin_de.url, link_plugin_en.url)
self.assertEqual(text_plugin_de.body, text_plugin_en.body)
def test_remove_plugin_before_published(self):
"""
When removing a draft plugin we would expect the public copy of the plugin to also be removed
"""
# add a page
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
# add a plugin
plugin_data = {
'plugin_type': "TextPlugin",
'language': settings.LANGUAGES[0][0],
'placeholder': page.placeholders.get(slot="body").pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEquals(response.status_code, 200)
self.assertEquals(int(response.content), CMSPlugin.objects.all()[0].pk)
# there should be only 1 plugin
self.assertEquals(CMSPlugin.objects.all().count(), 1)
# delete the plugin
plugin_data = {
'plugin_id': int(response.content)
}
remove_url = URL_CMS_PLUGIN_REMOVE
response = self.client.post(remove_url, plugin_data)
self.assertEquals(response.status_code, 200)
# there should be no plugins
self.assertEquals(0, CMSPlugin.objects.all().count())
def test_remove_plugin_after_published(self):
# add a page
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
# add a plugin
plugin_data = {
'plugin_type': "TextPlugin",
'language': settings.LANGUAGES[0][0],
'placeholder': page.placeholders.get(slot="body").pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
plugin_id = int(response.content)
self.assertEquals(response.status_code, 200)
self.assertEquals(int(response.content), CMSPlugin.objects.all()[0].pk)
# there should be only 1 plugin
self.assertEquals(CMSPlugin.objects.all().count(), 1)
self.assertEquals(CMSPlugin.objects.filter(placeholder__page__publisher_is_draft=True).count(), 1)
# publish page
response = self.client.post(URL_CMS_PAGE + "%d/change-status/" % page.pk, {1: 1})
self.assertEqual(response.status_code, 200)
self.assertEquals(Page.objects.count(), 2)
# there should now be two plugins - 1 draft, 1 public
self.assertEquals(CMSPlugin.objects.all().count(), 2)
# delete the plugin
plugin_data = {
'plugin_id': plugin_id
}
remove_url = URL_CMS_PLUGIN_REMOVE
response = self.client.post(remove_url, plugin_data)
self.assertEquals(response.status_code, 200)
# there should be no plugins
self.assertEquals(CMSPlugin.objects.all().count(), 1)
self.assertEquals(CMSPlugin.objects.filter(placeholder__page__publisher_is_draft=False).count(), 1)
def test_remove_plugin_not_associated_to_page(self):
"""
Test case for PlaceholderField
"""
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
# add a plugin
plugin_data = {
'plugin_type': "TextPlugin",
'language': settings.LANGUAGES[0][0],
'placeholder': page.placeholders.get(slot="body").pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEquals(response.status_code, 200)
self.assertEquals(int(response.content), CMSPlugin.objects.all()[0].pk)
# there should be only 1 plugin
self.assertEquals(CMSPlugin.objects.all().count(), 1)
ph = Placeholder(slot="subplugin")
ph.save()
plugin_data = {
'plugin_type': "TextPlugin",
'language': settings.LANGUAGES[0][0],
'placeholder': ph.pk,
'parent': int(response.content)
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
# no longer allowed for security reasons
self.assertEqual(response.status_code, 404)
def test_register_plugin_twice_should_raise(self):
number_of_plugins_before = len(plugin_pool.get_all_plugins())
# The first time we register the plugin is should work
plugin_pool.register_plugin(DumbFixturePlugin)
# Let's add it a second time. We should catch and exception
raised = False
try:
plugin_pool.register_plugin(DumbFixturePlugin)
except PluginAlreadyRegistered:
raised = True
self.assertTrue(raised)
# Let's also unregister the plugin now, and assert it's not in the
# pool anymore
plugin_pool.unregister_plugin(DumbFixturePlugin)
# Let's make sure we have the same number of plugins as before:
number_of_plugins_after = len(plugin_pool.get_all_plugins())
self.assertEqual(number_of_plugins_before, number_of_plugins_after)
def test_unregister_non_existing_plugin_should_raise(self):
number_of_plugins_before = len(plugin_pool.get_all_plugins())
raised = False
try:
# There should not be such a plugin registered if the others tests
# don't leak plugins
plugin_pool.unregister_plugin(DumbFixturePlugin)
except PluginNotRegistered:
raised = True
self.assertTrue(raised)
# Let's count, to make sure we didn't remove a plugin accidentally.
number_of_plugins_after = len(plugin_pool.get_all_plugins())
self.assertEqual(number_of_plugins_before, number_of_plugins_after)
def test_inheritplugin_media(self):
"""
Test case for InheritPagePlaceholder
"""
inheritfrompage = create_page('page to inherit from',
'nav_playground.html',
'en')
body = inheritfrompage.placeholders.get(slot="body")
plugin = TwitterRecentEntries(
plugin_type='TwitterRecentEntriesPlugin',
placeholder=body,
position=1,
language=settings.LANGUAGE_CODE,
twitter_user='djangocms',
)
plugin.insert_at(None, position='last-child', save=True)
inheritfrompage.publish()
page = create_page('inherit from page',
'nav_playground.html',
'en',
published=True)
inherited_body = page.placeholders.get(slot="body")
inherit_plugin = InheritPagePlaceholder(
plugin_type='InheritPagePlaceholderPlugin',
placeholder=inherited_body,
position=1,
language=settings.LANGUAGE_CODE,
from_page=inheritfrompage,
from_language=settings.LANGUAGE_CODE)
inherit_plugin.insert_at(None, position='last-child', save=True)
page.publish()
self.client.logout()
response = self.client.get(page.get_absolute_url())
self.assertTrue('%scms/js/libs/jquery.tweet.js' % settings.STATIC_URL in response.content, response.content)
def test_inherit_plugin_with_empty_plugin(self):
inheritfrompage = create_page('page to inherit from',
'nav_playground.html',
'en', published=True)
body = inheritfrompage.placeholders.get(slot="body")
empty_plugin = CMSPlugin(
plugin_type='TextPlugin', # create an empty plugin
placeholder=body,
position=1,
language='en',
)
empty_plugin.insert_at(None, position='last-child', save=True)
other_page = create_page('other page', 'nav_playground.html', 'en', published=True)
inherited_body = other_page.placeholders.get(slot="body")
inherit_plugin = InheritPagePlaceholder(
plugin_type='InheritPagePlaceholderPlugin',
placeholder=inherited_body,
position=1,
language='en',
from_page=inheritfrompage,
from_language='en'
)
inherit_plugin.insert_at(None, position='last-child', save=True)
add_plugin(inherited_body, "TextPlugin", "en", body="foobar")
# this should not fail, even if there in an empty plugin
rendered = inherited_body.render(context=self.get_context(other_page.get_absolute_url()), width=200)
self.assertIn("foobar", rendered)
def test_render_textplugin(self):
# Setup
page = create_page("render test", "nav_playground.html", "en")
ph = page.placeholders.get(slot="body")
text_plugin = add_plugin(ph, "TextPlugin", "en", body="Hello World")
link_plugins = []
for i in range(0, 10):
link_plugins.append(add_plugin(ph, "LinkPlugin", "en",
target=text_plugin,
name="A Link %d" % i,
url="http://django-cms.org"))
text_plugin.text.body += '<img src="/static/cms/images/plugins/link.png" alt="Link - %s" id="plugin_obj_%d" title="Link - %s" />' % (
link_plugins[-1].name,
link_plugins[-1].pk,
link_plugins[-1].name,
)
text_plugin.save()
txt = text_plugin.text
ph = Placeholder.objects.get(pk=ph.pk)
with self.assertNumQueries(2):
# 1 query for the CMSPlugin objects,
# 1 query for each type of child object (1 in this case, all are Link plugins)
txt.body = plugin_tags_to_admin_html(
'\n'.join(["{{ plugin_object %d }}" % l.cmsplugin_ptr_id
for l in link_plugins]))
txt.save()
text_plugin = self.reload(text_plugin)
with self.assertNumQueries(2):
rendered = text_plugin.render_plugin(placeholder=ph)
for i in range(0, 10):
self.assertTrue('A Link %d' % i in rendered)
def test_copy_textplugin(self):
"""
Test that copying of textplugins replaces references to copied plugins
"""
page = create_page("page", "nav_playground.html", "en")
placeholder = page.placeholders.get(slot='body')
plugin_base = CMSPlugin(
plugin_type='TextPlugin',
placeholder=placeholder,
position=1,
language=self.FIRST_LANG)
plugin_base.insert_at(None, position='last-child', save=False)
plugin = Text(body='')
plugin_base.set_base_attr(plugin)
plugin.save()
plugin_ref_1_base = CMSPlugin(
plugin_type='TextPlugin',
placeholder=placeholder,
position=1,
language=self.FIRST_LANG)
plugin_ref_1_base.insert_at(plugin_base, position='last-child', save=False)
plugin_ref_1 = Text(body='')
plugin_ref_1_base.set_base_attr(plugin_ref_1)
plugin_ref_1.save()
plugin_ref_2_base = CMSPlugin(
plugin_type='TextPlugin',
placeholder=placeholder,
position=2,
language=self.FIRST_LANG)
plugin_ref_2_base.insert_at(plugin_base, position='last-child', save=False)
plugin_ref_2 = Text(body='')
plugin_ref_2_base.set_base_attr(plugin_ref_2)
plugin_ref_2.save()
plugin.body = plugin_tags_to_admin_html(
' {{ plugin_object %s }} {{ plugin_object %s }} ' % (str(plugin_ref_1.pk), str(plugin_ref_2.pk)))
plugin.save()
page_data = self.get_new_page_data()
#create 2nd language page
page_data.update({
'language': self.SECOND_LANG,
'title': "%s %s" % (page.get_title(), self.SECOND_LANG),
})
response = self.client.post(URL_CMS_PAGE_CHANGE % page.pk + "?language=%s" % self.SECOND_LANG, page_data)
self.assertRedirects(response, URL_CMS_PAGE)
self.assertEquals(CMSPlugin.objects.filter(language=self.FIRST_LANG).count(), 3)
self.assertEquals(CMSPlugin.objects.filter(language=self.SECOND_LANG).count(), 0)
self.assertEquals(CMSPlugin.objects.count(), 3)
self.assertEquals(Page.objects.all().count(), 1)
copy_data = {
'placeholder': placeholder.pk,
'language': self.SECOND_LANG,
'copy_from': self.FIRST_LANG,
}
response = self.client.post(URL_CMS_PAGE + "copy-plugins/", copy_data)
self.assertEquals(response.status_code, 200)
self.assertEqual(response.content.count('<li '), 3)
# assert copy success
self.assertEquals(CMSPlugin.objects.filter(language=self.FIRST_LANG).count(), 3)
self.assertEquals(CMSPlugin.objects.filter(language=self.SECOND_LANG).count(), 3)
self.assertEquals(CMSPlugin.objects.count(), 6)
plugins = list(Text.objects.all())
new_plugin = plugins[-1]
idlist = sorted(plugin_tags_to_id_list(new_plugin.body))
expected = sorted([plugins[3].pk, plugins[4].pk])
self.assertEquals(idlist, expected)
def test_empty_plugin_is_ignored(self):
page = create_page("page", "nav_playground.html", "en")
placeholder = page.placeholders.get(slot='body')
plugin = CMSPlugin(
plugin_type='TextPlugin',
placeholder=placeholder,
position=1,
language=self.FIRST_LANG)
plugin.insert_at(None, position='last-child', save=True)
# this should not raise any errors, but just ignore the empty plugin
out = placeholder.render(self.get_context(), width=300)
self.assertFalse(len(out))
self.assertFalse(len(placeholder._en_plugins_cache))
def test_editing_plugin_changes_page_modification_time_in_sitemap(self):
now = timezone.now()
one_day_ago = now - datetime.timedelta(days=1)
page = create_page("page", "nav_playground.html", "en", published=True, publication_date=now)
page.creation_date = one_day_ago
page.changed_date = one_day_ago
plugin_id = self._create_text_plugin_on_page(page)
plugin = self._edit_text_plugin(plugin_id, "fnord")
actual_last_modification_time = CMSSitemap().lastmod(page)
self.assertEqual(plugin.changed_date - datetime.timedelta(microseconds=plugin.changed_date.microsecond),
actual_last_modification_time - datetime.timedelta(
microseconds=actual_last_modification_time.microsecond))
def test_moving_plugin_to_different_placeholder(self):
plugin_pool.register_plugin(DumbFixturePlugin)
page = create_page("page", "nav_playground.html", "en", published=True)
plugin_data = {
'plugin_type': 'DumbFixturePlugin',
'language': settings.LANGUAGES[0][0],
'placeholder': page.placeholders.get(slot='body').pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD % page.pk, plugin_data)
self.assertEquals(response.status_code, 200)
plugin_data['parent_id'] = int(response.content)
del plugin_data['placeholder']
response = self.client.post(URL_CMS_PLUGIN_ADD % page.pk, plugin_data)
self.assertEquals(response.status_code, 200)
post = {
'plugin_id': int(response.content),
'placeholder': 'right-column',
}
response = self.client.post(URL_CMS_PLUGIN_MOVE % page.pk, post)
self.assertEquals(response.status_code, 200)
from cms.plugins.utils import build_plugin_tree
build_plugin_tree(page.placeholders.get(slot='right-column').get_plugins_list())
plugin_pool.unregister_plugin(DumbFixturePlugin)
def test_get_plugins_for_page(self):
page_en = create_page("PluginOrderPage", "col_two.html", "en",
slug="page1", published=True, in_navigation=True)
ph_en = page_en.placeholders.get(slot="col_left")
text_plugin_1 = add_plugin(ph_en, "TextPlugin", "en", body="I'm inside an existing placeholder.")
# This placeholder is not in the template.
ph_en_not_used = page_en.placeholders.create(slot="not_used")
text_plugin_2 = add_plugin(ph_en_not_used, "TextPlugin", "en", body="I'm inside a non-existent placeholder.")
page_plugins = get_plugins_for_page(None, page_en, page_en.get_title_obj_attribute('language'))
db_text_plugin_1 = page_plugins.get(pk=text_plugin_1.pk)
self.assertRaises(CMSPlugin.DoesNotExist, page_plugins.get, pk=text_plugin_2.pk)
self.assertEquals(db_text_plugin_1.pk, text_plugin_1.pk)
def test_is_last_in_placeholder(self):
"""
Tests that children plugins don't affect the is_last_in_placeholder plugin method.
"""
page_en = create_page("PluginOrderPage", "col_two.html", "en",
slug="page1", published=True, in_navigation=True)
ph_en = page_en.placeholders.get(slot="col_left")
text_plugin_1 = add_plugin(ph_en, "TextPlugin", "en", body="I'm the first")
text_plugin_2 = add_plugin(ph_en, "TextPlugin", "en", body="I'm the second")
inner_text_plugin_1 = add_plugin(ph_en, "TextPlugin", "en", body="I'm the first child of text_plugin_1")
text_plugin_1.cmsplugin_set.add(inner_text_plugin_1)
self.assertEquals(text_plugin_2.is_last_in_placeholder(), True)
class FileSystemPluginTests(PluginsTestBaseCase):
def setUp(self):
super(FileSystemPluginTests, self).setUp()
call_command('collectstatic', interactive=False, verbosity=0, link=True)
def tearDown(self):
for directory in [settings.STATIC_ROOT, settings.MEDIA_ROOT]:
for root, dirs, files in os.walk(directory, topdown=False):
# We need to walk() the directory tree since rmdir() does not allow
# to remove non-empty directories...
for name in files:
# Start by killing all files we walked
os.remove(os.path.join(root, name))
for name in dirs:
# Now all directories we walked...
os.rmdir(os.path.join(root, name))
super(FileSystemPluginTests, self).tearDown()
def test_fileplugin_icon_uppercase(self):
page = create_page('testpage', 'nav_playground.html', 'en')
body = page.placeholders.get(slot="body")
plugin = File(
plugin_type='FilePlugin',
placeholder=body,
position=1,
language=settings.LANGUAGE_CODE,
)
plugin.file.save("UPPERCASE.JPG", SimpleUploadedFile("UPPERCASE.jpg", "content"), False)
plugin.insert_at(None, position='last-child', save=True)
self.assertNotEquals(plugin.get_icon_url().find('jpg'), -1)
class PluginManyToManyTestCase(PluginsTestBaseCase):
def setUp(self):
self.super_user = User(username="test", is_staff=True, is_active=True, is_superuser=True)
self.super_user.set_password("test")
self.super_user.save()
self.slave = User(username="slave", is_staff=True, is_active=True, is_superuser=False)
self.slave.set_password("slave")
self.slave.save()
self._login_context = self.login_user_context(self.super_user)
self._login_context.__enter__()
# create 3 sections
self.sections = []
self.section_pks = []
for i in range(3):
section = Section.objects.create(name="section %s" % i)
self.sections.append(section)
self.section_pks.append(section.pk)
self.section_count = len(self.sections)
# create 10 articles by section
for section in self.sections:
for j in range(10):
Article.objects.create(
title="article %s" % j,
section=section
)
self.FIRST_LANG = settings.LANGUAGES[0][0]
self.SECOND_LANG = settings.LANGUAGES[1][0]
def test_add_plugin_with_m2m(self):
# add a new text plugin
self.assertEqual(ArticlePluginModel.objects.count(), 0)
page_data = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
placeholder = page.placeholders.get(slot="body")
plugin_data = {
'plugin_type': "ArticlePlugin",
'language': self.FIRST_LANG,
'placeholder': placeholder.pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEquals(response.status_code, 200)
self.assertEquals(int(response.content), CMSPlugin.objects.all()[0].pk)
# now edit the plugin
edit_url = URL_CMS_PLUGIN_EDIT + response.content + "/"
response = self.client.get(edit_url)
self.assertEquals(response.status_code, 200)
data = {
'title': "Articles Plugin 1",
"sections": self.section_pks
}
response = self.client.post(edit_url, data)
self.assertEqual(response.status_code, 200)
self.assertEqual(ArticlePluginModel.objects.count(), 1)
plugin = ArticlePluginModel.objects.all()[0]
self.assertEquals(self.section_count, plugin.sections.count())
def test_add_plugin_with_m2m_and_publisher(self):
self.assertEqual(ArticlePluginModel.objects.count(), 0)
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
self.assertEqual(response.status_code, 302)
page = Page.objects.all()[0]
placeholder = page.placeholders.get(slot="body")
# add a plugin
plugin_data = {
'plugin_type': "ArticlePlugin",
'language': self.FIRST_LANG,
'placeholder': placeholder.pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEquals(response.status_code, 200)
self.assertEquals(int(response.content), CMSPlugin.objects.all()[0].pk)
# there should be only 1 plugin
self.assertEquals(1, CMSPlugin.objects.all().count())
articles_plugin_pk = int(response.content)
self.assertEquals(articles_plugin_pk, CMSPlugin.objects.all()[0].pk)
# now edit the plugin
edit_url = URL_CMS_PLUGIN_EDIT + response.content + "/"
data = {
'title': "Articles Plugin 1",
'sections': self.section_pks
}
response = self.client.post(edit_url, data)
self.assertEquals(response.status_code, 200)
self.assertEquals(1, ArticlePluginModel.objects.count())
articles_plugin = ArticlePluginModel.objects.all()[0]
self.assertEquals(u'Articles Plugin 1', articles_plugin.title)
self.assertEquals(self.section_count, articles_plugin.sections.count())
# check publish box
page = publish_page(page, self.super_user)
# there should now be two plugins - 1 draft, 1 public
self.assertEquals(2, CMSPlugin.objects.all().count())
self.assertEquals(2, ArticlePluginModel.objects.all().count())
db_counts = [plugin.sections.count() for plugin in ArticlePluginModel.objects.all()]
expected = [self.section_count for i in range(len(db_counts))]
self.assertEqual(expected, db_counts)
def test_copy_plugin_with_m2m(self):
page = create_page("page", "nav_playground.html", "en")
placeholder = page.placeholders.get(slot='body')
plugin = ArticlePluginModel(
plugin_type='ArticlePlugin',
placeholder=placeholder,
position=1,
language=self.FIRST_LANG)
plugin.insert_at(None, position='last-child', save=True)
edit_url = URL_CMS_PLUGIN_EDIT + str(plugin.pk) + "/"
data = {
'title': "Articles Plugin 1",
"sections": self.section_pks
}
response = self.client.post(edit_url, data)
self.assertEquals(response.status_code, 200)
self.assertEqual(ArticlePluginModel.objects.count(), 1)
self.assertEqual(ArticlePluginModel.objects.all()[0].sections.count(), self.section_count)
page_data = self.get_new_page_data()
#create 2nd language page
page_data.update({
'language': self.SECOND_LANG,
'title': "%s %s" % (page.get_title(), self.SECOND_LANG),
})
response = self.client.post(URL_CMS_PAGE_CHANGE % page.pk + "?language=%s" % self.SECOND_LANG, page_data)
self.assertRedirects(response, URL_CMS_PAGE)
self.assertEquals(CMSPlugin.objects.filter(language=self.FIRST_LANG).count(), 1)
self.assertEquals(CMSPlugin.objects.filter(language=self.SECOND_LANG).count(), 0)
self.assertEquals(CMSPlugin.objects.count(), 1)
self.assertEquals(Page.objects.all().count(), 1)
copy_data = {
'placeholder': placeholder.pk,
'language': self.SECOND_LANG,
'copy_from': self.FIRST_LANG,
}
response = self.client.post(URL_CMS_PAGE + "copy-plugins/", copy_data)
self.assertEquals(response.status_code, 200)
self.assertEqual(response.content.count('<li '), 1)
# assert copy success
self.assertEquals(CMSPlugin.objects.filter(language=self.FIRST_LANG).count(), 1)
self.assertEquals(CMSPlugin.objects.filter(language=self.SECOND_LANG).count(), 1)
self.assertEquals(CMSPlugin.objects.count(), 2)
db_counts = [plugin.sections.count() for plugin in ArticlePluginModel.objects.all()]
expected = [self.section_count for i in range(len(db_counts))]
self.assertEqual(expected, db_counts)
class PluginsMetaOptionsTests(TestCase):
''' TestCase set for ensuring that bugs like #992 are caught '''
# these plugins are inlined because, due to the nature of the #992
# ticket, we cannot actually import a single file with all the
# plugin variants in, because that calls __new__, at which point the
# error with splitted occurs.
def test_meta_options_as_defaults(self):
''' handling when a CMSPlugin meta options are computed defaults '''
# this plugin relies on the base CMSPlugin and Model classes to
# decide what the app_label and db_table should be
class TestPlugin(CMSPlugin):
pass
plugin = TestPlugin()
self.assertEqual(plugin._meta.db_table, 'cmsplugin_testplugin')
self.assertEqual(plugin._meta.app_label, 'tests') # because it's inlined
def test_meta_options_as_declared_defaults(self):
''' handling when a CMSPlugin meta options are declared as per defaults '''
# here, we declare the db_table and app_label explicitly, but to the same
# values as would be computed, thus making sure it's not a problem to
# supply options.
class TestPlugin2(CMSPlugin):
class Meta:
db_table = 'cmsplugin_testplugin2'
app_label = 'tests'
plugin = TestPlugin2()
self.assertEqual(plugin._meta.db_table, 'cmsplugin_testplugin2')
self.assertEqual(plugin._meta.app_label, 'tests') # because it's inlined
def test_meta_options_custom_app_label(self):
''' make sure customised meta options on CMSPlugins don't break things '''
class TestPlugin3(CMSPlugin):
class Meta:
app_label = 'one_thing'
plugin = TestPlugin3()
self.assertEqual(plugin._meta.db_table, 'cmsplugin_testplugin3') # because it's inlined
self.assertEqual(plugin._meta.app_label, 'one_thing')
def test_meta_options_custom_db_table(self):
''' make sure custom database table names are OK. '''
class TestPlugin4(CMSPlugin):
class Meta:
db_table = 'or_another'
plugin = TestPlugin4()
self.assertEqual(plugin._meta.db_table, 'or_another')
self.assertEqual(plugin._meta.app_label, 'tests') # because it's inlined
def test_meta_options_custom_both(self):
''' We should be able to customise app_label and db_table together '''
class TestPlugin5(CMSPlugin):
class Meta:
app_label = 'one_thing'
db_table = 'or_another'
plugin = TestPlugin5()
self.assertEqual(plugin._meta.db_table, 'or_another')
self.assertEqual(plugin._meta.app_label, 'one_thing')
class LinkPluginTestCase(PluginsTestBaseCase):
def test_does_not_verify_existance_of_url(self):
form = LinkForm(
{'name': 'Linkname', 'url': 'http://www.nonexistant.test'})
self.assertTrue(form.is_valid())
def test_opens_in_same_window_by_default(self):
"""Could not figure out how to render this plugin
Checking only for the values in the model"""
form = LinkForm({'name': 'Linkname',
'url': 'http://www.nonexistant.test'})
link = form.save()
self.assertEquals(link.target, '')
def test_open_in_blank_window(self):
form = LinkForm({'name': 'Linkname',
'url': 'http://www.nonexistant.test', 'target': '_blank'})
link = form.save()
self.assertEquals(link.target, '_blank')
def test_open_in_parent_window(self):
form = LinkForm({'name': 'Linkname',
'url': 'http://www.nonexistant.test', 'target': '_parent'})
link = form.save()
self.assertEquals(link.target, '_parent')
def test_open_in_top_window(self):
form = LinkForm({'name': 'Linkname',
'url': 'http://www.nonexistant.test', 'target': '_top'})
link = form.save()
self.assertEquals(link.target, '_top')
def test_open_in_nothing_else(self):
form = LinkForm({'name': 'Linkname',
'url': 'http://www.nonexistant.test', 'target': 'artificial'})
self.assertFalse(form.is_valid())
class NoDatabasePluginTests(TestCase):
def test_render_meta_is_unique(self):
text = Text()
link = Link()
self.assertNotEqual(id(text._render_meta), id(link._render_meta))
def test_render_meta_does_not_leak(self):
text = Text()
link = Link()
text._render_meta.text_enabled = False
link._render_meta.text_enabled = False
self.assertFalse(text._render_meta.text_enabled)
self.assertFalse(link._render_meta.text_enabled)
link._render_meta.text_enabled = True
self.assertFalse(text._render_meta.text_enabled)
self.assertTrue(link._render_meta.text_enabled)
def test_db_table_hack(self):
# TODO: Django tests seem to leak models from test methods, somehow
# we should clear django.db.models.loading.app_cache in tearDown.
plugin_class = PluginModelBase('TestPlugin', (CMSPlugin,), {'__module__': 'cms.tests.plugins'})
self.assertEqual(plugin_class._meta.db_table, 'cmsplugin_testplugin')
def test_db_table_hack_with_mixin(self):
class LeftMixin: pass
class RightMixin: pass
plugin_class = PluginModelBase('TestPlugin2', (LeftMixin, CMSPlugin, RightMixin),
{'__module__': 'cms.tests.plugins'})
self.assertEqual(plugin_class._meta.db_table, 'cmsplugin_testplugin2')
class PicturePluginTests(PluginsTestBaseCase):
def test_link_or_page(self):
"""Test a validator: you can enter a url or a page_link, but not both."""
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
picture = Picture(url="test")
# Note: don't call full_clean as it will check ALL fields - including
# the image, which we haven't defined. Call clean() instead which
# just validates the url and page_link fields.
picture.clean()
picture.page_link = page
picture.url = None
picture.clean()
picture.url = "test"
self.assertRaises(ValidationError, picture.clean)
class SimplePluginTests(TestCase):
def test_simple_naming(self):
class MyPlugin(CMSPluginBase):
render_template = 'base.html'
self.assertEqual(MyPlugin.name, 'My Plugin')
def test_simple_context(self):
class MyPlugin(CMSPluginBase):
render_template = 'base.html'
plugin = MyPlugin(ArticlePluginModel, admin.site)
context = {}
out_context = plugin.render(context, 1, 2)
self.assertEqual(out_context['instance'], 1)
self.assertEqual(out_context['placeholder'], 2)
self.assertIs(out_context, context)
| mpetyx/palmdrop | venv/lib/python2.7/site-packages/cms/tests/plugins.py | Python | apache-2.0 | 46,414 |
<html><body>
<style>
body, h1, h2, h3, div, span, p, pre, a {
margin: 0;
padding: 0;
border: 0;
font-weight: inherit;
font-style: inherit;
font-size: 100%;
font-family: inherit;
vertical-align: baseline;
}
body {
font-size: 13px;
padding: 1em;
}
h1 {
font-size: 26px;
margin-bottom: 1em;
}
h2 {
font-size: 24px;
margin-bottom: 1em;
}
h3 {
font-size: 20px;
margin-bottom: 1em;
margin-top: 1em;
}
pre, code {
line-height: 1.5;
font-family: Monaco, 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', 'Lucida Console', monospace;
}
pre {
margin-top: 0.5em;
}
h1, h2, h3, p {
font-family: Arial, sans serif;
}
h1, h2, h3 {
border-bottom: solid #CCC 1px;
}
.toc_element {
margin-top: 0.5em;
}
.firstline {
margin-left: 2 em;
}
.method {
margin-top: 1em;
border: solid 1px #CCC;
padding: 1em;
background: #EEE;
}
.details {
font-weight: bold;
font-size: 14px;
}
</style>
<h1><a href="dfareporting_v3_4.html">Campaign Manager 360 API</a> . <a href="dfareporting_v3_4.userRolePermissions.html">userRolePermissions</a></h1>
<h2>Instance Methods</h2>
<p class="toc_element">
<code><a href="#close">close()</a></code></p>
<p class="firstline">Close httplib2 connections.</p>
<p class="toc_element">
<code><a href="#get">get(profileId, id, x__xgafv=None)</a></code></p>
<p class="firstline">Gets one user role permission by ID.</p>
<p class="toc_element">
<code><a href="#list">list(profileId, ids=None, x__xgafv=None)</a></code></p>
<p class="firstline">Gets a list of user role permissions, possibly filtered.</p>
<h3>Method Details</h3>
<div class="method">
<code class="details" id="close">close()</code>
<pre>Close httplib2 connections.</pre>
</div>
<div class="method">
<code class="details" id="get">get(profileId, id, x__xgafv=None)</code>
<pre>Gets one user role permission by ID.
Args:
profileId: string, User profile ID associated with this request. (required)
id: string, User role permission ID. (required)
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # Contains properties of a user role permission.
"availability": "A String", # Levels of availability for a user role permission.
"id": "A String", # ID of this user role permission.
"kind": "A String", # Identifies what kind of resource this is. Value: the fixed string "dfareporting#userRolePermission".
"name": "A String", # Name of this user role permission.
"permissionGroupId": "A String", # ID of the permission group that this user role permission belongs to.
}</pre>
</div>
<div class="method">
<code class="details" id="list">list(profileId, ids=None, x__xgafv=None)</code>
<pre>Gets a list of user role permissions, possibly filtered.
Args:
profileId: string, User profile ID associated with this request. (required)
ids: string, Select only user role permissions with these IDs. (repeated)
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
2 - v2 error format
Returns:
An object of the form:
{ # User Role Permission List Response
"kind": "A String", # Identifies what kind of resource this is. Value: the fixed string "dfareporting#userRolePermissionsListResponse".
"userRolePermissions": [ # User role permission collection.
{ # Contains properties of a user role permission.
"availability": "A String", # Levels of availability for a user role permission.
"id": "A String", # ID of this user role permission.
"kind": "A String", # Identifies what kind of resource this is. Value: the fixed string "dfareporting#userRolePermission".
"name": "A String", # Name of this user role permission.
"permissionGroupId": "A String", # ID of the permission group that this user role permission belongs to.
},
],
}</pre>
</div>
</body></html> | googleapis/google-api-python-client | docs/dyn/dfareporting_v3_4.userRolePermissions.html | HTML | apache-2.0 | 4,171 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
var _echarts = require("./lib/echarts");
(function () {
for (var key in _echarts) {
if (_echarts == null || !_echarts.hasOwnProperty(key) || key === 'default' || key === '__esModule') return;
exports[key] = _echarts[key];
}
})();
var _export = require("./lib/export");
(function () {
for (var key in _export) {
if (_export == null || !_export.hasOwnProperty(key) || key === 'default' || key === '__esModule') return;
exports[key] = _export[key];
}
})();
require("./lib/component/dataset");
require("./lib/chart/line");
require("./lib/chart/bar");
require("./lib/chart/pie");
require("./lib/chart/scatter");
require("./lib/chart/radar");
require("./lib/chart/map");
require("./lib/chart/tree");
require("./lib/chart/treemap");
require("./lib/chart/graph");
require("./lib/chart/gauge");
require("./lib/chart/funnel");
require("./lib/chart/parallel");
require("./lib/chart/sankey");
require("./lib/chart/boxplot");
require("./lib/chart/candlestick");
require("./lib/chart/effectScatter");
require("./lib/chart/lines");
require("./lib/chart/heatmap");
require("./lib/chart/pictorialBar");
require("./lib/chart/themeRiver");
require("./lib/chart/sunburst");
require("./lib/chart/custom");
require("./lib/component/graphic");
require("./lib/component/grid");
require("./lib/component/legendScroll");
require("./lib/component/tooltip");
require("./lib/component/axisPointer");
require("./lib/component/polar");
require("./lib/component/geo");
require("./lib/component/parallel");
require("./lib/component/singleAxis");
require("./lib/component/brush");
require("./lib/component/calendar");
require("./lib/component/title");
require("./lib/component/dataZoom");
require("./lib/component/visualMap");
require("./lib/component/markPoint");
require("./lib/component/markLine");
require("./lib/component/markArea");
require("./lib/component/timeline");
require("./lib/component/toolbox");
require("zrender/lib/vml/vml");
require("zrender/lib/svg/svg"); | 100star/echarts | index.js | JavaScript | apache-2.0 | 2,805 |
/**
* Copyright 2009 sshj contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.schmizz.sshj.transport.cipher;
/** {@code aes192-cbc} cipher */
public class AES192CBC
extends BaseCipher {
/** Named factory for AES192CBC Cipher */
public static class Factory
implements net.schmizz.sshj.common.Factory.Named<Cipher> {
@Override
public Cipher create() {
return new AES192CBC();
}
@Override
public String getName() {
return "aes192-cbc";
}
}
public AES192CBC() {
super(16, 24, "AES", "AES/CBC/NoPadding");
}
}
| Juraldinio/sshj | src/main/java/net/schmizz/sshj/transport/cipher/AES192CBC.java | Java | apache-2.0 | 1,167 |
#ifndef _osimActuators_h_
#define _osimActuators_h_
/* -------------------------------------------------------------------------- *
* OpenSim: osimActuators.h *
* -------------------------------------------------------------------------- *
* The OpenSim API is a toolkit for musculoskeletal modeling and simulation. *
* See http://opensim.stanford.edu and the NOTICE file for more information. *
* OpenSim is developed at Stanford University and supported by the US *
* National Institutes of Health (U54 GM072970, R24 HD065690) and by DARPA *
* through the Warrior Web program. *
* *
* Copyright (c) 2005-2012 Stanford University and the Authors *
* Author(s): Ayman Habib *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); you may *
* not use this file except in compliance with the License. You may obtain a *
* copy of the License at http://www.apache.org/licenses/LICENSE-2.0. *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
* -------------------------------------------------------------------------- */
#include "CoordinateActuator.h"
#include "PointActuator.h"
#include "TorqueActuator.h"
#include "PointToPointActuator.h"
#include "SpringGeneralizedForce.h"
#include "ClutchedPathSpring.h"
#include "Schutte1993Muscle_Deprecated.h"
#include "Delp1990Muscle_Deprecated.h"
#include "Thelen2003Muscle_Deprecated.h"
#include "Thelen2003Muscle.h"
#include "RigidTendonMuscle.h"
#include "Millard2012EquilibriumMuscle.h"
#include "Millard2012AccelerationMuscle.h"
#include "RegisterTypes_osimActuators.h" // to expose RegisterTypes_osimActuators
#endif // _osimActuators_h_
| opensim-org/opensim-metabolicsprobes | OpenSim 3.2-64bit-VS12/sdk/include/OpenSim/Actuators/osimActuators.h | C | apache-2.0 | 2,384 |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.wire;
import alluxio.annotation.PublicApi;
import com.google.common.base.MoreObjects;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.concurrent.NotThreadSafe;
/**
* The block information.
*/
@PublicApi
@NotThreadSafe
public final class BlockInfo implements Serializable {
private static final long serialVersionUID = 5646834366222004646L;
private long mBlockId;
private long mLength;
private ArrayList<BlockLocation> mLocations = new ArrayList<>();
/**
* Creates a new instance of {@link BlockInfo}.
*/
public BlockInfo() {}
/**
* @return the block id
*/
public long getBlockId() {
return mBlockId;
}
/**
* @return the block length
*/
public long getLength() {
return mLength;
}
/**
* @return the block locations
*/
public List<BlockLocation> getLocations() {
return mLocations;
}
/**
* @param blockId the block id to use
* @return the block information
*/
public BlockInfo setBlockId(long blockId) {
mBlockId = blockId;
return this;
}
/**
* @param length the block length to use
* @return the block information
*/
public BlockInfo setLength(long length) {
mLength = length;
return this;
}
/**
* @param locations the block locations to use
* @return the block information
*/
public BlockInfo setLocations(List<BlockLocation> locations) {
mLocations = new ArrayList<>(Preconditions.checkNotNull(locations, "locations"));
return this;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof BlockInfo)) {
return false;
}
BlockInfo that = (BlockInfo) o;
return mBlockId == that.mBlockId && mLength == that.mLength
&& mLocations.equals(that.mLocations);
}
@Override
public int hashCode() {
return Objects.hashCode(mBlockId, mLength, mLocations);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this).add("id", mBlockId).add("length", mLength)
.add("locations", mLocations).toString();
}
}
| EvilMcJerkface/alluxio | core/common/src/main/java/alluxio/wire/BlockInfo.java | Java | apache-2.0 | 2,753 |
/*
* Copyright 2011 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.drools.guvnor.client.rpc;
import java.util.Date;
/**
* A single row of a paged data
*/
public class QueryPageRow extends AbstractAssetPageRow {
private String description;
private String abbreviatedDescription;
private String packageName;
private String creator;
private Date createdDate;
private String lastContributor;
private Date lastModified;
// ************************************************************************
// Getters and setters
// ************************************************************************
public String getAbbreviatedDescription() {
return abbreviatedDescription;
}
public Date getCreatedDate() {
return createdDate;
}
public String getCreator() {
return creator;
}
public String getDescription() {
return description;
}
public String getLastContributor() {
return lastContributor;
}
public Date getLastModified() {
return lastModified;
}
public String getPackageName() {
return packageName;
}
public void setAbbreviatedDescription(String abbreviatedDescription) {
this.abbreviatedDescription = abbreviatedDescription;
}
public void setCreatedDate(Date createdDate) {
this.createdDate = createdDate;
}
public void setCreator(String creator) {
this.creator = creator;
}
public void setDescription(String description) {
this.description = description;
}
public void setLastContributor(String lastContributor) {
this.lastContributor = lastContributor;
}
public void setLastModified(Date lastModified) {
this.lastModified = lastModified;
}
public void setPackageName(String packageName) {
this.packageName = packageName;
}
}
| Rikkola/guvnor | guvnor-webapp/src/main/java/org/drools/guvnor/client/rpc/QueryPageRow.java | Java | apache-2.0 | 2,449 |
package controller
import (
"reflect"
"testing"
kutil "k8s.io/kubernetes/pkg/util"
)
func TestScheduler(t *testing.T) {
keys := []string{}
s := NewScheduler(2, kutil.NewFakeAlwaysRateLimiter(), func(key, value interface{}) {
keys = append(keys, key.(string))
})
for i := 0; i < 6; i++ {
s.RunOnce()
if len(keys) > 0 {
t.Fatal(keys)
}
if s.position != (i+1)%3 {
t.Fatal(s.position)
}
}
s.Add("first", "test")
found := false
for i, buckets := range s.buckets {
if _, ok := buckets["first"]; ok {
found = true
} else {
continue
}
if i == s.position {
t.Fatal("should not insert into current bucket")
}
}
if !found {
t.Fatal("expected to find key in a bucket")
}
for i := 0; i < 10; i++ {
s.Delay("first")
if _, ok := s.buckets[(s.position-1+len(s.buckets))%len(s.buckets)]["first"]; !ok {
t.Fatal("key was not in the last bucket")
}
}
s.RunOnce()
if len(keys) != 0 {
t.Fatal(keys)
}
s.RunOnce()
if !reflect.DeepEqual(keys, []string{"first"}) {
t.Fatal(keys)
}
}
func TestSchedulerAddAndDelay(t *testing.T) {
s := NewScheduler(3, kutil.NewFakeAlwaysRateLimiter(), func(key, value interface{}) {})
// 3 is the last bucket, 0 is the current bucket
s.Add("first", "other")
if s.buckets[3]["first"] != "other" {
t.Fatalf("placed key in wrong bucket: %#v", s.buckets)
}
s.Add("second", "other")
if s.buckets[2]["second"] != "other" {
t.Fatalf("placed key in wrong bucket: %#v", s.buckets)
}
s.Add("third", "other")
if s.buckets[1]["third"] != "other" {
t.Fatalf("placed key in wrong bucket: %#v", s.buckets)
}
s.Add("fourth", "other")
if s.buckets[3]["fourth"] != "other" {
t.Fatalf("placed key in wrong bucket: %#v", s.buckets)
}
s.Add("fifth", "other")
if s.buckets[2]["fifth"] != "other" {
t.Fatalf("placed key in wrong bucket: %#v", s.buckets)
}
s.Remove("third", "other")
s.Add("sixth", "other")
if s.buckets[1]["sixth"] != "other" {
t.Fatalf("placed key in wrong bucket: %#v", s.buckets)
}
// delaying an item moves it to the last bucket
s.Delay("second")
if s.buckets[3]["second"] != "other" {
t.Fatalf("delay placed key in wrong bucket: %#v", s.buckets)
}
// delaying an item that is not in the map does nothing
s.Delay("third")
if _, ok := s.buckets[3]["third"]; ok {
t.Fatalf("delay placed key in wrong bucket: %#v", s.buckets)
}
// delaying an item that is already in the latest bucket does nothing
s.Delay("fourth")
if s.buckets[3]["fourth"] != "other" {
t.Fatalf("delay placed key in wrong bucket: %#v", s.buckets)
}
}
func TestSchedulerRemove(t *testing.T) {
s := NewScheduler(2, kutil.NewFakeAlwaysRateLimiter(), func(key, value interface{}) {})
s.Add("test", "other")
if s.Remove("test", "value") {
t.Fatal(s)
}
if !s.Remove("test", "other") {
t.Fatal(s)
}
if s.Len() != 0 {
t.Fatal(s)
}
s.Add("test", "other")
s.Add("test", "new")
if s.Len() != 1 {
t.Fatal(s)
}
if s.Remove("test", "other") {
t.Fatal(s)
}
if !s.Remove("test", "new") {
t.Fatal(s)
}
}
| jwforres/origin | pkg/controller/scheduler_test.go | GO | apache-2.0 | 3,022 |
/******************************************************************************
* Copyright 2019 The Apollo Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#pragma once
#include "modules/canbus/proto/chassis_detail.pb.h"
#include "modules/drivers/canbus/can_comm/protocol_data.h"
namespace apollo {
namespace canbus {
namespace wey {
class Ads1111 : public ::apollo::drivers::canbus::ProtocolData<
::apollo::canbus::ChassisDetail> {
public:
static const int32_t ID;
Ads1111();
uint32_t GetPeriod() const override;
void UpdateData(uint8_t* data) override;
void Reset() override;
// config detail: {'description': 'ADS request ESP/VLC to decelerate until
// standstill.', 'enum': {0: 'ADS_DECTOSTOP_NO_DEMAND', 1:
// 'ADS_DECTOSTOP_DEMAND'}, 'precision': 1.0, 'len': 1, 'name':
// 'ADS_DecToStop', 'is_signed_var': False, 'offset': 0.0,
// 'physical_range': '[0|1]', 'bit': 17, 'type': 'enum', 'order': 'motorola',
// 'physical_unit': ''}
Ads1111* set_ads_dectostop(Ads1_111::Ads_dectostopType ads_dectostop);
// config detail: {'description': 'The status of the ADS control unit.The ADS
// mode should be contained in every message sent by ADS', 'enum': {0:
// 'ADS_MODE_OFF_MODE', 3: 'ADS_MODE_ACTIVE_MODE'}, 'precision': 1.0, 'len':
// 5, 'name': 'ADS_Mode', 'is_signed_var': False, 'offset': 0.0,
// 'physical_range': '[0|31]', 'bit': 7, 'type': 'enum', 'order': 'motorola',
// 'physical_unit': ''}
Ads1111* set_ads_mode(Ads1_111::Ads_modeType ads_mode);
// config detail: {'description': 'ADS target acceleration for transmission',
// 'offset': -7.0, 'precision': 0.05, 'len': 8, 'name': 'ADS_TarAcce',
// 'is_signed_var': False, 'physical_range': '[-7|5.75]', 'bit': 15, 'type':
// 'double', 'order': 'motorola', 'physical_unit': 'm/s2'}
Ads1111* set_ads_taracce(double ads_taracce);
// config detail: {'description': 'ACC request ESP drive off', 'enum': {0:
// 'ADS_DRIVEOFF_REQ_NO_DEMAND', 1: 'ADS_DRIVEOFF_REQ_DEMAND'}, 'precision':
// 1.0, 'len': 1, 'name': 'ADS_Driveoff_Req', 'is_signed_var': False,
// 'offset': 0.0, 'physical_range': '[0|1]', 'bit': 1, 'type': 'enum',
// 'order': 'motorola', 'physical_unit': ''}
Ads1111* set_ads_driveoff_req(
Ads1_111::Ads_driveoff_reqType ads_driveoff_req);
// config detail: {'description': 'target deceleration value from AEB',
// 'offset': -16.0, 'precision': 0.000488, 'len': 16, 'name':
// 'ADS_AEB_TarAcce', 'is_signed_var': False, 'physical_range': '[-16|16]',
// 'bit': 39, 'type': 'double', 'order': 'motorola', 'physical_unit': 'm/s2'}
Ads1111* set_ads_aeb_taracce(double ads_aeb_taracce);
// config detail: {'description': 'Request of the AEB deceleration control.',
// 'enum': {0: 'ADS_AEB_TGTDECEL_REQ_NO_DEMAND', 1:
// 'ADS_AEB_TGTDECEL_REQ_DEMAND'}, 'precision': 1.0, 'len': 1, 'name':
// 'ADS_AEB_TgtDecel_Req', 'is_signed_var': False, 'offset': 0.0,
// 'physical_range': '[0|1]', 'bit': 31, 'type': 'enum', 'order': 'motorola',
// 'physical_unit': ''}
Ads1111* set_ads_aeb_tgtdecel_req(
Ads1_111::Ads_aeb_tgtdecel_reqType ads_aeb_tgtdecel_req);
private:
// config detail: {'description': 'ADS request ESP/VLC to decelerate until
// standstill.', 'enum': {0: 'ADS_DECTOSTOP_NO_DEMAND', 1:
// 'ADS_DECTOSTOP_DEMAND'}, 'precision': 1.0, 'len': 1, 'name':
// 'ADS_DecToStop', 'is_signed_var': False, 'offset': 0.0, 'physical_range':
// '[0|1]', 'bit': 17, 'type': 'enum', 'order': 'motorola',
// 'physical_unit': ''}
void set_p_ads_dectostop(uint8_t* data,
Ads1_111::Ads_dectostopType ads_dectostop);
// config detail: {'description': 'The status of the ADS control unit.The
// ADS mode should be contained in every message sent by ADS', 'enum': {0:
// 'ADS_MODE_OFF_MODE', 3: 'ADS_MODE_ACTIVE_MODE'}, 'precision': 1.0,
// 'len': 5, 'name': 'ADS_Mode', 'is_signed_var': False, 'offset': 0.0,
// 'physical_range': '[0|31]', 'bit': 7, 'type': 'enum', 'order': 'motorola',
// 'physical_unit': ''}
void set_p_ads_mode(uint8_t* data, Ads1_111::Ads_modeType ads_mode);
// config detail: {'description': 'ADS target acceleration for transmission',
// 'offset': -7.0, 'precision': 0.05, 'len': 8, 'name': 'ADS_TarAcce',
// 'is_signed_var': False, 'physical_range': '[-7|5.75]', 'bit': 15,
// 'type': 'double', 'order': 'motorola', 'physical_unit': 'm/s2'}
void set_p_ads_taracce(uint8_t* data, double ads_taracce);
// config detail: {'description': 'ACC request ESP drive off', 'enum': {0:
// 'ADS_DRIVEOFF_REQ_NO_DEMAND', 1: 'ADS_DRIVEOFF_REQ_DEMAND'}, 'precision':
// 1.0, 'len': 1, 'name': 'ADS_Driveoff_Req', 'is_signed_var': False,
// 'offset': 0.0, 'physical_range': '[0|1]', 'bit': 1, 'type': 'enum',
// 'order': 'motorola', 'physical_unit': ''}
void set_p_ads_driveoff_req(uint8_t* data,
Ads1_111::Ads_driveoff_reqType ads_driveoff_req);
// config detail: {'description': 'target deceleration value from AEB',
// 'offset': -16.0, 'precision': 0.000488, 'len': 16, 'name':
// 'ADS_AEB_TarAcce', 'is_signed_var': False, 'physical_range': '[-16|16]',
// 'bit': 39, 'type': 'double', 'order': 'motorola', 'physical_unit': 'm/s2'}
void set_p_ads_aeb_taracce(uint8_t* data, double ads_aeb_taracce);
// config detail: {'description': 'Request of the AEB deceleration control.',
// 'enum': {0: 'ADS_AEB_TGTDECEL_REQ_NO_DEMAND', 1:
// 'ADS_AEB_TGTDECEL_REQ_DEMAND'}, 'precision': 1.0, 'len': 1, 'name':
// 'ADS_AEB_TgtDecel_Req', 'is_signed_var': False, 'offset': 0.0,
// 'physical_range': '[0|1]', 'bit': 31, 'type': 'enum', 'order': 'motorola',
// 'physical_unit': ''}
void set_p_ads_aeb_tgtdecel_req(
uint8_t* data, Ads1_111::Ads_aeb_tgtdecel_reqType ads_aeb_tgtdecel_req);
private:
Ads1_111::Ads_dectostopType ads_dectostop_;
Ads1_111::Ads_modeType ads_mode_;
double ads_taracce_ = 0.0;
Ads1_111::Ads_driveoff_reqType ads_driveoff_req_;
double ads_aeb_taracce_ = 0.0;
Ads1_111::Ads_aeb_tgtdecel_reqType ads_aeb_tgtdecel_req_;
};
} // namespace wey
} // namespace canbus
} // namespace apollo
| wanglei828/apollo | modules/canbus/vehicle/wey/protocol/ads1_111.h | C | apache-2.0 | 6,759 |
/*
Copyright 2015 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// DO NOT EDIT. THIS FILE IS AUTO-GENERATED BY $KUBEROOT/hack/update-generated-deep-copies.sh.
package v1
import (
time "time"
api "k8s.io/kubernetes/pkg/api"
resource "k8s.io/kubernetes/pkg/api/resource"
conversion "k8s.io/kubernetes/pkg/conversion"
runtime "k8s.io/kubernetes/pkg/runtime"
util "k8s.io/kubernetes/pkg/util"
inf "speter.net/go/exp/math/dec/inf"
)
func deepCopy_resource_Quantity(in resource.Quantity, out *resource.Quantity, c *conversion.Cloner) error {
if in.Amount != nil {
if newVal, err := c.DeepCopy(in.Amount); err != nil {
return err
} else if newVal == nil {
out.Amount = nil
} else {
out.Amount = newVal.(*inf.Dec)
}
} else {
out.Amount = nil
}
out.Format = in.Format
return nil
}
func deepCopy_v1_APIVersion(in APIVersion, out *APIVersion, c *conversion.Cloner) error {
out.Name = in.Name
out.APIGroup = in.APIGroup
return nil
}
func deepCopy_v1_AWSElasticBlockStoreVolumeSource(in AWSElasticBlockStoreVolumeSource, out *AWSElasticBlockStoreVolumeSource, c *conversion.Cloner) error {
out.VolumeID = in.VolumeID
out.FSType = in.FSType
out.Partition = in.Partition
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_v1_Binding(in Binding, out *Binding, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectReference(in.Target, &out.Target, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_Capabilities(in Capabilities, out *Capabilities, c *conversion.Cloner) error {
if in.Add != nil {
out.Add = make([]Capability, len(in.Add))
for i := range in.Add {
out.Add[i] = in.Add[i]
}
} else {
out.Add = nil
}
if in.Drop != nil {
out.Drop = make([]Capability, len(in.Drop))
for i := range in.Drop {
out.Drop[i] = in.Drop[i]
}
} else {
out.Drop = nil
}
return nil
}
func deepCopy_v1_ComponentCondition(in ComponentCondition, out *ComponentCondition, c *conversion.Cloner) error {
out.Type = in.Type
out.Status = in.Status
out.Message = in.Message
out.Error = in.Error
return nil
}
func deepCopy_v1_ComponentStatus(in ComponentStatus, out *ComponentStatus, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if in.Conditions != nil {
out.Conditions = make([]ComponentCondition, len(in.Conditions))
for i := range in.Conditions {
if err := deepCopy_v1_ComponentCondition(in.Conditions[i], &out.Conditions[i], c); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
return nil
}
func deepCopy_v1_ComponentStatusList(in ComponentStatusList, out *ComponentStatusList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ComponentStatus, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_ComponentStatus(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_Container(in Container, out *Container, c *conversion.Cloner) error {
out.Name = in.Name
out.Image = in.Image
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
if in.Args != nil {
out.Args = make([]string, len(in.Args))
for i := range in.Args {
out.Args[i] = in.Args[i]
}
} else {
out.Args = nil
}
out.WorkingDir = in.WorkingDir
if in.Ports != nil {
out.Ports = make([]ContainerPort, len(in.Ports))
for i := range in.Ports {
if err := deepCopy_v1_ContainerPort(in.Ports[i], &out.Ports[i], c); err != nil {
return err
}
}
} else {
out.Ports = nil
}
if in.Env != nil {
out.Env = make([]EnvVar, len(in.Env))
for i := range in.Env {
if err := deepCopy_v1_EnvVar(in.Env[i], &out.Env[i], c); err != nil {
return err
}
}
} else {
out.Env = nil
}
if err := deepCopy_v1_ResourceRequirements(in.Resources, &out.Resources, c); err != nil {
return err
}
if in.VolumeMounts != nil {
out.VolumeMounts = make([]VolumeMount, len(in.VolumeMounts))
for i := range in.VolumeMounts {
if err := deepCopy_v1_VolumeMount(in.VolumeMounts[i], &out.VolumeMounts[i], c); err != nil {
return err
}
}
} else {
out.VolumeMounts = nil
}
if in.LivenessProbe != nil {
out.LivenessProbe = new(Probe)
if err := deepCopy_v1_Probe(*in.LivenessProbe, out.LivenessProbe, c); err != nil {
return err
}
} else {
out.LivenessProbe = nil
}
if in.ReadinessProbe != nil {
out.ReadinessProbe = new(Probe)
if err := deepCopy_v1_Probe(*in.ReadinessProbe, out.ReadinessProbe, c); err != nil {
return err
}
} else {
out.ReadinessProbe = nil
}
if in.Lifecycle != nil {
out.Lifecycle = new(Lifecycle)
if err := deepCopy_v1_Lifecycle(*in.Lifecycle, out.Lifecycle, c); err != nil {
return err
}
} else {
out.Lifecycle = nil
}
out.TerminationMessagePath = in.TerminationMessagePath
out.ImagePullPolicy = in.ImagePullPolicy
if in.SecurityContext != nil {
out.SecurityContext = new(SecurityContext)
if err := deepCopy_v1_SecurityContext(*in.SecurityContext, out.SecurityContext, c); err != nil {
return err
}
} else {
out.SecurityContext = nil
}
out.Stdin = in.Stdin
out.TTY = in.TTY
return nil
}
func deepCopy_v1_ContainerPort(in ContainerPort, out *ContainerPort, c *conversion.Cloner) error {
out.Name = in.Name
out.HostPort = in.HostPort
out.ContainerPort = in.ContainerPort
out.Protocol = in.Protocol
out.HostIP = in.HostIP
return nil
}
func deepCopy_v1_ContainerState(in ContainerState, out *ContainerState, c *conversion.Cloner) error {
if in.Waiting != nil {
out.Waiting = new(ContainerStateWaiting)
if err := deepCopy_v1_ContainerStateWaiting(*in.Waiting, out.Waiting, c); err != nil {
return err
}
} else {
out.Waiting = nil
}
if in.Running != nil {
out.Running = new(ContainerStateRunning)
if err := deepCopy_v1_ContainerStateRunning(*in.Running, out.Running, c); err != nil {
return err
}
} else {
out.Running = nil
}
if in.Terminated != nil {
out.Terminated = new(ContainerStateTerminated)
if err := deepCopy_v1_ContainerStateTerminated(*in.Terminated, out.Terminated, c); err != nil {
return err
}
} else {
out.Terminated = nil
}
return nil
}
func deepCopy_v1_ContainerStateRunning(in ContainerStateRunning, out *ContainerStateRunning, c *conversion.Cloner) error {
if err := deepCopy_util_Time(in.StartedAt, &out.StartedAt, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_ContainerStateTerminated(in ContainerStateTerminated, out *ContainerStateTerminated, c *conversion.Cloner) error {
out.ExitCode = in.ExitCode
out.Signal = in.Signal
out.Reason = in.Reason
out.Message = in.Message
if err := deepCopy_util_Time(in.StartedAt, &out.StartedAt, c); err != nil {
return err
}
if err := deepCopy_util_Time(in.FinishedAt, &out.FinishedAt, c); err != nil {
return err
}
out.ContainerID = in.ContainerID
return nil
}
func deepCopy_v1_ContainerStateWaiting(in ContainerStateWaiting, out *ContainerStateWaiting, c *conversion.Cloner) error {
out.Reason = in.Reason
return nil
}
func deepCopy_v1_ContainerStatus(in ContainerStatus, out *ContainerStatus, c *conversion.Cloner) error {
out.Name = in.Name
if err := deepCopy_v1_ContainerState(in.State, &out.State, c); err != nil {
return err
}
if err := deepCopy_v1_ContainerState(in.LastTerminationState, &out.LastTerminationState, c); err != nil {
return err
}
out.Ready = in.Ready
out.RestartCount = in.RestartCount
out.Image = in.Image
out.ImageID = in.ImageID
out.ContainerID = in.ContainerID
return nil
}
func deepCopy_v1_Daemon(in Daemon, out *Daemon, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_DaemonSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_v1_DaemonStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_DaemonList(in DaemonList, out *DaemonList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Daemon, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_Daemon(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_DaemonSpec(in DaemonSpec, out *DaemonSpec, c *conversion.Cloner) error {
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
if in.Template != nil {
out.Template = new(PodTemplateSpec)
if err := deepCopy_v1_PodTemplateSpec(*in.Template, out.Template, c); err != nil {
return err
}
} else {
out.Template = nil
}
return nil
}
func deepCopy_v1_DaemonStatus(in DaemonStatus, out *DaemonStatus, c *conversion.Cloner) error {
out.CurrentNumberScheduled = in.CurrentNumberScheduled
out.NumberMisscheduled = in.NumberMisscheduled
out.DesiredNumberScheduled = in.DesiredNumberScheduled
return nil
}
func deepCopy_v1_DeleteOptions(in DeleteOptions, out *DeleteOptions, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if in.GracePeriodSeconds != nil {
out.GracePeriodSeconds = new(int64)
*out.GracePeriodSeconds = *in.GracePeriodSeconds
} else {
out.GracePeriodSeconds = nil
}
return nil
}
func deepCopy_v1_EmptyDirVolumeSource(in EmptyDirVolumeSource, out *EmptyDirVolumeSource, c *conversion.Cloner) error {
out.Medium = in.Medium
return nil
}
func deepCopy_v1_EndpointAddress(in EndpointAddress, out *EndpointAddress, c *conversion.Cloner) error {
out.IP = in.IP
if in.TargetRef != nil {
out.TargetRef = new(ObjectReference)
if err := deepCopy_v1_ObjectReference(*in.TargetRef, out.TargetRef, c); err != nil {
return err
}
} else {
out.TargetRef = nil
}
return nil
}
func deepCopy_v1_EndpointPort(in EndpointPort, out *EndpointPort, c *conversion.Cloner) error {
out.Name = in.Name
out.Port = in.Port
out.Protocol = in.Protocol
return nil
}
func deepCopy_v1_EndpointSubset(in EndpointSubset, out *EndpointSubset, c *conversion.Cloner) error {
if in.Addresses != nil {
out.Addresses = make([]EndpointAddress, len(in.Addresses))
for i := range in.Addresses {
if err := deepCopy_v1_EndpointAddress(in.Addresses[i], &out.Addresses[i], c); err != nil {
return err
}
}
} else {
out.Addresses = nil
}
if in.Ports != nil {
out.Ports = make([]EndpointPort, len(in.Ports))
for i := range in.Ports {
if err := deepCopy_v1_EndpointPort(in.Ports[i], &out.Ports[i], c); err != nil {
return err
}
}
} else {
out.Ports = nil
}
return nil
}
func deepCopy_v1_Endpoints(in Endpoints, out *Endpoints, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if in.Subsets != nil {
out.Subsets = make([]EndpointSubset, len(in.Subsets))
for i := range in.Subsets {
if err := deepCopy_v1_EndpointSubset(in.Subsets[i], &out.Subsets[i], c); err != nil {
return err
}
}
} else {
out.Subsets = nil
}
return nil
}
func deepCopy_v1_EndpointsList(in EndpointsList, out *EndpointsList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Endpoints, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_Endpoints(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_EnvVar(in EnvVar, out *EnvVar, c *conversion.Cloner) error {
out.Name = in.Name
out.Value = in.Value
if in.ValueFrom != nil {
out.ValueFrom = new(EnvVarSource)
if err := deepCopy_v1_EnvVarSource(*in.ValueFrom, out.ValueFrom, c); err != nil {
return err
}
} else {
out.ValueFrom = nil
}
return nil
}
func deepCopy_v1_EnvVarSource(in EnvVarSource, out *EnvVarSource, c *conversion.Cloner) error {
if in.FieldRef != nil {
out.FieldRef = new(ObjectFieldSelector)
if err := deepCopy_v1_ObjectFieldSelector(*in.FieldRef, out.FieldRef, c); err != nil {
return err
}
} else {
out.FieldRef = nil
}
return nil
}
func deepCopy_v1_Event(in Event, out *Event, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectReference(in.InvolvedObject, &out.InvolvedObject, c); err != nil {
return err
}
out.Reason = in.Reason
out.Message = in.Message
if err := deepCopy_v1_EventSource(in.Source, &out.Source, c); err != nil {
return err
}
if err := deepCopy_util_Time(in.FirstTimestamp, &out.FirstTimestamp, c); err != nil {
return err
}
if err := deepCopy_util_Time(in.LastTimestamp, &out.LastTimestamp, c); err != nil {
return err
}
out.Count = in.Count
return nil
}
func deepCopy_v1_EventList(in EventList, out *EventList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Event, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_Event(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_EventSource(in EventSource, out *EventSource, c *conversion.Cloner) error {
out.Component = in.Component
out.Host = in.Host
return nil
}
func deepCopy_v1_ExecAction(in ExecAction, out *ExecAction, c *conversion.Cloner) error {
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
return nil
}
func deepCopy_v1_GCEPersistentDiskVolumeSource(in GCEPersistentDiskVolumeSource, out *GCEPersistentDiskVolumeSource, c *conversion.Cloner) error {
out.PDName = in.PDName
out.FSType = in.FSType
out.Partition = in.Partition
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_v1_GitRepoVolumeSource(in GitRepoVolumeSource, out *GitRepoVolumeSource, c *conversion.Cloner) error {
out.Repository = in.Repository
out.Revision = in.Revision
return nil
}
func deepCopy_v1_GlusterfsVolumeSource(in GlusterfsVolumeSource, out *GlusterfsVolumeSource, c *conversion.Cloner) error {
out.EndpointsName = in.EndpointsName
out.Path = in.Path
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_v1_HTTPGetAction(in HTTPGetAction, out *HTTPGetAction, c *conversion.Cloner) error {
out.Path = in.Path
if err := deepCopy_util_IntOrString(in.Port, &out.Port, c); err != nil {
return err
}
out.Host = in.Host
out.Scheme = in.Scheme
return nil
}
func deepCopy_v1_Handler(in Handler, out *Handler, c *conversion.Cloner) error {
if in.Exec != nil {
out.Exec = new(ExecAction)
if err := deepCopy_v1_ExecAction(*in.Exec, out.Exec, c); err != nil {
return err
}
} else {
out.Exec = nil
}
if in.HTTPGet != nil {
out.HTTPGet = new(HTTPGetAction)
if err := deepCopy_v1_HTTPGetAction(*in.HTTPGet, out.HTTPGet, c); err != nil {
return err
}
} else {
out.HTTPGet = nil
}
if in.TCPSocket != nil {
out.TCPSocket = new(TCPSocketAction)
if err := deepCopy_v1_TCPSocketAction(*in.TCPSocket, out.TCPSocket, c); err != nil {
return err
}
} else {
out.TCPSocket = nil
}
return nil
}
func deepCopy_v1_HostPathVolumeSource(in HostPathVolumeSource, out *HostPathVolumeSource, c *conversion.Cloner) error {
out.Path = in.Path
return nil
}
func deepCopy_v1_ISCSIVolumeSource(in ISCSIVolumeSource, out *ISCSIVolumeSource, c *conversion.Cloner) error {
out.TargetPortal = in.TargetPortal
out.IQN = in.IQN
out.Lun = in.Lun
out.FSType = in.FSType
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_v1_Lifecycle(in Lifecycle, out *Lifecycle, c *conversion.Cloner) error {
if in.PostStart != nil {
out.PostStart = new(Handler)
if err := deepCopy_v1_Handler(*in.PostStart, out.PostStart, c); err != nil {
return err
}
} else {
out.PostStart = nil
}
if in.PreStop != nil {
out.PreStop = new(Handler)
if err := deepCopy_v1_Handler(*in.PreStop, out.PreStop, c); err != nil {
return err
}
} else {
out.PreStop = nil
}
return nil
}
func deepCopy_v1_LimitRange(in LimitRange, out *LimitRange, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_LimitRangeSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_LimitRangeItem(in LimitRangeItem, out *LimitRangeItem, c *conversion.Cloner) error {
out.Type = in.Type
if in.Max != nil {
out.Max = make(ResourceList)
for key, val := range in.Max {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Max[key] = *newVal
}
} else {
out.Max = nil
}
if in.Min != nil {
out.Min = make(ResourceList)
for key, val := range in.Min {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Min[key] = *newVal
}
} else {
out.Min = nil
}
if in.Default != nil {
out.Default = make(ResourceList)
for key, val := range in.Default {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Default[key] = *newVal
}
} else {
out.Default = nil
}
return nil
}
func deepCopy_v1_LimitRangeList(in LimitRangeList, out *LimitRangeList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]LimitRange, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_LimitRange(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_LimitRangeSpec(in LimitRangeSpec, out *LimitRangeSpec, c *conversion.Cloner) error {
if in.Limits != nil {
out.Limits = make([]LimitRangeItem, len(in.Limits))
for i := range in.Limits {
if err := deepCopy_v1_LimitRangeItem(in.Limits[i], &out.Limits[i], c); err != nil {
return err
}
}
} else {
out.Limits = nil
}
return nil
}
func deepCopy_v1_List(in List, out *List, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]runtime.RawExtension, len(in.Items))
for i := range in.Items {
if err := deepCopy_runtime_RawExtension(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_ListMeta(in ListMeta, out *ListMeta, c *conversion.Cloner) error {
out.SelfLink = in.SelfLink
out.ResourceVersion = in.ResourceVersion
return nil
}
func deepCopy_v1_ListOptions(in ListOptions, out *ListOptions, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
out.LabelSelector = in.LabelSelector
out.FieldSelector = in.FieldSelector
out.Watch = in.Watch
out.ResourceVersion = in.ResourceVersion
return nil
}
func deepCopy_v1_LoadBalancerIngress(in LoadBalancerIngress, out *LoadBalancerIngress, c *conversion.Cloner) error {
out.IP = in.IP
out.Hostname = in.Hostname
return nil
}
func deepCopy_v1_LoadBalancerStatus(in LoadBalancerStatus, out *LoadBalancerStatus, c *conversion.Cloner) error {
if in.Ingress != nil {
out.Ingress = make([]LoadBalancerIngress, len(in.Ingress))
for i := range in.Ingress {
if err := deepCopy_v1_LoadBalancerIngress(in.Ingress[i], &out.Ingress[i], c); err != nil {
return err
}
}
} else {
out.Ingress = nil
}
return nil
}
func deepCopy_v1_LocalObjectReference(in LocalObjectReference, out *LocalObjectReference, c *conversion.Cloner) error {
out.Name = in.Name
return nil
}
func deepCopy_v1_NFSVolumeSource(in NFSVolumeSource, out *NFSVolumeSource, c *conversion.Cloner) error {
out.Server = in.Server
out.Path = in.Path
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_v1_Namespace(in Namespace, out *Namespace, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_NamespaceSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_v1_NamespaceStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_NamespaceList(in NamespaceList, out *NamespaceList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Namespace, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_Namespace(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_NamespaceSpec(in NamespaceSpec, out *NamespaceSpec, c *conversion.Cloner) error {
if in.Finalizers != nil {
out.Finalizers = make([]FinalizerName, len(in.Finalizers))
for i := range in.Finalizers {
out.Finalizers[i] = in.Finalizers[i]
}
} else {
out.Finalizers = nil
}
return nil
}
func deepCopy_v1_NamespaceStatus(in NamespaceStatus, out *NamespaceStatus, c *conversion.Cloner) error {
out.Phase = in.Phase
return nil
}
func deepCopy_v1_Node(in Node, out *Node, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_NodeSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_v1_NodeStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_NodeAddress(in NodeAddress, out *NodeAddress, c *conversion.Cloner) error {
out.Type = in.Type
out.Address = in.Address
return nil
}
func deepCopy_v1_NodeCondition(in NodeCondition, out *NodeCondition, c *conversion.Cloner) error {
out.Type = in.Type
out.Status = in.Status
if err := deepCopy_util_Time(in.LastHeartbeatTime, &out.LastHeartbeatTime, c); err != nil {
return err
}
if err := deepCopy_util_Time(in.LastTransitionTime, &out.LastTransitionTime, c); err != nil {
return err
}
out.Reason = in.Reason
out.Message = in.Message
return nil
}
func deepCopy_v1_NodeList(in NodeList, out *NodeList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Node, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_Node(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_NodeSpec(in NodeSpec, out *NodeSpec, c *conversion.Cloner) error {
out.PodCIDR = in.PodCIDR
out.ExternalID = in.ExternalID
out.ProviderID = in.ProviderID
out.Unschedulable = in.Unschedulable
return nil
}
func deepCopy_v1_NodeStatus(in NodeStatus, out *NodeStatus, c *conversion.Cloner) error {
if in.Capacity != nil {
out.Capacity = make(ResourceList)
for key, val := range in.Capacity {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Capacity[key] = *newVal
}
} else {
out.Capacity = nil
}
out.Phase = in.Phase
if in.Conditions != nil {
out.Conditions = make([]NodeCondition, len(in.Conditions))
for i := range in.Conditions {
if err := deepCopy_v1_NodeCondition(in.Conditions[i], &out.Conditions[i], c); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
if in.Addresses != nil {
out.Addresses = make([]NodeAddress, len(in.Addresses))
for i := range in.Addresses {
if err := deepCopy_v1_NodeAddress(in.Addresses[i], &out.Addresses[i], c); err != nil {
return err
}
}
} else {
out.Addresses = nil
}
if err := deepCopy_v1_NodeSystemInfo(in.NodeInfo, &out.NodeInfo, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_NodeSystemInfo(in NodeSystemInfo, out *NodeSystemInfo, c *conversion.Cloner) error {
out.MachineID = in.MachineID
out.SystemUUID = in.SystemUUID
out.BootID = in.BootID
out.KernelVersion = in.KernelVersion
out.OsImage = in.OsImage
out.ContainerRuntimeVersion = in.ContainerRuntimeVersion
out.KubeletVersion = in.KubeletVersion
out.KubeProxyVersion = in.KubeProxyVersion
return nil
}
func deepCopy_v1_ObjectFieldSelector(in ObjectFieldSelector, out *ObjectFieldSelector, c *conversion.Cloner) error {
out.APIVersion = in.APIVersion
out.FieldPath = in.FieldPath
return nil
}
func deepCopy_v1_ObjectMeta(in ObjectMeta, out *ObjectMeta, c *conversion.Cloner) error {
out.Name = in.Name
out.GenerateName = in.GenerateName
out.Namespace = in.Namespace
out.SelfLink = in.SelfLink
out.UID = in.UID
out.ResourceVersion = in.ResourceVersion
out.Generation = in.Generation
if err := deepCopy_util_Time(in.CreationTimestamp, &out.CreationTimestamp, c); err != nil {
return err
}
if in.DeletionTimestamp != nil {
out.DeletionTimestamp = new(util.Time)
if err := deepCopy_util_Time(*in.DeletionTimestamp, out.DeletionTimestamp, c); err != nil {
return err
}
} else {
out.DeletionTimestamp = nil
}
if in.DeletionGracePeriodSeconds != nil {
out.DeletionGracePeriodSeconds = new(int64)
*out.DeletionGracePeriodSeconds = *in.DeletionGracePeriodSeconds
} else {
out.DeletionGracePeriodSeconds = nil
}
if in.Labels != nil {
out.Labels = make(map[string]string)
for key, val := range in.Labels {
out.Labels[key] = val
}
} else {
out.Labels = nil
}
if in.Annotations != nil {
out.Annotations = make(map[string]string)
for key, val := range in.Annotations {
out.Annotations[key] = val
}
} else {
out.Annotations = nil
}
return nil
}
func deepCopy_v1_ObjectReference(in ObjectReference, out *ObjectReference, c *conversion.Cloner) error {
out.Kind = in.Kind
out.Namespace = in.Namespace
out.Name = in.Name
out.UID = in.UID
out.APIVersion = in.APIVersion
out.ResourceVersion = in.ResourceVersion
out.FieldPath = in.FieldPath
return nil
}
func deepCopy_v1_PersistentVolume(in PersistentVolume, out *PersistentVolume, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_PersistentVolumeSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_v1_PersistentVolumeStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_PersistentVolumeClaim(in PersistentVolumeClaim, out *PersistentVolumeClaim, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_PersistentVolumeClaimSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_v1_PersistentVolumeClaimStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_PersistentVolumeClaimList(in PersistentVolumeClaimList, out *PersistentVolumeClaimList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]PersistentVolumeClaim, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_PersistentVolumeClaim(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_PersistentVolumeClaimSpec(in PersistentVolumeClaimSpec, out *PersistentVolumeClaimSpec, c *conversion.Cloner) error {
if in.AccessModes != nil {
out.AccessModes = make([]PersistentVolumeAccessMode, len(in.AccessModes))
for i := range in.AccessModes {
out.AccessModes[i] = in.AccessModes[i]
}
} else {
out.AccessModes = nil
}
if err := deepCopy_v1_ResourceRequirements(in.Resources, &out.Resources, c); err != nil {
return err
}
out.VolumeName = in.VolumeName
return nil
}
func deepCopy_v1_PersistentVolumeClaimStatus(in PersistentVolumeClaimStatus, out *PersistentVolumeClaimStatus, c *conversion.Cloner) error {
out.Phase = in.Phase
if in.AccessModes != nil {
out.AccessModes = make([]PersistentVolumeAccessMode, len(in.AccessModes))
for i := range in.AccessModes {
out.AccessModes[i] = in.AccessModes[i]
}
} else {
out.AccessModes = nil
}
if in.Capacity != nil {
out.Capacity = make(ResourceList)
for key, val := range in.Capacity {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Capacity[key] = *newVal
}
} else {
out.Capacity = nil
}
return nil
}
func deepCopy_v1_PersistentVolumeClaimVolumeSource(in PersistentVolumeClaimVolumeSource, out *PersistentVolumeClaimVolumeSource, c *conversion.Cloner) error {
out.ClaimName = in.ClaimName
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_v1_PersistentVolumeList(in PersistentVolumeList, out *PersistentVolumeList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]PersistentVolume, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_PersistentVolume(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_PersistentVolumeSource(in PersistentVolumeSource, out *PersistentVolumeSource, c *conversion.Cloner) error {
if in.GCEPersistentDisk != nil {
out.GCEPersistentDisk = new(GCEPersistentDiskVolumeSource)
if err := deepCopy_v1_GCEPersistentDiskVolumeSource(*in.GCEPersistentDisk, out.GCEPersistentDisk, c); err != nil {
return err
}
} else {
out.GCEPersistentDisk = nil
}
if in.AWSElasticBlockStore != nil {
out.AWSElasticBlockStore = new(AWSElasticBlockStoreVolumeSource)
if err := deepCopy_v1_AWSElasticBlockStoreVolumeSource(*in.AWSElasticBlockStore, out.AWSElasticBlockStore, c); err != nil {
return err
}
} else {
out.AWSElasticBlockStore = nil
}
if in.HostPath != nil {
out.HostPath = new(HostPathVolumeSource)
if err := deepCopy_v1_HostPathVolumeSource(*in.HostPath, out.HostPath, c); err != nil {
return err
}
} else {
out.HostPath = nil
}
if in.Glusterfs != nil {
out.Glusterfs = new(GlusterfsVolumeSource)
if err := deepCopy_v1_GlusterfsVolumeSource(*in.Glusterfs, out.Glusterfs, c); err != nil {
return err
}
} else {
out.Glusterfs = nil
}
if in.NFS != nil {
out.NFS = new(NFSVolumeSource)
if err := deepCopy_v1_NFSVolumeSource(*in.NFS, out.NFS, c); err != nil {
return err
}
} else {
out.NFS = nil
}
if in.RBD != nil {
out.RBD = new(RBDVolumeSource)
if err := deepCopy_v1_RBDVolumeSource(*in.RBD, out.RBD, c); err != nil {
return err
}
} else {
out.RBD = nil
}
if in.ISCSI != nil {
out.ISCSI = new(ISCSIVolumeSource)
if err := deepCopy_v1_ISCSIVolumeSource(*in.ISCSI, out.ISCSI, c); err != nil {
return err
}
} else {
out.ISCSI = nil
}
return nil
}
func deepCopy_v1_PersistentVolumeSpec(in PersistentVolumeSpec, out *PersistentVolumeSpec, c *conversion.Cloner) error {
if in.Capacity != nil {
out.Capacity = make(ResourceList)
for key, val := range in.Capacity {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Capacity[key] = *newVal
}
} else {
out.Capacity = nil
}
if err := deepCopy_v1_PersistentVolumeSource(in.PersistentVolumeSource, &out.PersistentVolumeSource, c); err != nil {
return err
}
if in.AccessModes != nil {
out.AccessModes = make([]PersistentVolumeAccessMode, len(in.AccessModes))
for i := range in.AccessModes {
out.AccessModes[i] = in.AccessModes[i]
}
} else {
out.AccessModes = nil
}
if in.ClaimRef != nil {
out.ClaimRef = new(ObjectReference)
if err := deepCopy_v1_ObjectReference(*in.ClaimRef, out.ClaimRef, c); err != nil {
return err
}
} else {
out.ClaimRef = nil
}
out.PersistentVolumeReclaimPolicy = in.PersistentVolumeReclaimPolicy
return nil
}
func deepCopy_v1_PersistentVolumeStatus(in PersistentVolumeStatus, out *PersistentVolumeStatus, c *conversion.Cloner) error {
out.Phase = in.Phase
out.Message = in.Message
out.Reason = in.Reason
return nil
}
func deepCopy_v1_Pod(in Pod, out *Pod, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_PodSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_v1_PodStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_PodAttachOptions(in PodAttachOptions, out *PodAttachOptions, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
out.Stdin = in.Stdin
out.Stdout = in.Stdout
out.Stderr = in.Stderr
out.TTY = in.TTY
out.Container = in.Container
return nil
}
func deepCopy_v1_PodCondition(in PodCondition, out *PodCondition, c *conversion.Cloner) error {
out.Type = in.Type
out.Status = in.Status
return nil
}
func deepCopy_v1_PodExecOptions(in PodExecOptions, out *PodExecOptions, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
out.Stdin = in.Stdin
out.Stdout = in.Stdout
out.Stderr = in.Stderr
out.TTY = in.TTY
out.Container = in.Container
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
return nil
}
func deepCopy_v1_PodList(in PodList, out *PodList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Pod, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_Pod(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_PodLogOptions(in PodLogOptions, out *PodLogOptions, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
out.Container = in.Container
out.Follow = in.Follow
out.Previous = in.Previous
return nil
}
func deepCopy_v1_PodProxyOptions(in PodProxyOptions, out *PodProxyOptions, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
out.Path = in.Path
return nil
}
func deepCopy_v1_PodSpec(in PodSpec, out *PodSpec, c *conversion.Cloner) error {
if in.Volumes != nil {
out.Volumes = make([]Volume, len(in.Volumes))
for i := range in.Volumes {
if err := deepCopy_v1_Volume(in.Volumes[i], &out.Volumes[i], c); err != nil {
return err
}
}
} else {
out.Volumes = nil
}
if in.Containers != nil {
out.Containers = make([]Container, len(in.Containers))
for i := range in.Containers {
if err := deepCopy_v1_Container(in.Containers[i], &out.Containers[i], c); err != nil {
return err
}
}
} else {
out.Containers = nil
}
out.RestartPolicy = in.RestartPolicy
if in.TerminationGracePeriodSeconds != nil {
out.TerminationGracePeriodSeconds = new(int64)
*out.TerminationGracePeriodSeconds = *in.TerminationGracePeriodSeconds
} else {
out.TerminationGracePeriodSeconds = nil
}
if in.ActiveDeadlineSeconds != nil {
out.ActiveDeadlineSeconds = new(int64)
*out.ActiveDeadlineSeconds = *in.ActiveDeadlineSeconds
} else {
out.ActiveDeadlineSeconds = nil
}
out.DNSPolicy = in.DNSPolicy
if in.NodeSelector != nil {
out.NodeSelector = make(map[string]string)
for key, val := range in.NodeSelector {
out.NodeSelector[key] = val
}
} else {
out.NodeSelector = nil
}
out.ServiceAccountName = in.ServiceAccountName
out.DeprecatedServiceAccount = in.DeprecatedServiceAccount
out.NodeName = in.NodeName
out.HostNetwork = in.HostNetwork
if in.ImagePullSecrets != nil {
out.ImagePullSecrets = make([]LocalObjectReference, len(in.ImagePullSecrets))
for i := range in.ImagePullSecrets {
if err := deepCopy_v1_LocalObjectReference(in.ImagePullSecrets[i], &out.ImagePullSecrets[i], c); err != nil {
return err
}
}
} else {
out.ImagePullSecrets = nil
}
return nil
}
func deepCopy_v1_PodStatus(in PodStatus, out *PodStatus, c *conversion.Cloner) error {
out.Phase = in.Phase
if in.Conditions != nil {
out.Conditions = make([]PodCondition, len(in.Conditions))
for i := range in.Conditions {
if err := deepCopy_v1_PodCondition(in.Conditions[i], &out.Conditions[i], c); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
out.Message = in.Message
out.Reason = in.Reason
out.HostIP = in.HostIP
out.PodIP = in.PodIP
if in.StartTime != nil {
out.StartTime = new(util.Time)
if err := deepCopy_util_Time(*in.StartTime, out.StartTime, c); err != nil {
return err
}
} else {
out.StartTime = nil
}
if in.ContainerStatuses != nil {
out.ContainerStatuses = make([]ContainerStatus, len(in.ContainerStatuses))
for i := range in.ContainerStatuses {
if err := deepCopy_v1_ContainerStatus(in.ContainerStatuses[i], &out.ContainerStatuses[i], c); err != nil {
return err
}
}
} else {
out.ContainerStatuses = nil
}
return nil
}
func deepCopy_v1_PodStatusResult(in PodStatusResult, out *PodStatusResult, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_PodStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_PodTemplate(in PodTemplate, out *PodTemplate, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_PodTemplateSpec(in.Template, &out.Template, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_PodTemplateList(in PodTemplateList, out *PodTemplateList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]PodTemplate, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_PodTemplate(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_PodTemplateSpec(in PodTemplateSpec, out *PodTemplateSpec, c *conversion.Cloner) error {
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_PodSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_Probe(in Probe, out *Probe, c *conversion.Cloner) error {
if err := deepCopy_v1_Handler(in.Handler, &out.Handler, c); err != nil {
return err
}
out.InitialDelaySeconds = in.InitialDelaySeconds
out.TimeoutSeconds = in.TimeoutSeconds
return nil
}
func deepCopy_v1_RBDVolumeSource(in RBDVolumeSource, out *RBDVolumeSource, c *conversion.Cloner) error {
if in.CephMonitors != nil {
out.CephMonitors = make([]string, len(in.CephMonitors))
for i := range in.CephMonitors {
out.CephMonitors[i] = in.CephMonitors[i]
}
} else {
out.CephMonitors = nil
}
out.RBDImage = in.RBDImage
out.FSType = in.FSType
out.RBDPool = in.RBDPool
out.RadosUser = in.RadosUser
out.Keyring = in.Keyring
if in.SecretRef != nil {
out.SecretRef = new(LocalObjectReference)
if err := deepCopy_v1_LocalObjectReference(*in.SecretRef, out.SecretRef, c); err != nil {
return err
}
} else {
out.SecretRef = nil
}
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_v1_RangeAllocation(in RangeAllocation, out *RangeAllocation, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
out.Range = in.Range
if in.Data != nil {
out.Data = make([]uint8, len(in.Data))
for i := range in.Data {
out.Data[i] = in.Data[i]
}
} else {
out.Data = nil
}
return nil
}
func deepCopy_v1_ReplicationController(in ReplicationController, out *ReplicationController, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ReplicationControllerSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_v1_ReplicationControllerStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_ReplicationControllerList(in ReplicationControllerList, out *ReplicationControllerList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ReplicationController, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_ReplicationController(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_ReplicationControllerSpec(in ReplicationControllerSpec, out *ReplicationControllerSpec, c *conversion.Cloner) error {
if in.Replicas != nil {
out.Replicas = new(int)
*out.Replicas = *in.Replicas
} else {
out.Replicas = nil
}
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
if in.Template != nil {
out.Template = new(PodTemplateSpec)
if err := deepCopy_v1_PodTemplateSpec(*in.Template, out.Template, c); err != nil {
return err
}
} else {
out.Template = nil
}
return nil
}
func deepCopy_v1_ReplicationControllerStatus(in ReplicationControllerStatus, out *ReplicationControllerStatus, c *conversion.Cloner) error {
out.Replicas = in.Replicas
out.ObservedGeneration = in.ObservedGeneration
return nil
}
func deepCopy_v1_ResourceQuota(in ResourceQuota, out *ResourceQuota, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ResourceQuotaSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_v1_ResourceQuotaStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_ResourceQuotaList(in ResourceQuotaList, out *ResourceQuotaList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ResourceQuota, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_ResourceQuota(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_ResourceQuotaSpec(in ResourceQuotaSpec, out *ResourceQuotaSpec, c *conversion.Cloner) error {
if in.Hard != nil {
out.Hard = make(ResourceList)
for key, val := range in.Hard {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Hard[key] = *newVal
}
} else {
out.Hard = nil
}
return nil
}
func deepCopy_v1_ResourceQuotaStatus(in ResourceQuotaStatus, out *ResourceQuotaStatus, c *conversion.Cloner) error {
if in.Hard != nil {
out.Hard = make(ResourceList)
for key, val := range in.Hard {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Hard[key] = *newVal
}
} else {
out.Hard = nil
}
if in.Used != nil {
out.Used = make(ResourceList)
for key, val := range in.Used {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Used[key] = *newVal
}
} else {
out.Used = nil
}
return nil
}
func deepCopy_v1_ResourceRequirements(in ResourceRequirements, out *ResourceRequirements, c *conversion.Cloner) error {
if in.Limits != nil {
out.Limits = make(ResourceList)
for key, val := range in.Limits {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Limits[key] = *newVal
}
} else {
out.Limits = nil
}
if in.Requests != nil {
out.Requests = make(ResourceList)
for key, val := range in.Requests {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Requests[key] = *newVal
}
} else {
out.Requests = nil
}
return nil
}
func deepCopy_v1_SELinuxOptions(in SELinuxOptions, out *SELinuxOptions, c *conversion.Cloner) error {
out.User = in.User
out.Role = in.Role
out.Type = in.Type
out.Level = in.Level
return nil
}
func deepCopy_v1_Secret(in Secret, out *Secret, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if in.Data != nil {
out.Data = make(map[string][]uint8)
for key, val := range in.Data {
if newVal, err := c.DeepCopy(val); err != nil {
return err
} else {
out.Data[key] = newVal.([]uint8)
}
}
} else {
out.Data = nil
}
out.Type = in.Type
return nil
}
func deepCopy_v1_SecretList(in SecretList, out *SecretList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Secret, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_Secret(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_SecretVolumeSource(in SecretVolumeSource, out *SecretVolumeSource, c *conversion.Cloner) error {
out.SecretName = in.SecretName
return nil
}
func deepCopy_v1_SecurityContext(in SecurityContext, out *SecurityContext, c *conversion.Cloner) error {
if in.Capabilities != nil {
out.Capabilities = new(Capabilities)
if err := deepCopy_v1_Capabilities(*in.Capabilities, out.Capabilities, c); err != nil {
return err
}
} else {
out.Capabilities = nil
}
if in.Privileged != nil {
out.Privileged = new(bool)
*out.Privileged = *in.Privileged
} else {
out.Privileged = nil
}
if in.SELinuxOptions != nil {
out.SELinuxOptions = new(SELinuxOptions)
if err := deepCopy_v1_SELinuxOptions(*in.SELinuxOptions, out.SELinuxOptions, c); err != nil {
return err
}
} else {
out.SELinuxOptions = nil
}
if in.RunAsUser != nil {
out.RunAsUser = new(int64)
*out.RunAsUser = *in.RunAsUser
} else {
out.RunAsUser = nil
}
out.RunAsNonRoot = in.RunAsNonRoot
return nil
}
func deepCopy_v1_SerializedReference(in SerializedReference, out *SerializedReference, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectReference(in.Reference, &out.Reference, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_Service(in Service, out *Service, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ServiceSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_v1_ServiceStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_ServiceAccount(in ServiceAccount, out *ServiceAccount, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if in.Secrets != nil {
out.Secrets = make([]ObjectReference, len(in.Secrets))
for i := range in.Secrets {
if err := deepCopy_v1_ObjectReference(in.Secrets[i], &out.Secrets[i], c); err != nil {
return err
}
}
} else {
out.Secrets = nil
}
if in.ImagePullSecrets != nil {
out.ImagePullSecrets = make([]LocalObjectReference, len(in.ImagePullSecrets))
for i := range in.ImagePullSecrets {
if err := deepCopy_v1_LocalObjectReference(in.ImagePullSecrets[i], &out.ImagePullSecrets[i], c); err != nil {
return err
}
}
} else {
out.ImagePullSecrets = nil
}
return nil
}
func deepCopy_v1_ServiceAccountList(in ServiceAccountList, out *ServiceAccountList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ServiceAccount, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_ServiceAccount(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_ServiceList(in ServiceList, out *ServiceList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Service, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_Service(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_ServicePort(in ServicePort, out *ServicePort, c *conversion.Cloner) error {
out.Name = in.Name
out.Protocol = in.Protocol
out.Port = in.Port
if err := deepCopy_util_IntOrString(in.TargetPort, &out.TargetPort, c); err != nil {
return err
}
out.NodePort = in.NodePort
return nil
}
func deepCopy_v1_ServiceSpec(in ServiceSpec, out *ServiceSpec, c *conversion.Cloner) error {
if in.Ports != nil {
out.Ports = make([]ServicePort, len(in.Ports))
for i := range in.Ports {
if err := deepCopy_v1_ServicePort(in.Ports[i], &out.Ports[i], c); err != nil {
return err
}
}
} else {
out.Ports = nil
}
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
out.ClusterIP = in.ClusterIP
out.Type = in.Type
if in.ExternalIPs != nil {
out.ExternalIPs = make([]string, len(in.ExternalIPs))
for i := range in.ExternalIPs {
out.ExternalIPs[i] = in.ExternalIPs[i]
}
} else {
out.ExternalIPs = nil
}
out.SessionAffinity = in.SessionAffinity
return nil
}
func deepCopy_v1_ServiceStatus(in ServiceStatus, out *ServiceStatus, c *conversion.Cloner) error {
if err := deepCopy_v1_LoadBalancerStatus(in.LoadBalancer, &out.LoadBalancer, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_Status(in Status, out *Status, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
out.Status = in.Status
out.Message = in.Message
out.Reason = in.Reason
if in.Details != nil {
out.Details = new(StatusDetails)
if err := deepCopy_v1_StatusDetails(*in.Details, out.Details, c); err != nil {
return err
}
} else {
out.Details = nil
}
out.Code = in.Code
return nil
}
func deepCopy_v1_StatusCause(in StatusCause, out *StatusCause, c *conversion.Cloner) error {
out.Type = in.Type
out.Message = in.Message
out.Field = in.Field
return nil
}
func deepCopy_v1_StatusDetails(in StatusDetails, out *StatusDetails, c *conversion.Cloner) error {
out.Name = in.Name
out.Kind = in.Kind
if in.Causes != nil {
out.Causes = make([]StatusCause, len(in.Causes))
for i := range in.Causes {
if err := deepCopy_v1_StatusCause(in.Causes[i], &out.Causes[i], c); err != nil {
return err
}
}
} else {
out.Causes = nil
}
out.RetryAfterSeconds = in.RetryAfterSeconds
return nil
}
func deepCopy_v1_TCPSocketAction(in TCPSocketAction, out *TCPSocketAction, c *conversion.Cloner) error {
if err := deepCopy_util_IntOrString(in.Port, &out.Port, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_ThirdPartyResource(in ThirdPartyResource, out *ThirdPartyResource, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
out.Description = in.Description
if in.Versions != nil {
out.Versions = make([]APIVersion, len(in.Versions))
for i := range in.Versions {
if err := deepCopy_v1_APIVersion(in.Versions[i], &out.Versions[i], c); err != nil {
return err
}
}
} else {
out.Versions = nil
}
return nil
}
func deepCopy_v1_ThirdPartyResourceData(in ThirdPartyResourceData, out *ThirdPartyResourceData, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if in.Data != nil {
out.Data = make([]uint8, len(in.Data))
for i := range in.Data {
out.Data[i] = in.Data[i]
}
} else {
out.Data = nil
}
return nil
}
func deepCopy_v1_ThirdPartyResourceList(in ThirdPartyResourceList, out *ThirdPartyResourceList, c *conversion.Cloner) error {
if err := deepCopy_v1_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_v1_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ThirdPartyResource, len(in.Items))
for i := range in.Items {
if err := deepCopy_v1_ThirdPartyResource(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_v1_TypeMeta(in TypeMeta, out *TypeMeta, c *conversion.Cloner) error {
out.Kind = in.Kind
out.APIVersion = in.APIVersion
return nil
}
func deepCopy_v1_Volume(in Volume, out *Volume, c *conversion.Cloner) error {
out.Name = in.Name
if err := deepCopy_v1_VolumeSource(in.VolumeSource, &out.VolumeSource, c); err != nil {
return err
}
return nil
}
func deepCopy_v1_VolumeMount(in VolumeMount, out *VolumeMount, c *conversion.Cloner) error {
out.Name = in.Name
out.ReadOnly = in.ReadOnly
out.MountPath = in.MountPath
return nil
}
func deepCopy_v1_VolumeSource(in VolumeSource, out *VolumeSource, c *conversion.Cloner) error {
if in.HostPath != nil {
out.HostPath = new(HostPathVolumeSource)
if err := deepCopy_v1_HostPathVolumeSource(*in.HostPath, out.HostPath, c); err != nil {
return err
}
} else {
out.HostPath = nil
}
if in.EmptyDir != nil {
out.EmptyDir = new(EmptyDirVolumeSource)
if err := deepCopy_v1_EmptyDirVolumeSource(*in.EmptyDir, out.EmptyDir, c); err != nil {
return err
}
} else {
out.EmptyDir = nil
}
if in.GCEPersistentDisk != nil {
out.GCEPersistentDisk = new(GCEPersistentDiskVolumeSource)
if err := deepCopy_v1_GCEPersistentDiskVolumeSource(*in.GCEPersistentDisk, out.GCEPersistentDisk, c); err != nil {
return err
}
} else {
out.GCEPersistentDisk = nil
}
if in.AWSElasticBlockStore != nil {
out.AWSElasticBlockStore = new(AWSElasticBlockStoreVolumeSource)
if err := deepCopy_v1_AWSElasticBlockStoreVolumeSource(*in.AWSElasticBlockStore, out.AWSElasticBlockStore, c); err != nil {
return err
}
} else {
out.AWSElasticBlockStore = nil
}
if in.GitRepo != nil {
out.GitRepo = new(GitRepoVolumeSource)
if err := deepCopy_v1_GitRepoVolumeSource(*in.GitRepo, out.GitRepo, c); err != nil {
return err
}
} else {
out.GitRepo = nil
}
if in.Secret != nil {
out.Secret = new(SecretVolumeSource)
if err := deepCopy_v1_SecretVolumeSource(*in.Secret, out.Secret, c); err != nil {
return err
}
} else {
out.Secret = nil
}
if in.NFS != nil {
out.NFS = new(NFSVolumeSource)
if err := deepCopy_v1_NFSVolumeSource(*in.NFS, out.NFS, c); err != nil {
return err
}
} else {
out.NFS = nil
}
if in.ISCSI != nil {
out.ISCSI = new(ISCSIVolumeSource)
if err := deepCopy_v1_ISCSIVolumeSource(*in.ISCSI, out.ISCSI, c); err != nil {
return err
}
} else {
out.ISCSI = nil
}
if in.Glusterfs != nil {
out.Glusterfs = new(GlusterfsVolumeSource)
if err := deepCopy_v1_GlusterfsVolumeSource(*in.Glusterfs, out.Glusterfs, c); err != nil {
return err
}
} else {
out.Glusterfs = nil
}
if in.PersistentVolumeClaim != nil {
out.PersistentVolumeClaim = new(PersistentVolumeClaimVolumeSource)
if err := deepCopy_v1_PersistentVolumeClaimVolumeSource(*in.PersistentVolumeClaim, out.PersistentVolumeClaim, c); err != nil {
return err
}
} else {
out.PersistentVolumeClaim = nil
}
if in.RBD != nil {
out.RBD = new(RBDVolumeSource)
if err := deepCopy_v1_RBDVolumeSource(*in.RBD, out.RBD, c); err != nil {
return err
}
} else {
out.RBD = nil
}
return nil
}
func deepCopy_runtime_RawExtension(in runtime.RawExtension, out *runtime.RawExtension, c *conversion.Cloner) error {
if in.RawJSON != nil {
out.RawJSON = make([]uint8, len(in.RawJSON))
for i := range in.RawJSON {
out.RawJSON[i] = in.RawJSON[i]
}
} else {
out.RawJSON = nil
}
return nil
}
func deepCopy_util_IntOrString(in util.IntOrString, out *util.IntOrString, c *conversion.Cloner) error {
out.Kind = in.Kind
out.IntVal = in.IntVal
out.StrVal = in.StrVal
return nil
}
func deepCopy_util_Time(in util.Time, out *util.Time, c *conversion.Cloner) error {
if newVal, err := c.DeepCopy(in.Time); err != nil {
return err
} else {
out.Time = newVal.(time.Time)
}
return nil
}
func init() {
err := api.Scheme.AddGeneratedDeepCopyFuncs(
deepCopy_resource_Quantity,
deepCopy_v1_APIVersion,
deepCopy_v1_AWSElasticBlockStoreVolumeSource,
deepCopy_v1_Binding,
deepCopy_v1_Capabilities,
deepCopy_v1_ComponentCondition,
deepCopy_v1_ComponentStatus,
deepCopy_v1_ComponentStatusList,
deepCopy_v1_Container,
deepCopy_v1_ContainerPort,
deepCopy_v1_ContainerState,
deepCopy_v1_ContainerStateRunning,
deepCopy_v1_ContainerStateTerminated,
deepCopy_v1_ContainerStateWaiting,
deepCopy_v1_ContainerStatus,
deepCopy_v1_Daemon,
deepCopy_v1_DaemonList,
deepCopy_v1_DaemonSpec,
deepCopy_v1_DaemonStatus,
deepCopy_v1_DeleteOptions,
deepCopy_v1_EmptyDirVolumeSource,
deepCopy_v1_EndpointAddress,
deepCopy_v1_EndpointPort,
deepCopy_v1_EndpointSubset,
deepCopy_v1_Endpoints,
deepCopy_v1_EndpointsList,
deepCopy_v1_EnvVar,
deepCopy_v1_EnvVarSource,
deepCopy_v1_Event,
deepCopy_v1_EventList,
deepCopy_v1_EventSource,
deepCopy_v1_ExecAction,
deepCopy_v1_GCEPersistentDiskVolumeSource,
deepCopy_v1_GitRepoVolumeSource,
deepCopy_v1_GlusterfsVolumeSource,
deepCopy_v1_HTTPGetAction,
deepCopy_v1_Handler,
deepCopy_v1_HostPathVolumeSource,
deepCopy_v1_ISCSIVolumeSource,
deepCopy_v1_Lifecycle,
deepCopy_v1_LimitRange,
deepCopy_v1_LimitRangeItem,
deepCopy_v1_LimitRangeList,
deepCopy_v1_LimitRangeSpec,
deepCopy_v1_List,
deepCopy_v1_ListMeta,
deepCopy_v1_ListOptions,
deepCopy_v1_LoadBalancerIngress,
deepCopy_v1_LoadBalancerStatus,
deepCopy_v1_LocalObjectReference,
deepCopy_v1_NFSVolumeSource,
deepCopy_v1_Namespace,
deepCopy_v1_NamespaceList,
deepCopy_v1_NamespaceSpec,
deepCopy_v1_NamespaceStatus,
deepCopy_v1_Node,
deepCopy_v1_NodeAddress,
deepCopy_v1_NodeCondition,
deepCopy_v1_NodeList,
deepCopy_v1_NodeSpec,
deepCopy_v1_NodeStatus,
deepCopy_v1_NodeSystemInfo,
deepCopy_v1_ObjectFieldSelector,
deepCopy_v1_ObjectMeta,
deepCopy_v1_ObjectReference,
deepCopy_v1_PersistentVolume,
deepCopy_v1_PersistentVolumeClaim,
deepCopy_v1_PersistentVolumeClaimList,
deepCopy_v1_PersistentVolumeClaimSpec,
deepCopy_v1_PersistentVolumeClaimStatus,
deepCopy_v1_PersistentVolumeClaimVolumeSource,
deepCopy_v1_PersistentVolumeList,
deepCopy_v1_PersistentVolumeSource,
deepCopy_v1_PersistentVolumeSpec,
deepCopy_v1_PersistentVolumeStatus,
deepCopy_v1_Pod,
deepCopy_v1_PodAttachOptions,
deepCopy_v1_PodCondition,
deepCopy_v1_PodExecOptions,
deepCopy_v1_PodList,
deepCopy_v1_PodLogOptions,
deepCopy_v1_PodProxyOptions,
deepCopy_v1_PodSpec,
deepCopy_v1_PodStatus,
deepCopy_v1_PodStatusResult,
deepCopy_v1_PodTemplate,
deepCopy_v1_PodTemplateList,
deepCopy_v1_PodTemplateSpec,
deepCopy_v1_Probe,
deepCopy_v1_RBDVolumeSource,
deepCopy_v1_RangeAllocation,
deepCopy_v1_ReplicationController,
deepCopy_v1_ReplicationControllerList,
deepCopy_v1_ReplicationControllerSpec,
deepCopy_v1_ReplicationControllerStatus,
deepCopy_v1_ResourceQuota,
deepCopy_v1_ResourceQuotaList,
deepCopy_v1_ResourceQuotaSpec,
deepCopy_v1_ResourceQuotaStatus,
deepCopy_v1_ResourceRequirements,
deepCopy_v1_SELinuxOptions,
deepCopy_v1_Secret,
deepCopy_v1_SecretList,
deepCopy_v1_SecretVolumeSource,
deepCopy_v1_SecurityContext,
deepCopy_v1_SerializedReference,
deepCopy_v1_Service,
deepCopy_v1_ServiceAccount,
deepCopy_v1_ServiceAccountList,
deepCopy_v1_ServiceList,
deepCopy_v1_ServicePort,
deepCopy_v1_ServiceSpec,
deepCopy_v1_ServiceStatus,
deepCopy_v1_Status,
deepCopy_v1_StatusCause,
deepCopy_v1_StatusDetails,
deepCopy_v1_TCPSocketAction,
deepCopy_v1_ThirdPartyResource,
deepCopy_v1_ThirdPartyResourceData,
deepCopy_v1_ThirdPartyResourceList,
deepCopy_v1_TypeMeta,
deepCopy_v1_Volume,
deepCopy_v1_VolumeMount,
deepCopy_v1_VolumeSource,
deepCopy_runtime_RawExtension,
deepCopy_util_IntOrString,
deepCopy_util_Time,
)
if err != nil {
// if one of the deep copy functions is malformed, detect it immediately.
panic(err)
}
}
| dloeng/kubernetes | pkg/api/v1/deep_copy_generated.go | GO | apache-2.0 | 65,687 |
module Fog
module Storage
class OpenStack
class Real
# Create a new manifest object
#
# Creates an object with a +X-Object-Manifest+ header that specifies the common prefix ("<container>/<prefix>")
# for all uploaded segments. Retrieving the manifest object streams all segments matching this prefix.
# Segments must sort in the order they should be concatenated. Note that any future objects stored in the container
# along with the segments that match the prefix will be included when retrieving the manifest object.
#
# All segments must be stored in the same container, but may be in a different container than the manifest object.
# The default +X-Object-Manifest+ header is set to "+container+/+object+", but may be overridden in +options+
# to specify the prefix and/or the container where segments were stored.
# If overridden, names should be CGI escaped (excluding spaces) if needed (see {Fog::Rackspace.escape}).
#
# @param container [String] Name for container where +object+ will be stored. Should be < 256 bytes and must not contain '/'
# @param object [String] Name for manifest object.
# @param options [Hash] Config headers for +object+.
# @option options [String] 'X-Object-Manifest' ("container/object") "<container>/<prefix>" for segment objects.
#
# @see http://docs.openstack.org/api/openstack-object-storage/1.0/content/large-object-creation.html
def put_object_manifest(container, object, options = {})
path = "#{Fog::OpenStack.escape(container)}/#{Fog::OpenStack.escape(object)}"
headers = {'X-Object-Manifest' => path}.merge(options)
request(
:expects => 201,
:headers => headers,
:method => 'PUT',
:path => path
)
end
end
end
end
end
| luna1x/chef-server | vendor/ruby/1.9.1/gems/fog-1.15.0/lib/fog/openstack/requests/storage/put_object_manifest.rb | Ruby | apache-2.0 | 1,943 |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<!-- Favicon -->
<link href="../images/vl_blue.ico" type="image/x-icon" rel="icon"></link>
<link href="../images/vl_blue.ico" type="image/x-icon" rel="shortcut icon"></link>
<!-- Page title -->
<title>VLFeat - Documentation - C API</title>
<!-- Stylesheets -->
<link href="../vlfeat.css" type="text/css" rel="stylesheet"></link>
<link href="../pygmentize.css" type="text/css" rel="stylesheet"></link>
<style xml:space="preserve">
/* fixes a conflict between Pygmentize and MathJax */
.MathJax .mo, .MathJax .mi {color: inherit ! important}
</style>
<link rel="stylesheet" type="text/css" href="doxygen.css"></link>
<!-- Scripts-->
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<!-- MathJax -->
<script xml:space="preserve" type="text/x-mathjax-config">
MathJax.Hub.Config({
tex2jax: {
inlineMath: [ ['$','$'], ['\\(','\\)'] ],
processEscapes: true,
},
TeX: {
Macros: {
balpha: '\\boldsymbol{\\alpha}',
bc: '\\mathbf{c}',
be: '\\mathbf{e}',
bg: '\\mathbf{g}',
bq: '\\mathbf{q}',
bu: '\\mathbf{u}',
bv: '\\mathbf{v}',
bw: '\\mathbf{w}',
bx: '\\mathbf{x}',
by: '\\mathbf{y}',
bz: '\\mathbf{z}',
bsigma: '\\mathbf{\\sigma}',
sign: '\\operatorname{sign}',
diag: '\\operatorname{diag}',
real: '\\mathbb{R}',
},
equationNumbers: { autoNumber: 'AMS' }
}
});
</script>
<script src="http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML" xml:space="preserve" type="text/javascript"></script>
<!-- Google Custom Search -->
<script xml:space="preserve">
(function() {
var cx = '003215582122030917471:oq23albfeam';
var gcse = document.createElement('script'); gcse.type = 'text/javascript'; gcse.async = true;
gcse.src = (document.location.protocol == 'https:' ? 'https:' : 'http:') +
'//www.google.com/cse/cse.js?cx=' + cx;
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(gcse, s);
})();
</script>
<!-- Google Analytics -->
<script xml:space="preserve" type="text/javascript">
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-4936091-2']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script>
</head>
<!-- Body Start -->
<body>
<div id="header-section">
<div id="header">
<!-- Google CSE Search Box -->
<div id="google" class="gcse-searchbox-only" data-resultsUrl="http://www.vlfeat.org/search.html"></div>
<h1 id="id-13"><a shape="rect" href="../index.html" class="plain"><span id="vlfeat">VLFeat</span><span id="dotorg">.org</span></a></h1>
</div>
</div>
<div id="headbanner-section">
<div id="headbanner">
Documentation - C API
</div>
</div>
<div id="content-section">
<div id="content-wrapper">
<div id="sidebar"> <!-- Navigation Start -->
<ul>
<li><a href="../index.html">Home</a>
</li>
<li><a href="../download.html">Download</a>
</li>
<li><a href="../overview/tut.html">Tutorials</a>
</li>
<li><a href="../applications/apps.html">Applications</a>
</li>
<li><a href="../doc.html">Documentation</a>
<ul>
<li><a href="../matlab/matlab.html">Matlab API</a>
</li>
<li><a href="index.html" class='active' >C API</a>
</li>
<li><a href="../man/man.html">Man pages</a>
</li>
</ul></li>
</ul>
</div> <!-- sidebar -->
<div id="content">
<!-- <pagestyle href="%pathto:root;api/tabs.css"/> -->
<div class="doxygen">
<div id="top">
<!-- Generated by Doxygen 1.8.1.1 -->
<div id="navrow1" class="tabs">
<ul class="tablist">
<li><a href="index.html"><span>Main Page</span></a></li>
<li class="current"><a href="pages.html"><span>Related Pages</span></a></li>
<li><a href="annotated.html"><span>Data Structures</span></a></li>
<li><a href="files.html"><span>Files</span></a></li>
</ul>
</div>
<div id="nav-path" class="navpath">
<ul>
<li class="navelem"><a class="el" href="index.html">Vision Lab Features Library (VLFeat)</a></li><li class="navelem"><a class="el" href="vlad.html">Vector of Locally Aggregated Descriptors (VLAD) encoding</a></li> </ul>
</div>
</div><!-- top -->
<div class="header">
<div class="headertitle">
<div class="title">VLAD fundamentals </div> </div>
</div><!--header-->
<div class="contents">
<div class="toc"><h3>Table of Contents</h3>
<ul><li class="level1"><a href="#vlad-normalization">VLAD normalization</a></li>
</ul>
</div>
<div class="textblock"><p>This page describes the <em>Vector of Locally Aggregated Descriptors</em> (VLAD) image encoding of <a class="el" href="citelist.html#CITEREF_jegou10aggregating">[9]</a> . See <a class="el" href="vlad.html">Vector of Locally Aggregated Descriptors (VLAD) encoding</a> for an overview of the C API.</p>
<p>VLAD is a <em>feature encoding and pooling</em> method, similar to <a class="el" href="fisher.html">Fisher vectors</a>. VLAD encodes a set of local feature descriptors \(I=(\bx_1,\dots,\bx_n)\) extracted from an image using a dictionary built using a clustering method such as <a class="el" href="gmm.html">Gaussian Mixture Models (GMM)</a> or <a class="el" href="kmeans.html">K-means clustering</a>. Let \(q_{ik}\) be the strength of the association of data vector \(\bx_i\) to cluster \(\mu_k\), such that \(q_{ik} \geq 0\) and \(\sum_{k=1}^K q_{ik} = 1\). The association may be either soft (e.g. obtained as the posterior probabilities of the GMM clusters) or hard (e.g. obtained by vector quantization with K-means).</p>
<p>\(\mu_k\) are the cluster <em>means</em>, vectors of the same dimension as the data \(\bx_i\). VLAD encodes feature \(\bx\) by considering the <em>residuals</em> </p>
<p class="formulaDsp">
\[ \bv_k = \sum_{i=1}^{N} q_{ik} (\bx_{i} - \mu_k). \]
</p>
<p> The residulas are stacked together to obtain the vector </p>
<p class="formulaDsp">
\[ \hat\Phi(I) = \begin{bmatrix} \vdots \\ \bv_k \\ \vdots \end{bmatrix} \]
</p>
<p>Before the VLAD encoding is used it is usually normalized, as explained <a class="el" href="vlad-fundamentals.html#vlad-normalization">VLAD normalization</a> next.</p>
<h1><a class="anchor" id="vlad-normalization"></a>
VLAD normalization</h1>
<p>VLFeat VLAD implementation supports a number of different normalization strategies. These are optionally applied in this order:</p>
<ul>
<li><b>Component-wise mass normalization.</b> Each vector \(\bv_k\) is divided by the total mass of features associated to it \(\sum_{i=1}^N q_{ik}\).</li>
</ul>
<ul>
<li><b>Square-rooting.</b> The function \(\sign(z)\sqrt{|z|}\) is applied to all scalar components of the VLAD descriptor.</li>
</ul>
<ul>
<li><b>Component-wise \(l^2\) normalization.</b> The vectors \(\bv_k\) are divided by their norm \(\|\bv_k\|_2\).</li>
</ul>
<ul>
<li><b>Global \(l^2\) normalization.</b> The VLAD descriptor \(\hat\Phi(I)\) is divided by its norm \(\|\hat\Phi(I)\|_2\). </li>
</ul>
</div></div><!-- contents -->
<!-- Doc Here -->
</div>
</div>
<div class="clear"> </div>
</div>
</div> <!-- content-section -->
<div id="footer-section">
<div id="footer">
© 2007-13 The authors of VLFeat
</div> <!-- footer -->
</div> <!-- footer section -->
</body>
<!-- Body ends -->
</html>
| iscoe/cajal3d-i2g | external/vlfeat/doc/api/vlad-fundamentals.html | HTML | apache-2.0 | 7,874 |
> 编写:
> 校对:
# 全屏沉浸式应用
| craftsmanBai/android-training-course-in-chinese | SOURCE/ui/system-ui/immersive.md | Markdown | apache-2.0 | 46 |
// Bug #260 - int is not aliased with volatile int
// RESULT: yes, it flows through.
int VERDICT_SAFE;
int CURRENTLY_UNSAFE;
int main()
{
int volatile a = 4;
int * const p = &a;
p = &a;
a = a - 4;
if (*p != 0){
ERROR: goto ERROR;
}
return 0;
}
| TommesDee/cpachecker | test/original-sources/LDV-tests/volatile_alias.c | C | apache-2.0 | 319 |
/*
* Copyright (C) 2014 Michael Pardo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ollie.internal;
import com.google.testing.compile.JavaFileObjects;
import ollie.internal.codegen.Errors;
import org.junit.Test;
import javax.tools.JavaFileObject;
import static com.google.testing.compile.JavaSourceSubjectFactory.javaSource;
import static ollie.internal.ProcessorTestUtilities.ollieProcessors;
import static org.truth0.Truth.ASSERT;
public class ModelAdapterTest {
@Test
public void modelAdapter() {
JavaFileObject source = JavaFileObjects.forSourceLines("ollie.test.Note",
"package ollie.test;",
"import java.util.Date;",
"import ollie.Model;",
"import ollie.annotation.Column;",
"import ollie.annotation.NotNull;",
"import ollie.annotation.Table;",
"@Table(\"notes\")",
"public class Note extends Model {",
" public static final String TITLE = \"title\";",
" public static final String BODY = \"body\";",
" public static final String DATE = \"date\";",
" @Column(TITLE) public String title;",
" @Column(BODY) @NotNull public String body;",
" @Column(DATE) public Date date;",
"}"
);
JavaFileObject expectedSource = JavaFileObjects.forSourceLines("ollie/Note$$ModelAdapter",
"package ollie;",
"import android.content.ContentValues;",
"import android.database.Cursor;",
"import android.database.sqlite.SQLiteDatabase;",
"import ollie.internal.ModelAdapter;",
"import ollie.test.Note;",
"public final class Note$$ModelAdapter extends ModelAdapter<Note> {",
" public final Class<? extends Model> getModelType() {",
" return Note.class;",
" }",
" public final String getTableName() {",
" return \"notes\";",
" }",
" public final String getSchema() {",
" return \"CREATE TABLE IF NOT EXISTS notes (\" +",
" \"_id INTEGER PRIMARY KEY AUTOINCREMENT, \" +",
" \"title TEXT, \" +",
" \"body TEXT NOT NULL, \" +",
" \"date INTEGER)\";",
" }",
" public final void load(Note entity, Cursor cursor) {",
" final int _idIndex = cursor.getColumnIndex(\"_id\");",
" final int titleIndex = cursor.getColumnIndex(\"title\")",
" final int bodyIndex = cursor.getColumnIndex(\"body\")",
" final int dateIndex = cursor.getColumnIndex(\"date\")",
" entity.id = _idIndex >= 0 ? cursor.getLong(_idIndex) : null;",
" entity.title = titleIndex >= 0 ? cursor.getString(titleIndex) : null;",
" entity.body = bodyIndex >= 0 ? cursor.getString(bodyIndex) : null;",
" entity.date = dateIndex >= 0 ? Ollie.getTypeAdapter(java.util.Date.class)",
" .deserialize(cursor.getLong(dateIndex)) : null;",
" }",
" public final Long save(Note entity, SQLiteDatabase db) {",
" ContentValues values = new ContentValues();",
" values.put(\"_id\", entity.id);",
" values.put(\"title\", entity.title);",
" values.put(\"body\", entity.body);",
" values.put(\"date\", (java.lang.Long) Ollie.getTypeAdapter(java.util.Date.class)",
" .serialize(entity.date));",
" return insertOrUpdate(entity, db, values);",
" }",
" public final void delete(Note entity, SQLiteDatabase db) {",
" db.delete(\"notes\", \"_id=?\", new String[]{entity.id.toString()});",
" }",
"}"
);
ASSERT.about(javaSource()).that(source)
.processedWith(ollieProcessors())
.compilesWithoutError()
.and()
.generatesSources(expectedSource);
}
@Test
public void tablesAreClasses() {
JavaFileObject source = JavaFileObjects.forSourceLines("ollie.test.Note",
"package ollie.test;",
"import java.util.Date;",
"import ollie.Model;",
"import ollie.annotation.Column;",
"import ollie.annotation.NotNull;",
"import ollie.annotation.Table;",
"@Table(\"notes\")",
"public class Note extends Model {",
" public static final String TITLE = \"title\";",
" public static final String BODY = \"body\";",
" public static final String DATE = \"date\";",
" @Table(TITLE) public String title;",
" @Table(BODY) @NotNull public String body;",
" @Table(DATE) public Date date;",
"}"
);
ASSERT.about(javaSource()).that(source)
.processedWith(ollieProcessors())
.failsToCompile();
}
@Test
public void columnsAreFields() {
JavaFileObject source = JavaFileObjects.forSourceLines("ollie.test.Note",
"package ollie.test;",
"import java.util.Date;",
"import ollie.Model;",
"import ollie.annotation.Column;",
"import ollie.annotation.NotNull;",
"import ollie.annotation.Table;",
"@Column(\"notes\")",
"public class Note extends Model {",
" public static final String TITLE = \"title\";",
" public static final String BODY = \"body\";",
" public static final String DATE = \"date\";",
" @Column(TITLE) public String title;",
" @Column(BODY) @NotNull public String body;",
" @Column(DATE) public Date date;",
"}"
);
ASSERT.about(javaSource()).that(source)
.processedWith(ollieProcessors())
.failsToCompile()
.withErrorContaining(Errors.COLUMN_TYPE_ERROR);
}
@Test
public void columnsAreUnique() {
JavaFileObject source = JavaFileObjects.forSourceLines("ollie.test.Note",
"package ollie.test;",
"import java.util.Date;",
"import ollie.Model;",
"import ollie.annotation.Column;",
"import ollie.annotation.NotNull;",
"import ollie.annotation.Table;",
"@Table(\"notes\")",
"public class Note extends Model {",
" public static final String TITLE = \"title\";",
" public static final String DATE = \"date\";",
" @Column(TITLE) public String title;",
" @Column(TITLE) @NotNull public String body;",
" @Column(DATE) public Date date;",
"}"
);
ASSERT.about(javaSource()).that(source)
.processedWith(ollieProcessors())
.failsToCompile()
.withErrorContaining(Errors.COLUMN_DUPLICATE_ERROR);
}
}
| pardom/Ollie | compiler/src/test/java/ollie/internal/ModelAdapterTest.java | Java | apache-2.0 | 6,449 |
' Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
Imports System.Collections.Immutable
Imports Microsoft.CodeAnalysis.CodeActions
Imports Microsoft.CodeAnalysis.CodeRefactorings
Imports Microsoft.CodeAnalysis.IntroduceVariable
Namespace Microsoft.CodeAnalysis.Editor.VisualBasic.UnitTests.CodeRefactorings.IntroduceVariable
Public Class IntroduceVariableTests
Inherits AbstractVisualBasicCodeActionTest
Protected Overrides Function CreateCodeRefactoringProvider(workspace As Workspace, parameters As TestParameters) As CodeRefactoringProvider
Return New IntroduceVariableCodeRefactoringProvider()
End Function
Protected Overrides Function MassageActions(actions As ImmutableArray(Of CodeAction)) As ImmutableArray(Of CodeAction)
Return GetNestedActions(actions)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function Test1() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
Console.WriteLine([|1 + 1|])
End Sub
End Module",
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
Const {|Rename:Value|} As Integer = 1 + 1
Console.WriteLine(Value)
End Sub
End Module",
index:=2)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function Test2() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
Console.WriteLine([|1 + 1|])
End Sub
End Module",
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
Const {|Rename:Value|} As Integer = 1 + 1
Console.WriteLine(Value)
End Sub
End Module",
index:=3)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestInSingleLineIfExpression1() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
If goo([|1 + 1|]) Then bar(1 + 1)
End Sub
End Module",
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
Const {|Rename:V|} As Integer = 1 + 1
If goo(V) Then bar(1 + 1)
End Sub
End Module",
index:=2)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestInSingleLineIfExpression2() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
If goo([|1 + 1|]) Then bar(1 + 1)
End Sub
End Module",
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
Const {|Rename:V|} As Integer = 1 + 1
If goo(V) Then bar(V)
End Sub
End Module",
index:=3)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestInSingleLineIfStatement1() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
If goo(1 + 1) Then bar([|1 + 1|])
End Sub
End Module",
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
If goo(1 + 1) Then
Const {|Rename:V|} As Integer = 1 + 1
bar(V)
End If
End Sub
End Module",
index:=2)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestInSingleLineIfStatement2() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
If goo(1 + 1) Then bar([|1 + 1|])
End Sub
End Module",
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
Const {|Rename:V|} As Integer = 1 + 1
If goo(V) Then bar(V)
End Sub
End Module",
index:=3)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestNoIntroduceFieldOnMethodTypeParameter() As Task
Dim source = "Module Program
Sub Main(Of T)()
Goo([|CType(2.ToString(), T)|])
End Sub
End Module"
Await TestExactActionSetOfferedAsync(
source,
expectedActionSet:={
String.Format(FeaturesResources.Introduce_local_for_0, "CType(2.ToString(), T)"),
String.Format(FeaturesResources.Introduce_local_for_all_occurrences_of_0, "CType(2.ToString(), T)")})
' Verifies "Introduce field ..." is missing
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestNoIntroduceFieldOnMethodParameter() As Task
Dim source = "Module Program
Sub Main(x As Integer)
Goo([|x.ToString()|])
End Sub
End Module"
Await TestExactActionSetOfferedAsync(
source,
expectedActionSet:={
String.Format(FeaturesResources.Introduce_local_for_0, "x.ToString()"),
String.Format(FeaturesResources.Introduce_local_for_all_occurrences_of_0, "x.ToString()")})
' Verifies "Introduce field ..." is missing
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestNoRefactoringOnExpressionInAssignmentStatement() As Task
Dim source = "Module Program
Sub Main(x As Integer)
Dim r = [|x.ToString()|]
End Sub
End Module"
Await TestMissingInRegularAndScriptAsync(source)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestLocalGeneratedInInnerBlock1() As Task
Dim source = "Module Program
Sub Main(x As Integer)
If True Then
Goo([|x.ToString()|])
End If
End Sub
End Module"
Dim expected = "Module Program
Sub Main(x As Integer)
If True Then
Dim {|Rename:v|} As String = x.ToString()
Goo(v)
End If
End Sub
End Module"
Await TestInRegularAndScriptAsync(source, expected)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestLocalGeneratedInInnerBlock2() As Task
Dim source = "Module Program
Sub Main(x As Integer)
If True Then
Goo([|x.ToString()|])
End If
End Sub
End Module"
Dim expected = "Module Program
Sub Main(x As Integer)
If True Then
Dim {|Rename:v|} As String = x.ToString()
Goo(v)
End If
End Sub
End Module"
Await TestInRegularAndScriptAsync(source, expected)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestLocalFromSingleExpressionInAnonType() As Task
Dim source = "Module Program
Sub Main(x As Integer)
Dim f1 = New With {.SomeString = [|x.ToString()|]}
End Sub
End Module"
Dim expected = "Module Program
Sub Main(x As Integer)
Dim {|Rename:v|} As String = x.ToString()
Dim f1 = New With {.SomeString = v}
End Sub
End Module"
Await TestInRegularAndScriptAsync(source, expected)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestLocalFromMultipleExpressionsInAnonType() As Task
Dim source = "Module Program
Sub Main(x As Integer)
Dim f1 = New With {.SomeString = [|x.ToString()|], .SomeOtherString = x.ToString()}
Dim f2 = New With {.SomeString = x.ToString(), .SomeOtherString = x.ToString()}
Dim str As String = x.ToString()
End Sub
End Module"
Dim expected = "Module Program
Sub Main(x As Integer)
Dim {|Rename:v|} As String = x.ToString()
Dim f1 = New With {.SomeString = v, .SomeOtherString = v}
Dim f2 = New With {.SomeString = v, .SomeOtherString = v}
Dim str As String = v
End Sub
End Module"
Await TestInRegularAndScriptAsync(source, expected, index:=1)
End Function
<Fact(), Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestLocalFromInferredFieldInitializer() As Task
Dim source = "Imports System
Class C
Sub M()
Dim a As New With {[|Environment.TickCount|]}
End Sub
End Class"
Dim expected = "Imports System
Class C
Sub M()
Dim {|Rename:tickCount|} As Integer = Environment.TickCount
Dim a As New With {tickCount}
End Sub
End Class"
Await TestInRegularAndScriptAsync(source, expected, index:=1)
End Function
<Fact(), Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestLocalFromYieldStatement() As Task
Dim source = "Imports System
Class C
Iterator Function F() As IEnumerable(Of Integer)
Yield [|Environment.TickCount * 2|]
End Function
End Class"
Dim expected = "Imports System
Class C
Iterator Function F() As IEnumerable(Of Integer)
Dim {|Rename:v|} As Integer = Environment.TickCount * 2
Yield v
End Function
End Class"
Await TestInRegularAndScriptAsync(source, expected, index:=1)
End Function
<Fact(), Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestLocalFromWhileStatement() As Task
Dim source = "Class C
Sub M()
Dim x = 1
While [|x = 1|]
End While
End Sub
End Class"
Dim expected = "Class C
Sub M()
Dim x = 1
Dim {|Rename:v|} As Boolean = x = 1
While v
End While
End Sub
End Class"
Await TestInRegularAndScriptAsync(source, expected, index:=1)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestLocalFromSingleExpressionInObjectInitializer() As Task
Dim source = "Module Program
Structure GooStruct
Dim GooMember1 As String
End Structure
Sub Main(x As Integer)
Dim f1 = New GooStruct With {.GooMember1 = [|""t"" + ""test""|]}
End Sub
End Module"
Dim expected = "Module Program
Structure GooStruct
Dim GooMember1 As String
End Structure
Sub Main(x As Integer)
Const {|Rename:V|} As String = ""t"" + ""test""
Dim f1 = New GooStruct With {.GooMember1 = V}
End Sub
End Module"
Await TestInRegularAndScriptAsync(source, expected, index:=2)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestLocalFromMultipleExpressionsInObjectInitializer() As Task
Dim code =
"
Module Program
Structure GooStruct
Dim GooMember1 As String
Dim GooMember2 As String
End Structure
Sub Main(x As Integer)
Dim f1 = New GooStruct With {.GooMember1 = [|""t"" + ""test""|], .GooMember2 = ""t"" + ""test""}
Dim f2 = New GooStruct With {.GooMember1 = ""t"" + ""test"", .GooMember2 = ""t"" + ""test""}
Dim str As String = ""t"" + ""test""
End Sub
End Module
"
Dim expected =
"
Module Program
Structure GooStruct
Dim GooMember1 As String
Dim GooMember2 As String
End Structure
Sub Main(x As Integer)
Const {|Rename:V|} As String = ""t"" + ""test""
Dim f1 = New GooStruct With {.GooMember1 = V, .GooMember2 = V}
Dim f2 = New GooStruct With {.GooMember1 = V, .GooMember2 = V}
Dim str As String = V
End Sub
End Module
"
Await TestInRegularAndScriptAsync(code, expected, index:=3)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestFieldFromMultipleExpressionsInAnonType() As Task
Dim source = "Class Program
Dim q = New With {.str = [|""t"" + ""test""|]}
Dim r = New With {.str = ""t"" + ""test""}
Sub Goo()
Dim x = ""t"" + ""test""
End Sub
End Class"
Dim expected = "Class Program
Private Const {|Rename:V|} As String = ""t"" + ""test""
Dim q = New With {.str = V}
Dim r = New With {.str = V}
Sub Goo()
Dim x = V
End Sub
End Class"
Await TestInRegularAndScriptAsync(source, expected, index:=1)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestPrivateFieldFromExpressionInField() As Task
Dim source = "Class Program
Dim x = Goo([|2 + 2|])
End Class"
Dim expected = "Class Program
Private Const {|Rename:V|} As Integer = 2 + 2
Dim x = Goo(V)
End Class"
Await TestInRegularAndScriptAsync(source, expected)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestNoLocalFromExpressionInField() As Task
Dim source = "Class Program
Dim x = Goo([|2 + 2|])
End Class"
Await TestExactActionSetOfferedAsync(source, {String.Format(FeaturesResources.Introduce_constant_for_0, "2 + 2"), String.Format(FeaturesResources.Introduce_constant_for_all_occurrences_of_0, "2 + 2")})
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestSharedModifierAbsentInGeneratedModuleFields() As Task
Dim source = "Module Program
Private ReadOnly y As Integer = 1
Dim x = Goo([|2 + y|])
End Module"
Dim expected = "Module Program
Private ReadOnly y As Integer = 1
Private ReadOnly {|Rename:v|} As Integer = 2 + y
Dim x = Goo(v)
End Module"
Await TestInRegularAndScriptAsync(source, expected)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestSingleLocalInsertLocation() As Task
Dim source = "Class Program
Sub Method1()
Dim v1 As String = ""TEST""
Dim v2 As Integer = 2 + 2
Goo([|2 + 2|])
End Sub
End Class"
Dim expected = "Class Program
Sub Method1()
Dim v1 As String = ""TEST""
Dim v2 As Integer = 2 + 2
Const {|Rename:V|} As Integer = 2 + 2
Goo(V)
End Sub
End Class"
Await TestInRegularAndScriptAsync(source, expected, index:=2)
End Function
#Region "Parameter context"
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestConstantFieldGenerationForParameterSingleOccurrence() As Task
' This is incorrect: the field type should be Integer, not Object
Dim source = "Module Module1
Sub Goo(Optional x As Integer = [|42|])
End Sub
End Module"
Dim expected = "Module Module1
Private Const {|Rename:V|} As Integer = 42
Sub Goo(Optional x As Integer = V)
End Sub
End Module"
Await TestInRegularAndScriptAsync(source, expected)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestConstantFieldGenerationForParameterAllOccurrences() As Task
' This is incorrect: the field type should be Integer, not Object
Dim source = "Module Module1
Sub Bar(Optional x As Integer = 42)
End Sub
Sub Goo(Optional x As Integer = [|42|])
End Sub
End Module"
Dim expected = "Module Module1
Private Const {|Rename:V|} As Integer = 42
Sub Bar(Optional x As Integer = V)
End Sub
Sub Goo(Optional x As Integer = V)
End Sub
End Module"
Await TestInRegularAndScriptAsync(source, expected, index:=1)
End Function
#End Region
<WorkItem(540269, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/540269")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestReplaceDottedExpression() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
Console.WriteLine([|Goo.someVariable|])
Console.WriteLine(Goo.someVariable)
End Sub
End Module
Friend Class Goo
Shared Public someVariable As Integer
End Class",
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
Dim {|Rename:someVariable|} As Integer = Goo.someVariable
Console.WriteLine(someVariable)
Console.WriteLine(someVariable)
End Sub
End Module
Friend Class Goo
Shared Public someVariable As Integer
End Class",
index:=1)
End Function
<WorkItem(540457, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/540457")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestReplaceSingleLineIfWithMultiLine1() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
If True Then Goo([|2 + 2|])
End Sub
End Module",
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
If True Then
Const {|Rename:V|} As Integer = 2 + 2
Goo(V)
End If
End Sub
End Module",
index:=2)
End Function
<WorkItem(540457, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/540457")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestReplaceSingleLineIfWithMultiLine2() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
If True Then Goo([|1 + 1|]) Else Bar(1 + 1)
End Sub
End Module",
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
If True Then
Const {|Rename:V|} As Integer = 1 + 1
Goo(V)
Else
Bar(1 + 1)
End If
End Sub
End Module",
index:=2)
End Function
<WorkItem(540457, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/540457")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestReplaceSingleLineIfWithMultiLine3() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
If True Then Goo([|1 + 1|]) Else Bar(1 + 1)
End Sub
End Module",
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
Const {|Rename:V|} As Integer = 1 + 1
If True Then Goo(V) Else Bar(V)
End Sub
End Module",
index:=3)
End Function
<WorkItem(540457, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/540457")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestReplaceSingleLineIfWithMultiLine4() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
If True Then Goo(1 + 1) Else Bar([|1 + 1|])
End Sub
End Module",
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
If True Then
Goo(1 + 1)
Else
Const {|Rename:V|} As Integer = 1 + 1
Bar(V)
End If
End Sub
End Module",
index:=2)
End Function
<WorkItem(540468, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/540468")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestCantExtractMethodTypeParameterToFieldCount() As Task
Await TestActionCountAsync(
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(Of T)(x As Integer)
Goo([|CType(2.ToString(), T)|])
End Sub
End Module",
count:=2)
End Function
<WorkItem(540468, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/540468")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestCantExtractMethodTypeParameterToField() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(Of T)(x As Integer)
Goo([|CType(2.ToString(), T)|])
End Sub
End Module",
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(Of T)(x As Integer)
Dim {|Rename:t|} As T = CType(2.ToString(), T)
Goo(t)
End Sub
End Module")
End Function
<WorkItem(540489, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/540489")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestOnlyFieldsInsideConstructorInitializer() As Task
Await TestActionCountAsync(
"Class Goo
Sub New()
Me.New([|2 + 2|])
End Sub
Sub New(v As Integer)
End Sub
End Class",
count:=2)
Await TestInRegularAndScriptAsync(
"Class Goo
Sub New()
Me.New([|2 + 2|])
End Sub
Sub New(v As Integer)
End Sub
End Class",
"Class Goo
Private Const {|Rename:V|} As Integer = 2 + 2
Sub New()
Me.New(V)
End Sub
Sub New(v As Integer)
End Sub
End Class")
End Function
<WorkItem(540485, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/540485")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestIntroduceLocalForConstantExpression() As Task
Await TestInRegularAndScriptAsync(
"Module Program
Sub Main(args As String())
Dim s As String() = New String([|10|]) {}
End Sub
End Module",
"Module Program
Sub Main(args As String())
Const {|Rename:V|} As Integer = 10
Dim s As String() = New String(V) {}
End Sub
End Module",
index:=3)
End Function
<WorkItem(1065689, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1065689")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestIntroduceLocalForConstantExpressionWithTrailingTrivia() As Task
Await TestInRegularAndScriptAsync(
"
Class C
Private Function GetX() As Object
Return [|""c d
"" + ' comment 1
""a
b"" ' comment 2|]
End Function
End Class
",
"
Class C
Private Function GetX() As Object
Const {|Rename:V|} As String = ""c d
"" + ' comment 1
""a
b""
Return V ' comment 2
End Function
End Class
",
index:=3)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestIntroduceFieldWithTrailingTrivia() As Task
Await TestInRegularAndScriptAsync(
"
Class C
Private Sub S()
Dim x = 1 + [|2|] ' comment
End Sub
End Class
",
"
Class C
Private Const {|Rename:V|} As Integer = 2
Private Sub S()
Dim x = 1 + V ' comment
End Sub
End Class
",
index:=1)
End Function
<WorkItem(540487, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/540487")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestFormattingForPartialExpression() As Task
Dim code =
"
Module Program
Sub Main()
Dim i = [|1 + 2|] + 3
End Sub
End Module
"
Dim expected =
"
Module Program
Sub Main()
Const {|Rename:V|} As Integer = 1 + 2
Dim i = V + 3
End Sub
End Module
"
Await TestInRegularAndScriptAsync(code, expected, index:=2)
End Function
<WorkItem(540491, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/540491")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestInAttribute1() As Task
Await TestInRegularAndScriptAsync(
"<Attr([|2 + 2|])>
Class Goo
End Class
Friend Class AttrAttribute
Inherits Attribute
End Class",
"<Attr(Goo.V)>
Class Goo
Friend Const {|Rename:V|} As Integer = 2 + 2
End Class
Friend Class AttrAttribute
Inherits Attribute
End Class")
End Function
<WorkItem(540490, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/540490")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestInMyClassNew() As Task
Await TestInRegularAndScriptAsync(
"Class Goo
Sub New()
MyClass.New([|42|])
End Sub
Sub New(x As Integer)
End Sub
End Class",
"Class Goo
Private Const {|Rename:X|} As Integer = 42
Sub New()
MyClass.New(X)
End Sub
Sub New(x As Integer)
End Sub
End Class")
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestSingleToMultiLineIf1() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
If True Then Goo([|2 + 2|]) Else Bar(2 + 2)
End Sub
End Module",
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
If True Then
Const {|Rename:V|} As Integer = 2 + 2
Goo(V)
Else
Bar(2 + 2)
End If
End Sub
End Module",
index:=2)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestSingleToMultiLineIf2() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
If True Then Goo([|2 + 2|]) Else Bar(2 + 2)
End Sub
End Module",
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
Const {|Rename:V|} As Integer = 2 + 2
If True Then Goo(V) Else Bar(V)
End Sub
End Module",
index:=3)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestSingleToMultiLineIf3() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
If True Then Goo(2 + 2) Else Bar([|2 + 2|])
End Sub
End Module",
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
If True Then
Goo(2 + 2)
Else
Const {|Rename:V|} As Integer = 2 + 2
Bar(V)
End If
End Sub
End Module",
index:=2)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestSingleToMultiLineIf4() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
If True Then Goo(2 + 2) Else Bar([|2 + 2|])
End Sub
End Module",
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
Const {|Rename:V|} As Integer = 2 + 2
If True Then Goo(V) Else Bar(V)
End Sub
End Module",
index:=3)
End Function
<WorkItem(541604, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/541604")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestAttribute() As Task
Await TestInRegularAndScriptAsync(
"<Attr([|2 + 2|])>
Class Goo
End Class
Friend Class AttrAttribute
Inherits System.Attribute
End Class",
"<Attr(Goo.V)>
Class Goo
Friend Const {|Rename:V|} As Integer = 2 + 2
End Class
Friend Class AttrAttribute
Inherits System.Attribute
End Class")
End Function
<WorkItem(542092, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/542092")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestRangeArgumentLowerBound1() As Task
Await TestMissingInRegularAndScriptAsync("Module M
Sub Main()
Dim x() As Integer
ReDim x([|0|] To 5)
End Sub
End Module")
End Function
<WorkItem(542092, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/542092")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestRangeArgumentLowerBound2() As Task
Dim code =
"
Module M
Sub Main()
Dim x() As Integer
ReDim x(0 To 5)
Dim a = [|0|] + 1
End Sub
End Module
"
Dim expected =
"
Module M
Sub Main()
Dim x() As Integer
ReDim x(0 To 5)
Const {|Rename:V|} As Integer = 0
Dim a = V + 1
End Sub
End Module
"
Await TestInRegularAndScriptAsync(code, expected, index:=3)
End Function
<WorkItem(543029, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543029"), WorkItem(542963, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/542963"), WorkItem(542295, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/542295")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestUntypedExpression() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
Dim q As Object
If True Then q = [|Sub()
End Sub|]
End Sub
End Module",
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
Dim q As Object
If True Then
Dim {|Rename:p|} As Object = Sub()
End Sub
q = p
End If
End Sub
End Module")
End Function
<WorkItem(542374, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/542374")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestFieldConstantInAttribute1() As Task
Await TestInRegularAndScriptAsync(
"<Goo(2 + 3 + 4)>
Module Program
Dim x = [|2 + 3|] + 4
End Module
Friend Class GooAttribute
Inherits Attribute
Sub New(x As Integer)
End Sub
End Class",
"<Goo(2 + 3 + 4)>
Module Program
Private Const {|Rename:V|} As Integer = 2 + 3
Dim x = V + 4
End Module
Friend Class GooAttribute
Inherits Attribute
Sub New(x As Integer)
End Sub
End Class")
End Function
<WorkItem(542374, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/542374")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestFieldConstantInAttribute2() As Task
Await TestAsync(
"<Goo(2 + 3 + 4)>
Module Program
Dim x = [|2 + 3|] + 4
End Module
Friend Class GooAttribute
Inherits Attribute
Sub New(x As Integer)
End Sub
End Class",
"<Goo(V + 4)>
Module Program
Friend Const {|Rename:V|} As Integer = 2 + 3
Dim x = V + 4
End Module
Friend Class GooAttribute
Inherits Attribute
Sub New(x As Integer)
End Sub
End Class",
index:=1,
parseOptions:=Nothing)
End Function
<WorkItem(542783, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/542783")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestMissingOnAttributeName() As Task
Await TestMissingInRegularAndScriptAsync(
"<[|Obsolete|]>
Class C
End Class")
End Function
<WorkItem(542811, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/542811")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestMissingOnFilterClause() As Task
Await TestMissingInRegularAndScriptAsync(
"Module Program
Sub Main()
Try
Catch ex As Exception When [|+|]
End Try
End Sub
End Module")
End Function
<WorkItem(542906, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/542906")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestNoIntroduceLocalInAttribute() As Task
Dim input =
"Module Program \n <Obsolete([|""""|])> \n Sub Main(args As String()) \n End Sub \n End Module"
Await TestActionCountAsync(
NewLines(input),
count:=2)
Await TestInRegularAndScriptAsync(
NewLines(input),
"Module Program
Private Const {|Rename:V|} As String = """"
<Obsolete(V)>
Sub Main(args As String())
End Sub
End Module")
End Function
<WorkItem(542947, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/542947")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestNotOnMyBase() As Task
Await TestMissingInRegularAndScriptAsync(
"Class c1
Public res As String
Sub Goo()
res = ""1""
End Sub
End Class
Class c2
Inherits c1
Sub scen1()
[|MyBase|].Goo()
End Sub
End Class")
End Function
<WorkItem(541966, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/541966")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestNestedMultiLineIf1() As Task
Dim code =
"
Imports System
Module Program
Sub Main()
If True Then If True Then Console.WriteLine([|1|]) Else Console.WriteLine(2) Else Console.WriteLine(3)
End Sub
End Module
"
Dim expected =
"
Imports System
Module Program
Sub Main()
If True Then
If True Then
Const {|Rename:Value|} As Integer = 1
Console.WriteLine(Value)
Else
Console.WriteLine(2)
End If
Else
Console.WriteLine(3)
End If
End Sub
End Module
"
Await TestInRegularAndScriptAsync(code, expected, index:=3)
End Function
<WorkItem(541966, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/541966")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestNestedMultiLineIf2() As Task
Dim code =
"
Imports System
Module Program
Sub Main()
If True Then If True Then Console.WriteLine(1) Else Console.WriteLine([|2|]) Else Console.WriteLine(3)
End Sub
End Module
"
Dim expected =
"
Imports System
Module Program
Sub Main()
If True Then
If True Then
Console.WriteLine(1)
Else
Const {|Rename:Value|} As Integer = 2
Console.WriteLine(Value)
End If
Else
Console.WriteLine(3)
End If
End Sub
End Module
"
Await TestInRegularAndScriptAsync(code, expected, index:=3)
End Function
<WorkItem(541966, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/541966")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestNestedMultiLineIf3() As Task
Dim code =
"
Imports System
Module Program
Sub Main()
If True Then If True Then Console.WriteLine(1) Else Console.WriteLine(2) Else Console.WriteLine([|3|])
End Sub
End Module
"
Dim expected =
"
Imports System
Module Program
Sub Main()
If True Then
If True Then Console.WriteLine(1) Else Console.WriteLine(2)
Else
Const {|Rename:Value|} As Integer = 3
Console.WriteLine(Value)
End If
End Sub
End Module
"
Await TestInRegularAndScriptAsync(code, expected, index:=3)
End Function
<WorkItem(543273, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543273")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestSingleLineLambda1() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Module Program
Sub Main
Dim a = Sub(x As Integer) Console.WriteLine([|x + 1|]) ' Introduce local
End Sub
End Module",
"Imports System
Module Program
Sub Main
Dim a = Sub(x As Integer) Dim {|Rename:value|} As Integer = x + 1
Console.WriteLine(value)
End Sub ' Introduce local
End Sub
End Module")
End Function
<WorkItem(543273, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543273")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestSingleLineLambda2() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Module Program
Sub Main
Dim a = Sub(x As Integer) If True Then Console.WriteLine([|x + 1|]) Else Console.WriteLine()
End Sub
End Module",
"Imports System
Module Program
Sub Main
Dim a = Sub(x As Integer)
If True Then
Dim {|Rename:value|} As Integer = x + 1
Console.WriteLine(value)
Else
Console.WriteLine()
End If
End Sub
End Sub
End Module")
End Function
<WorkItem(543273, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543273")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestSingleLineLambda3() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Module Program
Sub Main
Dim a = Sub(x As Integer) If True Then Console.WriteLine() Else Console.WriteLine([|x + 1|])
End Sub
End Module",
"Imports System
Module Program
Sub Main
Dim a = Sub(x As Integer)
If True Then
Console.WriteLine()
Else
Dim {|Rename:value|} As Integer = x + 1
Console.WriteLine(value)
End If
End Sub
End Sub
End Module")
End Function
<WorkItem(543273, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543273")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestSingleLineLambda4() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Module Program
Sub Main
Dim a = Sub(x As Integer) If True Then Console.WriteLine([|x + 1|]) Else Console.WriteLine(x + 1)
End Sub
End Module",
"Imports System
Module Program
Sub Main
Dim a = Sub(x As Integer)
Dim {|Rename:value|} As Integer = x + 1
If True Then Console.WriteLine(value) Else Console.WriteLine(value)
End Sub
End Sub
End Module",
index:=1)
End Function
<WorkItem(543299, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543299")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestSingleLineLambda5() As Task
Await TestInRegularAndScriptAsync(
"Module Program
Sub Main(args As String())
Dim query = Sub(a) a = New With {Key .Key = Function(ByVal arg As Integer) As Integer
Return arg
End Function}.Key.Invoke([|a Or a|])
End Sub
End Module",
"Module Program
Sub Main(args As String())
Dim query = Sub(a) Dim {|Rename:arg1|} As Object = a Or a
a = New With {Key .Key = Function(ByVal arg As Integer) As Integer
Return arg
End Function}.Key.Invoke(arg1)
End Sub
End Sub
End Module")
End Function
<WorkItem(542762, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/542762")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestNotInIntoClause() As Task
Await TestMissingInRegularAndScriptAsync(
"Imports System.Linq
Module
Sub Main()
Dim x = Aggregate y In New Integer() {1}
Into [|Count()|]
End Sub
End Module")
End Function
<WorkItem(543289, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543289")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestNotOnAttribute1() As Task
Await TestMissingInRegularAndScriptAsync(
"Option Explicit Off
Module Program
<Runtime.CompilerServices.[|Extension|]()> _
Function Extension(ByVal x As Integer) As Integer
Return x
End Function
End Module")
End Function
<WorkItem(543289, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543289")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestNotOnAttribute2() As Task
Await TestMissingInRegularAndScriptAsync(
"Option Explicit Off
Module Program
<Runtime.CompilerServices.[|Extension()|]> _
Function Extension(ByVal x As Integer) As Integer
Return x
End Function
End Module")
End Function
<WorkItem(543461, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543461")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestCollectionInitializer() As Task
Await TestMissingInRegularAndScriptAsync(
"Module Program
Sub Main(args As String())
Dim i1 = New Integer() [|{4, 5}|]
End Sub
End Module")
End Function
<WorkItem(543573, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543573")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestCaseInsensitiveNameConflict() As Task
Await TestInRegularAndScriptAsync(
"Class M
Public Function Goo()
Return [|Me.Goo|] * 0
End Function
End Class",
"Class M
Public Function Goo()
Dim {|Rename:goo1|} As Object = Me.Goo
Return goo1 * 0
End Function
End Class")
End Function
<WorkItem(543590, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543590")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestQuery1() As Task
Await TestInRegularAndScriptAsync(
"Imports System.Linq
Public Class Base
Public Function Sample(ByVal arg As Integer) As Integer
Dim results = From s In New Integer() {1}
Select [|Sample(s)|]
Return 0
End Function
End Class",
"Imports System.Linq
Public Class Base
Public Function Sample(ByVal arg As Integer) As Integer
Dim results = From s In New Integer() {1}
Let {|Rename:v|} = Sample(s)
Select v
Return 0
End Function
End Class")
End Function
<WorkItem(543590, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543590")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestQueryCount1() As Task
Await TestActionCountAsync(
"Imports System.Linq
Public Class Base
Public Function Sample(ByVal arg As Integer) As Integer
Dim results = From s In New Integer() {1}
Select [|Sample(s)|]
Return 0
End Function
End Class",
count:=2)
End Function
<WorkItem(543590, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543590")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestQuery2() As Task
Await TestInRegularAndScriptAsync(
"Imports System.Linq
Public Class Base
Public Function Sample(ByVal arg As Integer) As Integer
Dim results = From s In New Integer() {1}
Where [|Sample(s)|] > 21
Select Sample(s)
Return 0
End Function
End Class",
"Imports System.Linq
Public Class Base
Public Function Sample(ByVal arg As Integer) As Integer
Dim results = From s In New Integer() {1}
Let {|Rename:v|} = Sample(s) Where v > 21
Select Sample(s)
Return 0
End Function
End Class")
End Function
<WorkItem(543590, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543590")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestQuery3() As Task
Await TestInRegularAndScriptAsync(
"Imports System.Linq
Public Class Base
Public Function Sample(ByVal arg As Integer) As Integer
Dim results = From s In New Integer() {1}
Where [|Sample(s)|] > 21
Select Sample(s)
Return 0
End Function
End Class",
"Imports System.Linq
Public Class Base
Public Function Sample(ByVal arg As Integer) As Integer
Dim results = From s In New Integer() {1}
Let {|Rename:v|} = Sample(s) Where v > 21
Select v
Return 0
End Function
End Class",
index:=1)
End Function
<WorkItem(543590, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543590")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestQuery4() As Task
Await TestInRegularAndScriptAsync(
"Imports System.Linq
Public Class Base
Public Function Sample(ByVal arg As Integer) As Integer
Dim results = From s In New Integer() {1}
Where Sample(s) > 21
Select [|Sample(s)|]
Return 0
End Function
End Class",
"Imports System.Linq
Public Class Base
Public Function Sample(ByVal arg As Integer) As Integer
Dim results = From s In New Integer() {1}
Where Sample(s) > 21
Let {|Rename:v|} = Sample(s)
Select v
Return 0
End Function
End Class")
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestQuery5() As Task
Await TestInRegularAndScriptAsync(
"Imports System.Linq
Public Class Base
Public Function Sample(ByVal arg As Integer) As Integer
Dim results = From s In New Integer() {1}
Where Sample(s) > 21
Select [|Sample(s)|]
Return 0
End Function
End Class",
"Imports System.Linq
Public Class Base
Public Function Sample(ByVal arg As Integer) As Integer
Dim results = From s In New Integer() {1}
Let {|Rename:v|} = Sample(s)
Where v > 21
Select v
Return 0
End Function
End Class",
index:=1)
End Function
<WorkItem(543529, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543529")>
<WorkItem(909152, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/909152")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestInStatementlessConstructorParameter() As Task
Await TestMissingInRegularAndScriptAsync("Class C1
Sub New(Optional ByRef x As String = [|Nothing|])
End Sub
End Class")
End Function
<WorkItem(543650, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543650")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestReferenceToAnonymousTypeProperty() As Task
Await TestMissingInRegularAndScriptAsync(
"Class AM
Sub M(args As String())
Dim var1 As New AM
Dim at1 As New With {var1, .friend = [|.var1|]}
End Sub
End Class")
End Function
<WorkItem(543698, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543698")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestIntegerArrayExpression() As Task
Await TestInRegularAndScriptAsync(
"Module Program
Sub Main()
Return [|New Integer() {}|]
End Sub
End Module",
"Module Program
Sub Main()
Dim {|Rename:v|} As Integer() = New Integer() {}
Return v
End Sub
End Module")
End Function
<WorkItem(544273, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/544273")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestAttributeNamedParameter() As Task
Await TestMissingInRegularAndScriptAsync(
"Class TestAttribute
Inherits Attribute
Public Sub New(Optional a As Integer = 42)
End Sub
End Class
<Test([|a|]:=5)>
Class Goo
End Class")
End Function
<WorkItem(544265, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/544265")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestMissingOnWrittenToExpression() As Task
Await TestMissingInRegularAndScriptAsync(
"Module Program
Sub Main()
Dim x = New Integer() {1, 2}
[|x(1)|] = 2
End Sub
End Module")
End Function
<WorkItem(543824, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543824")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestImplicitMemberAccess1() As Task
Await TestMissingInRegularAndScriptAsync(
"Imports System
Public Class C1
Public FieldInt As Long
Public FieldStr As String
Public Property PropInt As Integer
End Class
Public Class C2
Public Shared Sub Main()
Dim x = 1 + New C1() With {.FieldStr = [|.FieldInt|].ToString()}
End Sub
End Class")
End Function
<WorkItem(543824, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543824")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestImplicitMemberAccess2() As Task
Await TestMissingInRegularAndScriptAsync(
"Imports System
Public Class C1
Public FieldInt As Long
Public FieldStr As String
Public Property PropInt As Integer
End Class
Public Class C2
Public Shared Sub Main()
Dim x = 1 + New C1() With {.FieldStr = [|.FieldInt.ToString|]()}
End Sub
End Class")
End Function
<WorkItem(543824, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543824")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestImplicitMemberAccess3() As Task
Await TestMissingInRegularAndScriptAsync(
"Imports System
Public Class C1
Public FieldInt As Long
Public FieldStr As String
Public Property PropInt As Integer
End Class
Public Class C2
Public Shared Sub Main()
Dim x = 1 + New C1() With {.FieldStr = [|.FieldInt.ToString()|]}
End Sub
End Class")
End Function
<WorkItem(543824, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/543824")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestImplicitMemberAccess4() As Task
Dim code =
"
Imports System
Public Class C1
Public FieldInt As Long
Public FieldStr As String
Public Property PropInt As Integer
End Class
Public Class C2
Public Shared Sub Main()
Dim x = 1 + [|New C1() With {.FieldStr = .FieldInt.ToString()}|]
End Sub
End Class
"
Dim expected =
"
Imports System
Public Class C1
Public FieldInt As Long
Public FieldStr As String
Public Property PropInt As Integer
End Class
Public Class C2
Public Shared Sub Main()
Dim {|Rename:c1|} As C1 = New C1() With {.FieldStr = .FieldInt.ToString()}
Dim x = 1 + c1
End Sub
End Class
"
Await TestInRegularAndScriptAsync(code, expected)
End Function
<WorkItem(529510, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/529510")>
<WpfFact(Skip:="529510"), Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestNoRefactoringOnAddressOfExpression() As Task
Dim source = "Imports System
Module Module1
Public Sub Goo(ByVal a1 As Exception)
End Sub
Public Sub goo(ByVal a1 As Action(Of ArgumentException))
End Sub
Sub Main()
Goo(New Action(Of Exception)([|AddressOf Goo|]))
End Sub
End Module"
Await TestMissingInRegularAndScriptAsync(source)
End Function
<WorkItem(529510, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/529510")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsExtractMethod)>
Public Async Function TestMissingOnAddressOfInDelegate() As Task
Await TestMissingInRegularAndScriptAsync(
"Module Module1
Public Sub Goo(ByVal a1 As Exception)
End Sub
Public Sub goo(ByVal a1 As Action(Of ArgumentException))
End Sub
Sub Main()
goo(New Action(Of Exception)([|AddressOf Goo|]))
End Sub
End Module")
End Function
<WorkItem(545168, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/545168")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsExtractMethod)>
Public Async Function TestMissingOnXmlName() As Task
Await TestMissingInRegularAndScriptAsync(
"Module M
Sub Main()
Dim x = <[|x|]/>
End Sub
End Module")
End Function
<WorkItem(545262, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/545262")>
<WorkItem(909152, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/909152")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestInTernaryConditional() As Task
Await TestMissingInRegularAndScriptAsync("Module Program
Sub Main(args As String())
Dim p As Object = Nothing
Dim Obj1 = If(New With {.a = True}.a, p, [|Nothing|])
End Sub
End Module")
End Function
<WorkItem(545316, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/545316")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestInPropertyInitializer() As Task
Await TestInRegularAndScriptAsync(
"Module Module1
Property Prop As New List(Of String) From {[|""One""|], ""two""}
End Module",
"Module Module1
Private Const {|Rename:V|} As String = ""One""
Property Prop As New List(Of String) From {V, ""two""}
End Module")
End Function
<WorkItem(545308, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/545308")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestDoNotMergeAmpersand() As Task
Dim code =
"
Module Module1
Public Sub goo(Optional ByVal arg = ([|""a""|]) & ""b"")
End Sub
End Module
"
Dim expected =
"
Module Module1
Private Const {|Rename:V|} As String = ""a""
Public Sub goo(Optional ByVal arg = V & ""b"")
End Sub
End Module
"
Await TestInRegularAndScriptAsync(code, expected)
End Function
<WorkItem(545258, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/545258")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestVenusGeneration1() As Task
Dim code =
"
Class C
Sub Goo()
#ExternalSource (""Goo"", 1)
Console.WriteLine([|5|])
#End ExternalSource
End Sub
End Class
"
Dim expected =
"
Class C
Sub Goo()
#ExternalSource (""Goo"", 1)
Const {|Rename:V|} As Integer = 5
Console.WriteLine(V)
#End ExternalSource
End Sub
End Class
"
Await TestExactActionSetOfferedAsync(code,
{String.Format(FeaturesResources.Introduce_local_constant_for_0, "5"),
String.Format(FeaturesResources.Introduce_local_constant_for_all_occurrences_of_0, "5")})
Await TestInRegularAndScriptAsync(code, expected)
End Function
<WorkItem(545258, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/545258")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestVenusGeneration2() As Task
Dim code =
"
Class C
#ExternalSource (""Goo"", 1)
Sub Goo()
If False Then
Console.WriteLine([|5|])
End If
End Sub
#End ExternalSource
End Class
"
Await TestExactActionSetOfferedAsync(code,
{String.Format(FeaturesResources.Introduce_local_constant_for_0, "5"),
String.Format(FeaturesResources.Introduce_local_constant_for_all_occurrences_of_0, "5")})
End Function
<WorkItem(545258, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/545258")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestVenusGeneration3() As Task
Dim code =
"
Class C
Sub Goo()
#ExternalSource (""Goo"", 1)
If False Then
Console.WriteLine([|5|])
End If
#End ExternalSource
End Sub
End Class
"
Dim expected =
"
Class C
Sub Goo()
#ExternalSource (""Goo"", 1)
If False Then
Const {|Rename:V|} As Integer = 5
Console.WriteLine(V)
End If
#End ExternalSource
End Sub
End Class
"
Await TestExactActionSetOfferedAsync(code,
{String.Format(FeaturesResources.Introduce_local_constant_for_0, "5"),
String.Format(FeaturesResources.Introduce_local_constant_for_all_occurrences_of_0, "5")})
Await TestInRegularAndScriptAsync(code, expected)
End Function
<WorkItem(545525, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/545525")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestInvocation() As Task
Await TestInRegularAndScriptAsync(
"Option Strict On
Class C
Shared Sub Main()
Dim x = [|New C().Goo()|](0)
End Sub
Function Goo() As Integer()
End Function
End Class",
"Option Strict On
Class C
Shared Sub Main()
Dim {|Rename:v|} As Integer() = New C().Goo()
Dim x = v(0)
End Sub
Function Goo() As Integer()
End Function
End Class")
End Function
<WorkItem(545829, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/545829")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestOnImplicitMemberAccess() As Task
Await TestAsync(
"Module Program
Sub Main()
With """"
Dim x = [|.GetHashCode|] Xor &H7F3E ' Introduce Local
End With
End Sub
End Module",
"Module Program
Sub Main()
With """"
Dim {|Rename:getHashCode|} As Integer = .GetHashCode
Dim x = getHashCode Xor &H7F3E ' Introduce Local
End With
End Sub
End Module",
parseOptions:=Nothing)
Await TestAsync(
"Module Program
Sub Main()
With """"
Dim x = [|.GetHashCode|] Xor &H7F3E ' Introduce Local
End With
End Sub
End Module",
"Module Program
Sub Main()
With """"
Dim {|Rename:getHashCode|} As Integer = .GetHashCode
Dim x = getHashCode Xor &H7F3E ' Introduce Local
End With
End Sub
End Module",
parseOptions:=GetScriptOptions())
End Function
<WorkItem(545702, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/545702")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestMissingInRefLocation() As Task
Dim markup =
"
Module A
Sub Main()
Goo([|1|])
End Sub
Sub Goo(ByRef x As Long)
End Sub
Sub Goo(x As String)
End Sub
End Module
"
Await TestMissingInRegularAndScriptAsync(markup)
End Function
<WorkItem(546139, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546139")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestAcrossPartialTypes() As Task
Await TestInRegularAndScriptAsync(
"Partial Class C
Sub goo1(Optional x As String = [|""HELLO""|])
End Sub
End Class
Partial Class C
Sub goo3(Optional x As String = ""HELLO"")
End Sub
End Class",
"Partial Class C
Private Const {|Rename:V|} As String = ""HELLO""
Sub goo1(Optional x As String = V)
End Sub
End Class
Partial Class C
Sub goo3(Optional x As String = V)
End Sub
End Class",
index:=1)
End Function
<WorkItem(544669, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/544669")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestFunctionBody1() As Task
Await TestInRegularAndScriptAsync(
"Module Program
Sub Main(args As String())
Dim a1 = Function(ByVal x) [|x!goo|]
End Sub
End Module",
"Module Program
Sub Main(args As String())
Dim a1 = Function(ByVal x)
Dim {|Rename:goo|} As Object = x!goo
Return goo
End Function
End Sub
End Module")
End Function
<WorkItem(1065689, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1065689")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestTrailingTrivia() As Task
Dim code =
"
Module M
Sub Main()
Dim a = 1 +
[|2|] ' comment
End Sub
End Module
"
Dim expected =
"
Module M
Private Const {|Rename:V|} As Integer = 2
Sub Main()
Dim a = 1 +
V ' comment
End Sub
End Module
"
Await TestInRegularAndScriptAsync(code, expected)
End Function
<WorkItem(546815, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546815")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestInIfStatement() As Task
Await TestInRegularAndScriptAsync(
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main(args As String())
If [|True|] Then
End If
End Sub
End Module",
"Imports System
Imports System.Collections.Generic
Imports System.Linq
Module Program
Private Const {|Rename:V|} As Boolean = True
Sub Main(args As String())
If V Then
End If
End Sub
End Module")
End Function
<WorkItem(830928, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/830928")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestIntroduceLocalRemovesUnnecessaryCast() As Task
Await TestInRegularAndScriptAsync(
"Imports System.Collections.Generic
Class C
Private Shared Sub Main(args As String())
Dim hSet = New HashSet(Of String)()
hSet.Add([|hSet.ToString()|])
End Sub
End Class",
"Imports System.Collections.Generic
Class C
Private Shared Sub Main(args As String())
Dim hSet = New HashSet(Of String)()
Dim {|Rename:item|} As String = hSet.ToString()
hSet.Add(item)
End Sub
End Class")
End Function
<WorkItem(546691, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/546691")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestIntroLocalInSingleLineLambda() As Task
Dim code =
"
Module Program
Sub Main()
Dim x = Function() [|Sub()
End Sub|]
End Sub
End Module
"
Dim expected =
"
Module Program
Sub Main()
Dim {|Rename:p|} = Sub()
End Sub
Dim x = Function() p
End Sub
End Module
"
Await TestInRegularAndScriptAsync(code, expected)
End Function
<WorkItem(530720, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/530720")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestSingleToMultilineLambdaLineBreaks() As Task
Dim code =
"
Module Program
Sub Main()
Dim a = Function(c) [|c!goo|]
End Sub
End Module
"
Dim expected =
"
Module Program
Sub Main()
Dim a = Function(c)
Dim {|Rename:goo|} As Object = c!goo
Return goo
End Function
End Sub
End Module
"
Await TestInRegularAndScriptAsync(code, expected)
End Function
<WorkItem(531478, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/531478")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestEscapeKeywordsIfNeeded1() As Task
Dim code =
"
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main()
Take([|From x In """"|])
End Sub
Sub Take(x)
End Sub
End Module
"
Dim expected =
"
Imports System.Collections.Generic
Imports System.Linq
Module Program
Sub Main()
Dim {|Rename:x1|} As IEnumerable(Of Char) = From x In """"
[Take](x1)
End Sub
Sub Take(x)
End Sub
End Module
"
Await TestInRegularAndScriptAsync(code, expected)
End Function
<WorkItem(632327, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/632327")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestInsertAfterPreprocessor1() As Task
Dim code =
"
Public Class Index_vbhtml
Public Sub Execute()
#ExternalSource (""Home\Index.vbhtml"", 1)
Dim i = [|1 + 2|] + 3
If True Then
Dim j = 1 + 2 + 3
End If
#End ExternalSource
End Sub
End Class
"
Dim expected =
"
Public Class Index_vbhtml
Public Sub Execute()
#ExternalSource (""Home\Index.vbhtml"", 1)
Const {|Rename:V|} As Integer = 1 + 2
Dim i = V + 3
If True Then
Dim j = 1 + 2 + 3
End If
#End ExternalSource
End Sub
End Class
"
Await TestInRegularAndScriptAsync(code, expected)
End Function
<WorkItem(632327, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/632327")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestInsertAfterPreprocessor2() As Task
Dim code =
"
Public Class Index_vbhtml
Public Sub Execute()
#ExternalSource (""Home\Index.vbhtml"", 1)
Dim i = 1 + 2 + 3
If True Then
Dim j = [|1 + 2|] + 3
End If
#End ExternalSource
End Sub
End Class
"
Dim expected =
"
Public Class Index_vbhtml
Public Sub Execute()
#ExternalSource (""Home\Index.vbhtml"", 1)
Dim i = 1 + 2 + 3
If True Then
Const {|Rename:V|} As Integer = 1 + 2
Dim j = V + 3
End If
#End ExternalSource
End Sub
End Class
"
Await TestInRegularAndScriptAsync(code, expected)
End Function
<WorkItem(682683, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/682683")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestDontRemoveParenthesesIfOperatorPrecedenceWouldBeBroken() As Task
Dim code =
"
Imports System
Module Program
Sub Main()
Console.WriteLine(5 - ([|1|] + 2))
End Sub
End Module
"
Dim expected =
"
Imports System
Module Program
Sub Main()
Const {|Rename:V|} As Integer = 1
Console.WriteLine(5 - (V + 2))
End Sub
End Module
"
Await TestInRegularAndScriptAsync(code, expected, index:=2)
End Function
<WorkItem(1022458, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1022458")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestDontSimplifyParentUnlessEntireInnerNodeIsSelected() As Task
Dim code =
"
Imports System
Module Program
Sub Main()
Dim s = ""Text""
Dim x = 42
If ([|s.Length|].CompareTo(x) > 0 AndAlso
s.Length.CompareTo(x) > 0) Then
End If
End Sub
End Module
"
Dim expected =
"
Imports System
Module Program
Sub Main()
Dim s = ""Text""
Dim x = 42
Dim {|Rename:length|} As Integer = s.Length
If (length.CompareTo(x) > 0 AndAlso
length.CompareTo(x) > 0) Then
End If
End Sub
End Module
"
Await TestInRegularAndScriptAsync(code, expected, index:=1)
End Function
<WorkItem(939259, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/939259")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestIntroduceLocalWithTriviaInMultiLineStatements() As Task
Dim code =
"
Imports System
Module Program
Sub Main()
Dim x = If(True,
[|1|], ' TODO: Comment
2)
End Sub
End Module
"
Dim expected =
"
Imports System
Module Program
Sub Main()
Const {|Rename:V|} As Integer = 1
Dim x = If(True,
V, ' TODO: Comment
2)
End Sub
End Module
"
Await TestInRegularAndScriptAsync(code, expected, index:=3)
End Function
<WorkItem(909152, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/909152")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestMissingOnNothingLiteral() As Task
Await TestMissingInRegularAndScriptAsync(
"
Imports System
Module Program
Sub Main(args As String())
Main([|Nothing|])
M(Nothing)
End Sub
Sub M(i As Integer)
End Sub
End Module
")
End Function
<WorkItem(1130990, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1130990")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestInParentConditionalAccessExpressions() As Task
Dim code =
"
Imports System
Class C
Function F(Of T)(x As T) As T
Dim y = [|F(New C)|]?.F(New C)?.F(New C)
Return x
End Function
End Class
"
Dim expected =
"
Imports System
Class C
Function F(Of T)(x As T) As T
Dim {|Rename:c|} As C = F(New C)
Dim y = c?.F(New C)?.F(New C)
Return x
End Function
End Class
"
Await TestInRegularAndScriptAsync(code, expected)
End Function
<WorkItem(1130990, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1130990")>
<WorkItem(3110, "https://github.com/dotnet/roslyn/issues/3110")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestMissingAcrossMultipleParentConditionalAccessExpressions() As Task
Await TestMissingInRegularAndScriptAsync(
"
Imports System
Class C
Function F(Of T)(x As T) As T
Dim y = [|F(New C)?.F(New C)|]?.F(New C)
Return x
End Function
End Class
")
End Function
<WorkItem(1130990, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1130990")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestMissingOnInvocationExpressionInParentConditionalAccessExpressions() As Task
Await TestMissingInRegularAndScriptAsync(
"
Imports System
Class C
Function F(Of T)(x As T) As T
Dim y = F(New C)?.[|F(New C)|]?.F(New C)
Return x
End Function
End Class
")
End Function
<WorkItem(1130990, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1130990")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestMissingOnMemberBindingExpressionInParentConditionalAccessExpressions() As Task
Await TestMissingInRegularAndScriptAsync(
"
Imports System
Class C
Sub F()
Dim s as String = ""Text""
Dim l = s?.[|Length|]
End Sub
End Class
")
End Function
<WorkItem(2026, "https://github.com/dotnet/roslyn/issues/2026")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestReplaceAllFromInsideIfBlock() As Task
Dim code =
"
Imports System
Module DataTipInfoGetterModule
Friend Function GetInfoAsync() As DebugDataTipInfo
Dim expression As ExpressionSyntax = Nothing
Dim curr = DirectCast(expression.Parent, ExpressionSyntax)
If curr Is expression.Parent Then
Return New DebugDataTipInfo([|expression.Parent|].Span)
End If
Return Nothing
End Function
End Module
Friend Class TextSpan
End Class
Friend Class ExpressionSyntax
Public Property Parent As ExpressionSyntax
Public Property Span As TextSpan
End Class
Friend Class DebugDataTipInfo
Public Sub New(span As Object)
End Sub
End Class
"
Dim expected =
"
Imports System
Module DataTipInfoGetterModule
Friend Function GetInfoAsync() As DebugDataTipInfo
Dim expression As ExpressionSyntax = Nothing
Dim {|Rename:parent|} As ExpressionSyntax = expression.Parent
Dim curr = DirectCast(parent, ExpressionSyntax)
If curr Is parent Then
Return New DebugDataTipInfo(parent.Span)
End If
Return Nothing
End Function
End Module
Friend Class TextSpan
End Class
Friend Class ExpressionSyntax
Public Property Parent As ExpressionSyntax
Public Property Span As TextSpan
End Class
Friend Class DebugDataTipInfo
Public Sub New(span As Object)
End Sub
End Class
"
Await TestInRegularAndScriptAsync(code, expected, index:=1)
End Function
<WorkItem(1065661, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1065661")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestIntroduceVariableTextDoesntSpanLines1() As Task
Dim code = "
Class C
Sub M()
Dim s = """" + [|""a
b
c""|]
End Sub
End Class"
Await TestSmartTagTextAsync(code, String.Format(FeaturesResources.Introduce_local_constant_for_0, """a b c"""), New TestParameters(index:=2))
End Function
<WorkItem(1065661, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1065661")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestIntroduceVariableTextDoesntSpanLines2() As Task
Dim code = "
Class C
Sub M()
Dim s = """" + [|$""a
b
c""|]
End Sub
End Class"
Await TestSmartTagTextAsync(code, String.Format(FeaturesResources.Introduce_local_for_0, "$""a b c"""))
End Function
<WorkItem(976, "https://github.com/dotnet/roslyn/issues/976")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestNoConstantForInterpolatedStrings1() As Task
Dim code =
"
Module Program
Sub Main()
Dim args As String() = Nothing
Console.WriteLine([|$""{DateTime.Now.ToString()}Text{args(0)}""|])
End Sub
End Module
"
Dim expected =
"
Module Program
Sub Main()
Dim args As String() = Nothing
Dim {|Rename:v|} As String = $""{DateTime.Now.ToString()}Text{args(0)}""
Console.WriteLine(v)
End Sub
End Module
"
Await TestInRegularAndScriptAsync(code, expected)
End Function
<WorkItem(976, "https://github.com/dotnet/roslyn/issues/976")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestNoConstantForInterpolatedStrings2() As Task
Dim code =
"
Module Program
Sub Main()
Console.WriteLine([|$""Text{{s}}""|])
Console.WriteLine($""Text{{s}}"")
End Sub
End Module
"
Dim expected =
"
Module Program
Sub Main()
Dim {|Rename:v|} As String = $""Text{{s}}""
Console.WriteLine(v)
Console.WriteLine(v)
End Sub
End Module
"
Await TestInRegularAndScriptAsync(code, expected, index:=1)
End Function
<WorkItem(3147, "https://github.com/dotnet/roslyn/issues/3147")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestHandleFormattableStringTargetTyping1() As Task
Const code = "
Imports System
" & FormattableStringType & "
Namespace N
Class C
Public Sub M()
Dim f = FormattableString.Invariant([|$""""|])
End Sub
End Class
End Namespace"
Const expected = "
Imports System
" & FormattableStringType & "
Namespace N
Class C
Public Sub M()
Dim {|Rename:formattable|} As FormattableString = $""""
Dim f = FormattableString.Invariant(formattable)
End Sub
End Class
End Namespace"
Await TestInRegularAndScriptAsync(code, expected)
End Function
<WorkItem(936, "https://github.com/dotnet/roslyn/issues/936")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestInAutoPropertyInitializerEqualsClause() As Task
Dim code =
"
Imports System
Class C
Property Name As String = [|""Roslyn""|]
End Class
"
Dim expected =
"
Imports System
Class C
Private Const {|Rename:V|} As String = ""Roslyn""
Property Name As String = V
End Class
"
Await TestInRegularAndScriptAsync(code, expected)
End Function
<WorkItem(936, "https://github.com/dotnet/roslyn/issues/936")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestInAutoPropertyWithCollectionInitializerAfterEqualsClause() As Task
Dim code =
"
Imports System
Class C
Property Grades As Integer() = [|{90, 73}|]
End Class
"
Dim expected =
"
Imports System
Class C
Private Shared ReadOnly {|Rename:p|} As Integer() = {90, 73}
Property Grades As Integer() = p
End Class
"
Await TestInRegularAndScriptAsync(code, expected)
End Function
<WorkItem(936, "https://github.com/dotnet/roslyn/issues/936")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestInAutoPropertyInitializerAsClause() As Task
Dim code =
"
Imports System
Class C
Public Property Items As New List(Of String) From {[|""M""|], ""T"", ""W""}
End Class
"
Dim expected =
"
Imports System
Class C
Private Const {|Rename:V|} As String = ""M""
Public Property Items As New List(Of String) From {V, ""T"", ""W""}
End Class
"
Await TestInRegularAndScriptAsync(code, expected)
End Function
<WorkItem(936, "https://github.com/dotnet/roslyn/issues/936")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestInAutoPropertyObjectCreationExpressionWithinAsClause() As Task
Dim code =
"
Imports System
Class C
Property Orders As New List(Of Object)([|500|])
End Class
"
Dim expected =
"
Imports System
Class C
Private Const {|Rename:V|} As Integer = 500
Property Orders As New List(Of Object)(V)
End Class
"
Await TestInRegularAndScriptAsync(code, expected)
End Function
<WorkItem(11777, "https://github.com/dotnet/roslyn/issues/11777")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestGenerateLocalConflictingName1() As Task
Await TestInRegularAndScriptAsync(
"class Program
class MySpan
public Start as integer
end class
sub Method(span as MySpan)
dim pos as integer = span.Start
while pos < [|span.Start|]
dim start as integer = pos
end while
end sub
end class",
"class Program
class MySpan
public Start as integer
end class
sub Method(span as MySpan)
dim pos as integer = span.Start
Dim {|Rename:start1|} As Integer = span.Start
while pos < start1
dim start as integer = pos
end while
end sub
end class")
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TupleWithInferredName_LeaveExplicitName() As Task
Dim code = "
Class C
Shared Dim y As Integer = 2
Sub M()
Dim a As Integer = 1
Dim t = (a, x:=[|C.y|])
End Sub
End Class
"
Dim expected = "
Class C
Shared Dim y As Integer = 2
Sub M()
Dim a As Integer = 1
Dim {|Rename:y1|} As Integer = C.y
Dim t = (a, x:=y1)
End Sub
End Class
"
Await TestAsync(code, expected, parseOptions:=TestOptions.Regular.WithLanguageVersion(LanguageVersion.Latest))
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TupleWithInferredName_InferredNameBecomesExplicit() As Task
Dim code = "
Class C
Shared Dim y As Integer = 2
Sub M()
Dim a As Integer = 1
Dim t = (a, [|C.y|])
End Sub
End Class
"
Dim expected = "
Class C
Shared Dim y As Integer = 2
Sub M()
Dim a As Integer = 1
Dim {|Rename:y1|} As Integer = C.y
Dim t = (a, y:=y1)
End Sub
End Class
"
Await TestAsync(code, expected, parseOptions:=TestOptions.Regular.WithLanguageVersion(LanguageVersion.Latest))
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TupleWithInferredName_AllOccurrences() As Task
Dim code = "
Class C
Shared Dim y As Integer = 2
Sub M()
Dim a As Integer = 1
Dim t = (a, [|C.y|])
Dim t2 = (C.y, a)
End Sub
End Class
"
Dim expected = "
Class C
Shared Dim y As Integer = 2
Sub M()
Dim a As Integer = 1
Dim {|Rename:y1|} As Integer = C.y
Dim t = (a, y:=y1)
Dim t2 = (y:=y1, a)
End Sub
End Class
"
Await TestAsync(code, expected, index:=1,
parseOptions:=TestOptions.Regular.WithLanguageVersion(LanguageVersion.Latest))
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TupleWithInferredName_NoDuplicateNames() As Task
Dim code = "
Class C
Shared Dim y As Integer = 2
Sub M()
Dim t = (C.y, [|C.y|])
End Sub
End Class
"
Dim expected = "
Class C
Shared Dim y As Integer = 2
Sub M()
Dim {|Rename:y1|} As Integer = C.y
Dim t = (y1, y1)
End Sub
End Class
"
Await TestInRegularAndScriptAsync(code, expected, index:=1)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TupleWithInferredName_NoReservedNames() As Task
Dim code = "
Class C
Shared Dim rest As Integer = 2
Sub M()
Dim a As Integer = 1
Dim t = (a, [|C.rest|])
End Sub
End Class
"
Dim expected = "
Class C
Shared Dim rest As Integer = 2
Sub M()
Dim a As Integer = 1
Dim {|Rename:rest1|} As Integer = C.rest
Dim t = (a, rest1)
End Sub
End Class
"
Await TestAsync(code, expected, parseOptions:=TestOptions.Regular.WithLanguageVersion(LanguageVersion.Latest))
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function AnonymousTypeWithInferredName_LeaveExplicitName() As Task
Dim code = "
Class C
Shared Dim y As Integer = 2
Sub M()
Dim a As Integer = 1
Dim t = New With {a, [|C.y|]}
End Sub
End Class
"
Dim expected = "
Class C
Shared Dim y As Integer = 2
Sub M()
Dim a As Integer = 1
Dim {|Rename:y1|} As Integer = C.y
Dim t = New With {a, .y = y1}
End Sub
End Class
"
Await TestInRegularAndScriptAsync(code, expected)
End Function
<WorkItem(2423, "https://github.com/dotnet/roslyn/issues/2423")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestPickNameBasedOnArgument1() As Task
Await TestInRegularAndScriptAsync(
"class C
public sub new(a as string, b as string)
dim c = new TextSpan([|integer.Parse(a)|], integer.Parse(b))
end sub
end class
structure TextSpan
public sub new(start as integer, length as integer)
end sub
end structure",
"class C
public sub new(a as string, b as string)
Dim {|Rename:start|} As Integer = integer.Parse(a)
dim c = new TextSpan(start, integer.Parse(b))
end sub
end class
structure TextSpan
public sub new(start as integer, length as integer)
end sub
end structure")
End Function
<WorkItem(2423, "https://github.com/dotnet/roslyn/issues/2423")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestPickNameBasedOnArgument2() As Task
Await TestInRegularAndScriptAsync(
"class C
public sub new(a as string, b as string)
dim c = new TextSpan(integer.Parse(a), [|integer.Parse(b)|])
end sub
end class
structure TextSpan
public sub new(start as integer, length as integer)
end sub
end structure",
"class C
public sub new(a as string, b as string)
Dim {|Rename:length|} As Integer = integer.Parse(b)
dim c = new TextSpan(integer.Parse(a), length)
end sub
end class
structure TextSpan
public sub new(start as integer, length as integer)
end sub
end structure")
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
<WorkItem(10123, "https://github.com/dotnet/roslyn/issues/10123")>
Public Async Function TestSimpleParameterName() As Task
Dim source = "Module Program
Sub Main(x As Integer)
Goo([|x|])
End Sub
End Module"
Dim expected = "Module Program
Sub Main(x As Integer)
Dim {|Rename:x1|} As Integer = x
Goo(x1)
End Sub
End Module"
Await TestInRegularAndScriptAsync(source, expected)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
<WorkItem(10123, "https://github.com/dotnet/roslyn/issues/10123")>
Public Async Function TestSimpleParameterName_EmptySelection() As Task
Dim source = "Module Program
Sub Main(x As Integer)
Goo([||]x)
End Sub
End Module"
Await TestMissingAsync(source)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
<WorkItem(10123, "https://github.com/dotnet/roslyn/issues/10123")>
Public Async Function TestFieldName_QualifiedWithMe() As Task
Dim source = "Module Program
Dim x As Integer
Sub Main()
Goo([|x|])
End Sub
End Module"
Dim expected = "Module Program
Dim x As Integer
Sub Main()
Dim {|Rename:x1|} As Integer = x
Goo(x1)
End Sub
End Module"
Await TestInRegularAndScriptAsync(source, expected)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
<WorkItem(10123, "https://github.com/dotnet/roslyn/issues/10123")>
Public Async Function TestFieldName_QualifiedWithType() As Task
Dim source = "Module Program
Shared Dim x As Integer
Sub Main()
Goo([|Program.x|])
End Sub
End Module"
Dim expected = "Module Program
Shared Dim x As Integer
Sub Main()
Dim {|Rename:x1|} As Integer = Program.x
Goo(x1)
End Sub
End Module"
Await TestInRegularAndScriptAsync(source, expected)
End Function
<WorkItem(21373, "https://github.com/dotnet/roslyn/issues/21373")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestInAttribute() As Task
Dim code = "
Class C
Public Property Foo()
<Example([|3 + 3|])>
Public Property Bar()
End Class
"
Dim expected = "
Class C
Private Const {|Rename:V|} As Integer = 3 + 3
Public Property Foo()
<Example(V)>
Public Property Bar()
End Class
"
Await TestInRegularAndScriptAsync(code, expected)
End Function
<WorkItem(28266, "https://github.com/dotnet/roslyn/issues/28266")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestCaretAtEndOfExpression1() As Task
Await TestInRegularAndScriptAsync(
"class C
sub Goo()
Bar(1[||], 2)
end sub
end class",
"class C
Private Const {|Rename:V|} As Integer = 1
sub Goo()
Bar(V, 2)
end sub
end class")
End Function
<WorkItem(28266, "https://github.com/dotnet/roslyn/issues/28266")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestCaretAtEndOfExpression2() As Task
Await TestInRegularAndScriptAsync(
"class C
sub Goo()
Bar(1, 2[||])
end sub
end class",
"class C
Private Const {|Rename:V|} As Integer = 2
sub Goo()
Bar(1, V)
end sub
end class")
End Function
<WorkItem(28266, "https://github.com/dotnet/roslyn/issues/28266")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestCaretAtEndOfExpression3() As Task
Await TestInRegularAndScriptAsync(
"class C
sub Goo()
Bar(1, (2[||]))
end sub
end class",
"class C
Private Const {|Rename:V|} As Integer = 2
sub Goo()
Bar(1, V)
end sub
end class")
End Function
<WorkItem(28266, "https://github.com/dotnet/roslyn/issues/28266")>
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
Public Async Function TestCaretAtEndOfExpression4() As Task
Await TestInRegularAndScriptAsync(
"class C
sub Goo()
Bar(1, Bar(2[||]))
end sub
end class",
"class C
Private Const {|Rename:V|} As Integer = 2
sub Goo()
Bar(1, Bar(V))
end sub
end class")
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
<WorkItem(27949, "https://github.com/dotnet/roslyn/issues/27949")>
Public Async Function TestWhitespaceSpanInAssignment() As Task
Await TestMissingAsync("
Class C
Dim x As Integer = [| |] 0
End Class
")
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
<WorkItem(28665, "https://github.com/dotnet/roslyn/issues/28665")>
Public Async Function TestWhitespaceSpanInAttribute() As Task
Await TestMissingAsync("
Class C
<Example( [| |] )>
Public Function Foo()
End Function
End Class
")
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
<WorkItem(30207, "https://github.com/dotnet/roslyn/issues/30207")>
Public Async Function TestExplicitRecursiveInstanceMemberAccess_ForAllOccurrences() As Task
Dim source = "
Class C
Dim c As C
Sub Foo()
Dim y = [|c|].c.c
End Sub
End Class
"
Dim expected = "
Class C
Dim c As C
Sub Foo()
Dim {|Rename:c1|} As C = c
Dim y = c1.c.c
End Sub
End Class
"
Await TestInRegularAndScriptAsync(source, expected, index:=1)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
<WorkItem(30207, "https://github.com/dotnet/roslyn/issues/30207")>
Public Async Function TestImplicitRecursiveInstanceMemberAccess_ForAllOccurrences() As Task
Dim source = "
Class C
Dim c As C
Sub Foo()
Dim y = [|Me.c|].c.c
End Sub
End Class
"
Dim expected = "
Class C
Dim c As C
Sub Foo()
Dim {|Rename:c1|} As C = Me.c
Dim y = c1.c.c
End Sub
End Class
"
Await TestInRegularAndScriptAsync(source, expected, index:=1)
End Function
<Fact, Trait(Traits.Feature, Traits.Features.CodeActionsIntroduceVariable)>
<WorkItem(30207, "https://github.com/dotnet/roslyn/issues/30207")>
Public Async Function TestExpressionOfUndeclaredType() As Task
Dim source = "
Class C
Sub Test
Dim array As A() = [|A|].Bar()
End Sub
End Class"
Await TestMissingAsync(source)
End Function
End Class
End Namespace
| DustinCampbell/roslyn | src/EditorFeatures/VisualBasicTest/CodeActions/IntroduceVariable/IntroduceVariableTests.vb | Visual Basic | apache-2.0 | 96,654 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import atexit
import json
import logging
import os
import subprocess
import time
from nose.tools import assert_true, assert_false
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from desktop.lib.django_test_util import make_logged_in_client
from desktop.lib.paths import get_run_root
from desktop.lib.python_util import find_unused_port
from desktop.lib.security_util import get_localhost_name
from desktop.lib.test_utils import add_to_group, grant_access
from hadoop import pseudo_hdfs4
from hadoop.pseudo_hdfs4 import is_live_cluster, get_db_prefix
import beeswax.conf
from beeswax.server.dbms import get_query_server_config
from beeswax.server import dbms
HIVE_SERVER_TEST_PORT = find_unused_port()
_INITIALIZED = False
_SHARED_HIVE_SERVER_PROCESS = None
_SHARED_HIVE_SERVER = None
_SHARED_HIVE_SERVER_CLOSER = None
LOG = logging.getLogger(__name__)
def _start_server(cluster):
args = [beeswax.conf.HIVE_SERVER_BIN.get()]
env = cluster._mr2_env.copy()
hadoop_cp_proc = subprocess.Popen(args=[get_run_root('ext/hadoop/hadoop') + '/bin/hadoop', 'classpath'], env=env, cwd=cluster._tmpdir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
hadoop_cp_proc.wait()
hadoop_cp = hadoop_cp_proc.stdout.read().strip()
env.update({
'HADOOP_HOME': get_run_root('ext/hadoop/hadoop'), # Used only by Hive for some reason
'HIVE_CONF_DIR': beeswax.conf.HIVE_CONF_DIR.get(),
'HIVE_SERVER2_THRIFT_PORT': str(HIVE_SERVER_TEST_PORT),
'HADOOP_MAPRED_HOME': get_run_root('ext/hadoop/hadoop') + '/share/hadoop/mapreduce',
# Links created in jenkins script.
# If missing classes when booting HS2, check here.
'AUX_CLASSPATH':
get_run_root('ext/hadoop/hadoop') + '/share/hadoop/hdfs/hadoop-hdfs.jar'
+ ':' +
get_run_root('ext/hadoop/hadoop') + '/share/hadoop/common/lib/hadoop-auth.jar'
+ ':' +
get_run_root('ext/hadoop/hadoop') + '/share/hadoop/common/hadoop-common.jar'
+ ':' +
get_run_root('ext/hadoop/hadoop') + '/share/hadoop/mapreduce/hadoop-mapreduce-client-core.jar'
,
'HADOOP_CLASSPATH': hadoop_cp,
})
if os.getenv("JAVA_HOME"):
env["JAVA_HOME"] = os.getenv("JAVA_HOME")
LOG.info("Executing %s, env %s, cwd %s" % (repr(args), repr(env), cluster._tmpdir))
return subprocess.Popen(args=args, env=env, cwd=cluster._tmpdir, stdin=subprocess.PIPE)
def get_shared_beeswax_server(db_name='default'):
global _SHARED_HIVE_SERVER
global _SHARED_HIVE_SERVER_CLOSER
if _SHARED_HIVE_SERVER is None:
cluster = pseudo_hdfs4.shared_cluster()
if is_live_cluster():
def s():
pass
else:
s = _start_mini_hs2(cluster)
start = time.time()
started = False
sleep = 1
make_logged_in_client()
user = User.objects.get(username='test')
query_server = get_query_server_config()
db = dbms.get(user, query_server)
while not started and time.time() - start <= 30:
try:
db.open_session(user)
started = True
break
except Exception, e:
LOG.info('HiveServer2 server could not be found after: %s' % e)
time.sleep(sleep)
if not started:
raise Exception("Server took too long to come up.")
_SHARED_HIVE_SERVER, _SHARED_HIVE_SERVER_CLOSER = cluster, s
return _SHARED_HIVE_SERVER, _SHARED_HIVE_SERVER_CLOSER
def _start_mini_hs2(cluster):
HIVE_CONF = cluster.hadoop_conf_dir
finish = (
beeswax.conf.HIVE_SERVER_HOST.set_for_testing(get_localhost_name()),
beeswax.conf.HIVE_SERVER_PORT.set_for_testing(HIVE_SERVER_TEST_PORT),
beeswax.conf.HIVE_SERVER_BIN.set_for_testing(get_run_root('ext/hive/hive') + '/bin/hiveserver2'),
beeswax.conf.HIVE_CONF_DIR.set_for_testing(HIVE_CONF)
)
default_xml = """<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:derby:;databaseName=%(root)s/metastore_db;create=true</value>
<description>JDBC connect string for a JDBC metastore</description>
</property>
<property>
<name>hive.server2.enable.impersonation</name>
<value>false</value>
</property>
<property>
<name>hive.querylog.location</name>
<value>%(querylog)s</value>
</property>
</configuration>
""" % {'root': cluster._tmpdir, 'querylog': cluster.log_dir + '/hive'}
file(HIVE_CONF + '/hive-site.xml', 'w').write(default_xml)
global _SHARED_HIVE_SERVER_PROCESS
if _SHARED_HIVE_SERVER_PROCESS is None:
p = _start_server(cluster)
LOG.info("started")
cluster.fs.do_as_superuser(cluster.fs.chmod, '/tmp', 01777)
_SHARED_HIVE_SERVER_PROCESS = p
def kill():
LOG.info("Killing server (pid %d)." % p.pid)
os.kill(p.pid, 9)
p.wait()
atexit.register(kill)
def s():
for f in finish:
f()
cluster.stop()
return s
def wait_for_query_to_finish(client, response, max=60.0):
# Take a async API execute_query() response in input
start = time.time()
sleep_time = 0.05
if is_finished(response): # aka Has error at submission
return response
content = json.loads(response.content)
watch_url = content['watch_url']
response = client.get(watch_url, follow=True)
# Loop and check status
while not is_finished(response):
time.sleep(sleep_time)
sleep_time = min(1.0, sleep_time * 2) # Capped exponential
if (time.time() - start) > max:
message = "Query took too long! %d seconds" % (time.time() - start)
LOG.warning(message)
raise Exception(message)
response = client.get(watch_url, follow=True)
return response
def is_finished(response):
status = json.loads(response.content)
return 'error' in status or status.get('isSuccess') or status.get('isFailure')
def fetch_query_result_data(client, status_response, n=0, server_name='beeswax'):
# Take a wait_for_query_to_finish() response in input
status = json.loads(status_response.content)
response = client.get("/%(server_name)s/results/%(id)s/%(n)s?format=json" % {'server_name': server_name, 'id': status.get('id'), 'n': n})
content = json.loads(response.content)
return content
def make_query(client, query, submission_type="Execute",
udfs=None, settings=None, resources=None,
wait=False, name=None, desc=None, local=True,
is_parameterized=True, max=60.0, database='default', email_notify=False, params=None, server_name='beeswax', **kwargs):
"""
Prepares arguments for the execute view.
If wait is True, waits for query to finish as well.
"""
if settings is None:
settings = []
if params is None:
params = []
if local:
# Tests run faster if not run against the real cluster.
settings.append(('mapreduce.framework.name', 'local'))
# Prepares arguments for the execute view.
parameters = {
'query-query': query,
'query-name': name if name else '',
'query-desc': desc if desc else '',
'query-is_parameterized': is_parameterized and "on",
'query-database': database,
'query-email_notify': email_notify and "on",
}
if submission_type == 'Execute':
parameters['button-submit'] = 'Whatever'
elif submission_type == 'Explain':
parameters['button-explain'] = 'Whatever'
elif submission_type == 'Save':
parameters['saveform-save'] = 'True'
if name:
parameters['saveform-name'] = name
if desc:
parameters['saveform-desc'] = desc
parameters["functions-next_form_id"] = str(len(udfs or []))
for i, udf_pair in enumerate(udfs or []):
name, klass = udf_pair
parameters["functions-%d-name" % i] = name
parameters["functions-%d-class_name" % i] = klass
parameters["functions-%d-_exists" % i] = 'True'
parameters["settings-next_form_id"] = str(len(settings))
for i, settings_pair in enumerate(settings or []):
key, value = settings_pair
parameters["settings-%d-key" % i] = str(key)
parameters["settings-%d-value" % i] = str(value)
parameters["settings-%d-_exists" % i] = 'True'
parameters["file_resources-next_form_id"] = str(len(resources or []))
for i, resources_pair in enumerate(resources or []):
type, path = resources_pair
parameters["file_resources-%d-type" % i] = str(type)
parameters["file_resources-%d-path" % i] = str(path)
parameters["file_resources-%d-_exists" % i] = 'True'
for name, value in params:
parameters["parameterization-%s" % name] = value
kwargs.setdefault('follow', True)
execute_url = reverse("%(server_name)s:api_execute" % {'server_name': server_name})
if submission_type == 'Explain':
execute_url += "?explain=true"
if submission_type == 'Save':
execute_url = reverse("%(server_name)s:api_save_design" % {'server_name': server_name})
response = client.post(execute_url, parameters, **kwargs)
if wait:
return wait_for_query_to_finish(client, response, max)
return response
def verify_history(client, fragment, design=None, reverse=False, server_name='beeswax'):
"""
Verify that the query fragment and/or design are in the query history.
If reverse is True, verify the opposite.
Return the size of the history; -1 if we fail to determine it.
"""
resp = client.get('/%(server_name)s/query_history' % {'server_name': server_name})
my_assert = reverse and assert_false or assert_true
my_assert(fragment in resp.content, resp.content)
if design:
my_assert(design in resp.content, resp.content)
if resp.context:
try:
return len(resp.context['page'].object_list)
except KeyError:
pass
LOG.warn('Cannot find history size. Response context clobbered')
return -1
class BeeswaxSampleProvider(object):
"""
Setup the test db and install sample data
"""
@classmethod
def setup_class(cls):
cls.db_name = get_db_prefix(name='hive')
cls.cluster, shutdown = get_shared_beeswax_server(cls.db_name)
cls.client = make_logged_in_client(username='test', is_superuser=False)
add_to_group('test')
grant_access("test", "test", "beeswax")
# Weird redirection to avoid binding nonsense.
cls.shutdown = [ shutdown ]
cls.init_beeswax_db()
@classmethod
def teardown_class(cls):
if is_live_cluster():
# Delete test DB and tables
client = make_logged_in_client()
user = User.objects.get(username='test')
query_server = get_query_server_config()
db = dbms.get(user, query_server)
for db_name in [cls.db_name, '%s_other' % cls.db_name]:
databases = db.get_databases()
if db_name in databases:
tables = db.get_tables(database=db_name)
for table in tables:
make_query(client, 'DROP TABLE IF EXISTS `%(db)s`.`%(table)s`' % {'db': db_name, 'table': table}, wait=True)
make_query(client, 'DROP VIEW IF EXISTS `%(db)s`.`myview`' % {'db': db_name}, wait=True)
make_query(client, 'DROP DATABASE IF EXISTS %(db)s' % {'db': db_name}, wait=True)
# Check the cleanup
databases = db.get_databases()
assert_false(db_name in databases)
@classmethod
def init_beeswax_db(cls):
"""
Install the common test tables (only once)
"""
global _INITIALIZED
if _INITIALIZED:
return
make_query(cls.client, 'CREATE DATABASE IF NOT EXISTS %(db)s' % {'db': cls.db_name}, wait=True)
make_query(cls.client, 'CREATE DATABASE IF NOT EXISTS %(db)s_other' % {'db': cls.db_name}, wait=True)
data_file = cls.cluster.fs_prefix + u'/beeswax/sample_data_échantillon_%d.tsv'
# Create a "test_partitions" table.
CREATE_PARTITIONED_TABLE = """
CREATE TABLE `%(db)s`.`test_partitions` (foo INT, bar STRING)
PARTITIONED BY (baz STRING, boom STRING)
ROW FORMAT DELIMITED
FIELDS TERMINATED BY '\t'
LINES TERMINATED BY '\n'
""" % {'db': cls.db_name}
make_query(cls.client, CREATE_PARTITIONED_TABLE, wait=True)
cls._make_data_file(data_file % 1)
LOAD_DATA = """
LOAD DATA INPATH '%(data_file)s'
OVERWRITE INTO TABLE `%(db)s`.`test_partitions`
PARTITION (baz='baz_one', boom='boom_two')
""" % {'db': cls.db_name, 'data_file': data_file % 1}
make_query(cls.client, LOAD_DATA, wait=True, local=False)
# Insert additional partition data into "test_partitions" table
ADD_PARTITION = """
ALTER TABLE `%(db)s`.`test_partitions` ADD PARTITION(baz='baz_foo', boom='boom_bar') LOCATION '%(fs_prefix)s/baz_foo/boom_bar'
""" % {'db': cls.db_name, 'fs_prefix': cls.cluster.fs_prefix}
make_query(cls.client, ADD_PARTITION, wait=True, local=False)
# Create a bunch of other tables
CREATE_TABLE = """
CREATE TABLE `%(db)s`.`%(name)s` (foo INT, bar STRING)
COMMENT "%(comment)s"
ROW FORMAT DELIMITED
FIELDS TERMINATED BY '\t'
LINES TERMINATED BY '\n'
"""
# Create a "test" table.
table_info = {'db': cls.db_name, 'name': 'test', 'comment': 'Test table'}
cls._make_data_file(data_file % 2)
cls._make_table(table_info['name'], CREATE_TABLE % table_info, data_file % 2)
if is_live_cluster():
LOG.warn('HUE-2884: We cannot create Hive UTF8 tables when live cluster testing at the moment')
else:
# Create a "test_utf8" table.
table_info = {'db': cls.db_name, 'name': 'test_utf8', 'comment': cls.get_i18n_table_comment()}
cls._make_i18n_data_file(data_file % 3, 'utf-8')
cls._make_table(table_info['name'], CREATE_TABLE % table_info, data_file % 3)
# Create a "test_latin1" table.
table_info = {'db': cls.db_name, 'name': 'test_latin1', 'comment': cls.get_i18n_table_comment()}
cls._make_i18n_data_file(data_file % 4, 'latin1')
cls._make_table(table_info['name'], CREATE_TABLE % table_info, data_file % 4)
# Create a "myview" view.
make_query(cls.client, "CREATE VIEW `%(db)s`.`myview` (foo, bar) as SELECT * FROM `%(db)s`.`test`" % {'db': cls.db_name}, wait=True)
_INITIALIZED = True
@staticmethod
def get_i18n_table_comment():
return u'en-hello pt-Olá ch-你好 ko-안녕 ru-Здравствуйте'
@classmethod
def _make_table(cls, table_name, create_ddl, filename):
make_query(cls.client, create_ddl, wait=True, database=cls.db_name)
LOAD_DATA = """
LOAD DATA INPATH '%(filename)s' OVERWRITE INTO TABLE `%(db)s`.`%(table_name)s`
""" % {'filename': filename, 'table_name': table_name, 'db': cls.db_name}
make_query(cls.client, LOAD_DATA, wait=True, local=False, database=cls.db_name)
@classmethod
def _make_data_file(cls, filename):
"""
Create data to be loaded into tables.
Data contains two columns of:
<num> 0x<hex_num>
where <num> goes from 0 to 255 inclusive.
"""
cls.cluster.fs.setuser(cls.cluster.superuser)
f = cls.cluster.fs.open(filename, "w")
for x in xrange(256):
f.write("%d\t0x%x\n" % (x, x))
f.close()
@classmethod
def _make_i18n_data_file(cls, filename, encoding):
"""
Create i18n data to be loaded into tables.
Data contains two columns of:
<num> <unichr(num)>
where <num> goes from 0 to 255 inclusive.
"""
cls.cluster.fs.setuser(cls.cluster.superuser)
f = cls.cluster.fs.open(filename, "w")
for x in xrange(256):
f.write("%d\t%s\n" % (x, unichr(x).encode(encoding)))
f.close()
@classmethod
def _make_custom_data_file(cls, filename, data):
f = cls.cluster.fs.open(filename, "w")
for x in data:
f.write("%s\n" % x)
f.close()
| rahul67/hue | apps/beeswax/src/beeswax/test_base.py | Python | apache-2.0 | 16,313 |
// Copyright 2017 The Draco Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#include "draco/metadata/metadata_encoder.h"
#include "draco/core/decoder_buffer.h"
#include "draco/core/draco_test_base.h"
#include "draco/core/encoder_buffer.h"
#include "draco/metadata/metadata.h"
#include "draco/metadata/metadata_decoder.h"
namespace {
class MetadataEncoderTest : public ::testing::Test {
protected:
MetadataEncoderTest() {}
void TestEncodingMetadata() {
ASSERT_TRUE(encoder.EncodeMetadata(&encoder_buffer, &metadata));
draco::Metadata decoded_metadata;
decoder_buffer.Init(encoder_buffer.data(), encoder_buffer.size());
ASSERT_TRUE(decoder.DecodeMetadata(&decoder_buffer, &decoded_metadata));
CheckMetadatasAreEqual(metadata, decoded_metadata);
}
void TestEncodingGeometryMetadata() {
ASSERT_TRUE(
encoder.EncodeGeometryMetadata(&encoder_buffer, &geometry_metadata));
draco::GeometryMetadata decoded_metadata;
decoder_buffer.Init(encoder_buffer.data(), encoder_buffer.size());
ASSERT_TRUE(
decoder.DecodeGeometryMetadata(&decoder_buffer, &decoded_metadata));
CheckGeometryMetadatasAreEqual(geometry_metadata, decoded_metadata);
}
void CheckBlobOfDataAreEqual(const std::vector<uint8_t> &data0,
const std::vector<uint8_t> &data1) {
ASSERT_EQ(data0.size(), data1.size());
for (int i = 0; i < data0.size(); ++i) {
ASSERT_EQ(data0[i], data1[i]);
}
}
void CheckGeometryMetadatasAreEqual(
const draco::GeometryMetadata &metadata0,
const draco::GeometryMetadata &metadata1) {
ASSERT_EQ(metadata0.attribute_metadatas().size(),
metadata1.attribute_metadatas().size());
const std::vector<std::unique_ptr<draco::AttributeMetadata>>
&att_metadatas0 = metadata0.attribute_metadatas();
const std::vector<std::unique_ptr<draco::AttributeMetadata>>
&att_metadatas1 = metadata1.attribute_metadatas();
// Compare each attribute metadata.
for (int i = 0; i < metadata0.attribute_metadatas().size(); ++i) {
CheckMetadatasAreEqual(
static_cast<const draco::Metadata &>(*att_metadatas0[i]),
static_cast<const draco::Metadata &>(*att_metadatas1[i]));
}
// Compare entries and sub metadata.
CheckMetadatasAreEqual(static_cast<const draco::Metadata &>(metadata0),
static_cast<const draco::Metadata &>(metadata1));
}
void CheckMetadatasAreEqual(const draco::Metadata &metadata0,
const draco::Metadata &metadata1) {
ASSERT_EQ(metadata0.num_entries(), metadata1.num_entries());
const std::map<std::string, draco::EntryValue> &entries0 =
metadata0.entries();
const std::map<std::string, draco::EntryValue> &entries1 =
metadata1.entries();
for (const auto &entry : entries0) {
const std::string &entry_name = entry.first;
const std::vector<uint8_t> &data0 = entry.second.data();
const auto entry1_ptr = entries1.find(entry_name);
ASSERT_NE(entry1_ptr, entries1.end());
const std::vector<uint8_t> &data1 = entry1_ptr->second.data();
CheckBlobOfDataAreEqual(data0, data1);
}
// Check nested metadata.
ASSERT_EQ(metadata0.sub_metadatas().size(),
metadata1.sub_metadatas().size());
const std::map<std::string, std::unique_ptr<draco::Metadata>>
&sub_metadatas0 = metadata0.sub_metadatas();
// Encode each sub-metadata
for (auto &&sub_metadata_entry0 : sub_metadatas0) {
const auto sub_metadata_ptr1 =
metadata1.GetSubMetadata(sub_metadata_entry0.first);
ASSERT_NE(sub_metadata_ptr1, nullptr);
CheckMetadatasAreEqual(*sub_metadata_entry0.second, *sub_metadata_ptr1);
}
}
draco::MetadataEncoder encoder;
draco::MetadataDecoder decoder;
draco::EncoderBuffer encoder_buffer;
draco::DecoderBuffer decoder_buffer;
draco::Metadata metadata;
draco::GeometryMetadata geometry_metadata;
};
TEST_F(MetadataEncoderTest, TestSingleEntry) {
metadata.AddEntryInt("int", 100);
ASSERT_EQ(metadata.num_entries(), 1);
TestEncodingMetadata();
}
TEST_F(MetadataEncoderTest, TestMultipleEntries) {
metadata.AddEntryInt("int", 100);
metadata.AddEntryDouble("double", 1.234);
const std::string entry_value = "test string entry";
metadata.AddEntryString("string", entry_value);
ASSERT_EQ(metadata.num_entries(), 3);
TestEncodingMetadata();
}
TEST_F(MetadataEncoderTest, TestEncodingArrayEntries) {
std::vector<int32_t> int_array({1, 2, 3});
metadata.AddEntryIntArray("int_array", int_array);
std::vector<double> double_array({0.1, 0.2, 0.3});
metadata.AddEntryDoubleArray("double_array", double_array);
ASSERT_EQ(metadata.num_entries(), 2);
TestEncodingMetadata();
}
TEST_F(MetadataEncoderTest, TestEncodingBinaryEntry) {
const std::vector<uint8_t> binarydata({0x1, 0x2, 0x3, 0x4});
metadata.AddEntryBinary("binary_data", binarydata);
TestEncodingMetadata();
}
TEST_F(MetadataEncoderTest, TestEncodingNestedMetadata) {
metadata.AddEntryDouble("double", 1.234);
std::unique_ptr<draco::Metadata> sub_metadata =
std::unique_ptr<draco::Metadata>(new draco::Metadata());
sub_metadata->AddEntryInt("int", 100);
metadata.AddSubMetadata("sub0", std::move(sub_metadata));
TestEncodingMetadata();
}
TEST_F(MetadataEncoderTest, TestEncodingGeometryMetadata) {
std::unique_ptr<draco::AttributeMetadata> att_metadata =
std::unique_ptr<draco::AttributeMetadata>(new draco::AttributeMetadata);
att_metadata->AddEntryInt("int", 100);
att_metadata->AddEntryString("name", "pos");
ASSERT_TRUE(geometry_metadata.AddAttributeMetadata(std::move(att_metadata)));
TestEncodingGeometryMetadata();
}
} // namespace
| google/filament | third_party/draco/src/draco/metadata/metadata_encoder_test.cc | C++ | apache-2.0 | 6,286 |
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=US-ASCII">
<title>posix::basic_descriptor::close</title>
<link rel="stylesheet" href="../../../../../doc/src/boostbook.css" type="text/css">
<meta name="generator" content="DocBook XSL Stylesheets V1.78.1">
<link rel="home" href="../../../boost_asio.html" title="Boost.Asio">
<link rel="up" href="../posix__basic_descriptor.html" title="posix::basic_descriptor">
<link rel="prev" href="cancel/overload2.html" title="posix::basic_descriptor::cancel (2 of 2 overloads)">
<link rel="next" href="close/overload1.html" title="posix::basic_descriptor::close (1 of 2 overloads)">
</head>
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
<table cellpadding="2" width="100%"><tr>
<td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../../boost.png"></td>
<td align="center"><a href="../../../../../index.html">Home</a></td>
<td align="center"><a href="../../../../../libs/libraries.htm">Libraries</a></td>
<td align="center"><a href="http://www.boost.org/users/people.html">People</a></td>
<td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td>
<td align="center"><a href="../../../../../more/index.htm">More</a></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="cancel/overload2.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../posix__basic_descriptor.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="close/overload1.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
<div class="section">
<div class="titlepage"><div><div><h4 class="title">
<a name="boost_asio.reference.posix__basic_descriptor.close"></a><a class="link" href="close.html" title="posix::basic_descriptor::close">posix::basic_descriptor::close</a>
</h4></div></div></div>
<p>
<a class="indexterm" name="idp182941680"></a>
Close the descriptor.
</p>
<pre class="programlisting"><span class="keyword">void</span> <a class="link" href="close/overload1.html" title="posix::basic_descriptor::close (1 of 2 overloads)">close</a><span class="special">();</span>
<span class="emphasis"><em>» <a class="link" href="close/overload1.html" title="posix::basic_descriptor::close (1 of 2 overloads)">more...</a></em></span>
<span class="identifier">boost</span><span class="special">::</span><span class="identifier">system</span><span class="special">::</span><span class="identifier">error_code</span> <a class="link" href="close/overload2.html" title="posix::basic_descriptor::close (2 of 2 overloads)">close</a><span class="special">(</span>
<span class="identifier">boost</span><span class="special">::</span><span class="identifier">system</span><span class="special">::</span><span class="identifier">error_code</span> <span class="special">&</span> <span class="identifier">ec</span><span class="special">);</span>
<span class="emphasis"><em>» <a class="link" href="close/overload2.html" title="posix::basic_descriptor::close (2 of 2 overloads)">more...</a></em></span>
</pre>
</div>
<table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr>
<td align="left"></td>
<td align="right"><div class="copyright-footer">Copyright © 2003-2014 Christopher M. Kohlhoff<p>
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>)
</p>
</div></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="cancel/overload2.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../posix__basic_descriptor.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="close/overload1.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
</body>
</html>
| biospi/seamass-windeps | src/boost_1_57_0/doc/html/boost_asio/reference/posix__basic_descriptor/close.html | HTML | apache-2.0 | 4,285 |
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
******************************************************************************/
package org.apache.sling.scripting.sightly.impl.engine.extension.use;
import java.util.ArrayList;
import java.util.Collections;
import java.util.ListIterator;
import java.util.Map;
import java.util.concurrent.ConcurrentSkipListMap;
import javax.script.Bindings;
import javax.script.SimpleBindings;
import org.apache.commons.lang3.StringUtils;
import org.apache.sling.scripting.sightly.SightlyException;
import org.apache.sling.scripting.sightly.compiler.RuntimeFunction;
import org.apache.sling.scripting.sightly.extension.RuntimeExtension;
import org.apache.sling.scripting.sightly.impl.engine.extension.ExtensionUtils;
import org.apache.sling.scripting.sightly.render.RenderContext;
import org.apache.sling.scripting.sightly.render.RuntimeObjectModel;
import org.apache.sling.scripting.sightly.use.ProviderOutcome;
import org.apache.sling.scripting.sightly.use.UseProvider;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceReference;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Reference;
import org.osgi.service.component.annotations.ReferenceCardinality;
import org.osgi.service.component.annotations.ReferencePolicy;
/**
* Runtime extension for the USE plugin
*/
@Component(
service = RuntimeExtension.class,
property = {
RuntimeExtension.NAME + "=" + RuntimeFunction.USE
}
)
public class UseRuntimeExtension implements RuntimeExtension {
private final Map<ServiceReference, UseProvider> providersMap = new ConcurrentSkipListMap<>();
@Override
public Object call(final RenderContext renderContext, Object... arguments) {
ExtensionUtils.checkArgumentCount(RuntimeFunction.USE, arguments, 2);
RuntimeObjectModel runtimeObjectModel = renderContext.getObjectModel();
String identifier = runtimeObjectModel.toString(arguments[0]);
if (StringUtils.isEmpty(identifier)) {
throw new SightlyException("data-sly-use needs to be passed an identifier");
}
Map<String, Object> useArgumentsMap = runtimeObjectModel.toMap(arguments[1]);
Bindings useArguments = new SimpleBindings(Collections.unmodifiableMap(useArgumentsMap));
ArrayList<UseProvider> providers = new ArrayList<>(providersMap.values());
ListIterator<UseProvider> iterator = providers.listIterator(providers.size());
while (iterator.hasPrevious()) {
UseProvider provider = iterator.previous();
ProviderOutcome outcome = provider.provide(identifier, renderContext, useArguments);
Throwable failureCause;
if (outcome.isSuccess()) {
return outcome.getResult();
} else if ((failureCause = outcome.getCause()) != null) {
throw new SightlyException("Identifier " + identifier + " cannot be correctly instantiated by the Use API", failureCause);
}
}
throw new SightlyException("No use provider could resolve identifier " + identifier);
}
// OSGi ################################################################################################################################
@Reference(
policy = ReferencePolicy.DYNAMIC,
service = UseProvider.class,
cardinality = ReferenceCardinality.MULTIPLE
)
private void bindUseProvider(ServiceReference serviceReference) {
BundleContext bundleContext = serviceReference.getBundle().getBundleContext();
providersMap.put(serviceReference, (UseProvider) bundleContext.getService(serviceReference));
}
private void unbindUseProvider(ServiceReference serviceReference) {
providersMap.remove(serviceReference);
}
}
| tmaret/sling | bundles/scripting/sightly/engine/src/main/java/org/apache/sling/scripting/sightly/impl/engine/extension/use/UseRuntimeExtension.java | Java | apache-2.0 | 4,708 |
package org.switchyard.component.itests.camelcontext.component.xyzorder;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Generated;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
@JsonInclude(JsonInclude.Include.NON_NULL)
@Generated("org.jsonschema2pojo")
@JsonPropertyOrder({
"custId",
"priority",
"orderId",
"origin",
"approvalCode",
"lineItems"
})
public class XyzOrder {
@JsonProperty("custId")
private String custId;
@JsonProperty("priority")
private String priority;
@JsonProperty("orderId")
private String orderId;
@JsonProperty("origin")
private String origin;
@JsonProperty("approvalCode")
private String approvalCode;
@JsonProperty("lineItems")
private List<LineItem> lineItems = new ArrayList<LineItem>();
/**
*
* @return
* The custId
*/
@JsonProperty("custId")
public String getCustId() {
return custId;
}
/**
*
* @param custId
* The custId
*/
@JsonProperty("custId")
public void setCustId(String custId) {
this.custId = custId;
}
/**
*
* @return
* The priority
*/
@JsonProperty("priority")
public String getPriority() {
return priority;
}
/**
*
* @param priority
* The priority
*/
@JsonProperty("priority")
public void setPriority(String priority) {
this.priority = priority;
}
/**
*
* @return
* The orderId
*/
@JsonProperty("orderId")
public String getOrderId() {
return orderId;
}
/**
*
* @param orderId
* The orderId
*/
@JsonProperty("orderId")
public void setOrderId(String orderId) {
this.orderId = orderId;
}
/**
*
* @return
* The origin
*/
@JsonProperty("origin")
public String getOrigin() {
return origin;
}
/**
*
* @param origin
* The origin
*/
@JsonProperty("origin")
public void setOrigin(String origin) {
this.origin = origin;
}
/**
*
* @return
* The approvalCode
*/
@JsonProperty("approvalCode")
public String getApprovalCode() {
return approvalCode;
}
/**
*
* @param approvalCode
* The approvalCode
*/
@JsonProperty("origin")
public void setApprovalCode(String approvalCode) {
this.approvalCode = approvalCode;
}
/**
*
* @return
* The lineItems
*/
@JsonProperty("lineItems")
public List<LineItem> getLineItems() {
return lineItems;
}
/**
*
* @param lineItems
* The lineItems
*/
@JsonProperty("lineItems")
public void setLineItems(List<LineItem> lineItems) {
this.lineItems = lineItems;
}
}
| tadayosi/switchyard | components/itests/src/test/java/org/switchyard/component/itests/camelcontext/component/xyzorder/XyzOrder.java | Java | apache-2.0 | 3,040 |
#
# Copyright 2013-2014, Seth Vargo <sethvargo@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class Chef
module Sugar
module Ruby
extend self
#
# Determine if the current Ruby version is 2.0.
#
# @return [Boolean]
#
def ruby_20?(node)
version = Gem::Version.new(node['languages']['ruby']['version'])
Gem::Requirement.new('~> 2.0.0').satisfied_by?(version)
end
#
# Determine if the current Ruby version is 1.9.
#
# @return [Boolean]
#
def ruby_19?(node)
version = Gem::Version.new(node['languages']['ruby']['version'])
Gem::Requirement.new('~> 1.9.0').satisfied_by?(version)
end
end
module DSL
# @see Chef::Sugar::Ruby#ruby_20?
def ruby_20?; Chef::Sugar::Ruby.ruby_20?(node); end
# @see Chef::Sugar::Ruby#ruby_19?
def ruby_19?; Chef::Sugar::Ruby.ruby_19?(node); end
end
end
end
| xianfengyuan/chef_chef-sugar | lib/chef/sugar/ruby.rb | Ruby | apache-2.0 | 1,460 |
/*
* Copyright (c) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.tvleanback.data;
import android.app.IntentService;
import android.content.ContentValues;
import android.content.Intent;
import android.util.Log;
import com.example.android.tvleanback.R;
import org.json.JSONException;
import java.io.IOException;
import java.util.List;
/**
* FetchVideoService is responsible for fetching the videos from the Internet and inserting the
* results into a local SQLite database.
*/
public class FetchVideoService extends IntentService {
private static final String TAG = "FetchVideoService";
/**
* Creates an IntentService with a default name for the worker thread.
*/
public FetchVideoService() {
super(TAG);
}
@Override
protected void onHandleIntent(Intent workIntent) {
VideoDbBuilder builder = new VideoDbBuilder(getApplicationContext());
try {
List<ContentValues> contentValuesList =
builder.fetch(getResources().getString(R.string.catalog_url));
ContentValues[] downloadedVideoContentValues =
contentValuesList.toArray(new ContentValues[contentValuesList.size()]);
getApplicationContext().getContentResolver().bulkInsert(VideoContract.VideoEntry.CONTENT_URI,
downloadedVideoContentValues);
} catch (IOException | JSONException e) {
Log.e(TAG, "Error occurred in downloading videos");
e.printStackTrace();
}
}
}
| googlearchive/androidtv-Leanback | app/src/main/java/com/example/android/tvleanback/data/FetchVideoService.java | Java | apache-2.0 | 2,103 |
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.transitions.everywhere;
import android.content.Context;
import android.util.SparseArray;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
/**
* A scene represents the collection of values that various properties in the
* View hierarchy will have when the scene is applied. A Scene can be
* configured to automatically run a Transition when it is applied, which will
* animate the various property changes that take place during the
* scene change.
*/
public final class Scene {
private Context mContext;
private int mLayoutId = -1;
private ViewGroup mSceneRoot;
private View mLayout; // alternative to layoutId
Runnable mEnterAction, mExitAction;
/**
* Returns a Scene described by the resource file associated with the given
* <code>layoutId</code> parameter. If such a Scene has already been created,
* that same Scene will be returned. This caching of layoutId-based scenes enables
* sharing of common scenes between those created in code and those referenced
* by {@link TransitionManager} XML resource files.
*
* @param sceneRoot The root of the hierarchy in which scene changes
* and transitions will take place.
* @param layoutId The id of a standard layout resource file.
* @param context The context used in the process of inflating
* the layout resource.
* @return
*/
public static Scene getSceneForLayout(ViewGroup sceneRoot, int layoutId, Context context) {
SparseArray<Scene> scenes = (SparseArray<Scene>) sceneRoot.getTag(R.id.scene_layoutid_cache);
if (scenes == null) {
scenes = new SparseArray<Scene>();
sceneRoot.setTag(R.id.scene_layoutid_cache, scenes);
}
Scene scene = scenes.get(layoutId);
if (scene != null) {
return scene;
} else {
scene = new Scene(sceneRoot, layoutId, context);
scenes.put(layoutId, scene);
return scene;
}
}
/**
* Constructs a Scene with no information about how values will change
* when this scene is applied. This constructor might be used when
* a Scene is created with the intention of being dynamically configured,
* through setting {@link #setEnterAction(Runnable)} and possibly
* {@link #setExitAction(Runnable)}.
*
* @param sceneRoot The root of the hierarchy in which scene changes
* and transitions will take place.
*/
public Scene(ViewGroup sceneRoot) {
mSceneRoot = sceneRoot;
}
/**
* Constructs a Scene which, when entered, will remove any
* children from the sceneRoot container and will inflate and add
* the hierarchy specified by the layoutId resource file.
* <p/>
* <p>This method is hidden because layoutId-based scenes should be
* created by the caching factory method {@link android.transitions.everywhere.Scene#getCurrentScene(View)}.</p>
*
* @param sceneRoot The root of the hierarchy in which scene changes
* and transitions will take place.
* @param layoutId The id of a resource file that defines the view
* hierarchy of this scene.
* @param context The context used in the process of inflating
* the layout resource.
*/
private Scene(ViewGroup sceneRoot, int layoutId, Context context) {
mContext = context;
mSceneRoot = sceneRoot;
mLayoutId = layoutId;
}
/**
* Constructs a Scene which, when entered, will remove any
* children from the sceneRoot container and add the layout
* object as a new child of that container.
*
* @param sceneRoot The root of the hierarchy in which scene changes
* and transitions will take place.
* @param layout The view hierarchy of this scene, added as a child
* of sceneRoot when this scene is entered.
*/
public Scene(ViewGroup sceneRoot, View layout) {
mSceneRoot = sceneRoot;
mLayout = layout;
}
/**
* You can use {@link #Scene(ViewGroup, View)}.
*/
public Scene(ViewGroup sceneRoot, ViewGroup layout) {
mSceneRoot = sceneRoot;
mLayout = layout;
}
/**
* Gets the root of the scene, which is the root of the view hierarchy
* affected by changes due to this scene, and which will be animated
* when this scene is entered.
*
* @return The root of the view hierarchy affected by this scene.
*/
public ViewGroup getSceneRoot() {
return mSceneRoot;
}
/**
* Exits this scene, if it is the current scene
* on the scene's {@link #getSceneRoot() scene root}. The current scene is
* set when {@link #enter() entering} a scene.
* Exiting a scene runs the {@link #setExitAction(Runnable) exit action}
* if there is one.
*/
public void exit() {
if (getCurrentScene(mSceneRoot) == this) {
if (mExitAction != null) {
mExitAction.run();
}
}
}
/**
* Enters this scene, which entails changing all values that
* are specified by this scene. These may be values associated
* with a layout view group or layout resource file which will
* now be added to the scene root, or it may be values changed by
* an {@link #setEnterAction(Runnable)} enter action}, or a
* combination of the these. No transition will be run when the
* scene is entered. To get transition behavior in scene changes,
* use one of the methods in {@link TransitionManager} instead.
*/
public void enter() {
// Apply layout change, if any
if (mLayoutId > 0 || mLayout != null) {
// empty out parent container before adding to it
getSceneRoot().removeAllViews();
if (mLayoutId > 0) {
LayoutInflater.from(mContext).inflate(mLayoutId, mSceneRoot);
} else {
mSceneRoot.addView(mLayout);
}
}
// Notify next scene that it is entering. Subclasses may override to configure scene.
if (mEnterAction != null) {
mEnterAction.run();
}
setCurrentScene(mSceneRoot, this);
}
/**
* Set the scene that the given view is in. The current scene is set only
* on the root view of a scene, not for every view in that hierarchy. This
* information is used by Scene to determine whether there is a previous
* scene which should be exited before the new scene is entered.
*
* @param view The view on which the current scene is being set
*/
static void setCurrentScene(View view, Scene scene) {
view.setTag(android.transitions.everywhere.R.id.current_scene, scene);
}
/**
* Gets the current {@link android.transition.Scene} set on the given view. A scene is set on a view
* only if that view is the scene root.
*
* @return The current Scene set on this view. A value of null indicates that
* no Scene is currently set.
*/
static Scene getCurrentScene(View view) {
return (Scene) view.getTag(android.transitions.everywhere.R.id.current_scene);
}
/**
* Scenes that are not defined with layout resources or
* hierarchies, or which need to perform additional steps
* after those hierarchies are changed to, should set an enter
* action, and possibly an exit action as well. An enter action
* will cause Scene to call back into application code to do
* anything else the application needs after transitions have
* captured pre-change values and after any other scene changes
* have been applied, such as the layout (if any) being added to
* the view hierarchy. After this method is called, Transitions will
* be played.
*
* @param action The runnable whose {@link Runnable#run() run()} method will
* be called when this scene is entered
* @see #setExitAction(Runnable)
* @see android.transitions.everywhere.Scene#Scene(ViewGroup, int, Context)
* @see android.transitions.everywhere.Scene#Scene(ViewGroup, ViewGroup)
*/
public void setEnterAction(Runnable action) {
mEnterAction = action;
}
/**
* Scenes that are not defined with layout resources or
* hierarchies, or which need to perform additional steps
* after those hierarchies are changed to, should set an enter
* action, and possibly an exit action as well. An exit action
* will cause Scene to call back into application code to do
* anything the application needs to do after applicable transitions have
* captured pre-change values, but before any other scene changes
* have been applied, such as the new layout (if any) being added to
* the view hierarchy. After this method is called, the next scene
* will be entered, including a call to {@link #setEnterAction(Runnable)}
* if an enter action is set.
*
* @see #setEnterAction(Runnable)
* @see android.transitions.everywhere.Scene#Scene(ViewGroup, int, Context)
* @see android.transitions.everywhere.Scene#Scene(ViewGroup, ViewGroup)
*/
public void setExitAction(Runnable action) {
mExitAction = action;
}
/**
* Returns whether this Scene was created by a layout resource file, determined
* by the layoutId passed into
* {@link #getSceneForLayout(android.view.ViewGroup, int, android.content.Context)}.
* This is called by TransitionManager to determine whether it is safe for views from
* this scene to be removed from their parents when the scene is exited, which is
* used by {@link Fade} to fade these views out (the views must be removed from
* their parent in order to add them to the overlay for fading purposes). If a
* Scene is not based on a resource file, then the impact of removing views
* arbitrarily is unknown and should be avoided.
*/
boolean isCreatedFromLayoutResource() {
return (mLayoutId > 0);
}
} | Learn-Android-app/Transitions-Everywhere | library/src/main/java/android/transitions/everywhere/Scene.java | Java | apache-2.0 | 10,872 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.filter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import it.unimi.dsi.fastutil.ints.IntIterable;
import it.unimi.dsi.fastutil.ints.IntIterator;
import org.apache.druid.collections.bitmap.ImmutableBitmap;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.query.BitmapResultFactory;
import org.apache.druid.query.extraction.ExtractionFn;
import org.apache.druid.query.filter.BitmapIndexSelector;
import org.apache.druid.query.filter.Filter;
import org.apache.druid.query.filter.FilterTuning;
import org.apache.druid.query.filter.LikeDimFilter;
import org.apache.druid.query.filter.ValueMatcher;
import org.apache.druid.query.filter.vector.VectorValueMatcher;
import org.apache.druid.query.filter.vector.VectorValueMatcherColumnProcessorFactory;
import org.apache.druid.segment.ColumnInspector;
import org.apache.druid.segment.ColumnProcessors;
import org.apache.druid.segment.ColumnSelector;
import org.apache.druid.segment.ColumnSelectorFactory;
import org.apache.druid.segment.column.BitmapIndex;
import org.apache.druid.segment.data.CloseableIndexed;
import org.apache.druid.segment.data.Indexed;
import org.apache.druid.segment.vector.VectorColumnSelectorFactory;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Objects;
import java.util.Set;
public class LikeFilter implements Filter
{
private final String dimension;
private final ExtractionFn extractionFn;
private final LikeDimFilter.LikeMatcher likeMatcher;
private final FilterTuning filterTuning;
public LikeFilter(
final String dimension,
final ExtractionFn extractionFn,
final LikeDimFilter.LikeMatcher likeMatcher,
final FilterTuning filterTuning
)
{
this.dimension = dimension;
this.extractionFn = extractionFn;
this.likeMatcher = likeMatcher;
this.filterTuning = filterTuning;
}
@Override
public <T> T getBitmapResult(BitmapIndexSelector selector, BitmapResultFactory<T> bitmapResultFactory)
{
return bitmapResultFactory.unionDimensionValueBitmaps(getBitmapIterable(selector));
}
@Override
public double estimateSelectivity(BitmapIndexSelector selector)
{
return Filters.estimateSelectivity(getBitmapIterable(selector).iterator(), selector.getNumRows());
}
@Override
public ValueMatcher makeMatcher(ColumnSelectorFactory factory)
{
return Filters.makeValueMatcher(factory, dimension, likeMatcher.predicateFactory(extractionFn));
}
@Override
public VectorValueMatcher makeVectorMatcher(final VectorColumnSelectorFactory factory)
{
return ColumnProcessors.makeVectorProcessor(
dimension,
VectorValueMatcherColumnProcessorFactory.instance(),
factory
).makeMatcher(likeMatcher.predicateFactory(extractionFn));
}
@Override
public boolean canVectorizeMatcher(ColumnInspector inspector)
{
return true;
}
@Override
public Set<String> getRequiredColumns()
{
return ImmutableSet.of(dimension);
}
@Override
public boolean supportsRequiredColumnRewrite()
{
return true;
}
@Override
public Filter rewriteRequiredColumns(Map<String, String> columnRewrites)
{
String rewriteDimensionTo = columnRewrites.get(dimension);
if (rewriteDimensionTo == null) {
throw new IAE(
"Received a non-applicable rewrite: %s, filter's dimension: %s",
columnRewrites,
dimension
);
}
return new LikeFilter(
rewriteDimensionTo,
extractionFn,
likeMatcher,
filterTuning
);
}
@Override
public boolean supportsBitmapIndex(BitmapIndexSelector selector)
{
return selector.getBitmapIndex(dimension) != null;
}
@Override
public boolean shouldUseBitmapIndex(BitmapIndexSelector selector)
{
return Filters.shouldUseBitmapIndex(this, selector, filterTuning);
}
@Override
public boolean supportsSelectivityEstimation(ColumnSelector columnSelector, BitmapIndexSelector indexSelector)
{
return Filters.supportsSelectivityEstimation(this, dimension, columnSelector, indexSelector);
}
private Iterable<ImmutableBitmap> getBitmapIterable(final BitmapIndexSelector selector)
{
if (isSimpleEquals()) {
// Verify that dimension equals prefix.
return ImmutableList.of(
selector.getBitmapIndex(
dimension,
NullHandling.emptyToNullIfNeeded(likeMatcher.getPrefix())
)
);
} else if (isSimplePrefix()) {
// Verify that dimension startsWith prefix, and is accepted by likeMatcher.matchesSuffixOnly.
final BitmapIndex bitmapIndex = selector.getBitmapIndex(dimension);
if (bitmapIndex == null) {
// Treat this as a column full of nulls
return ImmutableList.of(likeMatcher.matches(null) ? Filters.allTrue(selector) : Filters.allFalse(selector));
}
// search for start, end indexes in the bitmaps; then include all matching bitmaps between those points
try (final CloseableIndexed<String> dimValues = selector.getDimensionValues(dimension)) {
// Union bitmaps for all matching dimension values in range.
// Use lazy iterator to allow unioning bitmaps one by one and avoid materializing all of them at once.
return Filters.bitmapsFromIndexes(getDimValueIndexIterableForPrefixMatch(bitmapIndex, dimValues), bitmapIndex);
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
} else {
// fallback
return Filters.matchPredicateNoUnion(
dimension,
selector,
likeMatcher.predicateFactory(extractionFn).makeStringPredicate()
);
}
}
/**
* Returns true if this filter is a simple equals filter: dimension = 'value' with no extractionFn.
*/
private boolean isSimpleEquals()
{
return extractionFn == null && likeMatcher.getSuffixMatch() == LikeDimFilter.LikeMatcher.SuffixMatch.MATCH_EMPTY;
}
/**
* Returns true if this filter is a simple prefix filter: dimension startsWith 'value' with no extractionFn.
*/
private boolean isSimplePrefix()
{
return extractionFn == null && !likeMatcher.getPrefix().isEmpty();
}
private IntIterable getDimValueIndexIterableForPrefixMatch(
final BitmapIndex bitmapIndex,
final Indexed<String> dimValues
)
{
final String lower = NullHandling.nullToEmptyIfNeeded(likeMatcher.getPrefix());
final String upper = NullHandling.nullToEmptyIfNeeded(likeMatcher.getPrefix()) + Character.MAX_VALUE;
final int startIndex; // inclusive
final int endIndex; // exclusive
if (lower == null) {
// For Null values
startIndex = bitmapIndex.getIndex(null);
endIndex = startIndex + 1;
} else {
final int lowerFound = bitmapIndex.getIndex(lower);
startIndex = lowerFound >= 0 ? lowerFound : -(lowerFound + 1);
final int upperFound = bitmapIndex.getIndex(upper);
endIndex = upperFound >= 0 ? upperFound + 1 : -(upperFound + 1);
}
return new IntIterable()
{
@Override
public IntIterator iterator()
{
return new IntIterator()
{
int currIndex = startIndex;
int found;
{
found = findNext();
}
private int findNext()
{
while (currIndex < endIndex && !likeMatcher.matchesSuffixOnly(dimValues, currIndex)) {
currIndex++;
}
if (currIndex < endIndex) {
return currIndex++;
} else {
return -1;
}
}
@Override
public boolean hasNext()
{
return found != -1;
}
@Override
public int nextInt()
{
int cur = found;
if (cur == -1) {
throw new NoSuchElementException();
}
found = findNext();
return cur;
}
};
}
};
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
LikeFilter that = (LikeFilter) o;
return Objects.equals(dimension, that.dimension) &&
Objects.equals(extractionFn, that.extractionFn) &&
Objects.equals(likeMatcher, that.likeMatcher) &&
Objects.equals(filterTuning, that.filterTuning);
}
@Override
public int hashCode()
{
return Objects.hash(dimension, extractionFn, likeMatcher, filterTuning);
}
}
| druid-io/druid | processing/src/main/java/org/apache/druid/segment/filter/LikeFilter.java | Java | apache-2.0 | 9,559 |
Subsets and Splits