code
stringlengths 3
1.05M
| repo_name
stringlengths 4
116
| path
stringlengths 4
991
| language
stringclasses 9
values | license
stringclasses 15
values | size
int32 3
1.05M
|
---|---|---|---|---|---|
"""
Telstra API platform for notify component.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/notify.telstra/
"""
import logging
import requests
import voluptuous as vol
from homeassistant.components.notify import (
BaseNotificationService, ATTR_TITLE, PLATFORM_SCHEMA)
from homeassistant.const import CONTENT_TYPE_JSON, HTTP_HEADER_CONTENT_TYPE
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_CONSUMER_KEY = 'consumer_key'
CONF_CONSUMER_SECRET = 'consumer_secret'
CONF_PHONE_NUMBER = 'phone_number'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_CONSUMER_KEY): cv.string,
vol.Required(CONF_CONSUMER_SECRET): cv.string,
vol.Required(CONF_PHONE_NUMBER): cv.string,
})
def get_service(hass, config, discovery_info=None):
"""Get the Telstra SMS API notification service."""
consumer_key = config.get(CONF_CONSUMER_KEY)
consumer_secret = config.get(CONF_CONSUMER_SECRET)
phone_number = config.get(CONF_PHONE_NUMBER)
if _authenticate(consumer_key, consumer_secret) is False:
_LOGGER.exception("Error obtaining authorization from Telstra API")
return None
return TelstraNotificationService(
consumer_key, consumer_secret, phone_number)
class TelstraNotificationService(BaseNotificationService):
"""Implementation of a notification service for the Telstra SMS API."""
def __init__(self, consumer_key, consumer_secret, phone_number):
"""Initialize the service."""
self._consumer_key = consumer_key
self._consumer_secret = consumer_secret
self._phone_number = phone_number
def send_message(self, message="", **kwargs):
"""Send a message to a user."""
title = kwargs.get(ATTR_TITLE)
# Retrieve authorization first
token_response = _authenticate(
self._consumer_key, self._consumer_secret)
if token_response is False:
_LOGGER.exception("Error obtaining authorization from Telstra API")
return
# Send the SMS
if title:
text = '{} {}'.format(title, message)
else:
text = message
message_data = {
'to': self._phone_number,
'body': text,
}
message_resource = 'https://api.telstra.com/v1/sms/messages'
message_headers = {
HTTP_HEADER_CONTENT_TYPE: CONTENT_TYPE_JSON,
'Authorization': 'Bearer ' + token_response['access_token'],
}
message_response = requests.post(
message_resource, headers=message_headers, json=message_data,
timeout=10)
if message_response.status_code != 202:
_LOGGER.exception("Failed to send SMS. Status code: %d",
message_response.status_code)
def _authenticate(consumer_key, consumer_secret):
"""Authenticate with the Telstra API."""
token_data = {
'client_id': consumer_key,
'client_secret': consumer_secret,
'grant_type': 'client_credentials',
'scope': 'SMS'
}
token_resource = 'https://api.telstra.com/v1/oauth/token'
token_response = requests.get(
token_resource, params=token_data, timeout=10).json()
if 'error' in token_response:
return False
return token_response
| MungoRae/home-assistant | homeassistant/components/notify/telstra.py | Python | apache-2.0 | 3,404 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math3.optim.nonlinear.scalar;
import org.apache.commons.math3.analysis.MultivariateVectorFunction;
import org.apache.commons.math3.optim.OptimizationData;
/**
* Gradient of the scalar function to be optimized.
*
* @version $Id$
* @since 3.1
*/
public class ObjectiveFunctionGradient implements OptimizationData {
/** Function to be optimized. */
private final MultivariateVectorFunction gradient;
/**
* @param g Gradient of the function to be optimized.
*/
public ObjectiveFunctionGradient(MultivariateVectorFunction g) {
gradient = g;
}
/**
* Gets the gradient of the function to be optimized.
*
* @return the objective function gradient.
*/
public MultivariateVectorFunction getObjectiveFunctionGradient() {
return gradient;
}
}
| charles-cooper/idylfin | src/org/apache/commons/math3/optim/nonlinear/scalar/ObjectiveFunctionGradient.java | Java | apache-2.0 | 1,648 |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.codeStyle.arrangement;
import com.intellij.lang.Language;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.editor.FoldRegion;
import com.intellij.openapi.editor.FoldingModel;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.codeStyle.CommonCodeStyleSettings;
import com.intellij.psi.codeStyle.arrangement.engine.ArrangementEngine;
import com.intellij.psi.codeStyle.arrangement.group.ArrangementGroupingRule;
import com.intellij.psi.codeStyle.arrangement.match.ArrangementSectionRule;
import com.intellij.psi.codeStyle.arrangement.match.StdArrangementEntryMatcher;
import com.intellij.psi.codeStyle.arrangement.match.StdArrangementMatchRule;
import com.intellij.psi.codeStyle.arrangement.model.ArrangementAtomMatchCondition;
import com.intellij.psi.codeStyle.arrangement.model.ArrangementMatchCondition;
import com.intellij.psi.codeStyle.arrangement.std.*;
import com.intellij.testFramework.fixtures.LightPlatformCodeInsightFixtureTestCase;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.intellij.psi.codeStyle.arrangement.std.StdArrangementTokens.Order.KEEP;
/**
* @author Denis Zhdanov
* @since 20.07.2012
*/
public abstract class AbstractRearrangerTest extends LightPlatformCodeInsightFixtureTestCase {
private static final RichTextHandler[] RICH_TEXT_HANDLERS = {new RangeHandler(), new FoldingHandler()};
private static final Pattern ATTRIBUTE_PATTERN = Pattern.compile("([^\\s]+)=([^\\s]+)");
protected FileType fileType;
protected Language language;
@Override
protected void setUp() throws Exception {
super.setUp();
CodeStyleSettingsManager.getInstance(myFixture.getProject()).setTemporarySettings(new CodeStyleSettings());
}
@Override
protected void tearDown() throws Exception {
CodeStyleSettingsManager.getInstance(myFixture.getProject()).dropTemporarySettings();
super.tearDown();
}
@NotNull
protected CommonCodeStyleSettings getCommonSettings() {
return CodeStyleSettingsManager.getInstance(myFixture.getProject()).getCurrentSettings().getCommonSettings(language);
}
protected static ArrangementSectionRule section(@NotNull StdArrangementMatchRule... rules) {
return section(null, null, rules);
}
protected static ArrangementSectionRule section(@Nullable String start, @Nullable String end, @NotNull StdArrangementMatchRule... rules) {
return ArrangementSectionRule.create(start, end, rules);
}
protected static StdArrangementRuleAliasToken alias(@NotNull String id, @NotNull StdArrangementMatchRule... rules) {
return new StdArrangementRuleAliasToken(id, id, ContainerUtil.newArrayList(rules));
}
@NotNull
protected static ArrangementGroupingRule group(@NotNull ArrangementSettingsToken type) {
return group(type, KEEP);
}
@NotNull
protected static ArrangementGroupingRule group(@NotNull ArrangementSettingsToken type, @NotNull ArrangementSettingsToken order) {
return new ArrangementGroupingRule(type, order);
}
@NotNull
protected static StdArrangementMatchRule rule(@NotNull ArrangementSettingsToken token) {
return new StdArrangementMatchRule(new StdArrangementEntryMatcher(atom(token)));
}
@NotNull
protected static StdArrangementMatchRule nameRule(@NotNull String nameFilter, @NotNull ArrangementSettingsToken... tokens) {
if (tokens.length == 0) {
return new StdArrangementMatchRule(new StdArrangementEntryMatcher(atom(nameFilter)));
}
else {
ArrangementAtomMatchCondition[] conditions = new ArrangementAtomMatchCondition[tokens.length + 1];
conditions[0] = atom(nameFilter);
for (int i = 0; i < tokens.length; i++) conditions[i + 1] = atom(tokens[i]);
ArrangementMatchCondition compositeCondition = ArrangementUtil.combine(conditions);
return new StdArrangementMatchRule(new StdArrangementEntryMatcher(compositeCondition));
}
}
@NotNull
protected static StdArrangementMatchRule rule(@NotNull ArrangementSettingsToken... conditions) {
return rule(ContainerUtil.map(conditions, it -> atom(it)));
}
@NotNull
protected static StdArrangementMatchRule rule(@NotNull List<ArrangementAtomMatchCondition> conditions) {
return rule(conditions.toArray(new ArrangementAtomMatchCondition[conditions.size()]));
}
@NotNull
protected static StdArrangementMatchRule rule(@NotNull ArrangementAtomMatchCondition... conditions) {
ArrangementMatchCondition compositeCondition = ArrangementUtil.combine(conditions);
return new StdArrangementMatchRule(new StdArrangementEntryMatcher(compositeCondition));
}
@NotNull
protected static StdArrangementMatchRule ruleWithOrder(@NotNull ArrangementSettingsToken orderType, @NotNull StdArrangementMatchRule rule) {
return new StdArrangementMatchRule(rule.getMatcher(), orderType);
}
@NotNull
protected static ArrangementAtomMatchCondition atom(@NotNull ArrangementSettingsToken token) {
return new ArrangementAtomMatchCondition(token);
}
protected static ArrangementAtomMatchCondition atom(@NotNull ArrangementSettingsToken token, boolean included) {
return new ArrangementAtomMatchCondition(token, included);
}
@NotNull
protected static ArrangementAtomMatchCondition atom(@NotNull String nameFilter) {
return new ArrangementAtomMatchCondition(StdArrangementTokens.Regexp.NAME, nameFilter);
}
protected void doTest(@NotNull Map<String, ?> args) {
String text = (String)args.get("initial");
String expected = (String)args.get("expected");
@SuppressWarnings("unchecked") List<TextRange> ranges = (List<TextRange>)args.get("ranges");
Info info = parse(text);
if (!isEmpty(ranges) && !isEmpty(info.ranges)) {
fail("Duplicate ranges set: explicit: " + ranges + ", " + "derived: " + info.ranges + ", text:\n" + text);
}
if (isEmpty(info.ranges)) {
info.ranges = !isEmpty(ranges) ? ranges : Arrays.asList(TextRange.from(0, text.length()));
}
myFixture.configureByText(fileType, info.text);
final FoldingModel foldingModel = myFixture.getEditor().getFoldingModel();
for (final FoldingInfo foldingInfo : info.foldings) {
foldingModel.runBatchFoldingOperation(() -> {
FoldRegion region = foldingModel.addFoldRegion(foldingInfo.start, foldingInfo.end, foldingInfo.placeholder);
if (region != null) region.setExpanded(false);
});
}
@SuppressWarnings("unchecked") List<ArrangementGroupingRule> groupingRules = (List<ArrangementGroupingRule>)args.get("groups");
if (groupingRules == null) groupingRules = Collections.emptyList();
List<?> rules = (List<?>)args.get("rules");
List<ArrangementSectionRule> sectionRules = getSectionRules(rules);
@SuppressWarnings("unchecked")
List<StdArrangementRuleAliasToken> aliases = (List<StdArrangementRuleAliasToken>)args.get("aliases");
CommonCodeStyleSettings settings = CodeStyleSettingsManager.getInstance(myFixture.getProject()).getCurrentSettings().getCommonSettings(language);
final StdArrangementSettings arrangementSettings =
aliases == null ?
new StdArrangementSettings(groupingRules, sectionRules) :
new StdArrangementExtendableSettings(groupingRules, sectionRules, aliases);
settings.setArrangementSettings(arrangementSettings);
ArrangementEngine engine = ServiceManager.getService(myFixture.getProject(), ArrangementEngine.class);
CommandProcessor.getInstance().executeCommand(getProject(), ()-> engine.arrange(myFixture.getEditor(), myFixture.getFile(), info.ranges), null, null);
// Check expectation.
Info after = parse(expected);
assertEquals(after.text, myFixture.getEditor().getDocument().getText());
for (FoldingInfo it : after.foldings) {
FoldRegion foldRegion = foldingModel.getCollapsedRegionAtOffset(it.start);
assertNotNull("Expected to find fold region at offset " + it.start, foldRegion);
assertEquals(it.end, foldRegion.getEndOffset());
}
}
protected List<ArrangementSectionRule> getSectionRules(List<?> rules) {
List<ArrangementSectionRule> sectionRules = Collections.emptyList();
if (rules != null) sectionRules = ContainerUtil.map(rules, (Function<Object, ArrangementSectionRule>)o -> o instanceof ArrangementSectionRule ? (ArrangementSectionRule)o : ArrangementSectionRule.create((StdArrangementMatchRule)o));
return sectionRules;
}
private static boolean isEmpty(Collection<?> collection) {
return collection == null || collection.isEmpty();
}
@NotNull
private static Info parse(@NotNull String text) {
Info result = new Info();
StringBuilder buffer = new StringBuilder(text);
int offset = 0;
while (offset < buffer.length()) {
RichTextHandler handler = null;
int richTextMarkStart = -1;
for (RichTextHandler h : RICH_TEXT_HANDLERS) {
int i = buffer.indexOf("<" + h.getMarker(), offset);
if (i >= 0 && (handler == null || i < richTextMarkStart)) {
richTextMarkStart = i;
handler = h;
}
}
if (handler == null) break;
String marker = handler.getMarker();
int attrStart = richTextMarkStart + marker.length() + 1;
int openingTagEnd = buffer.indexOf(">", richTextMarkStart);
int openTagLength = openingTagEnd - richTextMarkStart + 1;
Map<String, String> attributes = parseAttributes(buffer.substring(attrStart, openingTagEnd));
String closingTag = "</" + marker + ">";
int closingTagStart = buffer.indexOf(closingTag);
assert closingTagStart > 0;
handler.handle(result, attributes, richTextMarkStart, closingTagStart - openTagLength);
buffer.delete(closingTagStart, closingTagStart + closingTag.length());
buffer.delete(richTextMarkStart, openingTagEnd + 1);
offset = closingTagStart - openTagLength;
}
result.text = buffer.toString();
return result;
}
@NotNull
private static Map<String, String> parseAttributes(@NotNull String text) {
if (text.isEmpty()) return Collections.emptyMap();
Matcher matcher = ATTRIBUTE_PATTERN.matcher(text);
Map<String, String> result = ContainerUtil.newLinkedHashMap();
while (matcher.find()) result.put(matcher.group(1), matcher.group(2));
return result;
}
}
| ThiagoGarciaAlves/intellij-community | platform/testFramework/src/com/intellij/psi/codeStyle/arrangement/AbstractRearrangerTest.java | Java | apache-2.0 | 11,297 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.test.classloading.jar;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.ReducingState;
import org.apache.flink.api.common.state.ReducingStateDescriptor;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.common.typeutils.base.TypeSerializerSingleton;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.core.memory.DataInputView;
import org.apache.flink.core.memory.DataOutputView;
import org.apache.flink.runtime.state.CheckpointListener;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.checkpoint.ListCheckpointed;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.ParallelSourceFunction;
import org.apache.flink.test.util.SuccessException;
import org.apache.flink.util.Collector;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ThreadLocalRandom;
/**
* Test class used by the {@link org.apache.flink.test.classloading.ClassLoaderITCase}.
*/
public class CheckpointingCustomKvStateProgram {
public static void main(String[] args) throws Exception {
final String checkpointPath = args[0];
final String outputPath = args[1];
final int parallelism = 1;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(parallelism);
env.getConfig().disableSysoutLogging();
env.enableCheckpointing(100);
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(1, 1000));
env.setStateBackend(new FsStateBackend(checkpointPath));
DataStream<Integer> source = env.addSource(new InfiniteIntegerSource());
source
.map(new MapFunction<Integer, Tuple2<Integer, Integer>>() {
private static final long serialVersionUID = 1L;
@Override
public Tuple2<Integer, Integer> map(Integer value) throws Exception {
return new Tuple2<>(ThreadLocalRandom.current().nextInt(parallelism), value);
}
})
.keyBy(new KeySelector<Tuple2<Integer, Integer>, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Integer getKey(Tuple2<Integer, Integer> value) throws Exception {
return value.f0;
}
}).flatMap(new ReducingStateFlatMap()).writeAsText(outputPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
}
private static class InfiniteIntegerSource implements ParallelSourceFunction<Integer>, ListCheckpointed<Integer> {
private static final long serialVersionUID = -7517574288730066280L;
private volatile boolean running = true;
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
int counter = 0;
while (running) {
synchronized (ctx.getCheckpointLock()) {
ctx.collect(counter++);
}
}
}
@Override
public void cancel() {
running = false;
}
@Override
public List<Integer> snapshotState(long checkpointId, long timestamp) throws Exception {
return Collections.singletonList(0);
}
@Override
public void restoreState(List<Integer> state) throws Exception {
}
}
private static class ReducingStateFlatMap extends RichFlatMapFunction<Tuple2<Integer, Integer>, Integer>
implements ListCheckpointed<ReducingStateFlatMap>, CheckpointListener {
private static final long serialVersionUID = -5939722892793950253L;
private transient ReducingState<Integer> kvState;
private boolean atLeastOneSnapshotComplete = false;
private boolean restored = false;
@Override
public void open(Configuration parameters) throws Exception {
ReducingStateDescriptor<Integer> stateDescriptor =
new ReducingStateDescriptor<>(
"reducing-state",
new ReduceSum(),
CustomIntSerializer.INSTANCE);
this.kvState = getRuntimeContext().getReducingState(stateDescriptor);
}
@Override
public void flatMap(Tuple2<Integer, Integer> value, Collector<Integer> out) throws Exception {
kvState.add(value.f1);
if (atLeastOneSnapshotComplete) {
if (restored) {
throw new SuccessException();
} else {
throw new RuntimeException("Intended failure, to trigger restore");
}
}
}
@Override
public List<ReducingStateFlatMap> snapshotState(long checkpointId, long timestamp) throws Exception {
return Collections.singletonList(this);
}
@Override
public void restoreState(List<ReducingStateFlatMap> state) throws Exception {
restored = true;
atLeastOneSnapshotComplete = true;
}
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
atLeastOneSnapshotComplete = true;
}
private static class ReduceSum implements ReduceFunction<Integer> {
private static final long serialVersionUID = 1L;
@Override
public Integer reduce(Integer value1, Integer value2) throws Exception {
return value1 + value2;
}
}
}
private static final class CustomIntSerializer extends TypeSerializerSingleton<Integer> {
private static final long serialVersionUID = 4572452915892737448L;
public static final TypeSerializer<Integer> INSTANCE = new CustomIntSerializer();
@Override
public boolean isImmutableType() {
return true;
}
@Override
public Integer createInstance() {
return 0;
}
@Override
public Integer copy(Integer from) {
return from;
}
@Override
public Integer copy(Integer from, Integer reuse) {
return from;
}
@Override
public int getLength() {
return 4;
}
@Override
public void serialize(Integer record, DataOutputView target) throws IOException {
target.writeInt(record.intValue());
}
@Override
public Integer deserialize(DataInputView source) throws IOException {
return Integer.valueOf(source.readInt());
}
@Override
public Integer deserialize(Integer reuse, DataInputView source) throws IOException {
return Integer.valueOf(source.readInt());
}
@Override
public void copy(DataInputView source, DataOutputView target) throws IOException {
target.writeInt(source.readInt());
}
@Override
public boolean canEqual(Object obj) {
return obj instanceof CustomIntSerializer;
}
}
}
| zimmermatt/flink | flink-tests/src/test/java/org/apache/flink/test/classloading/jar/CheckpointingCustomKvStateProgram.java | Java | apache-2.0 | 7,468 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.common.operators;
//CHECKSTYLE.OFF: AvoidStarImport - Needed for TupleGenerator
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.List;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.JoinFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.io.LocalCollectionOutputFormat;
import org.apache.flink.api.java.operators.DeltaIteration;
import org.apache.flink.api.java.operators.IterativeDataSet;
import org.apache.flink.api.java.tuple.Tuple1;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;
import org.junit.Test;
@SuppressWarnings("serial")
public class CollectionExecutionIterationTest implements java.io.Serializable {
@Test
public void testBulkIteration() {
try {
ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
IterativeDataSet<Integer> iteration = env.fromElements(1).iterate(10);
DataSet<Integer> result = iteration.closeWith(iteration.map(new AddSuperstepNumberMapper()));
List<Integer> collected = new ArrayList<Integer>();
result.output(new LocalCollectionOutputFormat<Integer>(collected));
env.execute();
assertEquals(1, collected.size());
assertEquals(56, collected.get(0).intValue());
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testBulkIterationWithTerminationCriterion() {
try {
ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
IterativeDataSet<Integer> iteration = env.fromElements(1).iterate(100);
DataSet<Integer> iterationResult = iteration.map(new AddSuperstepNumberMapper());
DataSet<Integer> terminationCriterion = iterationResult.filter(new FilterFunction<Integer>() {
public boolean filter(Integer value) {
return value < 50;
}
});
List<Integer> collected = new ArrayList<Integer>();
iteration.closeWith(iterationResult, terminationCriterion)
.output(new LocalCollectionOutputFormat<Integer>(collected));
env.execute();
assertEquals(1, collected.size());
assertEquals(56, collected.get(0).intValue());
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testDeltaIteration() {
try {
ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
@SuppressWarnings("unchecked")
DataSet<Tuple2<Integer, Integer>> solInput = env.fromElements(
new Tuple2<Integer, Integer>(1, 0),
new Tuple2<Integer, Integer>(2, 0),
new Tuple2<Integer, Integer>(3, 0),
new Tuple2<Integer, Integer>(4, 0));
@SuppressWarnings("unchecked")
DataSet<Tuple1<Integer>> workInput = env.fromElements(
new Tuple1<Integer>(1),
new Tuple1<Integer>(2),
new Tuple1<Integer>(3),
new Tuple1<Integer>(4));
// Perform a delta iteration where we add those values to the workset where
// the second tuple field is smaller than the first tuple field.
// At the end both tuple fields must be the same.
DeltaIteration<Tuple2<Integer, Integer>, Tuple1<Integer>> iteration =
solInput.iterateDelta(workInput, 10, 0);
DataSet<Tuple2<Integer, Integer>> solDelta = iteration.getSolutionSet().join(
iteration.getWorkset()).where(0).equalTo(0).with(
new JoinFunction<Tuple2<Integer, Integer>, Tuple1<Integer>, Tuple2<Integer, Integer>>() {
@Override
public Tuple2<Integer, Integer> join(Tuple2<Integer, Integer> first,
Tuple1<Integer> second) throws Exception {
return new Tuple2<Integer, Integer>(first.f0, first.f1 + 1);
}
});
DataSet<Tuple1<Integer>> nextWorkset = solDelta.flatMap(
new FlatMapFunction<Tuple2<Integer, Integer>, Tuple1<Integer>>() {
@Override
public void flatMap(Tuple2<Integer, Integer> in, Collector<Tuple1<Integer>>
out) throws Exception {
if (in.f1 < in.f0) {
out.collect(new Tuple1<Integer>(in.f0));
}
}
});
List<Tuple2<Integer, Integer>> collected = new ArrayList<Tuple2<Integer, Integer>>();
iteration.closeWith(solDelta, nextWorkset)
.output(new LocalCollectionOutputFormat<Tuple2<Integer, Integer>>(collected));
env.execute();
// verify that both tuple fields are now the same
for (Tuple2<Integer, Integer> t: collected) {
assertEquals(t.f0, t.f1);
}
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
public static class AddSuperstepNumberMapper extends RichMapFunction<Integer, Integer> {
@Override
public Integer map(Integer value) {
int superstep = getIterationRuntimeContext().getSuperstepNumber();
return value + superstep;
}
}
}
| WangTaoTheTonic/flink | flink-java/src/test/java/org/apache/flink/api/common/operators/CollectionExecutionIterationTest.java | Java | apache-2.0 | 5,782 |
<?php
final class PhabricatorPeopleProfileEditController
extends PhabricatorPeopleProfileController {
public function handleRequest(AphrontRequest $request) {
$viewer = $this->getViewer();
$id = $request->getURIData('id');
$user = id(new PhabricatorPeopleQuery())
->setViewer($viewer)
->withIDs(array($id))
->needProfileImage(true)
->requireCapabilities(
array(
PhabricatorPolicyCapability::CAN_VIEW,
PhabricatorPolicyCapability::CAN_EDIT,
))
->executeOne();
if (!$user) {
return new Aphront404Response();
}
$this->setUser($user);
$done_uri = $this->getApplicationURI("manage/{$id}/");
$field_list = PhabricatorCustomField::getObjectFields(
$user,
PhabricatorCustomField::ROLE_EDIT);
$field_list
->setViewer($viewer)
->readFieldsFromStorage($user);
$validation_exception = null;
if ($request->isFormPost()) {
$xactions = $field_list->buildFieldTransactionsFromRequest(
new PhabricatorUserTransaction(),
$request);
$editor = id(new PhabricatorUserTransactionEditor())
->setActor($viewer)
->setContentSourceFromRequest($request)
->setContinueOnNoEffect(true);
try {
$editor->applyTransactions($user, $xactions);
return id(new AphrontRedirectResponse())->setURI($done_uri);
} catch (PhabricatorApplicationTransactionValidationException $ex) {
$validation_exception = $ex;
}
}
$title = pht('Edit Profile');
$form = id(new AphrontFormView())
->setUser($viewer);
$field_list->appendFieldsToForm($form);
$form
->appendChild(
id(new AphrontFormSubmitControl())
->addCancelButton($done_uri)
->setValue(pht('Save Profile')));
$allow_public = PhabricatorEnv::getEnvConfig('policy.allow-public');
$note = null;
if ($allow_public) {
$note = id(new PHUIInfoView())
->setSeverity(PHUIInfoView::SEVERITY_WARNING)
->appendChild(pht(
'Information on user profiles on this install is publicly '.
'visible.'));
}
$form_box = id(new PHUIObjectBoxView())
->setHeaderText(pht('Profile'))
->setValidationException($validation_exception)
->setBackground(PHUIObjectBoxView::BLUE_PROPERTY)
->setForm($form);
$crumbs = $this->buildApplicationCrumbs();
$crumbs->addTextCrumb(pht('Edit Profile'));
$crumbs->setBorder(true);
$nav = $this->newNavigation(
$user,
PhabricatorPeopleProfileMenuEngine::ITEM_MANAGE);
$header = id(new PHUIHeaderView())
->setHeader(pht('Edit Profile: %s', $user->getFullName()))
->setHeaderIcon('fa-pencil');
$view = id(new PHUITwoColumnView())
->setHeader($header)
->setFooter(array(
$note,
$form_box,
));
return $this->newPage()
->setTitle($title)
->setCrumbs($crumbs)
->setNavigation($nav)
->appendChild($view);
}
}
| phacility/phabricator | src/applications/people/controller/PhabricatorPeopleProfileEditController.php | PHP | apache-2.0 | 3,029 |
/*
Copyright 2015 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// DO NOT EDIT. THIS FILE IS AUTO-GENERATED BY $KUBEROOT/hack/update-generated-deep-copies.sh.
package extensions
import (
time "time"
api "k8s.io/kubernetes/pkg/api"
resource "k8s.io/kubernetes/pkg/api/resource"
unversioned "k8s.io/kubernetes/pkg/api/unversioned"
conversion "k8s.io/kubernetes/pkg/conversion"
util "k8s.io/kubernetes/pkg/util"
inf "speter.net/go/exp/math/dec/inf"
)
func deepCopy_api_AWSElasticBlockStoreVolumeSource(in api.AWSElasticBlockStoreVolumeSource, out *api.AWSElasticBlockStoreVolumeSource, c *conversion.Cloner) error {
out.VolumeID = in.VolumeID
out.FSType = in.FSType
out.Partition = in.Partition
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_Capabilities(in api.Capabilities, out *api.Capabilities, c *conversion.Cloner) error {
if in.Add != nil {
out.Add = make([]api.Capability, len(in.Add))
for i := range in.Add {
out.Add[i] = in.Add[i]
}
} else {
out.Add = nil
}
if in.Drop != nil {
out.Drop = make([]api.Capability, len(in.Drop))
for i := range in.Drop {
out.Drop[i] = in.Drop[i]
}
} else {
out.Drop = nil
}
return nil
}
func deepCopy_api_CephFSVolumeSource(in api.CephFSVolumeSource, out *api.CephFSVolumeSource, c *conversion.Cloner) error {
if in.Monitors != nil {
out.Monitors = make([]string, len(in.Monitors))
for i := range in.Monitors {
out.Monitors[i] = in.Monitors[i]
}
} else {
out.Monitors = nil
}
out.User = in.User
out.SecretFile = in.SecretFile
if in.SecretRef != nil {
out.SecretRef = new(api.LocalObjectReference)
if err := deepCopy_api_LocalObjectReference(*in.SecretRef, out.SecretRef, c); err != nil {
return err
}
} else {
out.SecretRef = nil
}
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_CinderVolumeSource(in api.CinderVolumeSource, out *api.CinderVolumeSource, c *conversion.Cloner) error {
out.VolumeID = in.VolumeID
out.FSType = in.FSType
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_Container(in api.Container, out *api.Container, c *conversion.Cloner) error {
out.Name = in.Name
out.Image = in.Image
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
if in.Args != nil {
out.Args = make([]string, len(in.Args))
for i := range in.Args {
out.Args[i] = in.Args[i]
}
} else {
out.Args = nil
}
out.WorkingDir = in.WorkingDir
if in.Ports != nil {
out.Ports = make([]api.ContainerPort, len(in.Ports))
for i := range in.Ports {
if err := deepCopy_api_ContainerPort(in.Ports[i], &out.Ports[i], c); err != nil {
return err
}
}
} else {
out.Ports = nil
}
if in.Env != nil {
out.Env = make([]api.EnvVar, len(in.Env))
for i := range in.Env {
if err := deepCopy_api_EnvVar(in.Env[i], &out.Env[i], c); err != nil {
return err
}
}
} else {
out.Env = nil
}
if err := deepCopy_api_ResourceRequirements(in.Resources, &out.Resources, c); err != nil {
return err
}
if in.VolumeMounts != nil {
out.VolumeMounts = make([]api.VolumeMount, len(in.VolumeMounts))
for i := range in.VolumeMounts {
if err := deepCopy_api_VolumeMount(in.VolumeMounts[i], &out.VolumeMounts[i], c); err != nil {
return err
}
}
} else {
out.VolumeMounts = nil
}
if in.LivenessProbe != nil {
out.LivenessProbe = new(api.Probe)
if err := deepCopy_api_Probe(*in.LivenessProbe, out.LivenessProbe, c); err != nil {
return err
}
} else {
out.LivenessProbe = nil
}
if in.ReadinessProbe != nil {
out.ReadinessProbe = new(api.Probe)
if err := deepCopy_api_Probe(*in.ReadinessProbe, out.ReadinessProbe, c); err != nil {
return err
}
} else {
out.ReadinessProbe = nil
}
if in.Lifecycle != nil {
out.Lifecycle = new(api.Lifecycle)
if err := deepCopy_api_Lifecycle(*in.Lifecycle, out.Lifecycle, c); err != nil {
return err
}
} else {
out.Lifecycle = nil
}
out.TerminationMessagePath = in.TerminationMessagePath
out.ImagePullPolicy = in.ImagePullPolicy
if in.SecurityContext != nil {
out.SecurityContext = new(api.SecurityContext)
if err := deepCopy_api_SecurityContext(*in.SecurityContext, out.SecurityContext, c); err != nil {
return err
}
} else {
out.SecurityContext = nil
}
out.Stdin = in.Stdin
out.StdinOnce = in.StdinOnce
out.TTY = in.TTY
return nil
}
func deepCopy_api_ContainerPort(in api.ContainerPort, out *api.ContainerPort, c *conversion.Cloner) error {
out.Name = in.Name
out.HostPort = in.HostPort
out.ContainerPort = in.ContainerPort
out.Protocol = in.Protocol
out.HostIP = in.HostIP
return nil
}
func deepCopy_api_DownwardAPIVolumeFile(in api.DownwardAPIVolumeFile, out *api.DownwardAPIVolumeFile, c *conversion.Cloner) error {
out.Path = in.Path
if err := deepCopy_api_ObjectFieldSelector(in.FieldRef, &out.FieldRef, c); err != nil {
return err
}
return nil
}
func deepCopy_api_DownwardAPIVolumeSource(in api.DownwardAPIVolumeSource, out *api.DownwardAPIVolumeSource, c *conversion.Cloner) error {
if in.Items != nil {
out.Items = make([]api.DownwardAPIVolumeFile, len(in.Items))
for i := range in.Items {
if err := deepCopy_api_DownwardAPIVolumeFile(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_api_EmptyDirVolumeSource(in api.EmptyDirVolumeSource, out *api.EmptyDirVolumeSource, c *conversion.Cloner) error {
out.Medium = in.Medium
return nil
}
func deepCopy_api_EnvVar(in api.EnvVar, out *api.EnvVar, c *conversion.Cloner) error {
out.Name = in.Name
out.Value = in.Value
if in.ValueFrom != nil {
out.ValueFrom = new(api.EnvVarSource)
if err := deepCopy_api_EnvVarSource(*in.ValueFrom, out.ValueFrom, c); err != nil {
return err
}
} else {
out.ValueFrom = nil
}
return nil
}
func deepCopy_api_EnvVarSource(in api.EnvVarSource, out *api.EnvVarSource, c *conversion.Cloner) error {
if in.FieldRef != nil {
out.FieldRef = new(api.ObjectFieldSelector)
if err := deepCopy_api_ObjectFieldSelector(*in.FieldRef, out.FieldRef, c); err != nil {
return err
}
} else {
out.FieldRef = nil
}
return nil
}
func deepCopy_api_ExecAction(in api.ExecAction, out *api.ExecAction, c *conversion.Cloner) error {
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
return nil
}
func deepCopy_api_FCVolumeSource(in api.FCVolumeSource, out *api.FCVolumeSource, c *conversion.Cloner) error {
if in.TargetWWNs != nil {
out.TargetWWNs = make([]string, len(in.TargetWWNs))
for i := range in.TargetWWNs {
out.TargetWWNs[i] = in.TargetWWNs[i]
}
} else {
out.TargetWWNs = nil
}
if in.Lun != nil {
out.Lun = new(int)
*out.Lun = *in.Lun
} else {
out.Lun = nil
}
out.FSType = in.FSType
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_FlockerVolumeSource(in api.FlockerVolumeSource, out *api.FlockerVolumeSource, c *conversion.Cloner) error {
out.DatasetName = in.DatasetName
return nil
}
func deepCopy_api_GCEPersistentDiskVolumeSource(in api.GCEPersistentDiskVolumeSource, out *api.GCEPersistentDiskVolumeSource, c *conversion.Cloner) error {
out.PDName = in.PDName
out.FSType = in.FSType
out.Partition = in.Partition
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_GitRepoVolumeSource(in api.GitRepoVolumeSource, out *api.GitRepoVolumeSource, c *conversion.Cloner) error {
out.Repository = in.Repository
out.Revision = in.Revision
return nil
}
func deepCopy_api_GlusterfsVolumeSource(in api.GlusterfsVolumeSource, out *api.GlusterfsVolumeSource, c *conversion.Cloner) error {
out.EndpointsName = in.EndpointsName
out.Path = in.Path
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_HTTPGetAction(in api.HTTPGetAction, out *api.HTTPGetAction, c *conversion.Cloner) error {
out.Path = in.Path
if err := deepCopy_util_IntOrString(in.Port, &out.Port, c); err != nil {
return err
}
out.Host = in.Host
out.Scheme = in.Scheme
return nil
}
func deepCopy_api_Handler(in api.Handler, out *api.Handler, c *conversion.Cloner) error {
if in.Exec != nil {
out.Exec = new(api.ExecAction)
if err := deepCopy_api_ExecAction(*in.Exec, out.Exec, c); err != nil {
return err
}
} else {
out.Exec = nil
}
if in.HTTPGet != nil {
out.HTTPGet = new(api.HTTPGetAction)
if err := deepCopy_api_HTTPGetAction(*in.HTTPGet, out.HTTPGet, c); err != nil {
return err
}
} else {
out.HTTPGet = nil
}
if in.TCPSocket != nil {
out.TCPSocket = new(api.TCPSocketAction)
if err := deepCopy_api_TCPSocketAction(*in.TCPSocket, out.TCPSocket, c); err != nil {
return err
}
} else {
out.TCPSocket = nil
}
return nil
}
func deepCopy_api_HostPathVolumeSource(in api.HostPathVolumeSource, out *api.HostPathVolumeSource, c *conversion.Cloner) error {
out.Path = in.Path
return nil
}
func deepCopy_api_ISCSIVolumeSource(in api.ISCSIVolumeSource, out *api.ISCSIVolumeSource, c *conversion.Cloner) error {
out.TargetPortal = in.TargetPortal
out.IQN = in.IQN
out.Lun = in.Lun
out.FSType = in.FSType
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_Lifecycle(in api.Lifecycle, out *api.Lifecycle, c *conversion.Cloner) error {
if in.PostStart != nil {
out.PostStart = new(api.Handler)
if err := deepCopy_api_Handler(*in.PostStart, out.PostStart, c); err != nil {
return err
}
} else {
out.PostStart = nil
}
if in.PreStop != nil {
out.PreStop = new(api.Handler)
if err := deepCopy_api_Handler(*in.PreStop, out.PreStop, c); err != nil {
return err
}
} else {
out.PreStop = nil
}
return nil
}
func deepCopy_api_LoadBalancerIngress(in api.LoadBalancerIngress, out *api.LoadBalancerIngress, c *conversion.Cloner) error {
out.IP = in.IP
out.Hostname = in.Hostname
return nil
}
func deepCopy_api_LoadBalancerStatus(in api.LoadBalancerStatus, out *api.LoadBalancerStatus, c *conversion.Cloner) error {
if in.Ingress != nil {
out.Ingress = make([]api.LoadBalancerIngress, len(in.Ingress))
for i := range in.Ingress {
if err := deepCopy_api_LoadBalancerIngress(in.Ingress[i], &out.Ingress[i], c); err != nil {
return err
}
}
} else {
out.Ingress = nil
}
return nil
}
func deepCopy_api_LocalObjectReference(in api.LocalObjectReference, out *api.LocalObjectReference, c *conversion.Cloner) error {
out.Name = in.Name
return nil
}
func deepCopy_api_NFSVolumeSource(in api.NFSVolumeSource, out *api.NFSVolumeSource, c *conversion.Cloner) error {
out.Server = in.Server
out.Path = in.Path
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_ObjectFieldSelector(in api.ObjectFieldSelector, out *api.ObjectFieldSelector, c *conversion.Cloner) error {
out.APIVersion = in.APIVersion
out.FieldPath = in.FieldPath
return nil
}
func deepCopy_api_ObjectMeta(in api.ObjectMeta, out *api.ObjectMeta, c *conversion.Cloner) error {
out.Name = in.Name
out.GenerateName = in.GenerateName
out.Namespace = in.Namespace
out.SelfLink = in.SelfLink
out.UID = in.UID
out.ResourceVersion = in.ResourceVersion
out.Generation = in.Generation
if err := deepCopy_unversioned_Time(in.CreationTimestamp, &out.CreationTimestamp, c); err != nil {
return err
}
if in.DeletionTimestamp != nil {
out.DeletionTimestamp = new(unversioned.Time)
if err := deepCopy_unversioned_Time(*in.DeletionTimestamp, out.DeletionTimestamp, c); err != nil {
return err
}
} else {
out.DeletionTimestamp = nil
}
if in.DeletionGracePeriodSeconds != nil {
out.DeletionGracePeriodSeconds = new(int64)
*out.DeletionGracePeriodSeconds = *in.DeletionGracePeriodSeconds
} else {
out.DeletionGracePeriodSeconds = nil
}
if in.Labels != nil {
out.Labels = make(map[string]string)
for key, val := range in.Labels {
out.Labels[key] = val
}
} else {
out.Labels = nil
}
if in.Annotations != nil {
out.Annotations = make(map[string]string)
for key, val := range in.Annotations {
out.Annotations[key] = val
}
} else {
out.Annotations = nil
}
return nil
}
func deepCopy_api_PersistentVolumeClaimVolumeSource(in api.PersistentVolumeClaimVolumeSource, out *api.PersistentVolumeClaimVolumeSource, c *conversion.Cloner) error {
out.ClaimName = in.ClaimName
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_PodSecurityContext(in api.PodSecurityContext, out *api.PodSecurityContext, c *conversion.Cloner) error {
out.HostNetwork = in.HostNetwork
out.HostPID = in.HostPID
out.HostIPC = in.HostIPC
if in.SELinuxOptions != nil {
out.SELinuxOptions = new(api.SELinuxOptions)
if err := deepCopy_api_SELinuxOptions(*in.SELinuxOptions, out.SELinuxOptions, c); err != nil {
return err
}
} else {
out.SELinuxOptions = nil
}
if in.RunAsUser != nil {
out.RunAsUser = new(int64)
*out.RunAsUser = *in.RunAsUser
} else {
out.RunAsUser = nil
}
if in.RunAsNonRoot != nil {
out.RunAsNonRoot = new(bool)
*out.RunAsNonRoot = *in.RunAsNonRoot
} else {
out.RunAsNonRoot = nil
}
if in.SupplementalGroups != nil {
out.SupplementalGroups = make([]int64, len(in.SupplementalGroups))
for i := range in.SupplementalGroups {
out.SupplementalGroups[i] = in.SupplementalGroups[i]
}
} else {
out.SupplementalGroups = nil
}
if in.FSGroup != nil {
out.FSGroup = new(int64)
*out.FSGroup = *in.FSGroup
} else {
out.FSGroup = nil
}
return nil
}
func deepCopy_api_PodSpec(in api.PodSpec, out *api.PodSpec, c *conversion.Cloner) error {
if in.Volumes != nil {
out.Volumes = make([]api.Volume, len(in.Volumes))
for i := range in.Volumes {
if err := deepCopy_api_Volume(in.Volumes[i], &out.Volumes[i], c); err != nil {
return err
}
}
} else {
out.Volumes = nil
}
if in.Containers != nil {
out.Containers = make([]api.Container, len(in.Containers))
for i := range in.Containers {
if err := deepCopy_api_Container(in.Containers[i], &out.Containers[i], c); err != nil {
return err
}
}
} else {
out.Containers = nil
}
out.RestartPolicy = in.RestartPolicy
if in.TerminationGracePeriodSeconds != nil {
out.TerminationGracePeriodSeconds = new(int64)
*out.TerminationGracePeriodSeconds = *in.TerminationGracePeriodSeconds
} else {
out.TerminationGracePeriodSeconds = nil
}
if in.ActiveDeadlineSeconds != nil {
out.ActiveDeadlineSeconds = new(int64)
*out.ActiveDeadlineSeconds = *in.ActiveDeadlineSeconds
} else {
out.ActiveDeadlineSeconds = nil
}
out.DNSPolicy = in.DNSPolicy
if in.NodeSelector != nil {
out.NodeSelector = make(map[string]string)
for key, val := range in.NodeSelector {
out.NodeSelector[key] = val
}
} else {
out.NodeSelector = nil
}
out.ServiceAccountName = in.ServiceAccountName
out.NodeName = in.NodeName
if in.SecurityContext != nil {
out.SecurityContext = new(api.PodSecurityContext)
if err := deepCopy_api_PodSecurityContext(*in.SecurityContext, out.SecurityContext, c); err != nil {
return err
}
} else {
out.SecurityContext = nil
}
if in.ImagePullSecrets != nil {
out.ImagePullSecrets = make([]api.LocalObjectReference, len(in.ImagePullSecrets))
for i := range in.ImagePullSecrets {
if err := deepCopy_api_LocalObjectReference(in.ImagePullSecrets[i], &out.ImagePullSecrets[i], c); err != nil {
return err
}
}
} else {
out.ImagePullSecrets = nil
}
return nil
}
func deepCopy_api_PodTemplateSpec(in api.PodTemplateSpec, out *api.PodTemplateSpec, c *conversion.Cloner) error {
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_api_PodSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
return nil
}
func deepCopy_api_Probe(in api.Probe, out *api.Probe, c *conversion.Cloner) error {
if err := deepCopy_api_Handler(in.Handler, &out.Handler, c); err != nil {
return err
}
out.InitialDelaySeconds = in.InitialDelaySeconds
out.TimeoutSeconds = in.TimeoutSeconds
out.PeriodSeconds = in.PeriodSeconds
out.SuccessThreshold = in.SuccessThreshold
out.FailureThreshold = in.FailureThreshold
return nil
}
func deepCopy_api_RBDVolumeSource(in api.RBDVolumeSource, out *api.RBDVolumeSource, c *conversion.Cloner) error {
if in.CephMonitors != nil {
out.CephMonitors = make([]string, len(in.CephMonitors))
for i := range in.CephMonitors {
out.CephMonitors[i] = in.CephMonitors[i]
}
} else {
out.CephMonitors = nil
}
out.RBDImage = in.RBDImage
out.FSType = in.FSType
out.RBDPool = in.RBDPool
out.RadosUser = in.RadosUser
out.Keyring = in.Keyring
if in.SecretRef != nil {
out.SecretRef = new(api.LocalObjectReference)
if err := deepCopy_api_LocalObjectReference(*in.SecretRef, out.SecretRef, c); err != nil {
return err
}
} else {
out.SecretRef = nil
}
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_ResourceRequirements(in api.ResourceRequirements, out *api.ResourceRequirements, c *conversion.Cloner) error {
if in.Limits != nil {
out.Limits = make(api.ResourceList)
for key, val := range in.Limits {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Limits[key] = *newVal
}
} else {
out.Limits = nil
}
if in.Requests != nil {
out.Requests = make(api.ResourceList)
for key, val := range in.Requests {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Requests[key] = *newVal
}
} else {
out.Requests = nil
}
return nil
}
func deepCopy_api_SELinuxOptions(in api.SELinuxOptions, out *api.SELinuxOptions, c *conversion.Cloner) error {
out.User = in.User
out.Role = in.Role
out.Type = in.Type
out.Level = in.Level
return nil
}
func deepCopy_api_SecretVolumeSource(in api.SecretVolumeSource, out *api.SecretVolumeSource, c *conversion.Cloner) error {
out.SecretName = in.SecretName
return nil
}
func deepCopy_api_SecurityContext(in api.SecurityContext, out *api.SecurityContext, c *conversion.Cloner) error {
if in.Capabilities != nil {
out.Capabilities = new(api.Capabilities)
if err := deepCopy_api_Capabilities(*in.Capabilities, out.Capabilities, c); err != nil {
return err
}
} else {
out.Capabilities = nil
}
if in.Privileged != nil {
out.Privileged = new(bool)
*out.Privileged = *in.Privileged
} else {
out.Privileged = nil
}
if in.SELinuxOptions != nil {
out.SELinuxOptions = new(api.SELinuxOptions)
if err := deepCopy_api_SELinuxOptions(*in.SELinuxOptions, out.SELinuxOptions, c); err != nil {
return err
}
} else {
out.SELinuxOptions = nil
}
if in.RunAsUser != nil {
out.RunAsUser = new(int64)
*out.RunAsUser = *in.RunAsUser
} else {
out.RunAsUser = nil
}
if in.RunAsNonRoot != nil {
out.RunAsNonRoot = new(bool)
*out.RunAsNonRoot = *in.RunAsNonRoot
} else {
out.RunAsNonRoot = nil
}
return nil
}
func deepCopy_api_TCPSocketAction(in api.TCPSocketAction, out *api.TCPSocketAction, c *conversion.Cloner) error {
if err := deepCopy_util_IntOrString(in.Port, &out.Port, c); err != nil {
return err
}
return nil
}
func deepCopy_api_Volume(in api.Volume, out *api.Volume, c *conversion.Cloner) error {
out.Name = in.Name
if err := deepCopy_api_VolumeSource(in.VolumeSource, &out.VolumeSource, c); err != nil {
return err
}
return nil
}
func deepCopy_api_VolumeMount(in api.VolumeMount, out *api.VolumeMount, c *conversion.Cloner) error {
out.Name = in.Name
out.ReadOnly = in.ReadOnly
out.MountPath = in.MountPath
return nil
}
func deepCopy_api_VolumeSource(in api.VolumeSource, out *api.VolumeSource, c *conversion.Cloner) error {
if in.HostPath != nil {
out.HostPath = new(api.HostPathVolumeSource)
if err := deepCopy_api_HostPathVolumeSource(*in.HostPath, out.HostPath, c); err != nil {
return err
}
} else {
out.HostPath = nil
}
if in.EmptyDir != nil {
out.EmptyDir = new(api.EmptyDirVolumeSource)
if err := deepCopy_api_EmptyDirVolumeSource(*in.EmptyDir, out.EmptyDir, c); err != nil {
return err
}
} else {
out.EmptyDir = nil
}
if in.GCEPersistentDisk != nil {
out.GCEPersistentDisk = new(api.GCEPersistentDiskVolumeSource)
if err := deepCopy_api_GCEPersistentDiskVolumeSource(*in.GCEPersistentDisk, out.GCEPersistentDisk, c); err != nil {
return err
}
} else {
out.GCEPersistentDisk = nil
}
if in.AWSElasticBlockStore != nil {
out.AWSElasticBlockStore = new(api.AWSElasticBlockStoreVolumeSource)
if err := deepCopy_api_AWSElasticBlockStoreVolumeSource(*in.AWSElasticBlockStore, out.AWSElasticBlockStore, c); err != nil {
return err
}
} else {
out.AWSElasticBlockStore = nil
}
if in.GitRepo != nil {
out.GitRepo = new(api.GitRepoVolumeSource)
if err := deepCopy_api_GitRepoVolumeSource(*in.GitRepo, out.GitRepo, c); err != nil {
return err
}
} else {
out.GitRepo = nil
}
if in.Secret != nil {
out.Secret = new(api.SecretVolumeSource)
if err := deepCopy_api_SecretVolumeSource(*in.Secret, out.Secret, c); err != nil {
return err
}
} else {
out.Secret = nil
}
if in.NFS != nil {
out.NFS = new(api.NFSVolumeSource)
if err := deepCopy_api_NFSVolumeSource(*in.NFS, out.NFS, c); err != nil {
return err
}
} else {
out.NFS = nil
}
if in.ISCSI != nil {
out.ISCSI = new(api.ISCSIVolumeSource)
if err := deepCopy_api_ISCSIVolumeSource(*in.ISCSI, out.ISCSI, c); err != nil {
return err
}
} else {
out.ISCSI = nil
}
if in.Glusterfs != nil {
out.Glusterfs = new(api.GlusterfsVolumeSource)
if err := deepCopy_api_GlusterfsVolumeSource(*in.Glusterfs, out.Glusterfs, c); err != nil {
return err
}
} else {
out.Glusterfs = nil
}
if in.PersistentVolumeClaim != nil {
out.PersistentVolumeClaim = new(api.PersistentVolumeClaimVolumeSource)
if err := deepCopy_api_PersistentVolumeClaimVolumeSource(*in.PersistentVolumeClaim, out.PersistentVolumeClaim, c); err != nil {
return err
}
} else {
out.PersistentVolumeClaim = nil
}
if in.RBD != nil {
out.RBD = new(api.RBDVolumeSource)
if err := deepCopy_api_RBDVolumeSource(*in.RBD, out.RBD, c); err != nil {
return err
}
} else {
out.RBD = nil
}
if in.Cinder != nil {
out.Cinder = new(api.CinderVolumeSource)
if err := deepCopy_api_CinderVolumeSource(*in.Cinder, out.Cinder, c); err != nil {
return err
}
} else {
out.Cinder = nil
}
if in.CephFS != nil {
out.CephFS = new(api.CephFSVolumeSource)
if err := deepCopy_api_CephFSVolumeSource(*in.CephFS, out.CephFS, c); err != nil {
return err
}
} else {
out.CephFS = nil
}
if in.Flocker != nil {
out.Flocker = new(api.FlockerVolumeSource)
if err := deepCopy_api_FlockerVolumeSource(*in.Flocker, out.Flocker, c); err != nil {
return err
}
} else {
out.Flocker = nil
}
if in.DownwardAPI != nil {
out.DownwardAPI = new(api.DownwardAPIVolumeSource)
if err := deepCopy_api_DownwardAPIVolumeSource(*in.DownwardAPI, out.DownwardAPI, c); err != nil {
return err
}
} else {
out.DownwardAPI = nil
}
if in.FC != nil {
out.FC = new(api.FCVolumeSource)
if err := deepCopy_api_FCVolumeSource(*in.FC, out.FC, c); err != nil {
return err
}
} else {
out.FC = nil
}
return nil
}
func deepCopy_resource_Quantity(in resource.Quantity, out *resource.Quantity, c *conversion.Cloner) error {
if in.Amount != nil {
if newVal, err := c.DeepCopy(in.Amount); err != nil {
return err
} else {
out.Amount = newVal.(*inf.Dec)
}
} else {
out.Amount = nil
}
out.Format = in.Format
return nil
}
func deepCopy_unversioned_ListMeta(in unversioned.ListMeta, out *unversioned.ListMeta, c *conversion.Cloner) error {
out.SelfLink = in.SelfLink
out.ResourceVersion = in.ResourceVersion
return nil
}
func deepCopy_unversioned_Time(in unversioned.Time, out *unversioned.Time, c *conversion.Cloner) error {
if newVal, err := c.DeepCopy(in.Time); err != nil {
return err
} else {
out.Time = newVal.(time.Time)
}
return nil
}
func deepCopy_unversioned_TypeMeta(in unversioned.TypeMeta, out *unversioned.TypeMeta, c *conversion.Cloner) error {
out.Kind = in.Kind
out.APIVersion = in.APIVersion
return nil
}
func deepCopy_extensions_APIVersion(in APIVersion, out *APIVersion, c *conversion.Cloner) error {
out.Name = in.Name
out.APIGroup = in.APIGroup
return nil
}
func deepCopy_extensions_CPUTargetUtilization(in CPUTargetUtilization, out *CPUTargetUtilization, c *conversion.Cloner) error {
out.TargetPercentage = in.TargetPercentage
return nil
}
func deepCopy_extensions_ClusterAutoscaler(in ClusterAutoscaler, out *ClusterAutoscaler, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_extensions_ClusterAutoscalerSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_ClusterAutoscalerList(in ClusterAutoscalerList, out *ClusterAutoscalerList, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_unversioned_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ClusterAutoscaler, len(in.Items))
for i := range in.Items {
if err := deepCopy_extensions_ClusterAutoscaler(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_extensions_ClusterAutoscalerSpec(in ClusterAutoscalerSpec, out *ClusterAutoscalerSpec, c *conversion.Cloner) error {
out.MinNodes = in.MinNodes
out.MaxNodes = in.MaxNodes
if in.TargetUtilization != nil {
out.TargetUtilization = make([]NodeUtilization, len(in.TargetUtilization))
for i := range in.TargetUtilization {
if err := deepCopy_extensions_NodeUtilization(in.TargetUtilization[i], &out.TargetUtilization[i], c); err != nil {
return err
}
}
} else {
out.TargetUtilization = nil
}
return nil
}
func deepCopy_extensions_DaemonSet(in DaemonSet, out *DaemonSet, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_extensions_DaemonSetSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_extensions_DaemonSetStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_DaemonSetList(in DaemonSetList, out *DaemonSetList, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_unversioned_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]DaemonSet, len(in.Items))
for i := range in.Items {
if err := deepCopy_extensions_DaemonSet(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_extensions_DaemonSetSpec(in DaemonSetSpec, out *DaemonSetSpec, c *conversion.Cloner) error {
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
if in.Template != nil {
out.Template = new(api.PodTemplateSpec)
if err := deepCopy_api_PodTemplateSpec(*in.Template, out.Template, c); err != nil {
return err
}
} else {
out.Template = nil
}
return nil
}
func deepCopy_extensions_DaemonSetStatus(in DaemonSetStatus, out *DaemonSetStatus, c *conversion.Cloner) error {
out.CurrentNumberScheduled = in.CurrentNumberScheduled
out.NumberMisscheduled = in.NumberMisscheduled
out.DesiredNumberScheduled = in.DesiredNumberScheduled
return nil
}
func deepCopy_extensions_Deployment(in Deployment, out *Deployment, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_extensions_DeploymentSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_extensions_DeploymentStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_DeploymentList(in DeploymentList, out *DeploymentList, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_unversioned_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Deployment, len(in.Items))
for i := range in.Items {
if err := deepCopy_extensions_Deployment(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_extensions_DeploymentSpec(in DeploymentSpec, out *DeploymentSpec, c *conversion.Cloner) error {
out.Replicas = in.Replicas
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
if err := deepCopy_api_PodTemplateSpec(in.Template, &out.Template, c); err != nil {
return err
}
if err := deepCopy_extensions_DeploymentStrategy(in.Strategy, &out.Strategy, c); err != nil {
return err
}
out.UniqueLabelKey = in.UniqueLabelKey
return nil
}
func deepCopy_extensions_DeploymentStatus(in DeploymentStatus, out *DeploymentStatus, c *conversion.Cloner) error {
out.Replicas = in.Replicas
out.UpdatedReplicas = in.UpdatedReplicas
return nil
}
func deepCopy_extensions_DeploymentStrategy(in DeploymentStrategy, out *DeploymentStrategy, c *conversion.Cloner) error {
out.Type = in.Type
if in.RollingUpdate != nil {
out.RollingUpdate = new(RollingUpdateDeployment)
if err := deepCopy_extensions_RollingUpdateDeployment(*in.RollingUpdate, out.RollingUpdate, c); err != nil {
return err
}
} else {
out.RollingUpdate = nil
}
return nil
}
func deepCopy_extensions_HTTPIngressPath(in HTTPIngressPath, out *HTTPIngressPath, c *conversion.Cloner) error {
out.Path = in.Path
if err := deepCopy_extensions_IngressBackend(in.Backend, &out.Backend, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_HTTPIngressRuleValue(in HTTPIngressRuleValue, out *HTTPIngressRuleValue, c *conversion.Cloner) error {
if in.Paths != nil {
out.Paths = make([]HTTPIngressPath, len(in.Paths))
for i := range in.Paths {
if err := deepCopy_extensions_HTTPIngressPath(in.Paths[i], &out.Paths[i], c); err != nil {
return err
}
}
} else {
out.Paths = nil
}
return nil
}
func deepCopy_extensions_HorizontalPodAutoscaler(in HorizontalPodAutoscaler, out *HorizontalPodAutoscaler, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_extensions_HorizontalPodAutoscalerSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_extensions_HorizontalPodAutoscalerStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_HorizontalPodAutoscalerList(in HorizontalPodAutoscalerList, out *HorizontalPodAutoscalerList, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_unversioned_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]HorizontalPodAutoscaler, len(in.Items))
for i := range in.Items {
if err := deepCopy_extensions_HorizontalPodAutoscaler(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_extensions_HorizontalPodAutoscalerSpec(in HorizontalPodAutoscalerSpec, out *HorizontalPodAutoscalerSpec, c *conversion.Cloner) error {
if err := deepCopy_extensions_SubresourceReference(in.ScaleRef, &out.ScaleRef, c); err != nil {
return err
}
if in.MinReplicas != nil {
out.MinReplicas = new(int)
*out.MinReplicas = *in.MinReplicas
} else {
out.MinReplicas = nil
}
out.MaxReplicas = in.MaxReplicas
if in.CPUUtilization != nil {
out.CPUUtilization = new(CPUTargetUtilization)
if err := deepCopy_extensions_CPUTargetUtilization(*in.CPUUtilization, out.CPUUtilization, c); err != nil {
return err
}
} else {
out.CPUUtilization = nil
}
return nil
}
func deepCopy_extensions_HorizontalPodAutoscalerStatus(in HorizontalPodAutoscalerStatus, out *HorizontalPodAutoscalerStatus, c *conversion.Cloner) error {
if in.ObservedGeneration != nil {
out.ObservedGeneration = new(int64)
*out.ObservedGeneration = *in.ObservedGeneration
} else {
out.ObservedGeneration = nil
}
if in.LastScaleTime != nil {
out.LastScaleTime = new(unversioned.Time)
if err := deepCopy_unversioned_Time(*in.LastScaleTime, out.LastScaleTime, c); err != nil {
return err
}
} else {
out.LastScaleTime = nil
}
out.CurrentReplicas = in.CurrentReplicas
out.DesiredReplicas = in.DesiredReplicas
if in.CurrentCPUUtilizationPercentage != nil {
out.CurrentCPUUtilizationPercentage = new(int)
*out.CurrentCPUUtilizationPercentage = *in.CurrentCPUUtilizationPercentage
} else {
out.CurrentCPUUtilizationPercentage = nil
}
return nil
}
func deepCopy_extensions_Ingress(in Ingress, out *Ingress, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_extensions_IngressSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_extensions_IngressStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_IngressBackend(in IngressBackend, out *IngressBackend, c *conversion.Cloner) error {
out.ServiceName = in.ServiceName
if err := deepCopy_util_IntOrString(in.ServicePort, &out.ServicePort, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_IngressList(in IngressList, out *IngressList, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_unversioned_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Ingress, len(in.Items))
for i := range in.Items {
if err := deepCopy_extensions_Ingress(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_extensions_IngressRule(in IngressRule, out *IngressRule, c *conversion.Cloner) error {
out.Host = in.Host
if err := deepCopy_extensions_IngressRuleValue(in.IngressRuleValue, &out.IngressRuleValue, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_IngressRuleValue(in IngressRuleValue, out *IngressRuleValue, c *conversion.Cloner) error {
if in.HTTP != nil {
out.HTTP = new(HTTPIngressRuleValue)
if err := deepCopy_extensions_HTTPIngressRuleValue(*in.HTTP, out.HTTP, c); err != nil {
return err
}
} else {
out.HTTP = nil
}
return nil
}
func deepCopy_extensions_IngressSpec(in IngressSpec, out *IngressSpec, c *conversion.Cloner) error {
if in.Backend != nil {
out.Backend = new(IngressBackend)
if err := deepCopy_extensions_IngressBackend(*in.Backend, out.Backend, c); err != nil {
return err
}
} else {
out.Backend = nil
}
if in.Rules != nil {
out.Rules = make([]IngressRule, len(in.Rules))
for i := range in.Rules {
if err := deepCopy_extensions_IngressRule(in.Rules[i], &out.Rules[i], c); err != nil {
return err
}
}
} else {
out.Rules = nil
}
return nil
}
func deepCopy_extensions_IngressStatus(in IngressStatus, out *IngressStatus, c *conversion.Cloner) error {
if err := deepCopy_api_LoadBalancerStatus(in.LoadBalancer, &out.LoadBalancer, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_Job(in Job, out *Job, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_extensions_JobSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_extensions_JobStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_JobCondition(in JobCondition, out *JobCondition, c *conversion.Cloner) error {
out.Type = in.Type
out.Status = in.Status
if err := deepCopy_unversioned_Time(in.LastProbeTime, &out.LastProbeTime, c); err != nil {
return err
}
if err := deepCopy_unversioned_Time(in.LastTransitionTime, &out.LastTransitionTime, c); err != nil {
return err
}
out.Reason = in.Reason
out.Message = in.Message
return nil
}
func deepCopy_extensions_JobList(in JobList, out *JobList, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_unversioned_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Job, len(in.Items))
for i := range in.Items {
if err := deepCopy_extensions_Job(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_extensions_JobSpec(in JobSpec, out *JobSpec, c *conversion.Cloner) error {
if in.Parallelism != nil {
out.Parallelism = new(int)
*out.Parallelism = *in.Parallelism
} else {
out.Parallelism = nil
}
if in.Completions != nil {
out.Completions = new(int)
*out.Completions = *in.Completions
} else {
out.Completions = nil
}
if in.Selector != nil {
out.Selector = new(PodSelector)
if err := deepCopy_extensions_PodSelector(*in.Selector, out.Selector, c); err != nil {
return err
}
} else {
out.Selector = nil
}
if err := deepCopy_api_PodTemplateSpec(in.Template, &out.Template, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_JobStatus(in JobStatus, out *JobStatus, c *conversion.Cloner) error {
if in.Conditions != nil {
out.Conditions = make([]JobCondition, len(in.Conditions))
for i := range in.Conditions {
if err := deepCopy_extensions_JobCondition(in.Conditions[i], &out.Conditions[i], c); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
if in.StartTime != nil {
out.StartTime = new(unversioned.Time)
if err := deepCopy_unversioned_Time(*in.StartTime, out.StartTime, c); err != nil {
return err
}
} else {
out.StartTime = nil
}
if in.CompletionTime != nil {
out.CompletionTime = new(unversioned.Time)
if err := deepCopy_unversioned_Time(*in.CompletionTime, out.CompletionTime, c); err != nil {
return err
}
} else {
out.CompletionTime = nil
}
out.Active = in.Active
out.Succeeded = in.Succeeded
out.Failed = in.Failed
return nil
}
func deepCopy_extensions_NodeUtilization(in NodeUtilization, out *NodeUtilization, c *conversion.Cloner) error {
out.Resource = in.Resource
out.Value = in.Value
return nil
}
func deepCopy_extensions_PodSelector(in PodSelector, out *PodSelector, c *conversion.Cloner) error {
if in.MatchLabels != nil {
out.MatchLabels = make(map[string]string)
for key, val := range in.MatchLabels {
out.MatchLabels[key] = val
}
} else {
out.MatchLabels = nil
}
if in.MatchExpressions != nil {
out.MatchExpressions = make([]PodSelectorRequirement, len(in.MatchExpressions))
for i := range in.MatchExpressions {
if err := deepCopy_extensions_PodSelectorRequirement(in.MatchExpressions[i], &out.MatchExpressions[i], c); err != nil {
return err
}
}
} else {
out.MatchExpressions = nil
}
return nil
}
func deepCopy_extensions_PodSelectorRequirement(in PodSelectorRequirement, out *PodSelectorRequirement, c *conversion.Cloner) error {
out.Key = in.Key
out.Operator = in.Operator
if in.Values != nil {
out.Values = make([]string, len(in.Values))
for i := range in.Values {
out.Values[i] = in.Values[i]
}
} else {
out.Values = nil
}
return nil
}
func deepCopy_extensions_ReplicationControllerDummy(in ReplicationControllerDummy, out *ReplicationControllerDummy, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_RollingUpdateDeployment(in RollingUpdateDeployment, out *RollingUpdateDeployment, c *conversion.Cloner) error {
if err := deepCopy_util_IntOrString(in.MaxUnavailable, &out.MaxUnavailable, c); err != nil {
return err
}
if err := deepCopy_util_IntOrString(in.MaxSurge, &out.MaxSurge, c); err != nil {
return err
}
out.MinReadySeconds = in.MinReadySeconds
return nil
}
func deepCopy_extensions_Scale(in Scale, out *Scale, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_extensions_ScaleSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_extensions_ScaleStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_ScaleSpec(in ScaleSpec, out *ScaleSpec, c *conversion.Cloner) error {
out.Replicas = in.Replicas
return nil
}
func deepCopy_extensions_ScaleStatus(in ScaleStatus, out *ScaleStatus, c *conversion.Cloner) error {
out.Replicas = in.Replicas
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
return nil
}
func deepCopy_extensions_SubresourceReference(in SubresourceReference, out *SubresourceReference, c *conversion.Cloner) error {
out.Kind = in.Kind
out.Name = in.Name
out.APIVersion = in.APIVersion
out.Subresource = in.Subresource
return nil
}
func deepCopy_extensions_ThirdPartyResource(in ThirdPartyResource, out *ThirdPartyResource, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
out.Description = in.Description
if in.Versions != nil {
out.Versions = make([]APIVersion, len(in.Versions))
for i := range in.Versions {
if err := deepCopy_extensions_APIVersion(in.Versions[i], &out.Versions[i], c); err != nil {
return err
}
}
} else {
out.Versions = nil
}
return nil
}
func deepCopy_extensions_ThirdPartyResourceData(in ThirdPartyResourceData, out *ThirdPartyResourceData, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if in.Data != nil {
out.Data = make([]uint8, len(in.Data))
for i := range in.Data {
out.Data[i] = in.Data[i]
}
} else {
out.Data = nil
}
return nil
}
func deepCopy_extensions_ThirdPartyResourceDataList(in ThirdPartyResourceDataList, out *ThirdPartyResourceDataList, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_unversioned_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ThirdPartyResourceData, len(in.Items))
for i := range in.Items {
if err := deepCopy_extensions_ThirdPartyResourceData(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_extensions_ThirdPartyResourceList(in ThirdPartyResourceList, out *ThirdPartyResourceList, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_unversioned_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ThirdPartyResource, len(in.Items))
for i := range in.Items {
if err := deepCopy_extensions_ThirdPartyResource(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_util_IntOrString(in util.IntOrString, out *util.IntOrString, c *conversion.Cloner) error {
out.Kind = in.Kind
out.IntVal = in.IntVal
out.StrVal = in.StrVal
return nil
}
func init() {
err := api.Scheme.AddGeneratedDeepCopyFuncs(
deepCopy_api_AWSElasticBlockStoreVolumeSource,
deepCopy_api_Capabilities,
deepCopy_api_CephFSVolumeSource,
deepCopy_api_CinderVolumeSource,
deepCopy_api_Container,
deepCopy_api_ContainerPort,
deepCopy_api_DownwardAPIVolumeFile,
deepCopy_api_DownwardAPIVolumeSource,
deepCopy_api_EmptyDirVolumeSource,
deepCopy_api_EnvVar,
deepCopy_api_EnvVarSource,
deepCopy_api_ExecAction,
deepCopy_api_FCVolumeSource,
deepCopy_api_FlockerVolumeSource,
deepCopy_api_GCEPersistentDiskVolumeSource,
deepCopy_api_GitRepoVolumeSource,
deepCopy_api_GlusterfsVolumeSource,
deepCopy_api_HTTPGetAction,
deepCopy_api_Handler,
deepCopy_api_HostPathVolumeSource,
deepCopy_api_ISCSIVolumeSource,
deepCopy_api_Lifecycle,
deepCopy_api_LoadBalancerIngress,
deepCopy_api_LoadBalancerStatus,
deepCopy_api_LocalObjectReference,
deepCopy_api_NFSVolumeSource,
deepCopy_api_ObjectFieldSelector,
deepCopy_api_ObjectMeta,
deepCopy_api_PersistentVolumeClaimVolumeSource,
deepCopy_api_PodSecurityContext,
deepCopy_api_PodSpec,
deepCopy_api_PodTemplateSpec,
deepCopy_api_Probe,
deepCopy_api_RBDVolumeSource,
deepCopy_api_ResourceRequirements,
deepCopy_api_SELinuxOptions,
deepCopy_api_SecretVolumeSource,
deepCopy_api_SecurityContext,
deepCopy_api_TCPSocketAction,
deepCopy_api_Volume,
deepCopy_api_VolumeMount,
deepCopy_api_VolumeSource,
deepCopy_resource_Quantity,
deepCopy_unversioned_ListMeta,
deepCopy_unversioned_Time,
deepCopy_unversioned_TypeMeta,
deepCopy_extensions_APIVersion,
deepCopy_extensions_CPUTargetUtilization,
deepCopy_extensions_ClusterAutoscaler,
deepCopy_extensions_ClusterAutoscalerList,
deepCopy_extensions_ClusterAutoscalerSpec,
deepCopy_extensions_DaemonSet,
deepCopy_extensions_DaemonSetList,
deepCopy_extensions_DaemonSetSpec,
deepCopy_extensions_DaemonSetStatus,
deepCopy_extensions_Deployment,
deepCopy_extensions_DeploymentList,
deepCopy_extensions_DeploymentSpec,
deepCopy_extensions_DeploymentStatus,
deepCopy_extensions_DeploymentStrategy,
deepCopy_extensions_HTTPIngressPath,
deepCopy_extensions_HTTPIngressRuleValue,
deepCopy_extensions_HorizontalPodAutoscaler,
deepCopy_extensions_HorizontalPodAutoscalerList,
deepCopy_extensions_HorizontalPodAutoscalerSpec,
deepCopy_extensions_HorizontalPodAutoscalerStatus,
deepCopy_extensions_Ingress,
deepCopy_extensions_IngressBackend,
deepCopy_extensions_IngressList,
deepCopy_extensions_IngressRule,
deepCopy_extensions_IngressRuleValue,
deepCopy_extensions_IngressSpec,
deepCopy_extensions_IngressStatus,
deepCopy_extensions_Job,
deepCopy_extensions_JobCondition,
deepCopy_extensions_JobList,
deepCopy_extensions_JobSpec,
deepCopy_extensions_JobStatus,
deepCopy_extensions_NodeUtilization,
deepCopy_extensions_PodSelector,
deepCopy_extensions_PodSelectorRequirement,
deepCopy_extensions_ReplicationControllerDummy,
deepCopy_extensions_RollingUpdateDeployment,
deepCopy_extensions_Scale,
deepCopy_extensions_ScaleSpec,
deepCopy_extensions_ScaleStatus,
deepCopy_extensions_SubresourceReference,
deepCopy_extensions_ThirdPartyResource,
deepCopy_extensions_ThirdPartyResourceData,
deepCopy_extensions_ThirdPartyResourceDataList,
deepCopy_extensions_ThirdPartyResourceList,
deepCopy_util_IntOrString,
)
if err != nil {
// if one of the deep copy functions is malformed, detect it immediately.
panic(err)
}
}
| klucar/pachyderm | vendor/k8s.io/kubernetes/pkg/apis/extensions/deep_copy_generated.go | GO | apache-2.0 | 48,982 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.trogdor.rest;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.JsonNode;
import org.apache.kafka.trogdor.task.TaskSpec;
/**
* The state for a task which is being stopped on the coordinator.
*/
public class TaskStopping extends TaskState {
/**
* The time on the agent when the task was received.
*/
private final long startedMs;
@JsonCreator
public TaskStopping(@JsonProperty("spec") TaskSpec spec,
@JsonProperty("startedMs") long startedMs,
@JsonProperty("status") JsonNode status) {
super(spec, status);
this.startedMs = startedMs;
}
@JsonProperty
public long startedMs() {
return startedMs;
}
@Override
public TaskStateType stateType() {
return TaskStateType.STOPPING;
}
}
| TiVo/kafka | trogdor/src/main/java/org/apache/kafka/trogdor/rest/TaskStopping.java | Java | apache-2.0 | 1,713 |
// +build go1.9
// Copyright 2019 Microsoft Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// This code was auto-generated by:
// github.com/Azure/azure-sdk-for-go/tools/profileBuilder
package servicefabric
import original "github.com/Azure/azure-sdk-for-go/services/servicefabric/6.5/servicefabric"
const (
DefaultBaseURI = original.DefaultBaseURI
)
type ApplicationDefinitionKind = original.ApplicationDefinitionKind
const (
Compose ApplicationDefinitionKind = original.Compose
Invalid ApplicationDefinitionKind = original.Invalid
ServiceFabricApplicationDescription ApplicationDefinitionKind = original.ServiceFabricApplicationDescription
)
type ApplicationPackageCleanupPolicy = original.ApplicationPackageCleanupPolicy
const (
ApplicationPackageCleanupPolicyAutomatic ApplicationPackageCleanupPolicy = original.ApplicationPackageCleanupPolicyAutomatic
ApplicationPackageCleanupPolicyDefault ApplicationPackageCleanupPolicy = original.ApplicationPackageCleanupPolicyDefault
ApplicationPackageCleanupPolicyInvalid ApplicationPackageCleanupPolicy = original.ApplicationPackageCleanupPolicyInvalid
ApplicationPackageCleanupPolicyManual ApplicationPackageCleanupPolicy = original.ApplicationPackageCleanupPolicyManual
)
type ApplicationScopedVolumeKind = original.ApplicationScopedVolumeKind
const (
ServiceFabricVolumeDisk ApplicationScopedVolumeKind = original.ServiceFabricVolumeDisk
)
type ApplicationStatus = original.ApplicationStatus
const (
ApplicationStatusCreating ApplicationStatus = original.ApplicationStatusCreating
ApplicationStatusDeleting ApplicationStatus = original.ApplicationStatusDeleting
ApplicationStatusFailed ApplicationStatus = original.ApplicationStatusFailed
ApplicationStatusInvalid ApplicationStatus = original.ApplicationStatusInvalid
ApplicationStatusReady ApplicationStatus = original.ApplicationStatusReady
ApplicationStatusUpgrading ApplicationStatus = original.ApplicationStatusUpgrading
)
type ApplicationTypeDefinitionKind = original.ApplicationTypeDefinitionKind
const (
ApplicationTypeDefinitionKindCompose ApplicationTypeDefinitionKind = original.ApplicationTypeDefinitionKindCompose
ApplicationTypeDefinitionKindInvalid ApplicationTypeDefinitionKind = original.ApplicationTypeDefinitionKindInvalid
ApplicationTypeDefinitionKindServiceFabricApplicationPackage ApplicationTypeDefinitionKind = original.ApplicationTypeDefinitionKindServiceFabricApplicationPackage
)
type ApplicationTypeStatus = original.ApplicationTypeStatus
const (
ApplicationTypeStatusAvailable ApplicationTypeStatus = original.ApplicationTypeStatusAvailable
ApplicationTypeStatusFailed ApplicationTypeStatus = original.ApplicationTypeStatusFailed
ApplicationTypeStatusInvalid ApplicationTypeStatus = original.ApplicationTypeStatusInvalid
ApplicationTypeStatusProvisioning ApplicationTypeStatus = original.ApplicationTypeStatusProvisioning
ApplicationTypeStatusUnprovisioning ApplicationTypeStatus = original.ApplicationTypeStatusUnprovisioning
)
type AutoScalingMechanismKind = original.AutoScalingMechanismKind
const (
AddRemoveReplica AutoScalingMechanismKind = original.AddRemoveReplica
)
type AutoScalingMetricKind = original.AutoScalingMetricKind
const (
Resource AutoScalingMetricKind = original.Resource
)
type AutoScalingResourceMetricName = original.AutoScalingResourceMetricName
const (
CPU AutoScalingResourceMetricName = original.CPU
MemoryInGB AutoScalingResourceMetricName = original.MemoryInGB
)
type AutoScalingTriggerKind = original.AutoScalingTriggerKind
const (
AverageLoad AutoScalingTriggerKind = original.AverageLoad
)
type BackupEntityKind = original.BackupEntityKind
const (
BackupEntityKindApplication BackupEntityKind = original.BackupEntityKindApplication
BackupEntityKindInvalid BackupEntityKind = original.BackupEntityKindInvalid
BackupEntityKindPartition BackupEntityKind = original.BackupEntityKindPartition
BackupEntityKindService BackupEntityKind = original.BackupEntityKindService
)
type BackupPolicyScope = original.BackupPolicyScope
const (
BackupPolicyScopeApplication BackupPolicyScope = original.BackupPolicyScopeApplication
BackupPolicyScopeInvalid BackupPolicyScope = original.BackupPolicyScopeInvalid
BackupPolicyScopePartition BackupPolicyScope = original.BackupPolicyScopePartition
BackupPolicyScopeService BackupPolicyScope = original.BackupPolicyScopeService
)
type BackupScheduleFrequencyType = original.BackupScheduleFrequencyType
const (
BackupScheduleFrequencyTypeDaily BackupScheduleFrequencyType = original.BackupScheduleFrequencyTypeDaily
BackupScheduleFrequencyTypeInvalid BackupScheduleFrequencyType = original.BackupScheduleFrequencyTypeInvalid
BackupScheduleFrequencyTypeWeekly BackupScheduleFrequencyType = original.BackupScheduleFrequencyTypeWeekly
)
type BackupScheduleKind = original.BackupScheduleKind
const (
BackupScheduleKindFrequencyBased BackupScheduleKind = original.BackupScheduleKindFrequencyBased
BackupScheduleKindInvalid BackupScheduleKind = original.BackupScheduleKindInvalid
BackupScheduleKindTimeBased BackupScheduleKind = original.BackupScheduleKindTimeBased
)
type BackupState = original.BackupState
const (
BackupStateAccepted BackupState = original.BackupStateAccepted
BackupStateBackupInProgress BackupState = original.BackupStateBackupInProgress
BackupStateFailure BackupState = original.BackupStateFailure
BackupStateInvalid BackupState = original.BackupStateInvalid
BackupStateSuccess BackupState = original.BackupStateSuccess
BackupStateTimeout BackupState = original.BackupStateTimeout
)
type BackupStorageKind = original.BackupStorageKind
const (
BackupStorageKindAzureBlobStore BackupStorageKind = original.BackupStorageKindAzureBlobStore
BackupStorageKindFileShare BackupStorageKind = original.BackupStorageKindFileShare
BackupStorageKindInvalid BackupStorageKind = original.BackupStorageKindInvalid
)
type BackupSuspensionScope = original.BackupSuspensionScope
const (
BackupSuspensionScopeApplication BackupSuspensionScope = original.BackupSuspensionScopeApplication
BackupSuspensionScopeInvalid BackupSuspensionScope = original.BackupSuspensionScopeInvalid
BackupSuspensionScopePartition BackupSuspensionScope = original.BackupSuspensionScopePartition
BackupSuspensionScopeService BackupSuspensionScope = original.BackupSuspensionScopeService
)
type BackupType = original.BackupType
const (
BackupTypeFull BackupType = original.BackupTypeFull
BackupTypeIncremental BackupType = original.BackupTypeIncremental
BackupTypeInvalid BackupType = original.BackupTypeInvalid
)
type ChaosEventKind = original.ChaosEventKind
const (
ChaosEventKindExecutingFaults ChaosEventKind = original.ChaosEventKindExecutingFaults
ChaosEventKindInvalid ChaosEventKind = original.ChaosEventKindInvalid
ChaosEventKindStarted ChaosEventKind = original.ChaosEventKindStarted
ChaosEventKindStopped ChaosEventKind = original.ChaosEventKindStopped
ChaosEventKindTestError ChaosEventKind = original.ChaosEventKindTestError
ChaosEventKindValidationFailed ChaosEventKind = original.ChaosEventKindValidationFailed
ChaosEventKindWaiting ChaosEventKind = original.ChaosEventKindWaiting
)
type ChaosScheduleStatus = original.ChaosScheduleStatus
const (
ChaosScheduleStatusActive ChaosScheduleStatus = original.ChaosScheduleStatusActive
ChaosScheduleStatusExpired ChaosScheduleStatus = original.ChaosScheduleStatusExpired
ChaosScheduleStatusInvalid ChaosScheduleStatus = original.ChaosScheduleStatusInvalid
ChaosScheduleStatusPending ChaosScheduleStatus = original.ChaosScheduleStatusPending
ChaosScheduleStatusStopped ChaosScheduleStatus = original.ChaosScheduleStatusStopped
)
type ChaosStatus = original.ChaosStatus
const (
ChaosStatusInvalid ChaosStatus = original.ChaosStatusInvalid
ChaosStatusRunning ChaosStatus = original.ChaosStatusRunning
ChaosStatusStopped ChaosStatus = original.ChaosStatusStopped
)
type ComposeDeploymentStatus = original.ComposeDeploymentStatus
const (
ComposeDeploymentStatusCreating ComposeDeploymentStatus = original.ComposeDeploymentStatusCreating
ComposeDeploymentStatusDeleting ComposeDeploymentStatus = original.ComposeDeploymentStatusDeleting
ComposeDeploymentStatusFailed ComposeDeploymentStatus = original.ComposeDeploymentStatusFailed
ComposeDeploymentStatusInvalid ComposeDeploymentStatus = original.ComposeDeploymentStatusInvalid
ComposeDeploymentStatusProvisioning ComposeDeploymentStatus = original.ComposeDeploymentStatusProvisioning
ComposeDeploymentStatusReady ComposeDeploymentStatus = original.ComposeDeploymentStatusReady
ComposeDeploymentStatusUnprovisioning ComposeDeploymentStatus = original.ComposeDeploymentStatusUnprovisioning
ComposeDeploymentStatusUpgrading ComposeDeploymentStatus = original.ComposeDeploymentStatusUpgrading
)
type ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeState
const (
ComposeDeploymentUpgradeStateFailed ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateFailed
ComposeDeploymentUpgradeStateInvalid ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateInvalid
ComposeDeploymentUpgradeStateProvisioningTarget ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateProvisioningTarget
ComposeDeploymentUpgradeStateRollingBackCompleted ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateRollingBackCompleted
ComposeDeploymentUpgradeStateRollingBackInProgress ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateRollingBackInProgress
ComposeDeploymentUpgradeStateRollingForwardCompleted ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateRollingForwardCompleted
ComposeDeploymentUpgradeStateRollingForwardInProgress ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateRollingForwardInProgress
ComposeDeploymentUpgradeStateRollingForwardPending ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateRollingForwardPending
ComposeDeploymentUpgradeStateUnprovisioningCurrent ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateUnprovisioningCurrent
ComposeDeploymentUpgradeStateUnprovisioningTarget ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateUnprovisioningTarget
)
type CreateFabricDump = original.CreateFabricDump
const (
False CreateFabricDump = original.False
True CreateFabricDump = original.True
)
type DataLossMode = original.DataLossMode
const (
DataLossModeFullDataLoss DataLossMode = original.DataLossModeFullDataLoss
DataLossModeInvalid DataLossMode = original.DataLossModeInvalid
DataLossModePartialDataLoss DataLossMode = original.DataLossModePartialDataLoss
)
type DayOfWeek = original.DayOfWeek
const (
Friday DayOfWeek = original.Friday
Monday DayOfWeek = original.Monday
Saturday DayOfWeek = original.Saturday
Sunday DayOfWeek = original.Sunday
Thursday DayOfWeek = original.Thursday
Tuesday DayOfWeek = original.Tuesday
Wednesday DayOfWeek = original.Wednesday
)
type DeactivationIntent = original.DeactivationIntent
const (
Pause DeactivationIntent = original.Pause
RemoveData DeactivationIntent = original.RemoveData
Restart DeactivationIntent = original.Restart
)
type DeployedApplicationStatus = original.DeployedApplicationStatus
const (
DeployedApplicationStatusActivating DeployedApplicationStatus = original.DeployedApplicationStatusActivating
DeployedApplicationStatusActive DeployedApplicationStatus = original.DeployedApplicationStatusActive
DeployedApplicationStatusDeactivating DeployedApplicationStatus = original.DeployedApplicationStatusDeactivating
DeployedApplicationStatusDownloading DeployedApplicationStatus = original.DeployedApplicationStatusDownloading
DeployedApplicationStatusInvalid DeployedApplicationStatus = original.DeployedApplicationStatusInvalid
DeployedApplicationStatusUpgrading DeployedApplicationStatus = original.DeployedApplicationStatusUpgrading
)
type DeploymentStatus = original.DeploymentStatus
const (
DeploymentStatusActivating DeploymentStatus = original.DeploymentStatusActivating
DeploymentStatusActive DeploymentStatus = original.DeploymentStatusActive
DeploymentStatusDeactivating DeploymentStatus = original.DeploymentStatusDeactivating
DeploymentStatusDownloading DeploymentStatus = original.DeploymentStatusDownloading
DeploymentStatusInvalid DeploymentStatus = original.DeploymentStatusInvalid
DeploymentStatusUpgrading DeploymentStatus = original.DeploymentStatusUpgrading
)
type DiagnosticsSinkKind = original.DiagnosticsSinkKind
const (
DiagnosticsSinkKindAzureInternalMonitoringPipeline DiagnosticsSinkKind = original.DiagnosticsSinkKindAzureInternalMonitoringPipeline
DiagnosticsSinkKindInvalid DiagnosticsSinkKind = original.DiagnosticsSinkKindInvalid
)
type EntityKind = original.EntityKind
const (
EntityKindApplication EntityKind = original.EntityKindApplication
EntityKindCluster EntityKind = original.EntityKindCluster
EntityKindDeployedApplication EntityKind = original.EntityKindDeployedApplication
EntityKindDeployedServicePackage EntityKind = original.EntityKindDeployedServicePackage
EntityKindInvalid EntityKind = original.EntityKindInvalid
EntityKindNode EntityKind = original.EntityKindNode
EntityKindPartition EntityKind = original.EntityKindPartition
EntityKindReplica EntityKind = original.EntityKindReplica
EntityKindService EntityKind = original.EntityKindService
)
type EntityKindBasicBackupEntity = original.EntityKindBasicBackupEntity
const (
EntityKindApplication1 EntityKindBasicBackupEntity = original.EntityKindApplication1
EntityKindBackupEntity EntityKindBasicBackupEntity = original.EntityKindBackupEntity
EntityKindPartition1 EntityKindBasicBackupEntity = original.EntityKindPartition1
EntityKindService1 EntityKindBasicBackupEntity = original.EntityKindService1
)
type EntryPointStatus = original.EntryPointStatus
const (
EntryPointStatusInvalid EntryPointStatus = original.EntryPointStatusInvalid
EntryPointStatusPending EntryPointStatus = original.EntryPointStatusPending
EntryPointStatusStarted EntryPointStatus = original.EntryPointStatusStarted
EntryPointStatusStarting EntryPointStatus = original.EntryPointStatusStarting
EntryPointStatusStopped EntryPointStatus = original.EntryPointStatusStopped
EntryPointStatusStopping EntryPointStatus = original.EntryPointStatusStopping
)
type FabricErrorCodes = original.FabricErrorCodes
const (
EABORT FabricErrorCodes = original.EABORT
EFAIL FabricErrorCodes = original.EFAIL
EINVALIDARG FabricErrorCodes = original.EINVALIDARG
FABRICEAPPLICATIONALREADYEXISTS FabricErrorCodes = original.FABRICEAPPLICATIONALREADYEXISTS
FABRICEAPPLICATIONALREADYINTARGETVERSION FabricErrorCodes = original.FABRICEAPPLICATIONALREADYINTARGETVERSION
FABRICEAPPLICATIONNOTFOUND FabricErrorCodes = original.FABRICEAPPLICATIONNOTFOUND
FABRICEAPPLICATIONNOTUPGRADING FabricErrorCodes = original.FABRICEAPPLICATIONNOTUPGRADING
FABRICEAPPLICATIONTYPEALREADYEXISTS FabricErrorCodes = original.FABRICEAPPLICATIONTYPEALREADYEXISTS
FABRICEAPPLICATIONTYPEINUSE FabricErrorCodes = original.FABRICEAPPLICATIONTYPEINUSE
FABRICEAPPLICATIONTYPENOTFOUND FabricErrorCodes = original.FABRICEAPPLICATIONTYPENOTFOUND
FABRICEAPPLICATIONTYPEPROVISIONINPROGRESS FabricErrorCodes = original.FABRICEAPPLICATIONTYPEPROVISIONINPROGRESS
FABRICEAPPLICATIONUPGRADEINPROGRESS FabricErrorCodes = original.FABRICEAPPLICATIONUPGRADEINPROGRESS
FABRICEAPPLICATIONUPGRADEVALIDATIONERROR FabricErrorCodes = original.FABRICEAPPLICATIONUPGRADEVALIDATIONERROR
FABRICEBACKUPINPROGRESS FabricErrorCodes = original.FABRICEBACKUPINPROGRESS
FABRICEBACKUPISENABLED FabricErrorCodes = original.FABRICEBACKUPISENABLED
FABRICEBACKUPNOTENABLED FabricErrorCodes = original.FABRICEBACKUPNOTENABLED
FABRICEBACKUPPOLICYALREADYEXISTING FabricErrorCodes = original.FABRICEBACKUPPOLICYALREADYEXISTING
FABRICEBACKUPPOLICYNOTEXISTING FabricErrorCodes = original.FABRICEBACKUPPOLICYNOTEXISTING
FABRICECOMMUNICATIONERROR FabricErrorCodes = original.FABRICECOMMUNICATIONERROR
FABRICECONFIGURATIONPARAMETERNOTFOUND FabricErrorCodes = original.FABRICECONFIGURATIONPARAMETERNOTFOUND
FABRICECONFIGURATIONSECTIONNOTFOUND FabricErrorCodes = original.FABRICECONFIGURATIONSECTIONNOTFOUND
FABRICEDIRECTORYNOTFOUND FabricErrorCodes = original.FABRICEDIRECTORYNOTFOUND
FABRICEENUMERATIONCOMPLETED FabricErrorCodes = original.FABRICEENUMERATIONCOMPLETED
FABRICEFABRICALREADYINTARGETVERSION FabricErrorCodes = original.FABRICEFABRICALREADYINTARGETVERSION
FABRICEFABRICNOTUPGRADING FabricErrorCodes = original.FABRICEFABRICNOTUPGRADING
FABRICEFABRICUPGRADEINPROGRESS FabricErrorCodes = original.FABRICEFABRICUPGRADEINPROGRESS
FABRICEFABRICUPGRADEVALIDATIONERROR FabricErrorCodes = original.FABRICEFABRICUPGRADEVALIDATIONERROR
FABRICEFABRICVERSIONALREADYEXISTS FabricErrorCodes = original.FABRICEFABRICVERSIONALREADYEXISTS
FABRICEFABRICVERSIONINUSE FabricErrorCodes = original.FABRICEFABRICVERSIONINUSE
FABRICEFABRICVERSIONNOTFOUND FabricErrorCodes = original.FABRICEFABRICVERSIONNOTFOUND
FABRICEFAULTANALYSISSERVICENOTEXISTING FabricErrorCodes = original.FABRICEFAULTANALYSISSERVICENOTEXISTING
FABRICEFILENOTFOUND FabricErrorCodes = original.FABRICEFILENOTFOUND
FABRICEHEALTHENTITYNOTFOUND FabricErrorCodes = original.FABRICEHEALTHENTITYNOTFOUND
FABRICEHEALTHSTALEREPORT FabricErrorCodes = original.FABRICEHEALTHSTALEREPORT
FABRICEIMAGEBUILDERRESERVEDDIRECTORYERROR FabricErrorCodes = original.FABRICEIMAGEBUILDERRESERVEDDIRECTORYERROR
FABRICEIMAGEBUILDERVALIDATIONERROR FabricErrorCodes = original.FABRICEIMAGEBUILDERVALIDATIONERROR
FABRICEINSTANCEIDMISMATCH FabricErrorCodes = original.FABRICEINSTANCEIDMISMATCH
FABRICEINVALIDADDRESS FabricErrorCodes = original.FABRICEINVALIDADDRESS
FABRICEINVALIDATOMICGROUP FabricErrorCodes = original.FABRICEINVALIDATOMICGROUP
FABRICEINVALIDCONFIGURATION FabricErrorCodes = original.FABRICEINVALIDCONFIGURATION
FABRICEINVALIDFORSTATELESSSERVICES FabricErrorCodes = original.FABRICEINVALIDFORSTATELESSSERVICES
FABRICEINVALIDNAMEURI FabricErrorCodes = original.FABRICEINVALIDNAMEURI
FABRICEINVALIDPARTITIONKEY FabricErrorCodes = original.FABRICEINVALIDPARTITIONKEY
FABRICEINVALIDSERVICESCALINGPOLICY FabricErrorCodes = original.FABRICEINVALIDSERVICESCALINGPOLICY
FABRICEKEYNOTFOUND FabricErrorCodes = original.FABRICEKEYNOTFOUND
FABRICEKEYTOOLARGE FabricErrorCodes = original.FABRICEKEYTOOLARGE
FABRICENAMEALREADYEXISTS FabricErrorCodes = original.FABRICENAMEALREADYEXISTS
FABRICENAMEDOESNOTEXIST FabricErrorCodes = original.FABRICENAMEDOESNOTEXIST
FABRICENAMENOTEMPTY FabricErrorCodes = original.FABRICENAMENOTEMPTY
FABRICENODEHASNOTSTOPPEDYET FabricErrorCodes = original.FABRICENODEHASNOTSTOPPEDYET
FABRICENODEISUP FabricErrorCodes = original.FABRICENODEISUP
FABRICENODENOTFOUND FabricErrorCodes = original.FABRICENODENOTFOUND
FABRICENOTPRIMARY FabricErrorCodes = original.FABRICENOTPRIMARY
FABRICENOTREADY FabricErrorCodes = original.FABRICENOTREADY
FABRICENOWRITEQUORUM FabricErrorCodes = original.FABRICENOWRITEQUORUM
FABRICEOPERATIONNOTCOMPLETE FabricErrorCodes = original.FABRICEOPERATIONNOTCOMPLETE
FABRICEPARTITIONNOTFOUND FabricErrorCodes = original.FABRICEPARTITIONNOTFOUND
FABRICEPATHTOOLONG FabricErrorCodes = original.FABRICEPATHTOOLONG
FABRICEPROPERTYCHECKFAILED FabricErrorCodes = original.FABRICEPROPERTYCHECKFAILED
FABRICEPROPERTYDOESNOTEXIST FabricErrorCodes = original.FABRICEPROPERTYDOESNOTEXIST
FABRICERECONFIGURATIONPENDING FabricErrorCodes = original.FABRICERECONFIGURATIONPENDING
FABRICEREPLICADOESNOTEXIST FabricErrorCodes = original.FABRICEREPLICADOESNOTEXIST
FABRICERESTOREINPROGRESS FabricErrorCodes = original.FABRICERESTOREINPROGRESS
FABRICERESTORESOURCETARGETPARTITIONMISMATCH FabricErrorCodes = original.FABRICERESTORESOURCETARGETPARTITIONMISMATCH
FABRICESEQUENCENUMBERCHECKFAILED FabricErrorCodes = original.FABRICESEQUENCENUMBERCHECKFAILED
FABRICESERVICEAFFINITYCHAINNOTSUPPORTED FabricErrorCodes = original.FABRICESERVICEAFFINITYCHAINNOTSUPPORTED
FABRICESERVICEALREADYEXISTS FabricErrorCodes = original.FABRICESERVICEALREADYEXISTS
FABRICESERVICEDOESNOTEXIST FabricErrorCodes = original.FABRICESERVICEDOESNOTEXIST
FABRICESERVICEGROUPALREADYEXISTS FabricErrorCodes = original.FABRICESERVICEGROUPALREADYEXISTS
FABRICESERVICEGROUPDOESNOTEXIST FabricErrorCodes = original.FABRICESERVICEGROUPDOESNOTEXIST
FABRICESERVICEMANIFESTNOTFOUND FabricErrorCodes = original.FABRICESERVICEMANIFESTNOTFOUND
FABRICESERVICEMETADATAMISMATCH FabricErrorCodes = original.FABRICESERVICEMETADATAMISMATCH
FABRICESERVICEOFFLINE FabricErrorCodes = original.FABRICESERVICEOFFLINE
FABRICESERVICETYPEMISMATCH FabricErrorCodes = original.FABRICESERVICETYPEMISMATCH
FABRICESERVICETYPENOTFOUND FabricErrorCodes = original.FABRICESERVICETYPENOTFOUND
FABRICESERVICETYPETEMPLATENOTFOUND FabricErrorCodes = original.FABRICESERVICETYPETEMPLATENOTFOUND
FABRICESINGLEINSTANCEAPPLICATIONALREADYEXISTS FabricErrorCodes = original.FABRICESINGLEINSTANCEAPPLICATIONALREADYEXISTS
FABRICESINGLEINSTANCEAPPLICATIONNOTFOUND FabricErrorCodes = original.FABRICESINGLEINSTANCEAPPLICATIONNOTFOUND
FABRICETIMEOUT FabricErrorCodes = original.FABRICETIMEOUT
FABRICEVALUEEMPTY FabricErrorCodes = original.FABRICEVALUEEMPTY
FABRICEVALUETOOLARGE FabricErrorCodes = original.FABRICEVALUETOOLARGE
FABRICEVOLUMEALREADYEXISTS FabricErrorCodes = original.FABRICEVOLUMEALREADYEXISTS
FABRICEVOLUMENOTFOUND FabricErrorCodes = original.FABRICEVOLUMENOTFOUND
SerializationError FabricErrorCodes = original.SerializationError
)
type FabricEventKind = original.FabricEventKind
const (
FabricEventKindApplicationContainerInstanceExited FabricEventKind = original.FabricEventKindApplicationContainerInstanceExited
FabricEventKindApplicationCreated FabricEventKind = original.FabricEventKindApplicationCreated
FabricEventKindApplicationDeleted FabricEventKind = original.FabricEventKindApplicationDeleted
FabricEventKindApplicationEvent FabricEventKind = original.FabricEventKindApplicationEvent
FabricEventKindApplicationHealthReportExpired FabricEventKind = original.FabricEventKindApplicationHealthReportExpired
FabricEventKindApplicationNewHealthReport FabricEventKind = original.FabricEventKindApplicationNewHealthReport
FabricEventKindApplicationProcessExited FabricEventKind = original.FabricEventKindApplicationProcessExited
FabricEventKindApplicationUpgradeCompleted FabricEventKind = original.FabricEventKindApplicationUpgradeCompleted
FabricEventKindApplicationUpgradeDomainCompleted FabricEventKind = original.FabricEventKindApplicationUpgradeDomainCompleted
FabricEventKindApplicationUpgradeRollbackCompleted FabricEventKind = original.FabricEventKindApplicationUpgradeRollbackCompleted
FabricEventKindApplicationUpgradeRollbackStarted FabricEventKind = original.FabricEventKindApplicationUpgradeRollbackStarted
FabricEventKindApplicationUpgradeStarted FabricEventKind = original.FabricEventKindApplicationUpgradeStarted
FabricEventKindChaosCodePackageRestartScheduled FabricEventKind = original.FabricEventKindChaosCodePackageRestartScheduled
FabricEventKindChaosNodeRestartScheduled FabricEventKind = original.FabricEventKindChaosNodeRestartScheduled
FabricEventKindChaosPartitionPrimaryMoveScheduled FabricEventKind = original.FabricEventKindChaosPartitionPrimaryMoveScheduled
FabricEventKindChaosPartitionSecondaryMoveScheduled FabricEventKind = original.FabricEventKindChaosPartitionSecondaryMoveScheduled
FabricEventKindChaosReplicaRemovalScheduled FabricEventKind = original.FabricEventKindChaosReplicaRemovalScheduled
FabricEventKindChaosReplicaRestartScheduled FabricEventKind = original.FabricEventKindChaosReplicaRestartScheduled
FabricEventKindChaosStarted FabricEventKind = original.FabricEventKindChaosStarted
FabricEventKindChaosStopped FabricEventKind = original.FabricEventKindChaosStopped
FabricEventKindClusterEvent FabricEventKind = original.FabricEventKindClusterEvent
FabricEventKindClusterHealthReportExpired FabricEventKind = original.FabricEventKindClusterHealthReportExpired
FabricEventKindClusterNewHealthReport FabricEventKind = original.FabricEventKindClusterNewHealthReport
FabricEventKindClusterUpgradeCompleted FabricEventKind = original.FabricEventKindClusterUpgradeCompleted
FabricEventKindClusterUpgradeDomainCompleted FabricEventKind = original.FabricEventKindClusterUpgradeDomainCompleted
FabricEventKindClusterUpgradeRollbackCompleted FabricEventKind = original.FabricEventKindClusterUpgradeRollbackCompleted
FabricEventKindClusterUpgradeRollbackStarted FabricEventKind = original.FabricEventKindClusterUpgradeRollbackStarted
FabricEventKindClusterUpgradeStarted FabricEventKind = original.FabricEventKindClusterUpgradeStarted
FabricEventKindContainerInstanceEvent FabricEventKind = original.FabricEventKindContainerInstanceEvent
FabricEventKindDeployedApplicationHealthReportExpired FabricEventKind = original.FabricEventKindDeployedApplicationHealthReportExpired
FabricEventKindDeployedApplicationNewHealthReport FabricEventKind = original.FabricEventKindDeployedApplicationNewHealthReport
FabricEventKindDeployedServicePackageHealthReportExpired FabricEventKind = original.FabricEventKindDeployedServicePackageHealthReportExpired
FabricEventKindDeployedServicePackageNewHealthReport FabricEventKind = original.FabricEventKindDeployedServicePackageNewHealthReport
FabricEventKindNodeAborted FabricEventKind = original.FabricEventKindNodeAborted
FabricEventKindNodeAddedToCluster FabricEventKind = original.FabricEventKindNodeAddedToCluster
FabricEventKindNodeClosed FabricEventKind = original.FabricEventKindNodeClosed
FabricEventKindNodeDeactivateCompleted FabricEventKind = original.FabricEventKindNodeDeactivateCompleted
FabricEventKindNodeDeactivateStarted FabricEventKind = original.FabricEventKindNodeDeactivateStarted
FabricEventKindNodeDown FabricEventKind = original.FabricEventKindNodeDown
FabricEventKindNodeEvent FabricEventKind = original.FabricEventKindNodeEvent
FabricEventKindNodeHealthReportExpired FabricEventKind = original.FabricEventKindNodeHealthReportExpired
FabricEventKindNodeNewHealthReport FabricEventKind = original.FabricEventKindNodeNewHealthReport
FabricEventKindNodeOpenFailed FabricEventKind = original.FabricEventKindNodeOpenFailed
FabricEventKindNodeOpenSucceeded FabricEventKind = original.FabricEventKindNodeOpenSucceeded
FabricEventKindNodeRemovedFromCluster FabricEventKind = original.FabricEventKindNodeRemovedFromCluster
FabricEventKindNodeUp FabricEventKind = original.FabricEventKindNodeUp
FabricEventKindPartitionAnalysisEvent FabricEventKind = original.FabricEventKindPartitionAnalysisEvent
FabricEventKindPartitionEvent FabricEventKind = original.FabricEventKindPartitionEvent
FabricEventKindPartitionHealthReportExpired FabricEventKind = original.FabricEventKindPartitionHealthReportExpired
FabricEventKindPartitionNewHealthReport FabricEventKind = original.FabricEventKindPartitionNewHealthReport
FabricEventKindPartitionPrimaryMoveAnalysis FabricEventKind = original.FabricEventKindPartitionPrimaryMoveAnalysis
FabricEventKindPartitionReconfigured FabricEventKind = original.FabricEventKindPartitionReconfigured
FabricEventKindReplicaEvent FabricEventKind = original.FabricEventKindReplicaEvent
FabricEventKindServiceCreated FabricEventKind = original.FabricEventKindServiceCreated
FabricEventKindServiceDeleted FabricEventKind = original.FabricEventKindServiceDeleted
FabricEventKindServiceEvent FabricEventKind = original.FabricEventKindServiceEvent
FabricEventKindServiceHealthReportExpired FabricEventKind = original.FabricEventKindServiceHealthReportExpired
FabricEventKindServiceNewHealthReport FabricEventKind = original.FabricEventKindServiceNewHealthReport
FabricEventKindStatefulReplicaHealthReportExpired FabricEventKind = original.FabricEventKindStatefulReplicaHealthReportExpired
FabricEventKindStatefulReplicaNewHealthReport FabricEventKind = original.FabricEventKindStatefulReplicaNewHealthReport
FabricEventKindStatelessReplicaHealthReportExpired FabricEventKind = original.FabricEventKindStatelessReplicaHealthReportExpired
FabricEventKindStatelessReplicaNewHealthReport FabricEventKind = original.FabricEventKindStatelessReplicaNewHealthReport
)
type FabricReplicaStatus = original.FabricReplicaStatus
const (
FabricReplicaStatusDown FabricReplicaStatus = original.FabricReplicaStatusDown
FabricReplicaStatusInvalid FabricReplicaStatus = original.FabricReplicaStatusInvalid
FabricReplicaStatusUp FabricReplicaStatus = original.FabricReplicaStatusUp
)
type FailureAction = original.FailureAction
const (
FailureActionInvalid FailureAction = original.FailureActionInvalid
FailureActionManual FailureAction = original.FailureActionManual
FailureActionRollback FailureAction = original.FailureActionRollback
)
type FailureReason = original.FailureReason
const (
HealthCheck FailureReason = original.HealthCheck
Interrupted FailureReason = original.Interrupted
None FailureReason = original.None
OverallUpgradeTimeout FailureReason = original.OverallUpgradeTimeout
UpgradeDomainTimeout FailureReason = original.UpgradeDomainTimeout
)
type HeaderMatchType = original.HeaderMatchType
const (
Exact HeaderMatchType = original.Exact
)
type HealthEvaluationKind = original.HealthEvaluationKind
const (
HealthEvaluationKindApplication HealthEvaluationKind = original.HealthEvaluationKindApplication
HealthEvaluationKindApplications HealthEvaluationKind = original.HealthEvaluationKindApplications
HealthEvaluationKindApplicationTypeApplications HealthEvaluationKind = original.HealthEvaluationKindApplicationTypeApplications
HealthEvaluationKindDeltaNodesCheck HealthEvaluationKind = original.HealthEvaluationKindDeltaNodesCheck
HealthEvaluationKindDeployedApplication HealthEvaluationKind = original.HealthEvaluationKindDeployedApplication
HealthEvaluationKindDeployedApplications HealthEvaluationKind = original.HealthEvaluationKindDeployedApplications
HealthEvaluationKindDeployedServicePackage HealthEvaluationKind = original.HealthEvaluationKindDeployedServicePackage
HealthEvaluationKindDeployedServicePackages HealthEvaluationKind = original.HealthEvaluationKindDeployedServicePackages
HealthEvaluationKindEvent HealthEvaluationKind = original.HealthEvaluationKindEvent
HealthEvaluationKindInvalid HealthEvaluationKind = original.HealthEvaluationKindInvalid
HealthEvaluationKindNode HealthEvaluationKind = original.HealthEvaluationKindNode
HealthEvaluationKindNodes HealthEvaluationKind = original.HealthEvaluationKindNodes
HealthEvaluationKindPartition HealthEvaluationKind = original.HealthEvaluationKindPartition
HealthEvaluationKindPartitions HealthEvaluationKind = original.HealthEvaluationKindPartitions
HealthEvaluationKindReplica HealthEvaluationKind = original.HealthEvaluationKindReplica
HealthEvaluationKindReplicas HealthEvaluationKind = original.HealthEvaluationKindReplicas
HealthEvaluationKindService HealthEvaluationKind = original.HealthEvaluationKindService
HealthEvaluationKindServices HealthEvaluationKind = original.HealthEvaluationKindServices
HealthEvaluationKindSystemApplication HealthEvaluationKind = original.HealthEvaluationKindSystemApplication
HealthEvaluationKindUpgradeDomainDeltaNodesCheck HealthEvaluationKind = original.HealthEvaluationKindUpgradeDomainDeltaNodesCheck
HealthEvaluationKindUpgradeDomainDeployedApplications HealthEvaluationKind = original.HealthEvaluationKindUpgradeDomainDeployedApplications
HealthEvaluationKindUpgradeDomainNodes HealthEvaluationKind = original.HealthEvaluationKindUpgradeDomainNodes
)
type HealthState = original.HealthState
const (
HealthStateError HealthState = original.HealthStateError
HealthStateInvalid HealthState = original.HealthStateInvalid
HealthStateOk HealthState = original.HealthStateOk
HealthStateUnknown HealthState = original.HealthStateUnknown
HealthStateWarning HealthState = original.HealthStateWarning
)
type HostIsolationMode = original.HostIsolationMode
const (
HostIsolationModeHyperV HostIsolationMode = original.HostIsolationModeHyperV
HostIsolationModeNone HostIsolationMode = original.HostIsolationModeNone
HostIsolationModeProcess HostIsolationMode = original.HostIsolationModeProcess
)
type HostType = original.HostType
const (
HostTypeContainerHost HostType = original.HostTypeContainerHost
HostTypeExeHost HostType = original.HostTypeExeHost
HostTypeInvalid HostType = original.HostTypeInvalid
)
type ImpactLevel = original.ImpactLevel
const (
ImpactLevelInvalid ImpactLevel = original.ImpactLevelInvalid
ImpactLevelNone ImpactLevel = original.ImpactLevelNone
ImpactLevelRemoveData ImpactLevel = original.ImpactLevelRemoveData
ImpactLevelRemoveNode ImpactLevel = original.ImpactLevelRemoveNode
ImpactLevelRestart ImpactLevel = original.ImpactLevelRestart
)
type Kind = original.Kind
const (
KindApplication Kind = original.KindApplication
KindApplications Kind = original.KindApplications
KindApplicationTypeApplications Kind = original.KindApplicationTypeApplications
KindDeltaNodesCheck Kind = original.KindDeltaNodesCheck
KindDeployedApplication Kind = original.KindDeployedApplication
KindDeployedApplications Kind = original.KindDeployedApplications
KindDeployedServicePackage Kind = original.KindDeployedServicePackage
KindDeployedServicePackages Kind = original.KindDeployedServicePackages
KindEvent Kind = original.KindEvent
KindHealthEvaluation Kind = original.KindHealthEvaluation
KindNode Kind = original.KindNode
KindNodes Kind = original.KindNodes
KindPartition Kind = original.KindPartition
KindPartitions Kind = original.KindPartitions
KindReplica Kind = original.KindReplica
KindReplicas Kind = original.KindReplicas
KindService Kind = original.KindService
KindServices Kind = original.KindServices
KindSystemApplication Kind = original.KindSystemApplication
KindUpgradeDomainDeltaNodesCheck Kind = original.KindUpgradeDomainDeltaNodesCheck
KindUpgradeDomainNodes Kind = original.KindUpgradeDomainNodes
)
type KindBasicApplicationScopedVolumeCreationParameters = original.KindBasicApplicationScopedVolumeCreationParameters
const (
KindApplicationScopedVolumeCreationParameters KindBasicApplicationScopedVolumeCreationParameters = original.KindApplicationScopedVolumeCreationParameters
KindServiceFabricVolumeDisk KindBasicApplicationScopedVolumeCreationParameters = original.KindServiceFabricVolumeDisk
)
type KindBasicAutoScalingMechanism = original.KindBasicAutoScalingMechanism
const (
KindAddRemoveReplica KindBasicAutoScalingMechanism = original.KindAddRemoveReplica
KindAutoScalingMechanism KindBasicAutoScalingMechanism = original.KindAutoScalingMechanism
)
type KindBasicAutoScalingMetric = original.KindBasicAutoScalingMetric
const (
KindAutoScalingMetric KindBasicAutoScalingMetric = original.KindAutoScalingMetric
KindResource KindBasicAutoScalingMetric = original.KindResource
)
type KindBasicAutoScalingTrigger = original.KindBasicAutoScalingTrigger
const (
KindAutoScalingTrigger KindBasicAutoScalingTrigger = original.KindAutoScalingTrigger
KindAverageLoad KindBasicAutoScalingTrigger = original.KindAverageLoad
)
type KindBasicBackupConfigurationInfo = original.KindBasicBackupConfigurationInfo
const (
KindBasicBackupConfigurationInfoKindApplication KindBasicBackupConfigurationInfo = original.KindBasicBackupConfigurationInfoKindApplication
KindBasicBackupConfigurationInfoKindBackupConfigurationInfo KindBasicBackupConfigurationInfo = original.KindBasicBackupConfigurationInfoKindBackupConfigurationInfo
KindBasicBackupConfigurationInfoKindPartition KindBasicBackupConfigurationInfo = original.KindBasicBackupConfigurationInfoKindPartition
KindBasicBackupConfigurationInfoKindService KindBasicBackupConfigurationInfo = original.KindBasicBackupConfigurationInfoKindService
)
type KindBasicChaosEvent = original.KindBasicChaosEvent
const (
KindChaosEvent KindBasicChaosEvent = original.KindChaosEvent
KindExecutingFaults KindBasicChaosEvent = original.KindExecutingFaults
KindStarted KindBasicChaosEvent = original.KindStarted
KindStopped KindBasicChaosEvent = original.KindStopped
KindTestError KindBasicChaosEvent = original.KindTestError
KindValidationFailed KindBasicChaosEvent = original.KindValidationFailed
KindWaiting KindBasicChaosEvent = original.KindWaiting
)
type KindBasicDiagnosticsSinkProperties = original.KindBasicDiagnosticsSinkProperties
const (
KindAzureInternalMonitoringPipeline KindBasicDiagnosticsSinkProperties = original.KindAzureInternalMonitoringPipeline
KindDiagnosticsSinkProperties KindBasicDiagnosticsSinkProperties = original.KindDiagnosticsSinkProperties
)
type KindBasicFabricEvent = original.KindBasicFabricEvent
const (
KindApplicationContainerInstanceExited KindBasicFabricEvent = original.KindApplicationContainerInstanceExited
KindApplicationCreated KindBasicFabricEvent = original.KindApplicationCreated
KindApplicationDeleted KindBasicFabricEvent = original.KindApplicationDeleted
KindApplicationEvent KindBasicFabricEvent = original.KindApplicationEvent
KindApplicationHealthReportExpired KindBasicFabricEvent = original.KindApplicationHealthReportExpired
KindApplicationNewHealthReport KindBasicFabricEvent = original.KindApplicationNewHealthReport
KindApplicationProcessExited KindBasicFabricEvent = original.KindApplicationProcessExited
KindApplicationUpgradeCompleted KindBasicFabricEvent = original.KindApplicationUpgradeCompleted
KindApplicationUpgradeDomainCompleted KindBasicFabricEvent = original.KindApplicationUpgradeDomainCompleted
KindApplicationUpgradeRollbackCompleted KindBasicFabricEvent = original.KindApplicationUpgradeRollbackCompleted
KindApplicationUpgradeRollbackStarted KindBasicFabricEvent = original.KindApplicationUpgradeRollbackStarted
KindApplicationUpgradeStarted KindBasicFabricEvent = original.KindApplicationUpgradeStarted
KindChaosCodePackageRestartScheduled KindBasicFabricEvent = original.KindChaosCodePackageRestartScheduled
KindChaosNodeRestartScheduled KindBasicFabricEvent = original.KindChaosNodeRestartScheduled
KindChaosPartitionPrimaryMoveScheduled KindBasicFabricEvent = original.KindChaosPartitionPrimaryMoveScheduled
KindChaosPartitionSecondaryMoveScheduled KindBasicFabricEvent = original.KindChaosPartitionSecondaryMoveScheduled
KindChaosReplicaRemovalScheduled KindBasicFabricEvent = original.KindChaosReplicaRemovalScheduled
KindChaosReplicaRestartScheduled KindBasicFabricEvent = original.KindChaosReplicaRestartScheduled
KindChaosStarted KindBasicFabricEvent = original.KindChaosStarted
KindChaosStopped KindBasicFabricEvent = original.KindChaosStopped
KindClusterEvent KindBasicFabricEvent = original.KindClusterEvent
KindClusterHealthReportExpired KindBasicFabricEvent = original.KindClusterHealthReportExpired
KindClusterNewHealthReport KindBasicFabricEvent = original.KindClusterNewHealthReport
KindClusterUpgradeCompleted KindBasicFabricEvent = original.KindClusterUpgradeCompleted
KindClusterUpgradeDomainCompleted KindBasicFabricEvent = original.KindClusterUpgradeDomainCompleted
KindClusterUpgradeRollbackCompleted KindBasicFabricEvent = original.KindClusterUpgradeRollbackCompleted
KindClusterUpgradeRollbackStarted KindBasicFabricEvent = original.KindClusterUpgradeRollbackStarted
KindClusterUpgradeStarted KindBasicFabricEvent = original.KindClusterUpgradeStarted
KindContainerInstanceEvent KindBasicFabricEvent = original.KindContainerInstanceEvent
KindDeployedApplicationHealthReportExpired KindBasicFabricEvent = original.KindDeployedApplicationHealthReportExpired
KindDeployedApplicationNewHealthReport KindBasicFabricEvent = original.KindDeployedApplicationNewHealthReport
KindDeployedServicePackageHealthReportExpired KindBasicFabricEvent = original.KindDeployedServicePackageHealthReportExpired
KindDeployedServicePackageNewHealthReport KindBasicFabricEvent = original.KindDeployedServicePackageNewHealthReport
KindFabricEvent KindBasicFabricEvent = original.KindFabricEvent
KindNodeAborted KindBasicFabricEvent = original.KindNodeAborted
KindNodeAddedToCluster KindBasicFabricEvent = original.KindNodeAddedToCluster
KindNodeClosed KindBasicFabricEvent = original.KindNodeClosed
KindNodeDeactivateCompleted KindBasicFabricEvent = original.KindNodeDeactivateCompleted
KindNodeDeactivateStarted KindBasicFabricEvent = original.KindNodeDeactivateStarted
KindNodeDown KindBasicFabricEvent = original.KindNodeDown
KindNodeEvent KindBasicFabricEvent = original.KindNodeEvent
KindNodeHealthReportExpired KindBasicFabricEvent = original.KindNodeHealthReportExpired
KindNodeNewHealthReport KindBasicFabricEvent = original.KindNodeNewHealthReport
KindNodeOpenFailed KindBasicFabricEvent = original.KindNodeOpenFailed
KindNodeOpenSucceeded KindBasicFabricEvent = original.KindNodeOpenSucceeded
KindNodeRemovedFromCluster KindBasicFabricEvent = original.KindNodeRemovedFromCluster
KindNodeUp KindBasicFabricEvent = original.KindNodeUp
KindPartitionAnalysisEvent KindBasicFabricEvent = original.KindPartitionAnalysisEvent
KindPartitionEvent KindBasicFabricEvent = original.KindPartitionEvent
KindPartitionHealthReportExpired KindBasicFabricEvent = original.KindPartitionHealthReportExpired
KindPartitionNewHealthReport KindBasicFabricEvent = original.KindPartitionNewHealthReport
KindPartitionPrimaryMoveAnalysis KindBasicFabricEvent = original.KindPartitionPrimaryMoveAnalysis
KindPartitionReconfigured KindBasicFabricEvent = original.KindPartitionReconfigured
KindReplicaEvent KindBasicFabricEvent = original.KindReplicaEvent
KindServiceCreated KindBasicFabricEvent = original.KindServiceCreated
KindServiceDeleted KindBasicFabricEvent = original.KindServiceDeleted
KindServiceEvent KindBasicFabricEvent = original.KindServiceEvent
KindServiceHealthReportExpired KindBasicFabricEvent = original.KindServiceHealthReportExpired
KindServiceNewHealthReport KindBasicFabricEvent = original.KindServiceNewHealthReport
KindStatefulReplicaHealthReportExpired KindBasicFabricEvent = original.KindStatefulReplicaHealthReportExpired
KindStatefulReplicaNewHealthReport KindBasicFabricEvent = original.KindStatefulReplicaNewHealthReport
KindStatelessReplicaHealthReportExpired KindBasicFabricEvent = original.KindStatelessReplicaHealthReportExpired
KindStatelessReplicaNewHealthReport KindBasicFabricEvent = original.KindStatelessReplicaNewHealthReport
)
type KindBasicNetworkResourcePropertiesBase = original.KindBasicNetworkResourcePropertiesBase
const (
KindLocal KindBasicNetworkResourcePropertiesBase = original.KindLocal
KindNetworkResourceProperties KindBasicNetworkResourcePropertiesBase = original.KindNetworkResourceProperties
KindNetworkResourcePropertiesBase KindBasicNetworkResourcePropertiesBase = original.KindNetworkResourcePropertiesBase
)
type KindBasicPropertyBatchInfo = original.KindBasicPropertyBatchInfo
const (
KindFailed KindBasicPropertyBatchInfo = original.KindFailed
KindPropertyBatchInfo KindBasicPropertyBatchInfo = original.KindPropertyBatchInfo
KindSuccessful KindBasicPropertyBatchInfo = original.KindSuccessful
)
type KindBasicPropertyBatchOperation = original.KindBasicPropertyBatchOperation
const (
KindCheckExists KindBasicPropertyBatchOperation = original.KindCheckExists
KindCheckSequence KindBasicPropertyBatchOperation = original.KindCheckSequence
KindCheckValue KindBasicPropertyBatchOperation = original.KindCheckValue
KindDelete KindBasicPropertyBatchOperation = original.KindDelete
KindGet KindBasicPropertyBatchOperation = original.KindGet
KindPropertyBatchOperation KindBasicPropertyBatchOperation = original.KindPropertyBatchOperation
KindPut KindBasicPropertyBatchOperation = original.KindPut
)
type KindBasicPropertyValue = original.KindBasicPropertyValue
const (
KindBinary KindBasicPropertyValue = original.KindBinary
KindDouble KindBasicPropertyValue = original.KindDouble
KindGUID KindBasicPropertyValue = original.KindGUID
KindInt64 KindBasicPropertyValue = original.KindInt64
KindPropertyValue KindBasicPropertyValue = original.KindPropertyValue
KindString KindBasicPropertyValue = original.KindString
)
type KindBasicProvisionApplicationTypeDescriptionBase = original.KindBasicProvisionApplicationTypeDescriptionBase
const (
KindExternalStore KindBasicProvisionApplicationTypeDescriptionBase = original.KindExternalStore
KindImageStorePath KindBasicProvisionApplicationTypeDescriptionBase = original.KindImageStorePath
KindProvisionApplicationTypeDescriptionBase KindBasicProvisionApplicationTypeDescriptionBase = original.KindProvisionApplicationTypeDescriptionBase
)
type KindBasicRepairImpactDescriptionBase = original.KindBasicRepairImpactDescriptionBase
const (
KindBasicRepairImpactDescriptionBaseKindNode KindBasicRepairImpactDescriptionBase = original.KindBasicRepairImpactDescriptionBaseKindNode
KindBasicRepairImpactDescriptionBaseKindRepairImpactDescriptionBase KindBasicRepairImpactDescriptionBase = original.KindBasicRepairImpactDescriptionBaseKindRepairImpactDescriptionBase
)
type KindBasicRepairTargetDescriptionBase = original.KindBasicRepairTargetDescriptionBase
const (
KindBasicRepairTargetDescriptionBaseKindNode KindBasicRepairTargetDescriptionBase = original.KindBasicRepairTargetDescriptionBaseKindNode
KindBasicRepairTargetDescriptionBaseKindRepairTargetDescriptionBase KindBasicRepairTargetDescriptionBase = original.KindBasicRepairTargetDescriptionBaseKindRepairTargetDescriptionBase
)
type KindBasicReplicaStatusBase = original.KindBasicReplicaStatusBase
const (
KindKeyValueStore KindBasicReplicaStatusBase = original.KindKeyValueStore
KindReplicaStatusBase KindBasicReplicaStatusBase = original.KindReplicaStatusBase
)
type KindBasicReplicatorStatus = original.KindBasicReplicatorStatus
const (
KindActiveSecondary KindBasicReplicatorStatus = original.KindActiveSecondary
KindIdleSecondary KindBasicReplicatorStatus = original.KindIdleSecondary
KindPrimary KindBasicReplicatorStatus = original.KindPrimary
KindReplicatorStatus KindBasicReplicatorStatus = original.KindReplicatorStatus
KindSecondaryReplicatorStatus KindBasicReplicatorStatus = original.KindSecondaryReplicatorStatus
)
type KindBasicSafetyCheck = original.KindBasicSafetyCheck
const (
KindEnsureAvailability KindBasicSafetyCheck = original.KindEnsureAvailability
KindEnsurePartitionQuorum KindBasicSafetyCheck = original.KindEnsurePartitionQuorum
KindEnsureSeedNodeQuorum KindBasicSafetyCheck = original.KindEnsureSeedNodeQuorum
KindPartitionSafetyCheck KindBasicSafetyCheck = original.KindPartitionSafetyCheck
KindSafetyCheck KindBasicSafetyCheck = original.KindSafetyCheck
KindWaitForInbuildReplica KindBasicSafetyCheck = original.KindWaitForInbuildReplica
KindWaitForPrimaryPlacement KindBasicSafetyCheck = original.KindWaitForPrimaryPlacement
KindWaitForPrimarySwap KindBasicSafetyCheck = original.KindWaitForPrimarySwap
KindWaitForReconfiguration KindBasicSafetyCheck = original.KindWaitForReconfiguration
)
type KindBasicScalingMechanismDescription = original.KindBasicScalingMechanismDescription
const (
KindAddRemoveIncrementalNamedPartition KindBasicScalingMechanismDescription = original.KindAddRemoveIncrementalNamedPartition
KindPartitionInstanceCount KindBasicScalingMechanismDescription = original.KindPartitionInstanceCount
KindScalingMechanismDescription KindBasicScalingMechanismDescription = original.KindScalingMechanismDescription
)
type KindBasicScalingTriggerDescription = original.KindBasicScalingTriggerDescription
const (
KindAveragePartitionLoad KindBasicScalingTriggerDescription = original.KindAveragePartitionLoad
KindAverageServiceLoad KindBasicScalingTriggerDescription = original.KindAverageServiceLoad
KindScalingTriggerDescription KindBasicScalingTriggerDescription = original.KindScalingTriggerDescription
)
type KindBasicSecretResourcePropertiesBase = original.KindBasicSecretResourcePropertiesBase
const (
KindInlinedValue KindBasicSecretResourcePropertiesBase = original.KindInlinedValue
KindSecretResourceProperties KindBasicSecretResourcePropertiesBase = original.KindSecretResourceProperties
KindSecretResourcePropertiesBase KindBasicSecretResourcePropertiesBase = original.KindSecretResourcePropertiesBase
)
type KindBasicServiceTypeDescription = original.KindBasicServiceTypeDescription
const (
KindServiceTypeDescription KindBasicServiceTypeDescription = original.KindServiceTypeDescription
KindStateful KindBasicServiceTypeDescription = original.KindStateful
KindStateless KindBasicServiceTypeDescription = original.KindStateless
)
type MoveCost = original.MoveCost
const (
High MoveCost = original.High
Low MoveCost = original.Low
Medium MoveCost = original.Medium
Zero MoveCost = original.Zero
)
type NetworkKind = original.NetworkKind
const (
Local NetworkKind = original.Local
)
type NodeDeactivationIntent = original.NodeDeactivationIntent
const (
NodeDeactivationIntentInvalid NodeDeactivationIntent = original.NodeDeactivationIntentInvalid
NodeDeactivationIntentPause NodeDeactivationIntent = original.NodeDeactivationIntentPause
NodeDeactivationIntentRemoveData NodeDeactivationIntent = original.NodeDeactivationIntentRemoveData
NodeDeactivationIntentRemoveNode NodeDeactivationIntent = original.NodeDeactivationIntentRemoveNode
NodeDeactivationIntentRestart NodeDeactivationIntent = original.NodeDeactivationIntentRestart
)
type NodeDeactivationStatus = original.NodeDeactivationStatus
const (
NodeDeactivationStatusCompleted NodeDeactivationStatus = original.NodeDeactivationStatusCompleted
NodeDeactivationStatusNone NodeDeactivationStatus = original.NodeDeactivationStatusNone
NodeDeactivationStatusSafetyCheckComplete NodeDeactivationStatus = original.NodeDeactivationStatusSafetyCheckComplete
NodeDeactivationStatusSafetyCheckInProgress NodeDeactivationStatus = original.NodeDeactivationStatusSafetyCheckInProgress
)
type NodeDeactivationTaskType = original.NodeDeactivationTaskType
const (
NodeDeactivationTaskTypeClient NodeDeactivationTaskType = original.NodeDeactivationTaskTypeClient
NodeDeactivationTaskTypeInfrastructure NodeDeactivationTaskType = original.NodeDeactivationTaskTypeInfrastructure
NodeDeactivationTaskTypeInvalid NodeDeactivationTaskType = original.NodeDeactivationTaskTypeInvalid
NodeDeactivationTaskTypeRepair NodeDeactivationTaskType = original.NodeDeactivationTaskTypeRepair
)
type NodeStatus = original.NodeStatus
const (
NodeStatusDisabled NodeStatus = original.NodeStatusDisabled
NodeStatusDisabling NodeStatus = original.NodeStatusDisabling
NodeStatusDown NodeStatus = original.NodeStatusDown
NodeStatusEnabling NodeStatus = original.NodeStatusEnabling
NodeStatusInvalid NodeStatus = original.NodeStatusInvalid
NodeStatusRemoved NodeStatus = original.NodeStatusRemoved
NodeStatusUnknown NodeStatus = original.NodeStatusUnknown
NodeStatusUp NodeStatus = original.NodeStatusUp
)
type NodeStatusFilter = original.NodeStatusFilter
const (
All NodeStatusFilter = original.All
Default NodeStatusFilter = original.Default
Disabled NodeStatusFilter = original.Disabled
Disabling NodeStatusFilter = original.Disabling
Down NodeStatusFilter = original.Down
Enabling NodeStatusFilter = original.Enabling
Removed NodeStatusFilter = original.Removed
Unknown NodeStatusFilter = original.Unknown
Up NodeStatusFilter = original.Up
)
type NodeTransitionType = original.NodeTransitionType
const (
NodeTransitionTypeInvalid NodeTransitionType = original.NodeTransitionTypeInvalid
NodeTransitionTypeStart NodeTransitionType = original.NodeTransitionTypeStart
NodeTransitionTypeStop NodeTransitionType = original.NodeTransitionTypeStop
)
type NodeUpgradePhase = original.NodeUpgradePhase
const (
NodeUpgradePhaseInvalid NodeUpgradePhase = original.NodeUpgradePhaseInvalid
NodeUpgradePhasePostUpgradeSafetyCheck NodeUpgradePhase = original.NodeUpgradePhasePostUpgradeSafetyCheck
NodeUpgradePhasePreUpgradeSafetyCheck NodeUpgradePhase = original.NodeUpgradePhasePreUpgradeSafetyCheck
NodeUpgradePhaseUpgrading NodeUpgradePhase = original.NodeUpgradePhaseUpgrading
)
type OperatingSystemType = original.OperatingSystemType
const (
Linux OperatingSystemType = original.Linux
Windows OperatingSystemType = original.Windows
)
type OperationState = original.OperationState
const (
OperationStateCancelled OperationState = original.OperationStateCancelled
OperationStateCompleted OperationState = original.OperationStateCompleted
OperationStateFaulted OperationState = original.OperationStateFaulted
OperationStateForceCancelled OperationState = original.OperationStateForceCancelled
OperationStateInvalid OperationState = original.OperationStateInvalid
OperationStateRollingBack OperationState = original.OperationStateRollingBack
OperationStateRunning OperationState = original.OperationStateRunning
)
type OperationType = original.OperationType
const (
OperationTypeInvalid OperationType = original.OperationTypeInvalid
OperationTypeNodeTransition OperationType = original.OperationTypeNodeTransition
OperationTypePartitionDataLoss OperationType = original.OperationTypePartitionDataLoss
OperationTypePartitionQuorumLoss OperationType = original.OperationTypePartitionQuorumLoss
OperationTypePartitionRestart OperationType = original.OperationTypePartitionRestart
)
type PackageSharingPolicyScope = original.PackageSharingPolicyScope
const (
PackageSharingPolicyScopeAll PackageSharingPolicyScope = original.PackageSharingPolicyScopeAll
PackageSharingPolicyScopeCode PackageSharingPolicyScope = original.PackageSharingPolicyScopeCode
PackageSharingPolicyScopeConfig PackageSharingPolicyScope = original.PackageSharingPolicyScopeConfig
PackageSharingPolicyScopeData PackageSharingPolicyScope = original.PackageSharingPolicyScopeData
PackageSharingPolicyScopeNone PackageSharingPolicyScope = original.PackageSharingPolicyScopeNone
)
type PartitionAccessStatus = original.PartitionAccessStatus
const (
PartitionAccessStatusGranted PartitionAccessStatus = original.PartitionAccessStatusGranted
PartitionAccessStatusInvalid PartitionAccessStatus = original.PartitionAccessStatusInvalid
PartitionAccessStatusNotPrimary PartitionAccessStatus = original.PartitionAccessStatusNotPrimary
PartitionAccessStatusNoWriteQuorum PartitionAccessStatus = original.PartitionAccessStatusNoWriteQuorum
PartitionAccessStatusReconfigurationPending PartitionAccessStatus = original.PartitionAccessStatusReconfigurationPending
)
type PartitionScheme = original.PartitionScheme
const (
PartitionSchemeInvalid PartitionScheme = original.PartitionSchemeInvalid
PartitionSchemeNamed PartitionScheme = original.PartitionSchemeNamed
PartitionSchemeSingleton PartitionScheme = original.PartitionSchemeSingleton
PartitionSchemeUniformInt64Range PartitionScheme = original.PartitionSchemeUniformInt64Range
)
type PartitionSchemeBasicPartitionSchemeDescription = original.PartitionSchemeBasicPartitionSchemeDescription
const (
PartitionSchemeNamed1 PartitionSchemeBasicPartitionSchemeDescription = original.PartitionSchemeNamed1
PartitionSchemePartitionSchemeDescription PartitionSchemeBasicPartitionSchemeDescription = original.PartitionSchemePartitionSchemeDescription
PartitionSchemeSingleton1 PartitionSchemeBasicPartitionSchemeDescription = original.PartitionSchemeSingleton1
PartitionSchemeUniformInt64Range1 PartitionSchemeBasicPartitionSchemeDescription = original.PartitionSchemeUniformInt64Range1
)
type PropertyBatchInfoKind = original.PropertyBatchInfoKind
const (
PropertyBatchInfoKindFailed PropertyBatchInfoKind = original.PropertyBatchInfoKindFailed
PropertyBatchInfoKindInvalid PropertyBatchInfoKind = original.PropertyBatchInfoKindInvalid
PropertyBatchInfoKindSuccessful PropertyBatchInfoKind = original.PropertyBatchInfoKindSuccessful
)
type PropertyBatchOperationKind = original.PropertyBatchOperationKind
const (
PropertyBatchOperationKindCheckExists PropertyBatchOperationKind = original.PropertyBatchOperationKindCheckExists
PropertyBatchOperationKindCheckSequence PropertyBatchOperationKind = original.PropertyBatchOperationKindCheckSequence
PropertyBatchOperationKindCheckValue PropertyBatchOperationKind = original.PropertyBatchOperationKindCheckValue
PropertyBatchOperationKindDelete PropertyBatchOperationKind = original.PropertyBatchOperationKindDelete
PropertyBatchOperationKindGet PropertyBatchOperationKind = original.PropertyBatchOperationKindGet
PropertyBatchOperationKindInvalid PropertyBatchOperationKind = original.PropertyBatchOperationKindInvalid
PropertyBatchOperationKindPut PropertyBatchOperationKind = original.PropertyBatchOperationKindPut
)
type PropertyValueKind = original.PropertyValueKind
const (
PropertyValueKindBinary PropertyValueKind = original.PropertyValueKindBinary
PropertyValueKindDouble PropertyValueKind = original.PropertyValueKindDouble
PropertyValueKindGUID PropertyValueKind = original.PropertyValueKindGUID
PropertyValueKindInt64 PropertyValueKind = original.PropertyValueKindInt64
PropertyValueKindInvalid PropertyValueKind = original.PropertyValueKindInvalid
PropertyValueKindString PropertyValueKind = original.PropertyValueKindString
)
type ProvisionApplicationTypeKind = original.ProvisionApplicationTypeKind
const (
ProvisionApplicationTypeKindExternalStore ProvisionApplicationTypeKind = original.ProvisionApplicationTypeKindExternalStore
ProvisionApplicationTypeKindImageStorePath ProvisionApplicationTypeKind = original.ProvisionApplicationTypeKindImageStorePath
ProvisionApplicationTypeKindInvalid ProvisionApplicationTypeKind = original.ProvisionApplicationTypeKindInvalid
)
type QuorumLossMode = original.QuorumLossMode
const (
QuorumLossModeAllReplicas QuorumLossMode = original.QuorumLossModeAllReplicas
QuorumLossModeInvalid QuorumLossMode = original.QuorumLossModeInvalid
QuorumLossModeQuorumReplicas QuorumLossMode = original.QuorumLossModeQuorumReplicas
)
type ReconfigurationPhase = original.ReconfigurationPhase
const (
ReconfigurationPhaseAbortPhaseZero ReconfigurationPhase = original.ReconfigurationPhaseAbortPhaseZero
ReconfigurationPhaseNone ReconfigurationPhase = original.ReconfigurationPhaseNone
ReconfigurationPhasePhase0 ReconfigurationPhase = original.ReconfigurationPhasePhase0
ReconfigurationPhasePhase1 ReconfigurationPhase = original.ReconfigurationPhasePhase1
ReconfigurationPhasePhase2 ReconfigurationPhase = original.ReconfigurationPhasePhase2
ReconfigurationPhasePhase3 ReconfigurationPhase = original.ReconfigurationPhasePhase3
ReconfigurationPhasePhase4 ReconfigurationPhase = original.ReconfigurationPhasePhase4
ReconfigurationPhaseUnknown ReconfigurationPhase = original.ReconfigurationPhaseUnknown
)
type ReconfigurationType = original.ReconfigurationType
const (
ReconfigurationTypeFailover ReconfigurationType = original.ReconfigurationTypeFailover
ReconfigurationTypeOther ReconfigurationType = original.ReconfigurationTypeOther
ReconfigurationTypeSwapPrimary ReconfigurationType = original.ReconfigurationTypeSwapPrimary
ReconfigurationTypeUnknown ReconfigurationType = original.ReconfigurationTypeUnknown
)
type RepairImpactKind = original.RepairImpactKind
const (
RepairImpactKindInvalid RepairImpactKind = original.RepairImpactKindInvalid
RepairImpactKindNode RepairImpactKind = original.RepairImpactKindNode
)
type RepairTargetKind = original.RepairTargetKind
const (
RepairTargetKindInvalid RepairTargetKind = original.RepairTargetKindInvalid
RepairTargetKindNode RepairTargetKind = original.RepairTargetKindNode
)
type RepairTaskHealthCheckState = original.RepairTaskHealthCheckState
const (
InProgress RepairTaskHealthCheckState = original.InProgress
NotStarted RepairTaskHealthCheckState = original.NotStarted
Skipped RepairTaskHealthCheckState = original.Skipped
Succeeded RepairTaskHealthCheckState = original.Succeeded
TimedOut RepairTaskHealthCheckState = original.TimedOut
)
type ReplicaHealthReportServiceKind = original.ReplicaHealthReportServiceKind
const (
Stateful ReplicaHealthReportServiceKind = original.Stateful
Stateless ReplicaHealthReportServiceKind = original.Stateless
)
type ReplicaKind = original.ReplicaKind
const (
ReplicaKindInvalid ReplicaKind = original.ReplicaKindInvalid
ReplicaKindKeyValueStore ReplicaKind = original.ReplicaKindKeyValueStore
)
type ReplicaRole = original.ReplicaRole
const (
ReplicaRoleActiveSecondary ReplicaRole = original.ReplicaRoleActiveSecondary
ReplicaRoleIdleSecondary ReplicaRole = original.ReplicaRoleIdleSecondary
ReplicaRoleNone ReplicaRole = original.ReplicaRoleNone
ReplicaRolePrimary ReplicaRole = original.ReplicaRolePrimary
ReplicaRoleUnknown ReplicaRole = original.ReplicaRoleUnknown
)
type ReplicaStatus = original.ReplicaStatus
const (
ReplicaStatusDown ReplicaStatus = original.ReplicaStatusDown
ReplicaStatusDropped ReplicaStatus = original.ReplicaStatusDropped
ReplicaStatusInBuild ReplicaStatus = original.ReplicaStatusInBuild
ReplicaStatusInvalid ReplicaStatus = original.ReplicaStatusInvalid
ReplicaStatusReady ReplicaStatus = original.ReplicaStatusReady
ReplicaStatusStandby ReplicaStatus = original.ReplicaStatusStandby
)
type ReplicatorOperationName = original.ReplicatorOperationName
const (
ReplicatorOperationNameAbort ReplicatorOperationName = original.ReplicatorOperationNameAbort
ReplicatorOperationNameBuild ReplicatorOperationName = original.ReplicatorOperationNameBuild
ReplicatorOperationNameChangeRole ReplicatorOperationName = original.ReplicatorOperationNameChangeRole
ReplicatorOperationNameClose ReplicatorOperationName = original.ReplicatorOperationNameClose
ReplicatorOperationNameInvalid ReplicatorOperationName = original.ReplicatorOperationNameInvalid
ReplicatorOperationNameNone ReplicatorOperationName = original.ReplicatorOperationNameNone
ReplicatorOperationNameOnDataLoss ReplicatorOperationName = original.ReplicatorOperationNameOnDataLoss
ReplicatorOperationNameOpen ReplicatorOperationName = original.ReplicatorOperationNameOpen
ReplicatorOperationNameUpdateEpoch ReplicatorOperationName = original.ReplicatorOperationNameUpdateEpoch
ReplicatorOperationNameWaitForCatchup ReplicatorOperationName = original.ReplicatorOperationNameWaitForCatchup
)
type ResourceStatus = original.ResourceStatus
const (
ResourceStatusCreating ResourceStatus = original.ResourceStatusCreating
ResourceStatusDeleting ResourceStatus = original.ResourceStatusDeleting
ResourceStatusFailed ResourceStatus = original.ResourceStatusFailed
ResourceStatusReady ResourceStatus = original.ResourceStatusReady
ResourceStatusUnknown ResourceStatus = original.ResourceStatusUnknown
ResourceStatusUpgrading ResourceStatus = original.ResourceStatusUpgrading
)
type RestartPartitionMode = original.RestartPartitionMode
const (
RestartPartitionModeAllReplicasOrInstances RestartPartitionMode = original.RestartPartitionModeAllReplicasOrInstances
RestartPartitionModeInvalid RestartPartitionMode = original.RestartPartitionModeInvalid
RestartPartitionModeOnlyActiveSecondaries RestartPartitionMode = original.RestartPartitionModeOnlyActiveSecondaries
)
type RestoreState = original.RestoreState
const (
RestoreStateAccepted RestoreState = original.RestoreStateAccepted
RestoreStateFailure RestoreState = original.RestoreStateFailure
RestoreStateInvalid RestoreState = original.RestoreStateInvalid
RestoreStateRestoreInProgress RestoreState = original.RestoreStateRestoreInProgress
RestoreStateSuccess RestoreState = original.RestoreStateSuccess
RestoreStateTimeout RestoreState = original.RestoreStateTimeout
)
type ResultStatus = original.ResultStatus
const (
ResultStatusCancelled ResultStatus = original.ResultStatusCancelled
ResultStatusFailed ResultStatus = original.ResultStatusFailed
ResultStatusInterrupted ResultStatus = original.ResultStatusInterrupted
ResultStatusInvalid ResultStatus = original.ResultStatusInvalid
ResultStatusPending ResultStatus = original.ResultStatusPending
ResultStatusSucceeded ResultStatus = original.ResultStatusSucceeded
)
type RetentionPolicyType = original.RetentionPolicyType
const (
RetentionPolicyTypeBasic RetentionPolicyType = original.RetentionPolicyTypeBasic
RetentionPolicyTypeInvalid RetentionPolicyType = original.RetentionPolicyTypeInvalid
)
type RetentionPolicyTypeBasicBasicRetentionPolicyDescription = original.RetentionPolicyTypeBasicBasicRetentionPolicyDescription
const (
RetentionPolicyTypeBasic1 RetentionPolicyTypeBasicBasicRetentionPolicyDescription = original.RetentionPolicyTypeBasic1
RetentionPolicyTypeRetentionPolicyDescription RetentionPolicyTypeBasicBasicRetentionPolicyDescription = original.RetentionPolicyTypeRetentionPolicyDescription
)
type SafetyCheckKind = original.SafetyCheckKind
const (
SafetyCheckKindEnsureAvailability SafetyCheckKind = original.SafetyCheckKindEnsureAvailability
SafetyCheckKindEnsurePartitionQuorum SafetyCheckKind = original.SafetyCheckKindEnsurePartitionQuorum
SafetyCheckKindEnsureSeedNodeQuorum SafetyCheckKind = original.SafetyCheckKindEnsureSeedNodeQuorum
SafetyCheckKindInvalid SafetyCheckKind = original.SafetyCheckKindInvalid
SafetyCheckKindWaitForInbuildReplica SafetyCheckKind = original.SafetyCheckKindWaitForInbuildReplica
SafetyCheckKindWaitForPrimaryPlacement SafetyCheckKind = original.SafetyCheckKindWaitForPrimaryPlacement
SafetyCheckKindWaitForPrimarySwap SafetyCheckKind = original.SafetyCheckKindWaitForPrimarySwap
SafetyCheckKindWaitForReconfiguration SafetyCheckKind = original.SafetyCheckKindWaitForReconfiguration
)
type ScalingMechanismKind = original.ScalingMechanismKind
const (
ScalingMechanismKindAddRemoveIncrementalNamedPartition ScalingMechanismKind = original.ScalingMechanismKindAddRemoveIncrementalNamedPartition
ScalingMechanismKindInvalid ScalingMechanismKind = original.ScalingMechanismKindInvalid
ScalingMechanismKindPartitionInstanceCount ScalingMechanismKind = original.ScalingMechanismKindPartitionInstanceCount
)
type ScalingTriggerKind = original.ScalingTriggerKind
const (
ScalingTriggerKindAveragePartitionLoad ScalingTriggerKind = original.ScalingTriggerKindAveragePartitionLoad
ScalingTriggerKindAverageServiceLoad ScalingTriggerKind = original.ScalingTriggerKindAverageServiceLoad
ScalingTriggerKindInvalid ScalingTriggerKind = original.ScalingTriggerKindInvalid
)
type ScheduleKind = original.ScheduleKind
const (
ScheduleKindBackupScheduleDescription ScheduleKind = original.ScheduleKindBackupScheduleDescription
ScheduleKindFrequencyBased ScheduleKind = original.ScheduleKindFrequencyBased
ScheduleKindTimeBased ScheduleKind = original.ScheduleKindTimeBased
)
type SecretKind = original.SecretKind
const (
InlinedValue SecretKind = original.InlinedValue
)
type ServiceCorrelationScheme = original.ServiceCorrelationScheme
const (
ServiceCorrelationSchemeAffinity ServiceCorrelationScheme = original.ServiceCorrelationSchemeAffinity
ServiceCorrelationSchemeAlignedAffinity ServiceCorrelationScheme = original.ServiceCorrelationSchemeAlignedAffinity
ServiceCorrelationSchemeInvalid ServiceCorrelationScheme = original.ServiceCorrelationSchemeInvalid
ServiceCorrelationSchemeNonAlignedAffinity ServiceCorrelationScheme = original.ServiceCorrelationSchemeNonAlignedAffinity
)
type ServiceEndpointRole = original.ServiceEndpointRole
const (
ServiceEndpointRoleInvalid ServiceEndpointRole = original.ServiceEndpointRoleInvalid
ServiceEndpointRoleStatefulPrimary ServiceEndpointRole = original.ServiceEndpointRoleStatefulPrimary
ServiceEndpointRoleStatefulSecondary ServiceEndpointRole = original.ServiceEndpointRoleStatefulSecondary
ServiceEndpointRoleStateless ServiceEndpointRole = original.ServiceEndpointRoleStateless
)
type ServiceKind = original.ServiceKind
const (
ServiceKindInvalid ServiceKind = original.ServiceKindInvalid
ServiceKindStateful ServiceKind = original.ServiceKindStateful
ServiceKindStateless ServiceKind = original.ServiceKindStateless
)
type ServiceKindBasicDeployedServiceReplicaDetailInfo = original.ServiceKindBasicDeployedServiceReplicaDetailInfo
const (
ServiceKindDeployedServiceReplicaDetailInfo ServiceKindBasicDeployedServiceReplicaDetailInfo = original.ServiceKindDeployedServiceReplicaDetailInfo
ServiceKindStateful1 ServiceKindBasicDeployedServiceReplicaDetailInfo = original.ServiceKindStateful1
ServiceKindStateless1 ServiceKindBasicDeployedServiceReplicaDetailInfo = original.ServiceKindStateless1
)
type ServiceKindBasicDeployedServiceReplicaInfo = original.ServiceKindBasicDeployedServiceReplicaInfo
const (
ServiceKindBasicDeployedServiceReplicaInfoServiceKindDeployedServiceReplicaInfo ServiceKindBasicDeployedServiceReplicaInfo = original.ServiceKindBasicDeployedServiceReplicaInfoServiceKindDeployedServiceReplicaInfo
ServiceKindBasicDeployedServiceReplicaInfoServiceKindStateful ServiceKindBasicDeployedServiceReplicaInfo = original.ServiceKindBasicDeployedServiceReplicaInfoServiceKindStateful
ServiceKindBasicDeployedServiceReplicaInfoServiceKindStateless ServiceKindBasicDeployedServiceReplicaInfo = original.ServiceKindBasicDeployedServiceReplicaInfoServiceKindStateless
)
type ServiceKindBasicReplicaHealth = original.ServiceKindBasicReplicaHealth
const (
ServiceKindBasicReplicaHealthServiceKindReplicaHealth ServiceKindBasicReplicaHealth = original.ServiceKindBasicReplicaHealthServiceKindReplicaHealth
ServiceKindBasicReplicaHealthServiceKindStateful ServiceKindBasicReplicaHealth = original.ServiceKindBasicReplicaHealthServiceKindStateful
ServiceKindBasicReplicaHealthServiceKindStateless ServiceKindBasicReplicaHealth = original.ServiceKindBasicReplicaHealthServiceKindStateless
)
type ServiceKindBasicReplicaHealthState = original.ServiceKindBasicReplicaHealthState
const (
ServiceKindBasicReplicaHealthStateServiceKindReplicaHealthState ServiceKindBasicReplicaHealthState = original.ServiceKindBasicReplicaHealthStateServiceKindReplicaHealthState
ServiceKindBasicReplicaHealthStateServiceKindStateful ServiceKindBasicReplicaHealthState = original.ServiceKindBasicReplicaHealthStateServiceKindStateful
ServiceKindBasicReplicaHealthStateServiceKindStateless ServiceKindBasicReplicaHealthState = original.ServiceKindBasicReplicaHealthStateServiceKindStateless
)
type ServiceKindBasicReplicaInfo = original.ServiceKindBasicReplicaInfo
const (
ServiceKindBasicReplicaInfoServiceKindReplicaInfo ServiceKindBasicReplicaInfo = original.ServiceKindBasicReplicaInfoServiceKindReplicaInfo
ServiceKindBasicReplicaInfoServiceKindStateful ServiceKindBasicReplicaInfo = original.ServiceKindBasicReplicaInfoServiceKindStateful
ServiceKindBasicReplicaInfoServiceKindStateless ServiceKindBasicReplicaInfo = original.ServiceKindBasicReplicaInfoServiceKindStateless
)
type ServiceKindBasicServiceDescription = original.ServiceKindBasicServiceDescription
const (
ServiceKindBasicServiceDescriptionServiceKindServiceDescription ServiceKindBasicServiceDescription = original.ServiceKindBasicServiceDescriptionServiceKindServiceDescription
ServiceKindBasicServiceDescriptionServiceKindStateful ServiceKindBasicServiceDescription = original.ServiceKindBasicServiceDescriptionServiceKindStateful
ServiceKindBasicServiceDescriptionServiceKindStateless ServiceKindBasicServiceDescription = original.ServiceKindBasicServiceDescriptionServiceKindStateless
)
type ServiceKindBasicServiceInfo = original.ServiceKindBasicServiceInfo
const (
ServiceKindBasicServiceInfoServiceKindServiceInfo ServiceKindBasicServiceInfo = original.ServiceKindBasicServiceInfoServiceKindServiceInfo
ServiceKindBasicServiceInfoServiceKindStateful ServiceKindBasicServiceInfo = original.ServiceKindBasicServiceInfoServiceKindStateful
ServiceKindBasicServiceInfoServiceKindStateless ServiceKindBasicServiceInfo = original.ServiceKindBasicServiceInfoServiceKindStateless
)
type ServiceKindBasicServicePartitionInfo = original.ServiceKindBasicServicePartitionInfo
const (
ServiceKindBasicServicePartitionInfoServiceKindServicePartitionInfo ServiceKindBasicServicePartitionInfo = original.ServiceKindBasicServicePartitionInfoServiceKindServicePartitionInfo
ServiceKindBasicServicePartitionInfoServiceKindStateful ServiceKindBasicServicePartitionInfo = original.ServiceKindBasicServicePartitionInfoServiceKindStateful
ServiceKindBasicServicePartitionInfoServiceKindStateless ServiceKindBasicServicePartitionInfo = original.ServiceKindBasicServicePartitionInfoServiceKindStateless
)
type ServiceKindBasicServiceUpdateDescription = original.ServiceKindBasicServiceUpdateDescription
const (
ServiceKindBasicServiceUpdateDescriptionServiceKindServiceUpdateDescription ServiceKindBasicServiceUpdateDescription = original.ServiceKindBasicServiceUpdateDescriptionServiceKindServiceUpdateDescription
ServiceKindBasicServiceUpdateDescriptionServiceKindStateful ServiceKindBasicServiceUpdateDescription = original.ServiceKindBasicServiceUpdateDescriptionServiceKindStateful
ServiceKindBasicServiceUpdateDescriptionServiceKindStateless ServiceKindBasicServiceUpdateDescription = original.ServiceKindBasicServiceUpdateDescriptionServiceKindStateless
)
type ServiceLoadMetricWeight = original.ServiceLoadMetricWeight
const (
ServiceLoadMetricWeightHigh ServiceLoadMetricWeight = original.ServiceLoadMetricWeightHigh
ServiceLoadMetricWeightLow ServiceLoadMetricWeight = original.ServiceLoadMetricWeightLow
ServiceLoadMetricWeightMedium ServiceLoadMetricWeight = original.ServiceLoadMetricWeightMedium
ServiceLoadMetricWeightZero ServiceLoadMetricWeight = original.ServiceLoadMetricWeightZero
)
type ServiceOperationName = original.ServiceOperationName
const (
ServiceOperationNameAbort ServiceOperationName = original.ServiceOperationNameAbort
ServiceOperationNameChangeRole ServiceOperationName = original.ServiceOperationNameChangeRole
ServiceOperationNameClose ServiceOperationName = original.ServiceOperationNameClose
ServiceOperationNameNone ServiceOperationName = original.ServiceOperationNameNone
ServiceOperationNameOpen ServiceOperationName = original.ServiceOperationNameOpen
ServiceOperationNameUnknown ServiceOperationName = original.ServiceOperationNameUnknown
)
type ServicePackageActivationMode = original.ServicePackageActivationMode
const (
ExclusiveProcess ServicePackageActivationMode = original.ExclusiveProcess
SharedProcess ServicePackageActivationMode = original.SharedProcess
)
type ServicePartitionKind = original.ServicePartitionKind
const (
ServicePartitionKindInt64Range ServicePartitionKind = original.ServicePartitionKindInt64Range
ServicePartitionKindInvalid ServicePartitionKind = original.ServicePartitionKindInvalid
ServicePartitionKindNamed ServicePartitionKind = original.ServicePartitionKindNamed
ServicePartitionKindSingleton ServicePartitionKind = original.ServicePartitionKindSingleton
)
type ServicePartitionKindBasicPartitionInformation = original.ServicePartitionKindBasicPartitionInformation
const (
ServicePartitionKindInt64Range1 ServicePartitionKindBasicPartitionInformation = original.ServicePartitionKindInt64Range1
ServicePartitionKindNamed1 ServicePartitionKindBasicPartitionInformation = original.ServicePartitionKindNamed1
ServicePartitionKindPartitionInformation ServicePartitionKindBasicPartitionInformation = original.ServicePartitionKindPartitionInformation
ServicePartitionKindSingleton1 ServicePartitionKindBasicPartitionInformation = original.ServicePartitionKindSingleton1
)
type ServicePartitionStatus = original.ServicePartitionStatus
const (
ServicePartitionStatusDeleting ServicePartitionStatus = original.ServicePartitionStatusDeleting
ServicePartitionStatusInQuorumLoss ServicePartitionStatus = original.ServicePartitionStatusInQuorumLoss
ServicePartitionStatusInvalid ServicePartitionStatus = original.ServicePartitionStatusInvalid
ServicePartitionStatusNotReady ServicePartitionStatus = original.ServicePartitionStatusNotReady
ServicePartitionStatusReady ServicePartitionStatus = original.ServicePartitionStatusReady
ServicePartitionStatusReconfiguring ServicePartitionStatus = original.ServicePartitionStatusReconfiguring
)
type ServicePlacementPolicyType = original.ServicePlacementPolicyType
const (
ServicePlacementPolicyTypeInvalid ServicePlacementPolicyType = original.ServicePlacementPolicyTypeInvalid
ServicePlacementPolicyTypeInvalidDomain ServicePlacementPolicyType = original.ServicePlacementPolicyTypeInvalidDomain
ServicePlacementPolicyTypeNonPartiallyPlaceService ServicePlacementPolicyType = original.ServicePlacementPolicyTypeNonPartiallyPlaceService
ServicePlacementPolicyTypePreferPrimaryDomain ServicePlacementPolicyType = original.ServicePlacementPolicyTypePreferPrimaryDomain
ServicePlacementPolicyTypeRequireDomain ServicePlacementPolicyType = original.ServicePlacementPolicyTypeRequireDomain
ServicePlacementPolicyTypeRequireDomainDistribution ServicePlacementPolicyType = original.ServicePlacementPolicyTypeRequireDomainDistribution
)
type ServiceStatus = original.ServiceStatus
const (
ServiceStatusActive ServiceStatus = original.ServiceStatusActive
ServiceStatusCreating ServiceStatus = original.ServiceStatusCreating
ServiceStatusDeleting ServiceStatus = original.ServiceStatusDeleting
ServiceStatusFailed ServiceStatus = original.ServiceStatusFailed
ServiceStatusUnknown ServiceStatus = original.ServiceStatusUnknown
ServiceStatusUpgrading ServiceStatus = original.ServiceStatusUpgrading
)
type ServiceTypeRegistrationStatus = original.ServiceTypeRegistrationStatus
const (
ServiceTypeRegistrationStatusDisabled ServiceTypeRegistrationStatus = original.ServiceTypeRegistrationStatusDisabled
ServiceTypeRegistrationStatusEnabled ServiceTypeRegistrationStatus = original.ServiceTypeRegistrationStatusEnabled
ServiceTypeRegistrationStatusInvalid ServiceTypeRegistrationStatus = original.ServiceTypeRegistrationStatusInvalid
ServiceTypeRegistrationStatusRegistered ServiceTypeRegistrationStatus = original.ServiceTypeRegistrationStatusRegistered
)
type SizeTypes = original.SizeTypes
const (
SizeTypesLarge SizeTypes = original.SizeTypesLarge
SizeTypesMedium SizeTypes = original.SizeTypesMedium
SizeTypesSmall SizeTypes = original.SizeTypesSmall
)
type State = original.State
const (
StateApproved State = original.StateApproved
StateClaimed State = original.StateClaimed
StateCompleted State = original.StateCompleted
StateCreated State = original.StateCreated
StateExecuting State = original.StateExecuting
StateInvalid State = original.StateInvalid
StatePreparing State = original.StatePreparing
StateRestoring State = original.StateRestoring
)
type StorageKind = original.StorageKind
const (
StorageKindAzureBlobStore StorageKind = original.StorageKindAzureBlobStore
StorageKindBackupStorageDescription StorageKind = original.StorageKindBackupStorageDescription
StorageKindFileShare StorageKind = original.StorageKindFileShare
)
type Type = original.Type
const (
TypeInvalidDomain Type = original.TypeInvalidDomain
TypeNonPartiallyPlaceService Type = original.TypeNonPartiallyPlaceService
TypePreferPrimaryDomain Type = original.TypePreferPrimaryDomain
TypeRequireDomain Type = original.TypeRequireDomain
TypeRequireDomainDistribution Type = original.TypeRequireDomainDistribution
TypeServicePlacementPolicyDescription Type = original.TypeServicePlacementPolicyDescription
)
type UpgradeDomainState = original.UpgradeDomainState
const (
UpgradeDomainStateCompleted UpgradeDomainState = original.UpgradeDomainStateCompleted
UpgradeDomainStateInProgress UpgradeDomainState = original.UpgradeDomainStateInProgress
UpgradeDomainStateInvalid UpgradeDomainState = original.UpgradeDomainStateInvalid
UpgradeDomainStatePending UpgradeDomainState = original.UpgradeDomainStatePending
)
type UpgradeKind = original.UpgradeKind
const (
UpgradeKindInvalid UpgradeKind = original.UpgradeKindInvalid
UpgradeKindRolling UpgradeKind = original.UpgradeKindRolling
)
type UpgradeMode = original.UpgradeMode
const (
UpgradeModeInvalid UpgradeMode = original.UpgradeModeInvalid
UpgradeModeMonitored UpgradeMode = original.UpgradeModeMonitored
UpgradeModeUnmonitoredAuto UpgradeMode = original.UpgradeModeUnmonitoredAuto
UpgradeModeUnmonitoredManual UpgradeMode = original.UpgradeModeUnmonitoredManual
)
type UpgradeSortOrder = original.UpgradeSortOrder
const (
UpgradeSortOrderDefault UpgradeSortOrder = original.UpgradeSortOrderDefault
UpgradeSortOrderInvalid UpgradeSortOrder = original.UpgradeSortOrderInvalid
UpgradeSortOrderLexicographical UpgradeSortOrder = original.UpgradeSortOrderLexicographical
UpgradeSortOrderNumeric UpgradeSortOrder = original.UpgradeSortOrderNumeric
UpgradeSortOrderReverseLexicographical UpgradeSortOrder = original.UpgradeSortOrderReverseLexicographical
UpgradeSortOrderReverseNumeric UpgradeSortOrder = original.UpgradeSortOrderReverseNumeric
)
type UpgradeState = original.UpgradeState
const (
UpgradeStateFailed UpgradeState = original.UpgradeStateFailed
UpgradeStateInvalid UpgradeState = original.UpgradeStateInvalid
UpgradeStateRollingBackCompleted UpgradeState = original.UpgradeStateRollingBackCompleted
UpgradeStateRollingBackInProgress UpgradeState = original.UpgradeStateRollingBackInProgress
UpgradeStateRollingForwardCompleted UpgradeState = original.UpgradeStateRollingForwardCompleted
UpgradeStateRollingForwardInProgress UpgradeState = original.UpgradeStateRollingForwardInProgress
UpgradeStateRollingForwardPending UpgradeState = original.UpgradeStateRollingForwardPending
)
type UpgradeType = original.UpgradeType
const (
UpgradeTypeInvalid UpgradeType = original.UpgradeTypeInvalid
UpgradeTypeRolling UpgradeType = original.UpgradeTypeRolling
UpgradeTypeRollingForceRestart UpgradeType = original.UpgradeTypeRollingForceRestart
)
type VolumeProvider = original.VolumeProvider
const (
SFAzureFile VolumeProvider = original.SFAzureFile
)
type AadMetadata = original.AadMetadata
type AadMetadataObject = original.AadMetadataObject
type AddRemoveIncrementalNamedPartitionScalingMechanism = original.AddRemoveIncrementalNamedPartitionScalingMechanism
type AddRemoveReplicaScalingMechanism = original.AddRemoveReplicaScalingMechanism
type AnalysisEventMetadata = original.AnalysisEventMetadata
type ApplicationBackupConfigurationInfo = original.ApplicationBackupConfigurationInfo
type ApplicationBackupEntity = original.ApplicationBackupEntity
type ApplicationCapacityDescription = original.ApplicationCapacityDescription
type ApplicationContainerInstanceExitedEvent = original.ApplicationContainerInstanceExitedEvent
type ApplicationCreatedEvent = original.ApplicationCreatedEvent
type ApplicationDeletedEvent = original.ApplicationDeletedEvent
type ApplicationDescription = original.ApplicationDescription
type ApplicationEvent = original.ApplicationEvent
type ApplicationHealth = original.ApplicationHealth
type ApplicationHealthEvaluation = original.ApplicationHealthEvaluation
type ApplicationHealthPolicies = original.ApplicationHealthPolicies
type ApplicationHealthPolicy = original.ApplicationHealthPolicy
type ApplicationHealthPolicyMapItem = original.ApplicationHealthPolicyMapItem
type ApplicationHealthReportExpiredEvent = original.ApplicationHealthReportExpiredEvent
type ApplicationHealthState = original.ApplicationHealthState
type ApplicationHealthStateChunk = original.ApplicationHealthStateChunk
type ApplicationHealthStateChunkList = original.ApplicationHealthStateChunkList
type ApplicationHealthStateFilter = original.ApplicationHealthStateFilter
type ApplicationInfo = original.ApplicationInfo
type ApplicationLoadInfo = original.ApplicationLoadInfo
type ApplicationMetricDescription = original.ApplicationMetricDescription
type ApplicationNameInfo = original.ApplicationNameInfo
type ApplicationNewHealthReportEvent = original.ApplicationNewHealthReportEvent
type ApplicationParameter = original.ApplicationParameter
type ApplicationProcessExitedEvent = original.ApplicationProcessExitedEvent
type ApplicationProperties = original.ApplicationProperties
type ApplicationResourceDescription = original.ApplicationResourceDescription
type ApplicationScopedVolume = original.ApplicationScopedVolume
type ApplicationScopedVolumeCreationParameters = original.ApplicationScopedVolumeCreationParameters
type ApplicationScopedVolumeCreationParametersServiceFabricVolumeDisk = original.ApplicationScopedVolumeCreationParametersServiceFabricVolumeDisk
type ApplicationTypeApplicationsHealthEvaluation = original.ApplicationTypeApplicationsHealthEvaluation
type ApplicationTypeHealthPolicyMapItem = original.ApplicationTypeHealthPolicyMapItem
type ApplicationTypeImageStorePath = original.ApplicationTypeImageStorePath
type ApplicationTypeInfo = original.ApplicationTypeInfo
type ApplicationTypeManifest = original.ApplicationTypeManifest
type ApplicationUpgradeCompletedEvent = original.ApplicationUpgradeCompletedEvent
type ApplicationUpgradeDescription = original.ApplicationUpgradeDescription
type ApplicationUpgradeDomainCompletedEvent = original.ApplicationUpgradeDomainCompletedEvent
type ApplicationUpgradeProgressInfo = original.ApplicationUpgradeProgressInfo
type ApplicationUpgradeRollbackCompletedEvent = original.ApplicationUpgradeRollbackCompletedEvent
type ApplicationUpgradeRollbackStartedEvent = original.ApplicationUpgradeRollbackStartedEvent
type ApplicationUpgradeStartedEvent = original.ApplicationUpgradeStartedEvent
type ApplicationUpgradeUpdateDescription = original.ApplicationUpgradeUpdateDescription
type ApplicationsHealthEvaluation = original.ApplicationsHealthEvaluation
type AutoScalingMechanism = original.AutoScalingMechanism
type AutoScalingMetric = original.AutoScalingMetric
type AutoScalingPolicy = original.AutoScalingPolicy
type AutoScalingResourceMetric = original.AutoScalingResourceMetric
type AutoScalingTrigger = original.AutoScalingTrigger
type AverageLoadScalingTrigger = original.AverageLoadScalingTrigger
type AveragePartitionLoadScalingTrigger = original.AveragePartitionLoadScalingTrigger
type AverageServiceLoadScalingTrigger = original.AverageServiceLoadScalingTrigger
type AzureBlobBackupStorageDescription = original.AzureBlobBackupStorageDescription
type AzureInternalMonitoringPipelineSinkDescription = original.AzureInternalMonitoringPipelineSinkDescription
type BackupConfigurationInfo = original.BackupConfigurationInfo
type BackupEntity = original.BackupEntity
type BackupInfo = original.BackupInfo
type BackupPartitionDescription = original.BackupPartitionDescription
type BackupPolicyDescription = original.BackupPolicyDescription
type BackupProgressInfo = original.BackupProgressInfo
type BackupScheduleDescription = original.BackupScheduleDescription
type BackupStorageDescription = original.BackupStorageDescription
type BackupSuspensionInfo = original.BackupSuspensionInfo
type BaseClient = original.BaseClient
type BasicApplicationEvent = original.BasicApplicationEvent
type BasicApplicationScopedVolumeCreationParameters = original.BasicApplicationScopedVolumeCreationParameters
type BasicAutoScalingMechanism = original.BasicAutoScalingMechanism
type BasicAutoScalingMetric = original.BasicAutoScalingMetric
type BasicAutoScalingTrigger = original.BasicAutoScalingTrigger
type BasicBackupConfigurationInfo = original.BasicBackupConfigurationInfo
type BasicBackupEntity = original.BasicBackupEntity
type BasicBackupScheduleDescription = original.BasicBackupScheduleDescription
type BasicBackupStorageDescription = original.BasicBackupStorageDescription
type BasicBasicRetentionPolicyDescription = original.BasicBasicRetentionPolicyDescription
type BasicChaosEvent = original.BasicChaosEvent
type BasicClusterEvent = original.BasicClusterEvent
type BasicDeployedServiceReplicaDetailInfo = original.BasicDeployedServiceReplicaDetailInfo
type BasicDeployedServiceReplicaInfo = original.BasicDeployedServiceReplicaInfo
type BasicDiagnosticsSinkProperties = original.BasicDiagnosticsSinkProperties
type BasicFabricEvent = original.BasicFabricEvent
type BasicHealthEvaluation = original.BasicHealthEvaluation
type BasicNetworkResourceProperties = original.BasicNetworkResourceProperties
type BasicNetworkResourcePropertiesBase = original.BasicNetworkResourcePropertiesBase
type BasicNodeEvent = original.BasicNodeEvent
type BasicPartitionAnalysisEvent = original.BasicPartitionAnalysisEvent
type BasicPartitionEvent = original.BasicPartitionEvent
type BasicPartitionInformation = original.BasicPartitionInformation
type BasicPartitionSafetyCheck = original.BasicPartitionSafetyCheck
type BasicPartitionSchemeDescription = original.BasicPartitionSchemeDescription
type BasicPropertyBatchInfo = original.BasicPropertyBatchInfo
type BasicPropertyBatchOperation = original.BasicPropertyBatchOperation
type BasicPropertyValue = original.BasicPropertyValue
type BasicProvisionApplicationTypeDescriptionBase = original.BasicProvisionApplicationTypeDescriptionBase
type BasicRepairImpactDescriptionBase = original.BasicRepairImpactDescriptionBase
type BasicRepairTargetDescriptionBase = original.BasicRepairTargetDescriptionBase
type BasicReplicaEvent = original.BasicReplicaEvent
type BasicReplicaHealth = original.BasicReplicaHealth
type BasicReplicaHealthState = original.BasicReplicaHealthState
type BasicReplicaInfo = original.BasicReplicaInfo
type BasicReplicaStatusBase = original.BasicReplicaStatusBase
type BasicReplicatorStatus = original.BasicReplicatorStatus
type BasicRetentionPolicyDescription = original.BasicRetentionPolicyDescription
type BasicSafetyCheck = original.BasicSafetyCheck
type BasicScalingMechanismDescription = original.BasicScalingMechanismDescription
type BasicScalingTriggerDescription = original.BasicScalingTriggerDescription
type BasicSecondaryReplicatorStatus = original.BasicSecondaryReplicatorStatus
type BasicSecretResourceProperties = original.BasicSecretResourceProperties
type BasicSecretResourcePropertiesBase = original.BasicSecretResourcePropertiesBase
type BasicServiceDescription = original.BasicServiceDescription
type BasicServiceEvent = original.BasicServiceEvent
type BasicServiceInfo = original.BasicServiceInfo
type BasicServicePartitionInfo = original.BasicServicePartitionInfo
type BasicServicePlacementPolicyDescription = original.BasicServicePlacementPolicyDescription
type BasicServiceTypeDescription = original.BasicServiceTypeDescription
type BasicServiceUpdateDescription = original.BasicServiceUpdateDescription
type BinaryPropertyValue = original.BinaryPropertyValue
type Chaos = original.Chaos
type ChaosCodePackageRestartScheduledEvent = original.ChaosCodePackageRestartScheduledEvent
type ChaosContext = original.ChaosContext
type ChaosEvent = original.ChaosEvent
type ChaosEventWrapper = original.ChaosEventWrapper
type ChaosEventsSegment = original.ChaosEventsSegment
type ChaosNodeRestartScheduledEvent = original.ChaosNodeRestartScheduledEvent
type ChaosParameters = original.ChaosParameters
type ChaosParametersDictionaryItem = original.ChaosParametersDictionaryItem
type ChaosPartitionPrimaryMoveScheduledEvent = original.ChaosPartitionPrimaryMoveScheduledEvent
type ChaosPartitionSecondaryMoveScheduledEvent = original.ChaosPartitionSecondaryMoveScheduledEvent
type ChaosReplicaRemovalScheduledEvent = original.ChaosReplicaRemovalScheduledEvent
type ChaosReplicaRestartScheduledEvent = original.ChaosReplicaRestartScheduledEvent
type ChaosSchedule = original.ChaosSchedule
type ChaosScheduleDescription = original.ChaosScheduleDescription
type ChaosScheduleJob = original.ChaosScheduleJob
type ChaosScheduleJobActiveDaysOfWeek = original.ChaosScheduleJobActiveDaysOfWeek
type ChaosStartedEvent = original.ChaosStartedEvent
type ChaosStoppedEvent = original.ChaosStoppedEvent
type ChaosTargetFilter = original.ChaosTargetFilter
type CheckExistsPropertyBatchOperation = original.CheckExistsPropertyBatchOperation
type CheckSequencePropertyBatchOperation = original.CheckSequencePropertyBatchOperation
type CheckValuePropertyBatchOperation = original.CheckValuePropertyBatchOperation
type ClusterConfiguration = original.ClusterConfiguration
type ClusterConfigurationUpgradeDescription = original.ClusterConfigurationUpgradeDescription
type ClusterConfigurationUpgradeStatusInfo = original.ClusterConfigurationUpgradeStatusInfo
type ClusterEvent = original.ClusterEvent
type ClusterHealth = original.ClusterHealth
type ClusterHealthChunk = original.ClusterHealthChunk
type ClusterHealthChunkQueryDescription = original.ClusterHealthChunkQueryDescription
type ClusterHealthPolicies = original.ClusterHealthPolicies
type ClusterHealthPolicy = original.ClusterHealthPolicy
type ClusterHealthReportExpiredEvent = original.ClusterHealthReportExpiredEvent
type ClusterLoadInfo = original.ClusterLoadInfo
type ClusterManifest = original.ClusterManifest
type ClusterNewHealthReportEvent = original.ClusterNewHealthReportEvent
type ClusterUpgradeCompletedEvent = original.ClusterUpgradeCompletedEvent
type ClusterUpgradeDescriptionObject = original.ClusterUpgradeDescriptionObject
type ClusterUpgradeDomainCompletedEvent = original.ClusterUpgradeDomainCompletedEvent
type ClusterUpgradeHealthPolicyObject = original.ClusterUpgradeHealthPolicyObject
type ClusterUpgradeProgressObject = original.ClusterUpgradeProgressObject
type ClusterUpgradeRollbackCompletedEvent = original.ClusterUpgradeRollbackCompletedEvent
type ClusterUpgradeRollbackStartedEvent = original.ClusterUpgradeRollbackStartedEvent
type ClusterUpgradeStartedEvent = original.ClusterUpgradeStartedEvent
type ClusterVersion = original.ClusterVersion
type CodePackageEntryPoint = original.CodePackageEntryPoint
type CodePackageEntryPointStatistics = original.CodePackageEntryPointStatistics
type ComposeDeploymentStatusInfo = original.ComposeDeploymentStatusInfo
type ComposeDeploymentUpgradeDescription = original.ComposeDeploymentUpgradeDescription
type ComposeDeploymentUpgradeProgressInfo = original.ComposeDeploymentUpgradeProgressInfo
type ContainerAPIRequestBody = original.ContainerAPIRequestBody
type ContainerAPIResponse = original.ContainerAPIResponse
type ContainerAPIResult = original.ContainerAPIResult
type ContainerCodePackageProperties = original.ContainerCodePackageProperties
type ContainerEvent = original.ContainerEvent
type ContainerInstanceEvent = original.ContainerInstanceEvent
type ContainerInstanceView = original.ContainerInstanceView
type ContainerLabel = original.ContainerLabel
type ContainerLogs = original.ContainerLogs
type ContainerState = original.ContainerState
type CreateComposeDeploymentDescription = original.CreateComposeDeploymentDescription
type CurrentUpgradeDomainProgressInfo = original.CurrentUpgradeDomainProgressInfo
type DeactivationIntentDescription = original.DeactivationIntentDescription
type DeletePropertyBatchOperation = original.DeletePropertyBatchOperation
type DeltaNodesCheckHealthEvaluation = original.DeltaNodesCheckHealthEvaluation
type DeployServicePackageToNodeDescription = original.DeployServicePackageToNodeDescription
type DeployedApplicationHealth = original.DeployedApplicationHealth
type DeployedApplicationHealthEvaluation = original.DeployedApplicationHealthEvaluation
type DeployedApplicationHealthReportExpiredEvent = original.DeployedApplicationHealthReportExpiredEvent
type DeployedApplicationHealthState = original.DeployedApplicationHealthState
type DeployedApplicationHealthStateChunk = original.DeployedApplicationHealthStateChunk
type DeployedApplicationHealthStateChunkList = original.DeployedApplicationHealthStateChunkList
type DeployedApplicationHealthStateFilter = original.DeployedApplicationHealthStateFilter
type DeployedApplicationInfo = original.DeployedApplicationInfo
type DeployedApplicationNewHealthReportEvent = original.DeployedApplicationNewHealthReportEvent
type DeployedApplicationsHealthEvaluation = original.DeployedApplicationsHealthEvaluation
type DeployedCodePackageInfo = original.DeployedCodePackageInfo
type DeployedServicePackageHealth = original.DeployedServicePackageHealth
type DeployedServicePackageHealthEvaluation = original.DeployedServicePackageHealthEvaluation
type DeployedServicePackageHealthReportExpiredEvent = original.DeployedServicePackageHealthReportExpiredEvent
type DeployedServicePackageHealthState = original.DeployedServicePackageHealthState
type DeployedServicePackageHealthStateChunk = original.DeployedServicePackageHealthStateChunk
type DeployedServicePackageHealthStateChunkList = original.DeployedServicePackageHealthStateChunkList
type DeployedServicePackageHealthStateFilter = original.DeployedServicePackageHealthStateFilter
type DeployedServicePackageInfo = original.DeployedServicePackageInfo
type DeployedServicePackageNewHealthReportEvent = original.DeployedServicePackageNewHealthReportEvent
type DeployedServicePackagesHealthEvaluation = original.DeployedServicePackagesHealthEvaluation
type DeployedServiceReplicaDetailInfo = original.DeployedServiceReplicaDetailInfo
type DeployedServiceReplicaDetailInfoModel = original.DeployedServiceReplicaDetailInfoModel
type DeployedServiceReplicaInfo = original.DeployedServiceReplicaInfo
type DeployedServiceTypeInfo = original.DeployedServiceTypeInfo
type DeployedStatefulServiceReplicaDetailInfo = original.DeployedStatefulServiceReplicaDetailInfo
type DeployedStatefulServiceReplicaInfo = original.DeployedStatefulServiceReplicaInfo
type DeployedStatelessServiceInstanceDetailInfo = original.DeployedStatelessServiceInstanceDetailInfo
type DeployedStatelessServiceInstanceInfo = original.DeployedStatelessServiceInstanceInfo
type DiagnosticsDescription = original.DiagnosticsDescription
type DiagnosticsRef = original.DiagnosticsRef
type DiagnosticsSinkProperties = original.DiagnosticsSinkProperties
type DisableBackupDescription = original.DisableBackupDescription
type DoublePropertyValue = original.DoublePropertyValue
type EnableBackupDescription = original.EnableBackupDescription
type EndpointProperties = original.EndpointProperties
type EndpointRef = original.EndpointRef
type EnsureAvailabilitySafetyCheck = original.EnsureAvailabilitySafetyCheck
type EnsurePartitionQuorumSafetyCheck = original.EnsurePartitionQuorumSafetyCheck
type EntityHealth = original.EntityHealth
type EntityHealthState = original.EntityHealthState
type EntityHealthStateChunk = original.EntityHealthStateChunk
type EntityHealthStateChunkList = original.EntityHealthStateChunkList
type EntityKindHealthStateCount = original.EntityKindHealthStateCount
type EnvironmentVariable = original.EnvironmentVariable
type Epoch = original.Epoch
type EventHealthEvaluation = original.EventHealthEvaluation
type ExecutingFaultsChaosEvent = original.ExecutingFaultsChaosEvent
type ExternalStoreProvisionApplicationTypeDescription = original.ExternalStoreProvisionApplicationTypeDescription
type FabricCodeVersionInfo = original.FabricCodeVersionInfo
type FabricConfigVersionInfo = original.FabricConfigVersionInfo
type FabricError = original.FabricError
type FabricErrorError = original.FabricErrorError
type FabricEvent = original.FabricEvent
type FailedPropertyBatchInfo = original.FailedPropertyBatchInfo
type FailedUpgradeDomainProgressObject = original.FailedUpgradeDomainProgressObject
type FailureUpgradeDomainProgressInfo = original.FailureUpgradeDomainProgressInfo
type FileInfo = original.FileInfo
type FileShareBackupStorageDescription = original.FileShareBackupStorageDescription
type FileVersion = original.FileVersion
type FolderInfo = original.FolderInfo
type FolderSizeInfo = original.FolderSizeInfo
type FrequencyBasedBackupScheduleDescription = original.FrequencyBasedBackupScheduleDescription
type GUIDPropertyValue = original.GUIDPropertyValue
type GatewayDestination = original.GatewayDestination
type GatewayProperties = original.GatewayProperties
type GatewayResourceDescription = original.GatewayResourceDescription
type GetBackupByStorageQueryDescription = original.GetBackupByStorageQueryDescription
type GetPropertyBatchOperation = original.GetPropertyBatchOperation
type HTTPConfig = original.HTTPConfig
type HTTPHostConfig = original.HTTPHostConfig
type HTTPRouteConfig = original.HTTPRouteConfig
type HTTPRouteMatchHeader = original.HTTPRouteMatchHeader
type HTTPRouteMatchPath = original.HTTPRouteMatchPath
type HTTPRouteMatchRule = original.HTTPRouteMatchRule
type HealthEvaluation = original.HealthEvaluation
type HealthEvaluationWrapper = original.HealthEvaluationWrapper
type HealthEvent = original.HealthEvent
type HealthInformation = original.HealthInformation
type HealthStateCount = original.HealthStateCount
type HealthStatistics = original.HealthStatistics
type IdentityDescription = original.IdentityDescription
type IdentityItemDescription = original.IdentityItemDescription
type ImageRegistryCredential = original.ImageRegistryCredential
type ImageStoreContent = original.ImageStoreContent
type ImageStoreCopyDescription = original.ImageStoreCopyDescription
type InlinedValueSecretResourceProperties = original.InlinedValueSecretResourceProperties
type Int64PropertyValue = original.Int64PropertyValue
type Int64RangePartitionInformation = original.Int64RangePartitionInformation
type InvokeDataLossResult = original.InvokeDataLossResult
type InvokeQuorumLossResult = original.InvokeQuorumLossResult
type KeyValueStoreReplicaStatus = original.KeyValueStoreReplicaStatus
type ListApplicationEvent = original.ListApplicationEvent
type ListClusterEvent = original.ListClusterEvent
type ListContainerInstanceEvent = original.ListContainerInstanceEvent
type ListDeployedCodePackageInfo = original.ListDeployedCodePackageInfo
type ListDeployedServicePackageInfo = original.ListDeployedServicePackageInfo
type ListDeployedServiceReplicaInfo = original.ListDeployedServiceReplicaInfo
type ListDeployedServiceTypeInfo = original.ListDeployedServiceTypeInfo
type ListFabricCodeVersionInfo = original.ListFabricCodeVersionInfo
type ListFabricConfigVersionInfo = original.ListFabricConfigVersionInfo
type ListFabricEvent = original.ListFabricEvent
type ListNodeEvent = original.ListNodeEvent
type ListOperationStatus = original.ListOperationStatus
type ListPartitionEvent = original.ListPartitionEvent
type ListRepairTask = original.ListRepairTask
type ListReplicaEvent = original.ListReplicaEvent
type ListServiceEvent = original.ListServiceEvent
type ListServiceTypeInfo = original.ListServiceTypeInfo
type LoadMetricInformation = original.LoadMetricInformation
type LoadMetricReport = original.LoadMetricReport
type LoadMetricReportInfo = original.LoadMetricReportInfo
type LocalNetworkResourceProperties = original.LocalNetworkResourceProperties
type ManagedApplicationIdentity = original.ManagedApplicationIdentity
type ManagedApplicationIdentityDescription = original.ManagedApplicationIdentityDescription
type MeshApplicationClient = original.MeshApplicationClient
type MeshCodePackageClient = original.MeshCodePackageClient
type MeshGatewayClient = original.MeshGatewayClient
type MeshNetworkClient = original.MeshNetworkClient
type MeshSecretClient = original.MeshSecretClient
type MeshSecretValueClient = original.MeshSecretValueClient
type MeshServiceClient = original.MeshServiceClient
type MeshServiceReplicaClient = original.MeshServiceReplicaClient
type MeshVolumeClient = original.MeshVolumeClient
type MonitoringPolicyDescription = original.MonitoringPolicyDescription
type NameDescription = original.NameDescription
type NamedPartitionInformation = original.NamedPartitionInformation
type NamedPartitionSchemeDescription = original.NamedPartitionSchemeDescription
type NetworkRef = original.NetworkRef
type NetworkResourceDescription = original.NetworkResourceDescription
type NetworkResourceProperties = original.NetworkResourceProperties
type NetworkResourcePropertiesBase = original.NetworkResourcePropertiesBase
type NodeAbortedEvent = original.NodeAbortedEvent
type NodeAddedToClusterEvent = original.NodeAddedToClusterEvent
type NodeClosedEvent = original.NodeClosedEvent
type NodeDeactivateCompletedEvent = original.NodeDeactivateCompletedEvent
type NodeDeactivateStartedEvent = original.NodeDeactivateStartedEvent
type NodeDeactivationInfo = original.NodeDeactivationInfo
type NodeDeactivationTask = original.NodeDeactivationTask
type NodeDeactivationTaskID = original.NodeDeactivationTaskID
type NodeDownEvent = original.NodeDownEvent
type NodeEvent = original.NodeEvent
type NodeHealth = original.NodeHealth
type NodeHealthEvaluation = original.NodeHealthEvaluation
type NodeHealthReportExpiredEvent = original.NodeHealthReportExpiredEvent
type NodeHealthState = original.NodeHealthState
type NodeHealthStateChunk = original.NodeHealthStateChunk
type NodeHealthStateChunkList = original.NodeHealthStateChunkList
type NodeHealthStateFilter = original.NodeHealthStateFilter
type NodeID = original.NodeID
type NodeImpact = original.NodeImpact
type NodeInfo = original.NodeInfo
type NodeLoadInfo = original.NodeLoadInfo
type NodeLoadMetricInformation = original.NodeLoadMetricInformation
type NodeNewHealthReportEvent = original.NodeNewHealthReportEvent
type NodeOpenFailedEvent = original.NodeOpenFailedEvent
type NodeOpenSucceededEvent = original.NodeOpenSucceededEvent
type NodeRemovedFromClusterEvent = original.NodeRemovedFromClusterEvent
type NodeRepairImpactDescription = original.NodeRepairImpactDescription
type NodeRepairTargetDescription = original.NodeRepairTargetDescription
type NodeResult = original.NodeResult
type NodeTransitionProgress = original.NodeTransitionProgress
type NodeTransitionResult = original.NodeTransitionResult
type NodeUpEvent = original.NodeUpEvent
type NodeUpgradeProgressInfo = original.NodeUpgradeProgressInfo
type NodesHealthEvaluation = original.NodesHealthEvaluation
type OperationStatus = original.OperationStatus
type PackageSharingPolicyInfo = original.PackageSharingPolicyInfo
type PagedApplicationInfoList = original.PagedApplicationInfoList
type PagedApplicationResourceDescriptionList = original.PagedApplicationResourceDescriptionList
type PagedApplicationTypeInfoList = original.PagedApplicationTypeInfoList
type PagedBackupConfigurationInfoList = original.PagedBackupConfigurationInfoList
type PagedBackupEntityList = original.PagedBackupEntityList
type PagedBackupInfoList = original.PagedBackupInfoList
type PagedBackupPolicyDescriptionList = original.PagedBackupPolicyDescriptionList
type PagedComposeDeploymentStatusInfoList = original.PagedComposeDeploymentStatusInfoList
type PagedDeployedApplicationInfoList = original.PagedDeployedApplicationInfoList
type PagedGatewayResourceDescriptionList = original.PagedGatewayResourceDescriptionList
type PagedNetworkResourceDescriptionList = original.PagedNetworkResourceDescriptionList
type PagedNodeInfoList = original.PagedNodeInfoList
type PagedPropertyInfoList = original.PagedPropertyInfoList
type PagedReplicaInfoList = original.PagedReplicaInfoList
type PagedSecretResourceDescriptionList = original.PagedSecretResourceDescriptionList
type PagedSecretValueResourceDescriptionList = original.PagedSecretValueResourceDescriptionList
type PagedServiceInfoList = original.PagedServiceInfoList
type PagedServicePartitionInfoList = original.PagedServicePartitionInfoList
type PagedServiceReplicaDescriptionList = original.PagedServiceReplicaDescriptionList
type PagedServiceResourceDescriptionList = original.PagedServiceResourceDescriptionList
type PagedSubNameInfoList = original.PagedSubNameInfoList
type PagedVolumeResourceDescriptionList = original.PagedVolumeResourceDescriptionList
type PartitionAnalysisEvent = original.PartitionAnalysisEvent
type PartitionBackupConfigurationInfo = original.PartitionBackupConfigurationInfo
type PartitionBackupEntity = original.PartitionBackupEntity
type PartitionDataLossProgress = original.PartitionDataLossProgress
type PartitionEvent = original.PartitionEvent
type PartitionHealth = original.PartitionHealth
type PartitionHealthEvaluation = original.PartitionHealthEvaluation
type PartitionHealthReportExpiredEvent = original.PartitionHealthReportExpiredEvent
type PartitionHealthState = original.PartitionHealthState
type PartitionHealthStateChunk = original.PartitionHealthStateChunk
type PartitionHealthStateChunkList = original.PartitionHealthStateChunkList
type PartitionHealthStateFilter = original.PartitionHealthStateFilter
type PartitionInformation = original.PartitionInformation
type PartitionInstanceCountScaleMechanism = original.PartitionInstanceCountScaleMechanism
type PartitionLoadInformation = original.PartitionLoadInformation
type PartitionNewHealthReportEvent = original.PartitionNewHealthReportEvent
type PartitionPrimaryMoveAnalysisEvent = original.PartitionPrimaryMoveAnalysisEvent
type PartitionQuorumLossProgress = original.PartitionQuorumLossProgress
type PartitionReconfiguredEvent = original.PartitionReconfiguredEvent
type PartitionRestartProgress = original.PartitionRestartProgress
type PartitionSafetyCheck = original.PartitionSafetyCheck
type PartitionSchemeDescription = original.PartitionSchemeDescription
type PartitionsHealthEvaluation = original.PartitionsHealthEvaluation
type PrimaryReplicatorStatus = original.PrimaryReplicatorStatus
type PropertyBatchDescriptionList = original.PropertyBatchDescriptionList
type PropertyBatchInfo = original.PropertyBatchInfo
type PropertyBatchInfoModel = original.PropertyBatchInfoModel
type PropertyBatchOperation = original.PropertyBatchOperation
type PropertyDescription = original.PropertyDescription
type PropertyInfo = original.PropertyInfo
type PropertyMetadata = original.PropertyMetadata
type PropertyValue = original.PropertyValue
type ProvisionApplicationTypeDescription = original.ProvisionApplicationTypeDescription
type ProvisionApplicationTypeDescriptionBase = original.ProvisionApplicationTypeDescriptionBase
type ProvisionFabricDescription = original.ProvisionFabricDescription
type PutPropertyBatchOperation = original.PutPropertyBatchOperation
type ReconfigurationInformation = original.ReconfigurationInformation
type RegistryCredential = original.RegistryCredential
type ReliableCollectionsRef = original.ReliableCollectionsRef
type RemoteReplicatorAcknowledgementDetail = original.RemoteReplicatorAcknowledgementDetail
type RemoteReplicatorAcknowledgementStatus = original.RemoteReplicatorAcknowledgementStatus
type RemoteReplicatorStatus = original.RemoteReplicatorStatus
type RepairImpactDescriptionBase = original.RepairImpactDescriptionBase
type RepairTargetDescriptionBase = original.RepairTargetDescriptionBase
type RepairTask = original.RepairTask
type RepairTaskApproveDescription = original.RepairTaskApproveDescription
type RepairTaskCancelDescription = original.RepairTaskCancelDescription
type RepairTaskDeleteDescription = original.RepairTaskDeleteDescription
type RepairTaskHistory = original.RepairTaskHistory
type RepairTaskUpdateHealthPolicyDescription = original.RepairTaskUpdateHealthPolicyDescription
type RepairTaskUpdateInfo = original.RepairTaskUpdateInfo
type ReplicaEvent = original.ReplicaEvent
type ReplicaHealth = original.ReplicaHealth
type ReplicaHealthEvaluation = original.ReplicaHealthEvaluation
type ReplicaHealthModel = original.ReplicaHealthModel
type ReplicaHealthState = original.ReplicaHealthState
type ReplicaHealthStateChunk = original.ReplicaHealthStateChunk
type ReplicaHealthStateChunkList = original.ReplicaHealthStateChunkList
type ReplicaHealthStateFilter = original.ReplicaHealthStateFilter
type ReplicaInfo = original.ReplicaInfo
type ReplicaInfoModel = original.ReplicaInfoModel
type ReplicaStatusBase = original.ReplicaStatusBase
type ReplicasHealthEvaluation = original.ReplicasHealthEvaluation
type ReplicatorQueueStatus = original.ReplicatorQueueStatus
type ReplicatorStatus = original.ReplicatorStatus
type ResolvedServiceEndpoint = original.ResolvedServiceEndpoint
type ResolvedServicePartition = original.ResolvedServicePartition
type ResourceLimits = original.ResourceLimits
type ResourceRequests = original.ResourceRequests
type ResourceRequirements = original.ResourceRequirements
type RestartDeployedCodePackageDescription = original.RestartDeployedCodePackageDescription
type RestartNodeDescription = original.RestartNodeDescription
type RestartPartitionResult = original.RestartPartitionResult
type RestorePartitionDescription = original.RestorePartitionDescription
type RestoreProgressInfo = original.RestoreProgressInfo
type ResumeApplicationUpgradeDescription = original.ResumeApplicationUpgradeDescription
type ResumeClusterUpgradeDescription = original.ResumeClusterUpgradeDescription
type RetentionPolicyDescription = original.RetentionPolicyDescription
type RollingUpgradeUpdateDescription = original.RollingUpgradeUpdateDescription
type SafetyCheck = original.SafetyCheck
type SafetyCheckWrapper = original.SafetyCheckWrapper
type ScalingMechanismDescription = original.ScalingMechanismDescription
type ScalingPolicyDescription = original.ScalingPolicyDescription
type ScalingTriggerDescription = original.ScalingTriggerDescription
type SecondaryActiveReplicatorStatus = original.SecondaryActiveReplicatorStatus
type SecondaryIdleReplicatorStatus = original.SecondaryIdleReplicatorStatus
type SecondaryReplicatorStatus = original.SecondaryReplicatorStatus
type SecretResourceDescription = original.SecretResourceDescription
type SecretResourceProperties = original.SecretResourceProperties
type SecretResourcePropertiesBase = original.SecretResourcePropertiesBase
type SecretValue = original.SecretValue
type SecretValueProperties = original.SecretValueProperties
type SecretValueResourceDescription = original.SecretValueResourceDescription
type SecretValueResourceProperties = original.SecretValueResourceProperties
type SeedNodeSafetyCheck = original.SeedNodeSafetyCheck
type SelectedPartition = original.SelectedPartition
type ServiceBackupConfigurationInfo = original.ServiceBackupConfigurationInfo
type ServiceBackupEntity = original.ServiceBackupEntity
type ServiceCorrelationDescription = original.ServiceCorrelationDescription
type ServiceCreatedEvent = original.ServiceCreatedEvent
type ServiceDeletedEvent = original.ServiceDeletedEvent
type ServiceDescription = original.ServiceDescription
type ServiceDescriptionModel = original.ServiceDescriptionModel
type ServiceEvent = original.ServiceEvent
type ServiceFromTemplateDescription = original.ServiceFromTemplateDescription
type ServiceHealth = original.ServiceHealth
type ServiceHealthEvaluation = original.ServiceHealthEvaluation
type ServiceHealthReportExpiredEvent = original.ServiceHealthReportExpiredEvent
type ServiceHealthState = original.ServiceHealthState
type ServiceHealthStateChunk = original.ServiceHealthStateChunk
type ServiceHealthStateChunkList = original.ServiceHealthStateChunkList
type ServiceHealthStateFilter = original.ServiceHealthStateFilter
type ServiceIdentity = original.ServiceIdentity
type ServiceInfo = original.ServiceInfo
type ServiceInfoModel = original.ServiceInfoModel
type ServiceLoadMetricDescription = original.ServiceLoadMetricDescription
type ServiceNameInfo = original.ServiceNameInfo
type ServiceNewHealthReportEvent = original.ServiceNewHealthReportEvent
type ServicePartitionInfo = original.ServicePartitionInfo
type ServicePartitionInfoModel = original.ServicePartitionInfoModel
type ServicePlacementInvalidDomainPolicyDescription = original.ServicePlacementInvalidDomainPolicyDescription
type ServicePlacementNonPartiallyPlaceServicePolicyDescription = original.ServicePlacementNonPartiallyPlaceServicePolicyDescription
type ServicePlacementPolicyDescription = original.ServicePlacementPolicyDescription
type ServicePlacementPreferPrimaryDomainPolicyDescription = original.ServicePlacementPreferPrimaryDomainPolicyDescription
type ServicePlacementRequireDomainDistributionPolicyDescription = original.ServicePlacementRequireDomainDistributionPolicyDescription
type ServicePlacementRequiredDomainPolicyDescription = original.ServicePlacementRequiredDomainPolicyDescription
type ServiceProperties = original.ServiceProperties
type ServiceReplicaDescription = original.ServiceReplicaDescription
type ServiceReplicaProperties = original.ServiceReplicaProperties
type ServiceResourceDescription = original.ServiceResourceDescription
type ServiceResourceProperties = original.ServiceResourceProperties
type ServiceTypeDescription = original.ServiceTypeDescription
type ServiceTypeExtensionDescription = original.ServiceTypeExtensionDescription
type ServiceTypeHealthPolicy = original.ServiceTypeHealthPolicy
type ServiceTypeHealthPolicyMapItem = original.ServiceTypeHealthPolicyMapItem
type ServiceTypeInfo = original.ServiceTypeInfo
type ServiceTypeManifest = original.ServiceTypeManifest
type ServiceUpdateDescription = original.ServiceUpdateDescription
type ServicesHealthEvaluation = original.ServicesHealthEvaluation
type Setting = original.Setting
type SingletonPartitionInformation = original.SingletonPartitionInformation
type SingletonPartitionSchemeDescription = original.SingletonPartitionSchemeDescription
type StartClusterUpgradeDescription = original.StartClusterUpgradeDescription
type StartedChaosEvent = original.StartedChaosEvent
type StatefulReplicaHealthReportExpiredEvent = original.StatefulReplicaHealthReportExpiredEvent
type StatefulReplicaNewHealthReportEvent = original.StatefulReplicaNewHealthReportEvent
type StatefulServiceDescription = original.StatefulServiceDescription
type StatefulServiceInfo = original.StatefulServiceInfo
type StatefulServicePartitionInfo = original.StatefulServicePartitionInfo
type StatefulServiceReplicaHealth = original.StatefulServiceReplicaHealth
type StatefulServiceReplicaHealthState = original.StatefulServiceReplicaHealthState
type StatefulServiceReplicaInfo = original.StatefulServiceReplicaInfo
type StatefulServiceTypeDescription = original.StatefulServiceTypeDescription
type StatefulServiceUpdateDescription = original.StatefulServiceUpdateDescription
type StatelessReplicaHealthReportExpiredEvent = original.StatelessReplicaHealthReportExpiredEvent
type StatelessReplicaNewHealthReportEvent = original.StatelessReplicaNewHealthReportEvent
type StatelessServiceDescription = original.StatelessServiceDescription
type StatelessServiceInfo = original.StatelessServiceInfo
type StatelessServiceInstanceHealth = original.StatelessServiceInstanceHealth
type StatelessServiceInstanceHealthState = original.StatelessServiceInstanceHealthState
type StatelessServiceInstanceInfo = original.StatelessServiceInstanceInfo
type StatelessServicePartitionInfo = original.StatelessServicePartitionInfo
type StatelessServiceTypeDescription = original.StatelessServiceTypeDescription
type StatelessServiceUpdateDescription = original.StatelessServiceUpdateDescription
type StoppedChaosEvent = original.StoppedChaosEvent
type String = original.String
type StringPropertyValue = original.StringPropertyValue
type SuccessfulPropertyBatchInfo = original.SuccessfulPropertyBatchInfo
type SystemApplicationHealthEvaluation = original.SystemApplicationHealthEvaluation
type TCPConfig = original.TCPConfig
type TestErrorChaosEvent = original.TestErrorChaosEvent
type TimeBasedBackupScheduleDescription = original.TimeBasedBackupScheduleDescription
type TimeOfDay = original.TimeOfDay
type TimeRange = original.TimeRange
type UniformInt64RangePartitionSchemeDescription = original.UniformInt64RangePartitionSchemeDescription
type UnplacedReplicaInformation = original.UnplacedReplicaInformation
type UnprovisionApplicationTypeDescriptionInfo = original.UnprovisionApplicationTypeDescriptionInfo
type UnprovisionFabricDescription = original.UnprovisionFabricDescription
type UpdateClusterUpgradeDescription = original.UpdateClusterUpgradeDescription
type UpgradeDomainDeltaNodesCheckHealthEvaluation = original.UpgradeDomainDeltaNodesCheckHealthEvaluation
type UpgradeDomainInfo = original.UpgradeDomainInfo
type UpgradeDomainNodesHealthEvaluation = original.UpgradeDomainNodesHealthEvaluation
type UpgradeOrchestrationServiceState = original.UpgradeOrchestrationServiceState
type UpgradeOrchestrationServiceStateSummary = original.UpgradeOrchestrationServiceStateSummary
type UploadChunkRange = original.UploadChunkRange
type UploadSession = original.UploadSession
type UploadSessionInfo = original.UploadSessionInfo
type ValidationFailedChaosEvent = original.ValidationFailedChaosEvent
type VolumeProperties = original.VolumeProperties
type VolumeProviderParametersAzureFile = original.VolumeProviderParametersAzureFile
type VolumeReference = original.VolumeReference
type VolumeResourceDescription = original.VolumeResourceDescription
type WaitForInbuildReplicaSafetyCheck = original.WaitForInbuildReplicaSafetyCheck
type WaitForPrimaryPlacementSafetyCheck = original.WaitForPrimaryPlacementSafetyCheck
type WaitForPrimarySwapSafetyCheck = original.WaitForPrimarySwapSafetyCheck
type WaitForReconfigurationSafetyCheck = original.WaitForReconfigurationSafetyCheck
type WaitingChaosEvent = original.WaitingChaosEvent
func New() BaseClient {
return original.New()
}
func NewMeshApplicationClient() MeshApplicationClient {
return original.NewMeshApplicationClient()
}
func NewMeshApplicationClientWithBaseURI(baseURI string) MeshApplicationClient {
return original.NewMeshApplicationClientWithBaseURI(baseURI)
}
func NewMeshCodePackageClient() MeshCodePackageClient {
return original.NewMeshCodePackageClient()
}
func NewMeshCodePackageClientWithBaseURI(baseURI string) MeshCodePackageClient {
return original.NewMeshCodePackageClientWithBaseURI(baseURI)
}
func NewMeshGatewayClient() MeshGatewayClient {
return original.NewMeshGatewayClient()
}
func NewMeshGatewayClientWithBaseURI(baseURI string) MeshGatewayClient {
return original.NewMeshGatewayClientWithBaseURI(baseURI)
}
func NewMeshNetworkClient() MeshNetworkClient {
return original.NewMeshNetworkClient()
}
func NewMeshNetworkClientWithBaseURI(baseURI string) MeshNetworkClient {
return original.NewMeshNetworkClientWithBaseURI(baseURI)
}
func NewMeshSecretClient() MeshSecretClient {
return original.NewMeshSecretClient()
}
func NewMeshSecretClientWithBaseURI(baseURI string) MeshSecretClient {
return original.NewMeshSecretClientWithBaseURI(baseURI)
}
func NewMeshSecretValueClient() MeshSecretValueClient {
return original.NewMeshSecretValueClient()
}
func NewMeshSecretValueClientWithBaseURI(baseURI string) MeshSecretValueClient {
return original.NewMeshSecretValueClientWithBaseURI(baseURI)
}
func NewMeshServiceClient() MeshServiceClient {
return original.NewMeshServiceClient()
}
func NewMeshServiceClientWithBaseURI(baseURI string) MeshServiceClient {
return original.NewMeshServiceClientWithBaseURI(baseURI)
}
func NewMeshServiceReplicaClient() MeshServiceReplicaClient {
return original.NewMeshServiceReplicaClient()
}
func NewMeshServiceReplicaClientWithBaseURI(baseURI string) MeshServiceReplicaClient {
return original.NewMeshServiceReplicaClientWithBaseURI(baseURI)
}
func NewMeshVolumeClient() MeshVolumeClient {
return original.NewMeshVolumeClient()
}
func NewMeshVolumeClientWithBaseURI(baseURI string) MeshVolumeClient {
return original.NewMeshVolumeClientWithBaseURI(baseURI)
}
func NewWithBaseURI(baseURI string) BaseClient {
return original.NewWithBaseURI(baseURI)
}
func PossibleApplicationDefinitionKindValues() []ApplicationDefinitionKind {
return original.PossibleApplicationDefinitionKindValues()
}
func PossibleApplicationPackageCleanupPolicyValues() []ApplicationPackageCleanupPolicy {
return original.PossibleApplicationPackageCleanupPolicyValues()
}
func PossibleApplicationScopedVolumeKindValues() []ApplicationScopedVolumeKind {
return original.PossibleApplicationScopedVolumeKindValues()
}
func PossibleApplicationStatusValues() []ApplicationStatus {
return original.PossibleApplicationStatusValues()
}
func PossibleApplicationTypeDefinitionKindValues() []ApplicationTypeDefinitionKind {
return original.PossibleApplicationTypeDefinitionKindValues()
}
func PossibleApplicationTypeStatusValues() []ApplicationTypeStatus {
return original.PossibleApplicationTypeStatusValues()
}
func PossibleAutoScalingMechanismKindValues() []AutoScalingMechanismKind {
return original.PossibleAutoScalingMechanismKindValues()
}
func PossibleAutoScalingMetricKindValues() []AutoScalingMetricKind {
return original.PossibleAutoScalingMetricKindValues()
}
func PossibleAutoScalingResourceMetricNameValues() []AutoScalingResourceMetricName {
return original.PossibleAutoScalingResourceMetricNameValues()
}
func PossibleAutoScalingTriggerKindValues() []AutoScalingTriggerKind {
return original.PossibleAutoScalingTriggerKindValues()
}
func PossibleBackupEntityKindValues() []BackupEntityKind {
return original.PossibleBackupEntityKindValues()
}
func PossibleBackupPolicyScopeValues() []BackupPolicyScope {
return original.PossibleBackupPolicyScopeValues()
}
func PossibleBackupScheduleFrequencyTypeValues() []BackupScheduleFrequencyType {
return original.PossibleBackupScheduleFrequencyTypeValues()
}
func PossibleBackupScheduleKindValues() []BackupScheduleKind {
return original.PossibleBackupScheduleKindValues()
}
func PossibleBackupStateValues() []BackupState {
return original.PossibleBackupStateValues()
}
func PossibleBackupStorageKindValues() []BackupStorageKind {
return original.PossibleBackupStorageKindValues()
}
func PossibleBackupSuspensionScopeValues() []BackupSuspensionScope {
return original.PossibleBackupSuspensionScopeValues()
}
func PossibleBackupTypeValues() []BackupType {
return original.PossibleBackupTypeValues()
}
func PossibleChaosEventKindValues() []ChaosEventKind {
return original.PossibleChaosEventKindValues()
}
func PossibleChaosScheduleStatusValues() []ChaosScheduleStatus {
return original.PossibleChaosScheduleStatusValues()
}
func PossibleChaosStatusValues() []ChaosStatus {
return original.PossibleChaosStatusValues()
}
func PossibleComposeDeploymentStatusValues() []ComposeDeploymentStatus {
return original.PossibleComposeDeploymentStatusValues()
}
func PossibleComposeDeploymentUpgradeStateValues() []ComposeDeploymentUpgradeState {
return original.PossibleComposeDeploymentUpgradeStateValues()
}
func PossibleCreateFabricDumpValues() []CreateFabricDump {
return original.PossibleCreateFabricDumpValues()
}
func PossibleDataLossModeValues() []DataLossMode {
return original.PossibleDataLossModeValues()
}
func PossibleDayOfWeekValues() []DayOfWeek {
return original.PossibleDayOfWeekValues()
}
func PossibleDeactivationIntentValues() []DeactivationIntent {
return original.PossibleDeactivationIntentValues()
}
func PossibleDeployedApplicationStatusValues() []DeployedApplicationStatus {
return original.PossibleDeployedApplicationStatusValues()
}
func PossibleDeploymentStatusValues() []DeploymentStatus {
return original.PossibleDeploymentStatusValues()
}
func PossibleDiagnosticsSinkKindValues() []DiagnosticsSinkKind {
return original.PossibleDiagnosticsSinkKindValues()
}
func PossibleEntityKindBasicBackupEntityValues() []EntityKindBasicBackupEntity {
return original.PossibleEntityKindBasicBackupEntityValues()
}
func PossibleEntityKindValues() []EntityKind {
return original.PossibleEntityKindValues()
}
func PossibleEntryPointStatusValues() []EntryPointStatus {
return original.PossibleEntryPointStatusValues()
}
func PossibleFabricErrorCodesValues() []FabricErrorCodes {
return original.PossibleFabricErrorCodesValues()
}
func PossibleFabricEventKindValues() []FabricEventKind {
return original.PossibleFabricEventKindValues()
}
func PossibleFabricReplicaStatusValues() []FabricReplicaStatus {
return original.PossibleFabricReplicaStatusValues()
}
func PossibleFailureActionValues() []FailureAction {
return original.PossibleFailureActionValues()
}
func PossibleFailureReasonValues() []FailureReason {
return original.PossibleFailureReasonValues()
}
func PossibleHeaderMatchTypeValues() []HeaderMatchType {
return original.PossibleHeaderMatchTypeValues()
}
func PossibleHealthEvaluationKindValues() []HealthEvaluationKind {
return original.PossibleHealthEvaluationKindValues()
}
func PossibleHealthStateValues() []HealthState {
return original.PossibleHealthStateValues()
}
func PossibleHostIsolationModeValues() []HostIsolationMode {
return original.PossibleHostIsolationModeValues()
}
func PossibleHostTypeValues() []HostType {
return original.PossibleHostTypeValues()
}
func PossibleImpactLevelValues() []ImpactLevel {
return original.PossibleImpactLevelValues()
}
func PossibleKindBasicApplicationScopedVolumeCreationParametersValues() []KindBasicApplicationScopedVolumeCreationParameters {
return original.PossibleKindBasicApplicationScopedVolumeCreationParametersValues()
}
func PossibleKindBasicAutoScalingMechanismValues() []KindBasicAutoScalingMechanism {
return original.PossibleKindBasicAutoScalingMechanismValues()
}
func PossibleKindBasicAutoScalingMetricValues() []KindBasicAutoScalingMetric {
return original.PossibleKindBasicAutoScalingMetricValues()
}
func PossibleKindBasicAutoScalingTriggerValues() []KindBasicAutoScalingTrigger {
return original.PossibleKindBasicAutoScalingTriggerValues()
}
func PossibleKindBasicBackupConfigurationInfoValues() []KindBasicBackupConfigurationInfo {
return original.PossibleKindBasicBackupConfigurationInfoValues()
}
func PossibleKindBasicChaosEventValues() []KindBasicChaosEvent {
return original.PossibleKindBasicChaosEventValues()
}
func PossibleKindBasicDiagnosticsSinkPropertiesValues() []KindBasicDiagnosticsSinkProperties {
return original.PossibleKindBasicDiagnosticsSinkPropertiesValues()
}
func PossibleKindBasicFabricEventValues() []KindBasicFabricEvent {
return original.PossibleKindBasicFabricEventValues()
}
func PossibleKindBasicNetworkResourcePropertiesBaseValues() []KindBasicNetworkResourcePropertiesBase {
return original.PossibleKindBasicNetworkResourcePropertiesBaseValues()
}
func PossibleKindBasicPropertyBatchInfoValues() []KindBasicPropertyBatchInfo {
return original.PossibleKindBasicPropertyBatchInfoValues()
}
func PossibleKindBasicPropertyBatchOperationValues() []KindBasicPropertyBatchOperation {
return original.PossibleKindBasicPropertyBatchOperationValues()
}
func PossibleKindBasicPropertyValueValues() []KindBasicPropertyValue {
return original.PossibleKindBasicPropertyValueValues()
}
func PossibleKindBasicProvisionApplicationTypeDescriptionBaseValues() []KindBasicProvisionApplicationTypeDescriptionBase {
return original.PossibleKindBasicProvisionApplicationTypeDescriptionBaseValues()
}
func PossibleKindBasicRepairImpactDescriptionBaseValues() []KindBasicRepairImpactDescriptionBase {
return original.PossibleKindBasicRepairImpactDescriptionBaseValues()
}
func PossibleKindBasicRepairTargetDescriptionBaseValues() []KindBasicRepairTargetDescriptionBase {
return original.PossibleKindBasicRepairTargetDescriptionBaseValues()
}
func PossibleKindBasicReplicaStatusBaseValues() []KindBasicReplicaStatusBase {
return original.PossibleKindBasicReplicaStatusBaseValues()
}
func PossibleKindBasicReplicatorStatusValues() []KindBasicReplicatorStatus {
return original.PossibleKindBasicReplicatorStatusValues()
}
func PossibleKindBasicSafetyCheckValues() []KindBasicSafetyCheck {
return original.PossibleKindBasicSafetyCheckValues()
}
func PossibleKindBasicScalingMechanismDescriptionValues() []KindBasicScalingMechanismDescription {
return original.PossibleKindBasicScalingMechanismDescriptionValues()
}
func PossibleKindBasicScalingTriggerDescriptionValues() []KindBasicScalingTriggerDescription {
return original.PossibleKindBasicScalingTriggerDescriptionValues()
}
func PossibleKindBasicSecretResourcePropertiesBaseValues() []KindBasicSecretResourcePropertiesBase {
return original.PossibleKindBasicSecretResourcePropertiesBaseValues()
}
func PossibleKindBasicServiceTypeDescriptionValues() []KindBasicServiceTypeDescription {
return original.PossibleKindBasicServiceTypeDescriptionValues()
}
func PossibleKindValues() []Kind {
return original.PossibleKindValues()
}
func PossibleMoveCostValues() []MoveCost {
return original.PossibleMoveCostValues()
}
func PossibleNetworkKindValues() []NetworkKind {
return original.PossibleNetworkKindValues()
}
func PossibleNodeDeactivationIntentValues() []NodeDeactivationIntent {
return original.PossibleNodeDeactivationIntentValues()
}
func PossibleNodeDeactivationStatusValues() []NodeDeactivationStatus {
return original.PossibleNodeDeactivationStatusValues()
}
func PossibleNodeDeactivationTaskTypeValues() []NodeDeactivationTaskType {
return original.PossibleNodeDeactivationTaskTypeValues()
}
func PossibleNodeStatusFilterValues() []NodeStatusFilter {
return original.PossibleNodeStatusFilterValues()
}
func PossibleNodeStatusValues() []NodeStatus {
return original.PossibleNodeStatusValues()
}
func PossibleNodeTransitionTypeValues() []NodeTransitionType {
return original.PossibleNodeTransitionTypeValues()
}
func PossibleNodeUpgradePhaseValues() []NodeUpgradePhase {
return original.PossibleNodeUpgradePhaseValues()
}
func PossibleOperatingSystemTypeValues() []OperatingSystemType {
return original.PossibleOperatingSystemTypeValues()
}
func PossibleOperationStateValues() []OperationState {
return original.PossibleOperationStateValues()
}
func PossibleOperationTypeValues() []OperationType {
return original.PossibleOperationTypeValues()
}
func PossiblePackageSharingPolicyScopeValues() []PackageSharingPolicyScope {
return original.PossiblePackageSharingPolicyScopeValues()
}
func PossiblePartitionAccessStatusValues() []PartitionAccessStatus {
return original.PossiblePartitionAccessStatusValues()
}
func PossiblePartitionSchemeBasicPartitionSchemeDescriptionValues() []PartitionSchemeBasicPartitionSchemeDescription {
return original.PossiblePartitionSchemeBasicPartitionSchemeDescriptionValues()
}
func PossiblePartitionSchemeValues() []PartitionScheme {
return original.PossiblePartitionSchemeValues()
}
func PossiblePropertyBatchInfoKindValues() []PropertyBatchInfoKind {
return original.PossiblePropertyBatchInfoKindValues()
}
func PossiblePropertyBatchOperationKindValues() []PropertyBatchOperationKind {
return original.PossiblePropertyBatchOperationKindValues()
}
func PossiblePropertyValueKindValues() []PropertyValueKind {
return original.PossiblePropertyValueKindValues()
}
func PossibleProvisionApplicationTypeKindValues() []ProvisionApplicationTypeKind {
return original.PossibleProvisionApplicationTypeKindValues()
}
func PossibleQuorumLossModeValues() []QuorumLossMode {
return original.PossibleQuorumLossModeValues()
}
func PossibleReconfigurationPhaseValues() []ReconfigurationPhase {
return original.PossibleReconfigurationPhaseValues()
}
func PossibleReconfigurationTypeValues() []ReconfigurationType {
return original.PossibleReconfigurationTypeValues()
}
func PossibleRepairImpactKindValues() []RepairImpactKind {
return original.PossibleRepairImpactKindValues()
}
func PossibleRepairTargetKindValues() []RepairTargetKind {
return original.PossibleRepairTargetKindValues()
}
func PossibleRepairTaskHealthCheckStateValues() []RepairTaskHealthCheckState {
return original.PossibleRepairTaskHealthCheckStateValues()
}
func PossibleReplicaHealthReportServiceKindValues() []ReplicaHealthReportServiceKind {
return original.PossibleReplicaHealthReportServiceKindValues()
}
func PossibleReplicaKindValues() []ReplicaKind {
return original.PossibleReplicaKindValues()
}
func PossibleReplicaRoleValues() []ReplicaRole {
return original.PossibleReplicaRoleValues()
}
func PossibleReplicaStatusValues() []ReplicaStatus {
return original.PossibleReplicaStatusValues()
}
func PossibleReplicatorOperationNameValues() []ReplicatorOperationName {
return original.PossibleReplicatorOperationNameValues()
}
func PossibleResourceStatusValues() []ResourceStatus {
return original.PossibleResourceStatusValues()
}
func PossibleRestartPartitionModeValues() []RestartPartitionMode {
return original.PossibleRestartPartitionModeValues()
}
func PossibleRestoreStateValues() []RestoreState {
return original.PossibleRestoreStateValues()
}
func PossibleResultStatusValues() []ResultStatus {
return original.PossibleResultStatusValues()
}
func PossibleRetentionPolicyTypeBasicBasicRetentionPolicyDescriptionValues() []RetentionPolicyTypeBasicBasicRetentionPolicyDescription {
return original.PossibleRetentionPolicyTypeBasicBasicRetentionPolicyDescriptionValues()
}
func PossibleRetentionPolicyTypeValues() []RetentionPolicyType {
return original.PossibleRetentionPolicyTypeValues()
}
func PossibleSafetyCheckKindValues() []SafetyCheckKind {
return original.PossibleSafetyCheckKindValues()
}
func PossibleScalingMechanismKindValues() []ScalingMechanismKind {
return original.PossibleScalingMechanismKindValues()
}
func PossibleScalingTriggerKindValues() []ScalingTriggerKind {
return original.PossibleScalingTriggerKindValues()
}
func PossibleScheduleKindValues() []ScheduleKind {
return original.PossibleScheduleKindValues()
}
func PossibleSecretKindValues() []SecretKind {
return original.PossibleSecretKindValues()
}
func PossibleServiceCorrelationSchemeValues() []ServiceCorrelationScheme {
return original.PossibleServiceCorrelationSchemeValues()
}
func PossibleServiceEndpointRoleValues() []ServiceEndpointRole {
return original.PossibleServiceEndpointRoleValues()
}
func PossibleServiceKindBasicDeployedServiceReplicaDetailInfoValues() []ServiceKindBasicDeployedServiceReplicaDetailInfo {
return original.PossibleServiceKindBasicDeployedServiceReplicaDetailInfoValues()
}
func PossibleServiceKindBasicDeployedServiceReplicaInfoValues() []ServiceKindBasicDeployedServiceReplicaInfo {
return original.PossibleServiceKindBasicDeployedServiceReplicaInfoValues()
}
func PossibleServiceKindBasicReplicaHealthStateValues() []ServiceKindBasicReplicaHealthState {
return original.PossibleServiceKindBasicReplicaHealthStateValues()
}
func PossibleServiceKindBasicReplicaHealthValues() []ServiceKindBasicReplicaHealth {
return original.PossibleServiceKindBasicReplicaHealthValues()
}
func PossibleServiceKindBasicReplicaInfoValues() []ServiceKindBasicReplicaInfo {
return original.PossibleServiceKindBasicReplicaInfoValues()
}
func PossibleServiceKindBasicServiceDescriptionValues() []ServiceKindBasicServiceDescription {
return original.PossibleServiceKindBasicServiceDescriptionValues()
}
func PossibleServiceKindBasicServiceInfoValues() []ServiceKindBasicServiceInfo {
return original.PossibleServiceKindBasicServiceInfoValues()
}
func PossibleServiceKindBasicServicePartitionInfoValues() []ServiceKindBasicServicePartitionInfo {
return original.PossibleServiceKindBasicServicePartitionInfoValues()
}
func PossibleServiceKindBasicServiceUpdateDescriptionValues() []ServiceKindBasicServiceUpdateDescription {
return original.PossibleServiceKindBasicServiceUpdateDescriptionValues()
}
func PossibleServiceKindValues() []ServiceKind {
return original.PossibleServiceKindValues()
}
func PossibleServiceLoadMetricWeightValues() []ServiceLoadMetricWeight {
return original.PossibleServiceLoadMetricWeightValues()
}
func PossibleServiceOperationNameValues() []ServiceOperationName {
return original.PossibleServiceOperationNameValues()
}
func PossibleServicePackageActivationModeValues() []ServicePackageActivationMode {
return original.PossibleServicePackageActivationModeValues()
}
func PossibleServicePartitionKindBasicPartitionInformationValues() []ServicePartitionKindBasicPartitionInformation {
return original.PossibleServicePartitionKindBasicPartitionInformationValues()
}
func PossibleServicePartitionKindValues() []ServicePartitionKind {
return original.PossibleServicePartitionKindValues()
}
func PossibleServicePartitionStatusValues() []ServicePartitionStatus {
return original.PossibleServicePartitionStatusValues()
}
func PossibleServicePlacementPolicyTypeValues() []ServicePlacementPolicyType {
return original.PossibleServicePlacementPolicyTypeValues()
}
func PossibleServiceStatusValues() []ServiceStatus {
return original.PossibleServiceStatusValues()
}
func PossibleServiceTypeRegistrationStatusValues() []ServiceTypeRegistrationStatus {
return original.PossibleServiceTypeRegistrationStatusValues()
}
func PossibleSizeTypesValues() []SizeTypes {
return original.PossibleSizeTypesValues()
}
func PossibleStateValues() []State {
return original.PossibleStateValues()
}
func PossibleStorageKindValues() []StorageKind {
return original.PossibleStorageKindValues()
}
func PossibleTypeValues() []Type {
return original.PossibleTypeValues()
}
func PossibleUpgradeDomainStateValues() []UpgradeDomainState {
return original.PossibleUpgradeDomainStateValues()
}
func PossibleUpgradeKindValues() []UpgradeKind {
return original.PossibleUpgradeKindValues()
}
func PossibleUpgradeModeValues() []UpgradeMode {
return original.PossibleUpgradeModeValues()
}
func PossibleUpgradeSortOrderValues() []UpgradeSortOrder {
return original.PossibleUpgradeSortOrderValues()
}
func PossibleUpgradeStateValues() []UpgradeState {
return original.PossibleUpgradeStateValues()
}
func PossibleUpgradeTypeValues() []UpgradeType {
return original.PossibleUpgradeTypeValues()
}
func PossibleVolumeProviderValues() []VolumeProvider {
return original.PossibleVolumeProviderValues()
}
func UserAgent() string {
return original.UserAgent() + " profiles/preview"
}
func Version() string {
return original.Version()
}
| pweil-/origin | vendor/github.com/Azure/azure-sdk-for-go/profiles/preview/servicefabric/servicefabric/models.go | GO | apache-2.0 | 148,564 |
/*
* Copyright 2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.dataflow.core;
import org.springframework.cloud.deployer.spi.core.AppDeploymentRequest;
import org.springframework.core.style.ToStringCreator;
/**
* Description of an execution of a task including resource to be executed and how it was configured via Spring Cloud
* Data Flow
*
* @author Mark Pollack
* @author Michael Minella
* @since 2.3
*/
public class TaskManifest {
private AppDeploymentRequest taskDeploymentRequest;
private String platformName;
/**
* Name of the platform the related task execution was executed on.
*
* @return name of the platform
*/
public String getPlatformName() {
return platformName;
}
/**
* Name of the platform the related task execution was executed on.
*
* @param platformName platform name
*/
public void setPlatformName(String platformName) {
this.platformName = platformName;
}
/**
* {@code AppDeploymentRequest} representing the task being executed
*
* @return {@code AppDeploymentRequest}
*/
public AppDeploymentRequest getTaskDeploymentRequest() {
return taskDeploymentRequest;
}
/**
* Task deployment
*
* @param taskDeploymentRequest {@code AppDeploymentRequest}
*/
public void setTaskDeploymentRequest(AppDeploymentRequest taskDeploymentRequest) {
this.taskDeploymentRequest = taskDeploymentRequest;
}
public String toString() {
return (new ToStringCreator(this)).append("taskDeploymentRequest", this.taskDeploymentRequest).append("platformName", this.platformName).toString();
}
}
| mminella/spring-cloud-data | spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/TaskManifest.java | Java | apache-2.0 | 2,150 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.store;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.shard.IndexEventListener;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardState;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardPath;
import org.elasticsearch.index.store.DirectoryService;
import org.elasticsearch.index.store.IndexStore;
import org.elasticsearch.index.store.IndexStoreConfig;
import org.elasticsearch.plugins.Plugin;
import java.util.Collections;
import java.util.EnumSet;
import java.util.IdentityHashMap;
import java.util.Map;
public class MockFSIndexStore extends IndexStore {
public static final Setting<Boolean> INDEX_CHECK_INDEX_ON_CLOSE_SETTING =
Setting.boolSetting("index.store.mock.check_index_on_close", true, Property.IndexScope, Property.NodeScope);
public static class TestPlugin extends Plugin {
@Override
public String name() {
return "mock-index-store";
}
@Override
public String description() {
return "a mock index store for testing";
}
@Override
public Settings additionalSettings() {
return Settings.builder().put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "mock").build();
}
public void onModule(SettingsModule module) {
module.registerSetting(INDEX_CHECK_INDEX_ON_CLOSE_SETTING);
module.registerSetting(MockFSDirectoryService.CRASH_INDEX_SETTING);
module.registerSetting(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_SETTING);
module.registerSetting(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING);
module.registerSetting(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE_SETTING);
module.registerSetting(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING);
}
@Override
public void onIndexModule(IndexModule indexModule) {
Settings indexSettings = indexModule.getSettings();
if ("mock".equals(indexSettings.get(IndexModule.INDEX_STORE_TYPE_SETTING.getKey()))) {
if (INDEX_CHECK_INDEX_ON_CLOSE_SETTING.get(indexSettings)) {
indexModule.addIndexEventListener(new Listener());
}
indexModule.addIndexStore("mock", MockFSIndexStore::new);
}
}
}
MockFSIndexStore(IndexSettings indexSettings,
IndexStoreConfig config) {
super(indexSettings, config);
}
public DirectoryService newDirectoryService(ShardPath path) {
return new MockFSDirectoryService(indexSettings, this, path);
}
private static final EnumSet<IndexShardState> validCheckIndexStates = EnumSet.of(
IndexShardState.STARTED, IndexShardState.RELOCATED, IndexShardState.POST_RECOVERY
);
private static final class Listener implements IndexEventListener {
private final Map<IndexShard, Boolean> shardSet = Collections.synchronizedMap(new IdentityHashMap<>());
@Override
public void afterIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) {
if (indexShard != null) {
Boolean remove = shardSet.remove(indexShard);
if (remove == Boolean.TRUE) {
ESLogger logger = Loggers.getLogger(getClass(), indexShard.indexSettings().getSettings(), indexShard.shardId());
MockFSDirectoryService.checkIndex(logger, indexShard.store(), indexShard.shardId());
}
}
}
@Override
public void indexShardStateChanged(IndexShard indexShard, @Nullable IndexShardState previousState, IndexShardState currentState, @Nullable String reason) {
if (currentState == IndexShardState.CLOSED && validCheckIndexStates.contains(previousState) && indexShard.indexSettings().isOnSharedFilesystem() == false) {
shardSet.put(indexShard, Boolean.TRUE);
}
}
}
}
| mmaracic/elasticsearch | test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java | Java | apache-2.0 | 5,321 |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.dmn.client.resources;
import org.kie.workbench.common.stunner.svg.annotation.SVGSource;
import org.kie.workbench.common.stunner.svg.annotation.SVGViewFactory;
import org.kie.workbench.common.stunner.svg.client.shape.view.SVGShapeViewResource;
import static org.kie.workbench.common.dmn.client.resources.DMNSVGViewFactory.PATH_CSS;
@SVGViewFactory(cssPath = PATH_CSS)
public interface DMNSVGViewFactory {
String PATH_CSS = "images/shapes/dmn-shapes.css";
String DIAGRAM = "images/shapes/diagram.svg";
String BUSINESS_KNOWLEDGE_MODEL = "images/shapes/business-knowledge-model.svg";
String BUSINESS_KNOWLEDGE_MODEL_PALETTE = "images/shapes/business-knowledge-model-palette.svg";
String DECISION = "images/shapes/decision.svg";
String DECISION_PALETTE = "images/shapes/decision-palette.svg";
String INPUT_DATA = "images/shapes/input-data.svg";
String INPUT_DATA_PALETTE = "images/shapes/input-data-palette.svg";
String KNOWLEDGE_SOURCE = "images/shapes/knowledge-source.svg";
String KNOWLEDGE_SOURCE_PALETTE = "images/shapes/knowledge-source-palette.svg";
String TEXT_ANNOTATION = "images/shapes/text-annotation.svg";
String TEXT_ANNOTATION_PALETTE = "images/shapes/text-annotation-palette.svg";
@SVGSource(DIAGRAM)
SVGShapeViewResource diagram();
@SVGSource(BUSINESS_KNOWLEDGE_MODEL)
SVGShapeViewResource businessKnowledgeModel();
@SVGSource(DECISION)
SVGShapeViewResource decision();
@SVGSource(INPUT_DATA)
SVGShapeViewResource inputData();
@SVGSource(KNOWLEDGE_SOURCE)
SVGShapeViewResource knowledgeSource();
@SVGSource(TEXT_ANNOTATION)
SVGShapeViewResource textAnnotation();
}
| jhrcek/kie-wb-common | kie-wb-common-dmn/kie-wb-common-dmn-client/src/main/java/org/kie/workbench/common/dmn/client/resources/DMNSVGViewFactory.java | Java | apache-2.0 | 2,348 |
// Copyright 2015 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package utils
import (
"os"
jww "github.com/spf13/jwalterweatherman"
)
func CheckErr(err error, s ...string) {
if err != nil {
if len(s) == 0 {
jww.CRITICAL.Println(err)
} else {
for _, message := range s {
jww.ERROR.Println(message)
}
jww.ERROR.Println(err)
}
}
}
func StopOnErr(err error, s ...string) {
if err != nil {
if len(s) == 0 {
newMessage := err.Error()
// Printing an empty string results in a error with
// no message, no bueno.
if newMessage != "" {
jww.CRITICAL.Println(newMessage)
}
} else {
for _, message := range s {
if message != "" {
jww.CRITICAL.Println(message)
}
}
}
os.Exit(-1)
}
}
| coderzh/hugo | utils/utils.go | GO | apache-2.0 | 1,295 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.IO;
using System.Linq;
using System.Reflection.Metadata;
using System.Reflection.PortableExecutable;
namespace Microsoft.CodeAnalysis.Rebuild
{
public static class Extensions
{
internal static void SkipNullTerminator(ref this BlobReader blobReader)
{
var b = blobReader.ReadByte();
if (b != '\0')
{
throw new InvalidDataException(string.Format(RebuildResources.Encountered_unexpected_byte_0_when_expecting_a_null_terminator, b));
}
}
public static MetadataReader? GetEmbeddedPdbMetadataReader(this PEReader peReader)
{
var entry = peReader.ReadDebugDirectory().SingleOrDefault(x => x.Type == DebugDirectoryEntryType.EmbeddedPortablePdb);
if (entry.Type == DebugDirectoryEntryType.Unknown)
{
return null;
}
var provider = peReader.ReadEmbeddedPortablePdbDebugDirectoryData(entry);
return provider.GetMetadataReader();
}
}
}
| physhi/roslyn | src/Compilers/Core/Rebuild/Extensions.cs | C# | apache-2.0 | 1,272 |
package water.deploy;
import java.io.*;
import java.util.*;
import java.util.Map.Entry;
import java.util.jar.*;
import javassist.*;
import water.*;
import water.api.DocGen;
import water.util.Utils;
public class LaunchJar extends Request2 {
static final int API_WEAVER = 1;
static public DocGen.FieldDoc[] DOC_FIELDS;
@API(help = "Jars keys", required = true, filter = Default.class)
public String jars;
@API(help = "Class to instantiate and launch", required = true, filter = Default.class)
public String job_class;
@Override protected Response serve() {
final Job job;
try {
// Move jars from KV store to tmp files
ClassPool pool = new ClassPool(true);
ArrayList<JarEntry> entries = new ArrayList<JarEntry>();
String[] splits = jars.split(",");
for( int i = 0; i < splits.length; i++ ) {
Key key = Key.make(splits[i]);
throw H2O.unimpl();
//ValueArray va = UKV.get(key);
//File file = File.createTempFile("h2o", ".jar");
//Utils.writeFileAndClose(file, va.openStream());
//DKV.remove(key);
//pool.appendClassPath(file.getPath());
//
//JarFile jar = new JarFile(file);
//Enumeration e = jar.entries();
//while( e.hasMoreElements() ) {
// JarEntry entry = (JarEntry) e.nextElement();
// entries.add(entry);
//}
//jar.close();
}
// Append UID to class names so allow multiple invocations
String uid = Key.rand();
ClassMap renames = new ClassMap();
for( JarEntry entry : entries ) {
if( entry.getName().endsWith(".class") ) {
String n = Utils.className(entry.getName());
String u;
int index = n.indexOf("$");
if( index < 0 )
index = n.length();
u = n.substring(0, index) + uid + n.substring(index);
renames.put(n, u);
}
}
ArrayList<CtClass> updated = new ArrayList();
for( Entry<String, String> entry : ((Map<String, String>) renames).entrySet() ) {
CtClass c = pool.get(entry.getKey().replace('/', '.'));
c.replaceClassName(renames);
updated.add(c);
}
// Create jar file and register it on each node
HashSet<String> packages = new HashSet();
ByteArrayOutputStream mem = new ByteArrayOutputStream();
JarOutputStream jar = new JarOutputStream(mem);
DataOutputStream bc = new DataOutputStream(jar);
for( CtClass c : updated ) {
jar.putNextEntry(new JarEntry(c.getName().replace('.', '/') + ".class"));
c.toBytecode(bc);
bc.flush();
String p = c.getPackageName();
if( p == null )
throw new IllegalArgumentException("Package is null for class " + c);
packages.add(p);
}
jar.close();
weavePackages(packages.toArray(new String[0]));
AddJar task = new AddJar();
task._data = mem.toByteArray();
task.invokeOnAllNodes();
// Start job
Class c = Class.forName(job_class + uid);
job = (Job) c.newInstance();
job.fork();
} catch( Exception ex ) {
throw new RuntimeException(ex);
}
return Response.done(this);
}
public static void weavePackages(String... names) {
WeavePackages task = new WeavePackages();
task._names = names;
task.invokeOnAllNodes();
}
static class WeavePackages extends DRemoteTask {
String[] _names;
@Override public void lcompute() {
for( String name : _names )
Boot.weavePackage(name);
tryComplete();
}
@Override public void reduce(DRemoteTask drt) {
}
}
static class AddJar extends DRemoteTask {
byte[] _data;
@Override public void lcompute() {
try {
File file = File.createTempFile("h2o", ".jar");
Utils.writeFileAndClose(file, new ByteArrayInputStream(_data));
Boot._init.addExternalJars(file);
tryComplete();
} catch( Exception ex ) {
throw new RuntimeException(ex);
}
}
@Override public void reduce(DRemoteTask drt) {
}
}
}
| rowhit/h2o-2 | src/main/java/water/deploy/LaunchJar.java | Java | apache-2.0 | 4,106 |
binomial_fit.coef() | madmax983/h2o-3 | h2o-docs/src/booklets/v2_2015/source/GLM_Vignette_code_examples/glm_model_output_20.py | Python | apache-2.0 | 19 |
#!/usr/bin/env python
import os
import sys
from macholib.MachOStandalone import MachOStandalone
from macholib.util import strip_files
def standaloneApp(path):
if not os.path.isdir(path) and os.path.exists(
os.path.join(path, 'Contents')):
raise SystemExit('%s: %s does not look like an app bundle'
% (sys.argv[0], path))
files = MachOStandalone(path).run()
strip_files(files)
def main():
print("WARNING: 'macho_standalone' is deprecated, use 'python -mmacholib dump' instead")
if not sys.argv[1:]:
raise SystemExit('usage: %s [appbundle ...]' % (sys.argv[0],))
for fn in sys.argv[1:]:
standaloneApp(fn)
if __name__ == '__main__':
main()
| timeyyy/PyUpdater | pyupdater/vendor/PyInstaller/lib/macholib/macho_standalone.py | Python | bsd-2-clause | 718 |
/**
* Sticky Notes
*
* An open source lightweight pastebin application
*
* @package StickyNotes
* @author Sayak Banerjee
* @copyright (c) 2014 Sayak Banerjee <mail@sayakbanerjee.com>. All rights reserved.
* @license http://www.opensource.org/licenses/bsd-license.php
* @link http://sayakbanerjee.com/sticky-notes
* @since Version 1.0
* @filesource
*/
/**
* Stores the current URL
*
* @var string
*/
var currentUrl = $(location).attr('href');
/**
* Timer container
*
* @var array
*/
var timers = new Array();
/**
* Instance counter
*
* @var int
*/
var instance = 0;
/**
* This is the main entry point of the script
*
* @return void
*/
function initMain()
{
// Initialize a new instance
initInstance();
// Initialize AJAX components
initAjaxComponents();
// Initialize AJAX navigation
initAjaxNavigation();
// Initialize addons
initAddons();
}
/**
* This initializes all JS addons
*
* @return void
*/
function initAddons()
{
// Initialize code wrapping
initWrapToggle();
// Initialize the code editor
initEditor();
// Initialize tab persistence
initTabPersistence();
// Initialize line reference
initLineReference();
// Initialize bootstrap components
initBootstrap();
}
/**
* Initializes a new instance of the JS library
*
* @return void
*/
function initInstance()
{
// Clear all timers
if (timers[instance] !== undefined)
{
for (idx in timers[instance])
{
clearInterval(timers[instance][idx]);
}
}
// Create a new instance and timer container
instance++;
timers[instance] = new Array();
}
/**
* Starts a new timed operation
*
* @param operation
* @param callback
* @param interval
* @return void
*/
function initTimer(operation, callback, interval)
{
switch (operation)
{
case 'once':
setTimeout(callback, interval);
break;
case 'repeat':
timers[instance].push(setInterval(callback, interval));
break;
}
}
/**
* Scans for and processes AJAX components
*
* Each AJAX component can have 4 parameters:
* - realtime : Indicates if the component involves realtime data
* - onload : The AJAX request will be triggered automatically
* - component : The utility component to request
* - extra : Any extra data that will be sent to the server
*
* @return void
*/
function initAjaxComponents()
{
var count = 1;
// Setup AJAX requests
$('[data-toggle="ajax"]').each(function()
{
var id = 'stickynotes-' + count++;
var onload = $(this).attr('data-onload') === 'true';
var realtime = $(this).attr('data-realtime') === 'true';
var component = $(this).attr('data-component');
var extra = $(this).attr('data-extra');
// Set the id of this element
$(this).attr('data-id', id);
// AJAX URL and component must be defined
if (ajaxUrl !== undefined && component !== undefined)
{
var getUrl = ajaxUrl + '/' + component + (extra !== undefined ? '/' + extra : '');
var callback = function(e)
{
// Add the loading icon
$(this).html('<span class="glyphicon glyphicon-refresh"></span>');
// Send the AJAX request
$.ajax({
url: getUrl,
data: { key: Math.random(), ajax: 1 },
context: $('[data-id="' + id + '"]'),
success: function(response)
{
// Dump the HTML in the element
$(this).html(response);
// If response is link, set it as href as well
if (response.indexOf('http') === 0)
{
$(this).attr('href', response);
$(this).removeAttr('data-toggle');
$(this).off('click');
}
// Load addons again
initAddons();
}
});
if (e !== undefined)
{
e.preventDefault();
}
};
// Execute the AJAX callback
if (onload)
{
if (realtime)
{
initTimer('repeat', callback, 5000);
}
initTimer('once', callback, 0);
}
else
{
$(this).off('click').on('click', callback);
}
}
});
}
/**
* Enabled AJAX navigation across the site
*
* @return void
*/
function initAjaxNavigation()
{
if (ajaxNav !== undefined && ajaxNav && $.support.cors)
{
// AJAX callback
var callback = function(e)
{
var navMethod = $(this).prop('tagName') == 'A' ? 'GET' : 'POST';
var seek = $(this).attr('data-seek');
// Set up data based on method
switch (navMethod)
{
case 'GET':
navUrl = $(this).attr('href');
payload = 'ajax=1';
break;
case 'POST':
navUrl = $(this).attr('action');
payload = $(this).serialize() + '&ajax=1';
break;
}
// Send an AJAX request for all but anchor links
if (navUrl !== undefined && !$('.loader').is(':visible'))
{
$('.loader').show();
$.ajax({
url: navUrl,
method: navMethod,
context: $('body'),
data: payload,
success: function(response, status, info)
{
var isPageSection = response.indexOf('<!DOCTYPE html>') == -1;
var isHtmlContent = info.getResponseHeader('Content-Type').indexOf('text/html') != -1;
// Change the page URL
currentUrl = info.getResponseHeader('StickyNotes-Url');
window.history.pushState({ html: response }, null, currentUrl);
// Handle the response
if (isPageSection && isHtmlContent)
{
$(this).html(response);
}
else if (isHtmlContent)
{
dom = $(document.createElement('html'));
dom[0].innerHTML = response;
$(this).html(dom.find('body').html());
}
else
{
window.location = navUrl;
}
// Seek to top of the page
$.scrollTo(0, 200);
// Load JS triggers again
initMain();
},
error: function()
{
window.location = navUrl;
}
});
e.preventDefault();
}
};
// Execute callback on all links, excluding some
$('body').find('a' +
':not([href*="/admin"])' +
':not([href*="/attachment"])' +
':not([href*="#"])' +
':not([href*="mailto:"])' +
':not([onclick])'
).off('click').on('click', callback);
// Execute callback on all designated forms
$('body').find('form[data-navigate="ajax"]').off('submit').on('submit', callback);
// URL change monitor
initTimer('repeat', function()
{
var href = $(location).attr('href');
// Trim the trailing slash from currentUrl
if (currentUrl.substr(-1) == '/')
{
currentUrl = currentUrl.substr(0, currentUrl.length - 1);
}
// Trim the trailing slash from href
if (href.substr(-1) == '/')
{
href = href.substr(0, href.length - 1);
}
// Reload page if URL changed
if (currentUrl != href && href.indexOf('#') == -1)
{
currentUrl = href;
// Load the selected page
$('.loader').show();
$.get(href, function(response)
{
dom = $(document.createElement('html'));
dom[0].innerHTML = response;
$('body').html(dom.find('body').html());
});
}
}, 300);
}
}
/**
* Activates the code wrapping toggle function
*
* @return void
*/
function initWrapToggle()
{
$('[data-toggle="wrap"]').off('click').on('click', function(e)
{
var isWrapped = $('.pre div').css('white-space') != 'nowrap';
var newValue = isWrapped ? 'nowrap' : 'inherit';
$('.pre div').css('white-space', newValue);
e.preventDefault();
});
}
/**
* Activates the paste editor
*
* @return void
*/
function initEditor()
{
// Insert tab in the code box
$('[name="data"]').off('keydown').on('keydown', function (e)
{
if (e.keyCode == 9)
{
var myValue = "\t";
var startPos = this.selectionStart;
var endPos = this.selectionEnd;
var scrollTop = this.scrollTop;
this.value = this.value.substring(0, startPos) + myValue + this.value.substring(endPos,this.value.length);
this.focus();
this.selectionStart = startPos + myValue.length;
this.selectionEnd = startPos + myValue.length;
this.scrollTop = scrollTop;
e.preventDefault();
}
});
// Tick the private checkbox if password is entered
$('[name="password"]').off('keyup').on('keyup', function()
{
$('[name="private"]').attr('checked', $(this).val().length > 0);
});
}
/**
* Activates some bootstrap components
*
* @return void
*/
function initBootstrap()
{
// Activate tooltips
$('[data-toggle="tooltip"]').tooltip();
}
/**
* Saves the tab state on all pages
*
* @return void
*/
function initTabPersistence()
{
// Restore the previous tab state
$('.nav-tabs').each(function()
{
var id = $(this).attr('id');
var index = $.cookie('stickynotes_tabstate');
if (index !== undefined)
{
$('.nav-tabs > li:eq(' + index + ') a').tab('show');
}
});
// Save the current tab state
$('.nav-tabs > li > a').on('shown.bs.tab', function (e)
{
var id = $(this).parents('.nav-tabs').attr('id');
var index = $(this).parents('li').index();
$.cookie('stickynotes_tabstate', index);
})
// Clear tab state when navigated to a different page
if ($('.nav-tabs').length == 0)
{
$.cookie('stickynotes_tabstate', null);
}
}
/**
* Highlights lines upon clicking them on the #show page
*
* @return void
*/
function initLineReference()
{
if ($('section#show').length != 0)
{
var line = 1;
// First, we allocate unique IDs to all lines
$('.pre li').each(function()
{
$(this).attr('id', 'line-' + line++);
});
// Next, navigate to an ID if the user requested it
var anchor = window.location.hash;
if (anchor.length > 0)
{
var top = $(anchor).offset().top;
// Scroll to the anchor
$.scrollTo(top, 200);
// Highlight the anchor
$(anchor).addClass('highlight');
}
// Click to change anchor
$('.pre li').off('mouseup').on('mouseup', function()
{
if (window.getSelection() == '')
{
var id = $(this).attr('id');
var top = $(this).offset().top;
// Scroll to the anchor
$.scrollTo(top, 200, function() {
window.location.hash = '#' + id;
});
// Highlight the anchor
$('.pre li').removeClass('highlight');
$(this).addClass('highlight');
}
});
}
}
/**
* Draws a Google chart in a container
*
* @return void
*/
function initAreaChart()
{
if (chartData !== undefined && chartContainer !== undefined)
{
// Create an instance of line chart
var chart = new google.visualization.AreaChart(chartContainer);
// Define chart options
var options = {
colors: [ '#428bca', '#d9534f' ],
areaOpacity: 0.1,
lineWidth: 4,
pointSize: 8,
hAxis: {
textStyle: {
color: '#666'
},
gridlines: {
color: 'transparent'
},
baselineColor: '#eeeeee',
format:'MMM d'
},
vAxis: {
textStyle: {
color: '#666'
},
gridlines: {
color: '#eee'
}
},
chartArea: {
left: 50,
top: 10,
width: '100%',
height: 210
},
legend: {
position: 'bottom'
}
};
// Draw the line chart
chart.draw(chartData, options);
}
// Redraw chart on window resize
$(window).off('resize').on('resize', initAreaChart);
}
/**
* Invoke the entry point on DOM ready
*/
$(initMain);
| solitaryr/sticky-notes | public/assets/pbr/js/stickynotes.js | JavaScript | bsd-2-clause | 10,960 |
/*******************************************************************************
* Copyright (c) 2013, Daniel Murphy
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
******************************************************************************/
package org.jbox2d.serialization;
import java.io.IOException;
import java.io.OutputStream;
/**
* Container for holding serialization results. Use
* {@link #getValue()} to get the implementation-specific
* result.
* @author Daniel Murphy
*
*/
public interface SerializationResult {
/**
* The implementation-specific serialization
* result.
* @return serialization result
*/
public Object getValue();
/**
* Writes the result to the given output stream.
* @param argOutputStream
* @throws IOException
*/
public void writeTo(OutputStream argOutputStream) throws IOException;
}
| Latertater/jbox2d | jbox2d-serialization/src/main/java/org/jbox2d/serialization/SerializationResult.java | Java | bsd-2-clause | 2,130 |
cask 'media-center' do
version '23.00.20'
sha256 '70042295e59a0114900ca475cb2ab46d8c8793c58dbb429542ce4129614e5f25'
url "http://files.jriver.com/mediacenter/channels/v#{version.major}/stable/MediaCenter#{version.no_dots}.dmg"
name 'JRiver Media Center'
homepage 'https://www.jriver.com/'
app "Media Center #{version.major}.app"
zap delete: [
"~/Library/Caches/com.jriver.MediaCenter#{version.major}",
"~/Library/Saved Application State/com.jriver.MediaCenter#{version.major}.savedState",
],
trash: [
'~/Library/Application Support/J River/',
'~/Documents/JRiver/',
"~/Library/Preferences/com.jriver.MediaCenter#{version.major}.plist",
]
end
| klane/homebrew-cask | Casks/media-center.rb | Ruby | bsd-2-clause | 773 |
<?php
class Kwc_Trl_DateHelper_DateTime_Component extends Kwc_Abstract
{
}
| kaufmo/koala-framework | tests/Kwc/Trl/DateHelper/DateTime/Component.php | PHP | bsd-2-clause | 75 |
#include <pcl/features/rops_estimation.h>
#include <pcl/io/pcd_io.h>
int main (int argc, char** argv)
{
if (argc != 4)
return (-1);
pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ> ());
if (pcl::io::loadPCDFile (argv[1], *cloud) == -1)
return (-1);
pcl::PointIndicesPtr indices (new pcl::PointIndices);
std::ifstream indices_file;
indices_file.open (argv[2], std::ifstream::in);
for (std::string line; std::getline (indices_file, line);)
{
std::istringstream in (line);
unsigned int index = 0;
in >> index;
indices->indices.push_back (index - 1);
}
indices_file.close ();
std::vector <pcl::Vertices> triangles;
std::ifstream triangles_file;
triangles_file.open (argv[3], std::ifstream::in);
for (std::string line; std::getline (triangles_file, line);)
{
pcl::Vertices triangle;
std::istringstream in (line);
unsigned int vertex = 0;
in >> vertex;
triangle.vertices.push_back (vertex - 1);
in >> vertex;
triangle.vertices.push_back (vertex - 1);
in >> vertex;
triangle.vertices.push_back (vertex - 1);
triangles.push_back (triangle);
}
float support_radius = 0.0285f;
unsigned int number_of_partition_bins = 5;
unsigned int number_of_rotations = 3;
pcl::search::KdTree<pcl::PointXYZ>::Ptr search_method (new pcl::search::KdTree<pcl::PointXYZ>);
search_method->setInputCloud (cloud);
pcl::ROPSEstimation <pcl::PointXYZ, pcl::Histogram <135> > feature_estimator;
feature_estimator.setSearchMethod (search_method);
feature_estimator.setSearchSurface (cloud);
feature_estimator.setInputCloud (cloud);
feature_estimator.setIndices (indices);
feature_estimator.setTriangles (triangles);
feature_estimator.setRadiusSearch (support_radius);
feature_estimator.setNumberOfPartitionBins (number_of_partition_bins);
feature_estimator.setNumberOfRotations (number_of_rotations);
feature_estimator.setSupportRadius (support_radius);
pcl::PointCloud<pcl::Histogram <135> >::Ptr histograms (new pcl::PointCloud <pcl::Histogram <135> > ());
feature_estimator.compute (*histograms);
return (0);
}
| drmateo/pcl | doc/tutorials/content/sources/rops_feature/rops_feature.cpp | C++ | bsd-3-clause | 2,148 |
/*===================================================================
The Medical Imaging Interaction Toolkit (MITK)
Copyright (c) University College London (UCL).
All rights reserved.
This software is distributed WITHOUT ANY WARRANTY; without
even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE.
See LICENSE.txt or http://www.mitk.org for details.
===================================================================*/
#include "QmitkUiLoader.h"
#include "QmitkDataStorageComboBoxWithSelectNone.h"
#include "mitkNodePredicateDataType.h"
#include "mitkNodePredicateOr.h"
#include "mitkImage.h"
//-----------------------------------------------------------------------------
QmitkUiLoader::QmitkUiLoader(const mitk::DataStorage* dataStorage, QObject *parent)
: ctkCmdLineModuleQtUiLoader(parent)
, m_DataStorage(dataStorage)
{
}
//-----------------------------------------------------------------------------
QmitkUiLoader::~QmitkUiLoader()
{
}
//-----------------------------------------------------------------------------
QStringList QmitkUiLoader::availableWidgets () const
{
QStringList availableWidgets = ctkCmdLineModuleQtUiLoader::availableWidgets();
availableWidgets << "QmitkDataStorageComboBoxWithSelectNone";
return availableWidgets;
}
//-----------------------------------------------------------------------------
QWidget* QmitkUiLoader::createWidget(const QString& className, QWidget* parent, const QString& name)
{
QWidget* widget = nullptr;
if (className == "QmitkDataStorageComboBoxWithSelectNone")
{
auto comboBox = new QmitkDataStorageComboBoxWithSelectNone(parent);
comboBox->setObjectName(name);
comboBox->SetAutoSelectNewItems(false);
comboBox->SetPredicate(mitk::TNodePredicateDataType< mitk::Image >::New());
comboBox->SetDataStorage(const_cast<mitk::DataStorage*>(m_DataStorage));
comboBox->setCurrentIndex(0);
widget = comboBox;
}
else
{
widget = ctkCmdLineModuleQtUiLoader::createWidget(className, parent, name);
}
return widget;
}
| NifTK/MITK | Plugins/org.mitk.gui.qt.cmdlinemodules/src/internal/QmitkUiLoader.cpp | C++ | bsd-3-clause | 2,060 |
description("This tests that page scaling does not affect mouse event pageX and pageY coordinates.");
var html = document.documentElement;
var div = document.createElement("div");
div.style.width = "100px";
div.style.height = "100px";
div.style.backgroundColor = "blue";
var eventLog = "";
function appendEventLog() {
var msg = event.type + "(" + event.pageX + ", " + event.pageY + ")";
if (window.eventSender) {
eventLog += msg;
} else {
debug(msg);
}
}
function clearEventLog() {
eventLog = "";
}
div.addEventListener("click", appendEventLog, false);
document.body.insertBefore(div, document.body.firstChild);
function sendEvents(button) {
if (!window.eventSender) {
debug("This test requires DumpRenderTree. Click on the blue rect with the left mouse button to log the mouse coordinates.")
return;
}
eventSender.mouseDown(button);
eventSender.mouseUp(button);
}
function testEvents(button, description, expectedString) {
sendEvents(button);
debug(description);
shouldBeEqualToString("eventLog", expectedString);
debug("");
clearEventLog();
}
if (window.eventSender) {
eventSender.mouseMoveTo(10, 10);
// We are clicking in the same position on screen. As we scale or transform the page,
// we expect the pageX and pageY event coordinates to change because different
// parts of the document are under the mouse.
testEvents(0, "Unscaled", "click(10, 10)");
window.eventSender.setPageScaleFactorLimits(0.5, 0.5);
window.eventSender.setPageScaleFactor(0.5, 0, 0);
testEvents(0, "setPageScale(0.5)", "click(20, 20)");
}
| modulexcite/blink | LayoutTests/fast/events/script-tests/page-scaled-mouse-click.js | JavaScript | bsd-3-clause | 1,651 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ash/shell.h"
#include <algorithm>
#include <string>
#include "ash/accelerators/accelerator_controller.h"
#include "ash/accelerators/accelerator_delegate.h"
#include "ash/accelerators/focus_manager_factory.h"
#include "ash/accelerators/nested_accelerator_delegate.h"
#include "ash/ash_switches.h"
#include "ash/autoclick/autoclick_controller.h"
#include "ash/desktop_background/desktop_background_controller.h"
#include "ash/desktop_background/desktop_background_view.h"
#include "ash/desktop_background/user_wallpaper_delegate.h"
#include "ash/display/cursor_window_controller.h"
#include "ash/display/display_controller.h"
#include "ash/display/display_manager.h"
#include "ash/display/event_transformation_handler.h"
#include "ash/display/mouse_cursor_event_filter.h"
#include "ash/display/screen_position_controller.h"
#include "ash/drag_drop/drag_drop_controller.h"
#include "ash/first_run/first_run_helper_impl.h"
#include "ash/focus_cycler.h"
#include "ash/frame/custom_frame_view_ash.h"
#include "ash/gpu_support.h"
#include "ash/high_contrast/high_contrast_controller.h"
#include "ash/host/ash_window_tree_host_init_params.h"
#include "ash/keyboard_uma_event_filter.h"
#include "ash/magnifier/magnification_controller.h"
#include "ash/magnifier/partial_magnification_controller.h"
#include "ash/media_delegate.h"
#include "ash/new_window_delegate.h"
#include "ash/root_window_controller.h"
#include "ash/session/session_state_delegate.h"
#include "ash/shelf/app_list_shelf_item_delegate.h"
#include "ash/shelf/shelf_delegate.h"
#include "ash/shelf/shelf_item_delegate.h"
#include "ash/shelf/shelf_item_delegate_manager.h"
#include "ash/shelf/shelf_layout_manager.h"
#include "ash/shelf/shelf_model.h"
#include "ash/shelf/shelf_widget.h"
#include "ash/shelf/shelf_window_watcher.h"
#include "ash/shell_delegate.h"
#include "ash/shell_factory.h"
#include "ash/shell_init_params.h"
#include "ash/shell_window_ids.h"
#include "ash/system/locale/locale_notification_controller.h"
#include "ash/system/status_area_widget.h"
#include "ash/system/tray/system_tray_delegate.h"
#include "ash/system/tray/system_tray_notifier.h"
#include "ash/wm/app_list_controller.h"
#include "ash/wm/ash_focus_rules.h"
#include "ash/wm/ash_native_cursor_manager.h"
#include "ash/wm/coordinate_conversion.h"
#include "ash/wm/event_client_impl.h"
#include "ash/wm/lock_state_controller.h"
#include "ash/wm/maximize_mode/maximize_mode_controller.h"
#include "ash/wm/maximize_mode/maximize_mode_window_manager.h"
#include "ash/wm/mru_window_tracker.h"
#include "ash/wm/overlay_event_filter.h"
#include "ash/wm/overview/window_selector_controller.h"
#include "ash/wm/power_button_controller.h"
#include "ash/wm/resize_shadow_controller.h"
#include "ash/wm/root_window_layout_manager.h"
#include "ash/wm/screen_dimmer.h"
#include "ash/wm/system_gesture_event_filter.h"
#include "ash/wm/system_modal_container_event_filter.h"
#include "ash/wm/system_modal_container_layout_manager.h"
#include "ash/wm/toplevel_window_event_handler.h"
#include "ash/wm/video_detector.h"
#include "ash/wm/window_animations.h"
#include "ash/wm/window_cycle_controller.h"
#include "ash/wm/window_positioner.h"
#include "ash/wm/window_properties.h"
#include "ash/wm/window_util.h"
#include "ash/wm/workspace_controller.h"
#include "base/bind.h"
#include "base/debug/trace_event.h"
#include "ui/aura/client/aura_constants.h"
#include "ui/aura/env.h"
#include "ui/aura/layout_manager.h"
#include "ui/aura/window.h"
#include "ui/aura/window_event_dispatcher.h"
#include "ui/base/ui_base_switches.h"
#include "ui/base/user_activity/user_activity_detector.h"
#include "ui/compositor/layer.h"
#include "ui/compositor/layer_animator.h"
#include "ui/events/event_target_iterator.h"
#include "ui/gfx/display.h"
#include "ui/gfx/geometry/size.h"
#include "ui/gfx/image/image_skia.h"
#include "ui/gfx/screen.h"
#include "ui/keyboard/keyboard.h"
#include "ui/keyboard/keyboard_controller.h"
#include "ui/keyboard/keyboard_switches.h"
#include "ui/keyboard/keyboard_util.h"
#include "ui/message_center/message_center.h"
#include "ui/views/corewm/tooltip_aura.h"
#include "ui/views/corewm/tooltip_controller.h"
#include "ui/views/focus/focus_manager_factory.h"
#include "ui/views/widget/native_widget_aura.h"
#include "ui/views/widget/widget.h"
#include "ui/wm/core/accelerator_filter.h"
#include "ui/wm/core/compound_event_filter.h"
#include "ui/wm/core/focus_controller.h"
#include "ui/wm/core/input_method_event_filter.h"
#include "ui/wm/core/nested_accelerator_controller.h"
#include "ui/wm/core/shadow_controller.h"
#include "ui/wm/core/visibility_controller.h"
#include "ui/wm/core/window_modality_controller.h"
#if defined(OS_CHROMEOS)
#if defined(USE_X11)
#include "ui/gfx/x/x11_types.h"
#endif // defined(USE_X11)
#include "ash/accelerators/magnifier_key_scroller.h"
#include "ash/accelerators/spoken_feedback_toggler.h"
#include "ash/ash_constants.h"
#include "ash/content/display/screen_orientation_delegate_chromeos.h"
#include "ash/display/display_change_observer_chromeos.h"
#include "ash/display/display_configurator_animation.h"
#include "ash/display/display_error_observer_chromeos.h"
#include "ash/display/projecting_observer_chromeos.h"
#include "ash/display/resolution_notification_controller.h"
#include "ash/sticky_keys/sticky_keys_controller.h"
#include "ash/system/chromeos/bluetooth/bluetooth_notification_controller.h"
#include "ash/system/chromeos/brightness/brightness_controller_chromeos.h"
#include "ash/system/chromeos/power/power_event_observer.h"
#include "ash/system/chromeos/power/power_status.h"
#include "ash/system/chromeos/power/video_activity_notifier.h"
#include "ash/system/chromeos/session/last_window_closed_logout_reminder.h"
#include "ash/system/chromeos/session/logout_confirmation_controller.h"
#include "ash/touch/touch_transformer_controller.h"
#include "ash/virtual_keyboard_controller.h"
#include "base/bind_helpers.h"
#include "base/sys_info.h"
#include "chromeos/accelerometer/accelerometer_reader.h"
#include "chromeos/dbus/dbus_thread_manager.h"
#include "ui/chromeos/user_activity_power_manager_notifier.h"
#include "ui/display/chromeos/display_configurator.h"
#endif // defined(OS_CHROMEOS)
namespace ash {
namespace {
using aura::Window;
using views::Widget;
// A Corewm VisibilityController subclass that calls the Ash animation routine
// so we can pick up our extended animations. See ash/wm/window_animations.h.
class AshVisibilityController : public ::wm::VisibilityController {
public:
AshVisibilityController() {}
~AshVisibilityController() override {}
private:
// Overridden from ::wm::VisibilityController:
bool CallAnimateOnChildWindowVisibilityChanged(aura::Window* window,
bool visible) override {
return AnimateOnChildWindowVisibilityChanged(window, visible);
}
DISALLOW_COPY_AND_ASSIGN(AshVisibilityController);
};
AshWindowTreeHostInitParams ShellInitParamsToAshWindowTreeHostInitParams(
const ShellInitParams& shell_init_params) {
AshWindowTreeHostInitParams ash_init_params;
#if defined(OS_WIN)
ash_init_params.remote_hwnd = shell_init_params.remote_hwnd;
#endif
return ash_init_params;
}
} // namespace
// static
Shell* Shell::instance_ = NULL;
// static
bool Shell::initially_hide_cursor_ = false;
////////////////////////////////////////////////////////////////////////////////
// Shell, public:
// static
Shell* Shell::CreateInstance(const ShellInitParams& init_params) {
CHECK(!instance_);
instance_ = new Shell(init_params.delegate);
instance_->Init(init_params);
return instance_;
}
// static
Shell* Shell::GetInstance() {
CHECK(instance_);
return instance_;
}
// static
bool Shell::HasInstance() {
return !!instance_;
}
// static
void Shell::DeleteInstance() {
delete instance_;
instance_ = NULL;
}
// static
RootWindowController* Shell::GetPrimaryRootWindowController() {
CHECK(HasInstance());
return GetRootWindowController(GetPrimaryRootWindow());
}
// static
Shell::RootWindowControllerList Shell::GetAllRootWindowControllers() {
CHECK(HasInstance());
return Shell::GetInstance()->display_controller()->
GetAllRootWindowControllers();
}
// static
aura::Window* Shell::GetPrimaryRootWindow() {
CHECK(HasInstance());
return GetInstance()->display_controller()->GetPrimaryRootWindow();
}
// static
aura::Window* Shell::GetTargetRootWindow() {
CHECK(HasInstance());
Shell* shell = GetInstance();
if (shell->scoped_target_root_window_)
return shell->scoped_target_root_window_;
return shell->target_root_window_;
}
// static
gfx::Screen* Shell::GetScreen() {
return gfx::Screen::GetScreenByType(gfx::SCREEN_TYPE_ALTERNATE);
}
// static
aura::Window::Windows Shell::GetAllRootWindows() {
CHECK(HasInstance());
return Shell::GetInstance()->display_controller()->
GetAllRootWindows();
}
// static
aura::Window* Shell::GetContainer(aura::Window* root_window,
int container_id) {
return root_window->GetChildById(container_id);
}
// static
const aura::Window* Shell::GetContainer(const aura::Window* root_window,
int container_id) {
return root_window->GetChildById(container_id);
}
// static
std::vector<aura::Window*> Shell::GetContainersFromAllRootWindows(
int container_id,
aura::Window* priority_root) {
std::vector<aura::Window*> containers;
aura::Window::Windows root_windows = GetAllRootWindows();
for (aura::Window::Windows::const_iterator it = root_windows.begin();
it != root_windows.end(); ++it) {
aura::Window* container = (*it)->GetChildById(container_id);
if (container) {
if (priority_root && priority_root->Contains(container))
containers.insert(containers.begin(), container);
else
containers.push_back(container);
}
}
return containers;
}
void Shell::ShowContextMenu(const gfx::Point& location_in_screen,
ui::MenuSourceType source_type) {
// No context menus if there is no session with an active user.
if (!session_state_delegate_->NumberOfLoggedInUsers())
return;
// No context menus when screen is locked.
if (session_state_delegate_->IsScreenLocked())
return;
aura::Window* root =
wm::GetRootWindowMatching(gfx::Rect(location_in_screen, gfx::Size()));
GetRootWindowController(root)
->ShowContextMenu(location_in_screen, source_type);
}
void Shell::ShowAppList(aura::Window* window) {
// If the context window is not given, show it on the target root window.
if (!window)
window = GetTargetRootWindow();
if (!app_list_controller_)
app_list_controller_.reset(new AppListController);
app_list_controller_->Show(window);
}
void Shell::DismissAppList() {
if (!app_list_controller_)
return;
app_list_controller_->Dismiss();
}
void Shell::ToggleAppList(aura::Window* window) {
if (app_list_controller_ && app_list_controller_->IsVisible()) {
DismissAppList();
return;
}
ShowAppList(window);
}
bool Shell::GetAppListTargetVisibility() const {
return app_list_controller_.get() &&
app_list_controller_->GetTargetVisibility();
}
aura::Window* Shell::GetAppListWindow() {
return app_list_controller_.get() ? app_list_controller_->GetWindow() : NULL;
}
app_list::AppListView* Shell::GetAppListView() {
return app_list_controller_.get() ? app_list_controller_->GetView() : NULL;
}
bool Shell::IsSystemModalWindowOpen() const {
if (simulate_modal_window_open_for_testing_)
return true;
const std::vector<aura::Window*> containers = GetContainersFromAllRootWindows(
kShellWindowId_SystemModalContainer, NULL);
for (std::vector<aura::Window*>::const_iterator cit = containers.begin();
cit != containers.end(); ++cit) {
for (aura::Window::Windows::const_iterator wit = (*cit)->children().begin();
wit != (*cit)->children().end(); ++wit) {
if ((*wit)->GetProperty(aura::client::kModalKey) ==
ui::MODAL_TYPE_SYSTEM && (*wit)->TargetVisibility()) {
return true;
}
}
}
return false;
}
views::NonClientFrameView* Shell::CreateDefaultNonClientFrameView(
views::Widget* widget) {
// Use translucent-style window frames for dialogs.
return new CustomFrameViewAsh(widget);
}
void Shell::RotateFocus(Direction direction) {
focus_cycler_->RotateFocus(direction == FORWARD ? FocusCycler::FORWARD
: FocusCycler::BACKWARD);
}
void Shell::SetDisplayWorkAreaInsets(Window* contains,
const gfx::Insets& insets) {
if (!display_controller_->UpdateWorkAreaOfDisplayNearestWindow(
contains, insets)) {
return;
}
FOR_EACH_OBSERVER(ShellObserver, observers_,
OnDisplayWorkAreaInsetsChanged());
}
void Shell::OnLoginStateChanged(user::LoginStatus status) {
FOR_EACH_OBSERVER(ShellObserver, observers_, OnLoginStateChanged(status));
}
void Shell::OnLoginUserProfilePrepared() {
CreateShelf();
CreateKeyboard();
}
void Shell::UpdateAfterLoginStatusChange(user::LoginStatus status) {
RootWindowControllerList controllers = GetAllRootWindowControllers();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter)
(*iter)->UpdateAfterLoginStatusChange(status);
}
void Shell::OnAppTerminating() {
FOR_EACH_OBSERVER(ShellObserver, observers_, OnAppTerminating());
}
void Shell::OnLockStateChanged(bool locked) {
FOR_EACH_OBSERVER(ShellObserver, observers_, OnLockStateChanged(locked));
#ifndef NDEBUG
// Make sure that there is no system modal in Lock layer when unlocked.
if (!locked) {
std::vector<aura::Window*> containers = GetContainersFromAllRootWindows(
kShellWindowId_LockSystemModalContainer, GetPrimaryRootWindow());
for (std::vector<aura::Window*>::const_iterator iter = containers.begin();
iter != containers.end(); ++iter) {
DCHECK_EQ(0u, (*iter)->children().size());
}
}
#endif
}
void Shell::OnCastingSessionStartedOrStopped(bool started) {
#if defined(OS_CHROMEOS)
if (projecting_observer_)
projecting_observer_->OnCastingSessionStartedOrStopped(started);
#endif
}
void Shell::OnOverviewModeStarting() {
FOR_EACH_OBSERVER(ShellObserver, observers_, OnOverviewModeStarting());
}
void Shell::OnOverviewModeEnding() {
FOR_EACH_OBSERVER(ShellObserver, observers_, OnOverviewModeEnding());
}
void Shell::OnMaximizeModeStarted() {
FOR_EACH_OBSERVER(ShellObserver, observers_, OnMaximizeModeStarted());
}
void Shell::OnMaximizeModeEnded() {
FOR_EACH_OBSERVER(ShellObserver, observers_, OnMaximizeModeEnded());
}
void Shell::OnRootWindowAdded(aura::Window* root_window) {
FOR_EACH_OBSERVER(ShellObserver, observers_, OnRootWindowAdded(root_window));
}
void Shell::CreateShelf() {
RootWindowControllerList controllers = GetAllRootWindowControllers();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter)
(*iter)->shelf()->CreateShelf();
}
void Shell::OnShelfCreatedForRootWindow(aura::Window* root_window) {
FOR_EACH_OBSERVER(ShellObserver,
observers_,
OnShelfCreatedForRootWindow(root_window));
}
void Shell::CreateKeyboard() {
// TODO(bshe): Primary root window controller may not be the controller to
// attach virtual keyboard. See http://crbug.com/303429
InitKeyboard();
GetPrimaryRootWindowController()->
ActivateKeyboard(keyboard::KeyboardController::GetInstance());
}
void Shell::DeactivateKeyboard() {
if (keyboard::KeyboardController::GetInstance()) {
RootWindowControllerList controllers = GetAllRootWindowControllers();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter) {
(*iter)->DeactivateKeyboard(keyboard::KeyboardController::GetInstance());
}
}
keyboard::KeyboardController::ResetInstance(NULL);
}
void Shell::ShowShelf() {
RootWindowControllerList controllers = GetAllRootWindowControllers();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter)
(*iter)->ShowShelf();
}
void Shell::AddShellObserver(ShellObserver* observer) {
observers_.AddObserver(observer);
}
void Shell::RemoveShellObserver(ShellObserver* observer) {
observers_.RemoveObserver(observer);
}
#if defined(OS_CHROMEOS)
bool Shell::ShouldSaveDisplaySettings() {
return !((maximize_mode_controller_->IsMaximizeModeWindowManagerEnabled() &&
maximize_mode_controller_->
ignore_display_configuration_updates()) ||
resolution_notification_controller_->DoesNotificationTimeout());
}
#endif
void Shell::UpdateShelfVisibility() {
RootWindowControllerList controllers = GetAllRootWindowControllers();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter)
if ((*iter)->shelf())
(*iter)->UpdateShelfVisibility();
}
void Shell::SetShelfAutoHideBehavior(ShelfAutoHideBehavior behavior,
aura::Window* root_window) {
ash::ShelfLayoutManager::ForShelf(root_window)->SetAutoHideBehavior(behavior);
}
ShelfAutoHideBehavior Shell::GetShelfAutoHideBehavior(
aura::Window* root_window) const {
return ash::ShelfLayoutManager::ForShelf(root_window)->auto_hide_behavior();
}
void Shell::SetShelfAlignment(ShelfAlignment alignment,
aura::Window* root_window) {
if (ash::ShelfLayoutManager::ForShelf(root_window)->SetAlignment(alignment)) {
FOR_EACH_OBSERVER(
ShellObserver, observers_, OnShelfAlignmentChanged(root_window));
}
}
ShelfAlignment Shell::GetShelfAlignment(const aura::Window* root_window) {
return GetRootWindowController(root_window)
->GetShelfLayoutManager()
->GetAlignment();
}
void Shell::SetDimming(bool should_dim) {
RootWindowControllerList controllers = GetAllRootWindowControllers();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter)
(*iter)->screen_dimmer()->SetDimming(should_dim);
}
void Shell::NotifyFullscreenStateChange(bool is_fullscreen,
aura::Window* root_window) {
FOR_EACH_OBSERVER(ShellObserver, observers_, OnFullscreenStateChanged(
is_fullscreen, root_window));
}
void Shell::CreateModalBackground(aura::Window* window) {
if (!modality_filter_) {
modality_filter_.reset(new SystemModalContainerEventFilter(this));
AddPreTargetHandler(modality_filter_.get());
}
RootWindowControllerList controllers = GetAllRootWindowControllers();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter)
(*iter)->GetSystemModalLayoutManager(window)->CreateModalBackground();
}
void Shell::OnModalWindowRemoved(aura::Window* removed) {
RootWindowControllerList controllers = GetAllRootWindowControllers();
bool activated = false;
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end() && !activated; ++iter) {
activated = (*iter)->GetSystemModalLayoutManager(removed)->
ActivateNextModalWindow();
}
if (!activated) {
RemovePreTargetHandler(modality_filter_.get());
modality_filter_.reset();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter)
(*iter)->GetSystemModalLayoutManager(removed)->DestroyModalBackground();
}
}
WebNotificationTray* Shell::GetWebNotificationTray() {
return GetPrimaryRootWindowController()->shelf()->
status_area_widget()->web_notification_tray();
}
bool Shell::HasPrimaryStatusArea() {
ShelfWidget* shelf = GetPrimaryRootWindowController()->shelf();
return shelf && shelf->status_area_widget();
}
SystemTray* Shell::GetPrimarySystemTray() {
return GetPrimaryRootWindowController()->GetSystemTray();
}
ShelfDelegate* Shell::GetShelfDelegate() {
if (!shelf_delegate_) {
shelf_model_.reset(new ShelfModel);
// Creates ShelfItemDelegateManager before ShelfDelegate.
shelf_item_delegate_manager_.reset(
new ShelfItemDelegateManager(shelf_model_.get()));
shelf_delegate_.reset(delegate_->CreateShelfDelegate(shelf_model_.get()));
scoped_ptr<ShelfItemDelegate> controller(new AppListShelfItemDelegate);
// Finding the shelf model's location of the app list and setting its
// ShelfItemDelegate.
int app_list_index = shelf_model_->GetItemIndexForType(TYPE_APP_LIST);
DCHECK_GE(app_list_index, 0);
ShelfID app_list_id = shelf_model_->items()[app_list_index].id;
DCHECK(app_list_id);
shelf_item_delegate_manager_->SetShelfItemDelegate(app_list_id,
controller.Pass());
shelf_window_watcher_.reset(new ShelfWindowWatcher(
shelf_model_.get(), shelf_item_delegate_manager_.get()));
}
return shelf_delegate_.get();
}
void Shell::SetTouchHudProjectionEnabled(bool enabled) {
if (is_touch_hud_projection_enabled_ == enabled)
return;
is_touch_hud_projection_enabled_ = enabled;
FOR_EACH_OBSERVER(ShellObserver, observers_,
OnTouchHudProjectionToggled(enabled));
}
#if defined(OS_CHROMEOS)
ash::FirstRunHelper* Shell::CreateFirstRunHelper() {
return new ash::FirstRunHelperImpl;
}
void Shell::SetCursorCompositingEnabled(bool enabled) {
display_controller_->cursor_window_controller()->SetCursorCompositingEnabled(
enabled);
native_cursor_manager_->SetNativeCursorEnabled(!enabled);
}
#endif // defined(OS_CHROMEOS)
void Shell::DoInitialWorkspaceAnimation() {
return GetPrimaryRootWindowController()->workspace_controller()->
DoInitialAnimation();
}
////////////////////////////////////////////////////////////////////////////////
// Shell, private:
Shell::Shell(ShellDelegate* delegate)
: target_root_window_(NULL),
scoped_target_root_window_(NULL),
delegate_(delegate),
window_positioner_(new WindowPositioner),
activation_client_(NULL),
#if defined(OS_CHROMEOS)
accelerometer_reader_(new chromeos::AccelerometerReader()),
display_configurator_(new ui::DisplayConfigurator()),
#endif // defined(OS_CHROMEOS)
native_cursor_manager_(new AshNativeCursorManager),
cursor_manager_(
scoped_ptr<::wm::NativeCursorManager>(native_cursor_manager_)),
simulate_modal_window_open_for_testing_(false),
is_touch_hud_projection_enabled_(false) {
DCHECK(delegate_.get());
gpu_support_.reset(delegate_->CreateGPUSupport());
display_manager_.reset(new DisplayManager);
display_controller_.reset(new DisplayController);
user_metrics_recorder_.reset(new UserMetricsRecorder);
#if defined(OS_CHROMEOS)
PowerStatus::Initialize();
#endif
}
Shell::~Shell() {
TRACE_EVENT0("shutdown", "ash::Shell::Destructor");
delegate_->PreShutdown();
views::FocusManagerFactory::Install(NULL);
// Remove the focus from any window. This will prevent overhead and side
// effects (e.g. crashes) from changing focus during shutdown.
// See bug crbug.com/134502.
aura::client::GetFocusClient(GetPrimaryRootWindow())->FocusWindow(NULL);
// Please keep in same order as in Init() because it's easy to miss one.
if (window_modality_controller_)
window_modality_controller_.reset();
#if defined(OS_CHROMEOS)
RemovePreTargetHandler(magnifier_key_scroll_handler_.get());
magnifier_key_scroll_handler_.reset();
RemovePreTargetHandler(speech_feedback_handler_.get());
speech_feedback_handler_.reset();
#endif
RemovePreTargetHandler(user_activity_detector_.get());
RemovePreTargetHandler(overlay_filter_.get());
RemovePreTargetHandler(input_method_filter_.get());
RemovePreTargetHandler(accelerator_filter_.get());
RemovePreTargetHandler(event_transformation_handler_.get());
RemovePreTargetHandler(toplevel_window_event_handler_.get());
RemovePostTargetHandler(toplevel_window_event_handler_.get());
RemovePreTargetHandler(system_gesture_filter_.get());
RemovePreTargetHandler(keyboard_metrics_filter_.get());
RemovePreTargetHandler(mouse_cursor_filter_.get());
// TooltipController is deleted with the Shell so removing its references.
RemovePreTargetHandler(tooltip_controller_.get());
// Destroy the virtual keyboard controller before the maximize mode controller
// since the latters destructor triggers events that the former is listening
// to but no longer cares about.
#if defined(OS_CHROMEOS)
virtual_keyboard_controller_.reset();
#endif
// Destroy maximize mode controller early on since it has some observers which
// need to be removed.
maximize_mode_controller_->Shutdown();
maximize_mode_controller_.reset();
// AppList needs to be released before shelf layout manager, which is
// destroyed with shelf container in the loop below. However, app list
// container is now on top of shelf container and released after it.
// TODO(xiyuan): Move it back when app list container is no longer needed.
app_list_controller_.reset();
#if defined(OS_CHROMEOS)
// Destroy the LastWindowClosedLogoutReminder before the
// LogoutConfirmationController.
last_window_closed_logout_reminder_.reset();
// Destroy the LogoutConfirmationController before the SystemTrayDelegate.
logout_confirmation_controller_.reset();
#endif
// Destroy SystemTrayDelegate before destroying the status area(s).
system_tray_delegate_->Shutdown();
system_tray_delegate_.reset();
locale_notification_controller_.reset();
// Drag-and-drop must be canceled prior to close all windows.
drag_drop_controller_.reset();
// Controllers who have WindowObserver added must be deleted
// before |display_controller_| is deleted.
#if defined(OS_CHROMEOS)
// VideoActivityNotifier must be deleted before |video_detector_| is
// deleted because it's observing video activity through
// VideoDetectorObserver interface.
video_activity_notifier_.reset();
#endif // defined(OS_CHROMEOS)
video_detector_.reset();
high_contrast_controller_.reset();
shadow_controller_.reset();
resize_shadow_controller_.reset();
window_cycle_controller_.reset();
window_selector_controller_.reset();
mru_window_tracker_.reset();
// |shelf_window_watcher_| has a weak pointer to |shelf_Model_|
// and has window observers.
shelf_window_watcher_.reset();
// Destroy all child windows including widgets.
display_controller_->CloseChildWindows();
display_controller_->CloseMirroringDisplay();
// Chrome implementation of shelf delegate depends on FocusClient,
// so must be deleted before |focus_client_|.
shelf_delegate_.reset();
focus_client_.reset();
// Destroy SystemTrayNotifier after destroying SystemTray as TrayItems
// needs to remove observers from it.
system_tray_notifier_.reset();
// These need a valid Shell instance to clean up properly, so explicitly
// delete them before invalidating the instance.
// Alphabetical. TODO(oshima): sort.
magnification_controller_.reset();
partial_magnification_controller_.reset();
tooltip_controller_.reset();
event_client_.reset();
nested_accelerator_controller_.reset();
toplevel_window_event_handler_.reset();
visibility_controller_.reset();
// |shelf_item_delegate_manager_| observes |shelf_model_|. It must be
// destroyed before |shelf_model_| is destroyed.
shelf_item_delegate_manager_.reset();
shelf_model_.reset();
power_button_controller_.reset();
lock_state_controller_.reset();
#if defined(OS_CHROMEOS)
resolution_notification_controller_.reset();
#endif
desktop_background_controller_.reset();
mouse_cursor_filter_.reset();
#if defined(OS_CHROMEOS)
touch_transformer_controller_.reset();
#endif // defined(OS_CHROMEOS)
// This also deletes all RootWindows. Note that we invoke Shutdown() on
// DisplayController before resetting |display_controller_|, since destruction
// of its owned RootWindowControllers relies on the value.
display_manager_->CreateScreenForShutdown();
display_controller_->Shutdown();
display_controller_.reset();
screen_position_controller_.reset();
accessibility_delegate_.reset();
new_window_delegate_.reset();
media_delegate_.reset();
keyboard::KeyboardController::ResetInstance(NULL);
#if defined(OS_CHROMEOS)
if (display_change_observer_)
display_configurator_->RemoveObserver(display_change_observer_.get());
if (display_configurator_animation_)
display_configurator_->RemoveObserver(
display_configurator_animation_.get());
if (display_error_observer_)
display_configurator_->RemoveObserver(display_error_observer_.get());
if (projecting_observer_)
display_configurator_->RemoveObserver(projecting_observer_.get());
display_change_observer_.reset();
PowerStatus::Shutdown();
// Ensure that DBusThreadManager outlives this Shell.
DCHECK(chromeos::DBusThreadManager::IsInitialized());
#endif
DCHECK(instance_ == this);
instance_ = NULL;
}
void Shell::Init(const ShellInitParams& init_params) {
delegate_->PreInit();
bool display_initialized = display_manager_->InitFromCommandLine();
#if defined(OS_CHROMEOS)
display_configurator_->Init(!gpu_support_->IsPanelFittingDisabled());
display_configurator_animation_.reset(new DisplayConfiguratorAnimation());
display_configurator_->AddObserver(display_configurator_animation_.get());
// The DBusThreadManager must outlive this Shell. See the DCHECK in ~Shell.
chromeos::DBusThreadManager* dbus_thread_manager =
chromeos::DBusThreadManager::Get();
projecting_observer_.reset(
new ProjectingObserver(dbus_thread_manager->GetPowerManagerClient()));
display_configurator_->AddObserver(projecting_observer_.get());
if (!display_initialized && base::SysInfo::IsRunningOnChromeOS()) {
display_change_observer_.reset(new DisplayChangeObserver);
// Register |display_change_observer_| first so that the rest of
// observer gets invoked after the root windows are configured.
display_configurator_->AddObserver(display_change_observer_.get());
display_error_observer_.reset(new DisplayErrorObserver());
display_configurator_->AddObserver(display_error_observer_.get());
display_configurator_->set_state_controller(display_change_observer_.get());
display_configurator_->set_mirroring_controller(display_manager_.get());
display_configurator_->ForceInitialConfigure(
delegate_->IsFirstRunAfterBoot() ? kChromeOsBootColor : 0);
display_initialized = true;
}
#endif // defined(OS_CHROMEOS)
if (!display_initialized)
display_manager_->InitDefaultDisplay();
display_manager_->RefreshFontParams();
// Install the custom factory first so that views::FocusManagers for Tray,
// Shelf, and WallPaper could be created by the factory.
views::FocusManagerFactory::Install(new AshFocusManagerFactory);
aura::Env::CreateInstance(true);
aura::Env::GetInstance()->set_context_factory(init_params.context_factory);
// The WindowModalityController needs to be at the front of the input event
// pretarget handler list to ensure that it processes input events when modal
// windows are active.
window_modality_controller_.reset(
new ::wm::WindowModalityController(this));
env_filter_.reset(new ::wm::CompoundEventFilter);
AddPreTargetHandler(env_filter_.get());
::wm::FocusController* focus_controller =
new ::wm::FocusController(new wm::AshFocusRules);
focus_client_.reset(focus_controller);
activation_client_ = focus_controller;
activation_client_->AddObserver(this);
focus_cycler_.reset(new FocusCycler());
screen_position_controller_.reset(new ScreenPositionController);
display_controller_->Start();
display_controller_->CreatePrimaryHost(
ShellInitParamsToAshWindowTreeHostInitParams(init_params));
aura::Window* root_window = display_controller_->GetPrimaryRootWindow();
target_root_window_ = root_window;
#if defined(OS_CHROMEOS)
resolution_notification_controller_.reset(
new ResolutionNotificationController);
#endif
cursor_manager_.SetDisplay(GetScreen()->GetPrimaryDisplay());
nested_accelerator_controller_.reset(
new ::wm::NestedAcceleratorController(new NestedAcceleratorDelegate));
accelerator_controller_.reset(new AcceleratorController);
maximize_mode_controller_.reset(new MaximizeModeController());
#if defined(OS_CHROMEOS)
magnifier_key_scroll_handler_ = MagnifierKeyScroller::CreateHandler();
AddPreTargetHandler(magnifier_key_scroll_handler_.get());
speech_feedback_handler_ = SpokenFeedbackToggler::CreateHandler();
AddPreTargetHandler(speech_feedback_handler_.get());
#endif
// The order in which event filters are added is significant.
// ui::UserActivityDetector passes events to observers, so let them get
// rewritten first.
user_activity_detector_.reset(new ui::UserActivityDetector);
AddPreTargetHandler(user_activity_detector_.get());
overlay_filter_.reset(new OverlayEventFilter);
AddPreTargetHandler(overlay_filter_.get());
AddShellObserver(overlay_filter_.get());
input_method_filter_.reset(new ::wm::InputMethodEventFilter(
root_window->GetHost()->GetAcceleratedWidget()));
AddPreTargetHandler(input_method_filter_.get());
accelerator_filter_.reset(new ::wm::AcceleratorFilter(
scoped_ptr< ::wm::AcceleratorDelegate>(new AcceleratorDelegate).Pass(),
accelerator_controller_->accelerator_history()));
AddPreTargetHandler(accelerator_filter_.get());
event_transformation_handler_.reset(new EventTransformationHandler);
AddPreTargetHandler(event_transformation_handler_.get());
toplevel_window_event_handler_.reset(new ToplevelWindowEventHandler);
system_gesture_filter_.reset(new SystemGestureEventFilter);
AddPreTargetHandler(system_gesture_filter_.get());
keyboard_metrics_filter_.reset(new KeyboardUMAEventFilter);
AddPreTargetHandler(keyboard_metrics_filter_.get());
// The keyboard system must be initialized before the RootWindowController is
// created.
#if defined(OS_CHROMEOS)
keyboard::InitializeKeyboard();
#endif
#if defined(OS_CHROMEOS)
sticky_keys_controller_.reset(new StickyKeysController);
#endif
lock_state_controller_.reset(new LockStateController);
power_button_controller_.reset(new PowerButtonController(
lock_state_controller_.get()));
#if defined(OS_CHROMEOS)
// Pass the initial display state to PowerButtonController.
power_button_controller_->OnDisplayModeChanged(
display_configurator_->cached_displays());
#endif
AddShellObserver(lock_state_controller_.get());
drag_drop_controller_.reset(new DragDropController);
mouse_cursor_filter_.reset(new MouseCursorEventFilter());
PrependPreTargetHandler(mouse_cursor_filter_.get());
// Create Controllers that may need root window.
// TODO(oshima): Move as many controllers before creating
// RootWindowController as possible.
visibility_controller_.reset(new AshVisibilityController);
magnification_controller_.reset(
MagnificationController::CreateInstance());
mru_window_tracker_.reset(new MruWindowTracker(activation_client_));
partial_magnification_controller_.reset(
new PartialMagnificationController());
autoclick_controller_.reset(AutoclickController::CreateInstance());
high_contrast_controller_.reset(new HighContrastController);
video_detector_.reset(new VideoDetector);
window_selector_controller_.reset(new WindowSelectorController());
window_cycle_controller_.reset(new WindowCycleController());
tooltip_controller_.reset(
new views::corewm::TooltipController(
scoped_ptr<views::corewm::Tooltip>(
new views::corewm::TooltipAura(gfx::SCREEN_TYPE_ALTERNATE))));
AddPreTargetHandler(tooltip_controller_.get());
event_client_.reset(new EventClientImpl);
// This controller needs to be set before SetupManagedWindowMode.
desktop_background_controller_.reset(new DesktopBackgroundController());
user_wallpaper_delegate_.reset(delegate_->CreateUserWallpaperDelegate());
session_state_delegate_.reset(delegate_->CreateSessionStateDelegate());
accessibility_delegate_.reset(delegate_->CreateAccessibilityDelegate());
new_window_delegate_.reset(delegate_->CreateNewWindowDelegate());
media_delegate_.reset(delegate_->CreateMediaDelegate());
resize_shadow_controller_.reset(new ResizeShadowController());
shadow_controller_.reset(
new ::wm::ShadowController(activation_client_));
// Create system_tray_notifier_ before the delegate.
system_tray_notifier_.reset(new ash::SystemTrayNotifier());
// Initialize system_tray_delegate_ before initializing StatusAreaWidget.
system_tray_delegate_.reset(delegate()->CreateSystemTrayDelegate());
DCHECK(system_tray_delegate_.get());
locale_notification_controller_.reset(new LocaleNotificationController);
// Initialize system_tray_delegate_ after StatusAreaWidget is created.
system_tray_delegate_->Initialize();
#if defined(OS_CHROMEOS)
// Create the LogoutConfirmationController after the SystemTrayDelegate.
logout_confirmation_controller_.reset(new LogoutConfirmationController(
base::Bind(&SystemTrayDelegate::SignOut,
base::Unretained(system_tray_delegate_.get()))));
// Create TouchTransformerController before DisplayController::InitDisplays()
// since TouchTransformerController listens on
// DisplayController::Observer::OnDisplaysInitialized().
touch_transformer_controller_.reset(new TouchTransformerController());
#endif // defined(OS_CHROMEOS)
display_controller_->InitDisplays();
#if defined(OS_CHROMEOS)
// Needs to be created after InitDisplays() since it may cause the virtual
// keyboard to be deployed.
virtual_keyboard_controller_.reset(new VirtualKeyboardController);
#endif // defined(OS_CHROMEOS)
// It needs to be created after RootWindowController has been created
// (which calls OnWindowResized has been called, otherwise the
// widget will not paint when restoring after a browser crash. Also it needs
// to be created after InitSecondaryDisplays() to initialize the wallpapers in
// the correct size.
user_wallpaper_delegate_->InitializeWallpaper();
if (initially_hide_cursor_)
cursor_manager_.HideCursor();
cursor_manager_.SetCursor(ui::kCursorPointer);
#if defined(OS_CHROMEOS)
// Set accelerator controller delegates.
accelerator_controller_->SetBrightnessControlDelegate(
scoped_ptr<ash::BrightnessControlDelegate>(
new ash::system::BrightnessControllerChromeos).Pass());
power_event_observer_.reset(new PowerEventObserver());
user_activity_notifier_.reset(
new ui::UserActivityPowerManagerNotifier(user_activity_detector_.get()));
video_activity_notifier_.reset(
new VideoActivityNotifier(video_detector_.get()));
bluetooth_notification_controller_.reset(new BluetoothNotificationController);
last_window_closed_logout_reminder_.reset(new LastWindowClosedLogoutReminder);
screen_orientation_delegate_.reset(new ScreenOrientationDelegate());
#endif
// The compositor thread and main message loop have to be running in
// order to create mirror window. Run it after the main message loop
// is started.
display_manager_->CreateMirrorWindowAsyncIfAny();
}
void Shell::InitKeyboard() {
if (keyboard::IsKeyboardEnabled()) {
if (keyboard::KeyboardController::GetInstance()) {
RootWindowControllerList controllers = GetAllRootWindowControllers();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter) {
(*iter)->DeactivateKeyboard(
keyboard::KeyboardController::GetInstance());
}
}
keyboard::KeyboardControllerProxy* proxy =
delegate_->CreateKeyboardControllerProxy();
keyboard::KeyboardController::ResetInstance(
new keyboard::KeyboardController(proxy));
}
}
void Shell::InitRootWindow(aura::Window* root_window) {
DCHECK(activation_client_);
DCHECK(visibility_controller_.get());
DCHECK(drag_drop_controller_.get());
aura::client::SetFocusClient(root_window, focus_client_.get());
input_method_filter_->SetInputMethodPropertyInRootWindow(root_window);
aura::client::SetActivationClient(root_window, activation_client_);
::wm::FocusController* focus_controller =
static_cast< ::wm::FocusController*>(activation_client_);
root_window->AddPreTargetHandler(focus_controller);
aura::client::SetVisibilityClient(root_window, visibility_controller_.get());
aura::client::SetDragDropClient(root_window, drag_drop_controller_.get());
aura::client::SetScreenPositionClient(root_window,
screen_position_controller_.get());
aura::client::SetCursorClient(root_window, &cursor_manager_);
aura::client::SetTooltipClient(root_window, tooltip_controller_.get());
aura::client::SetEventClient(root_window, event_client_.get());
aura::client::SetWindowMoveClient(root_window,
toplevel_window_event_handler_.get());
root_window->AddPreTargetHandler(toplevel_window_event_handler_.get());
root_window->AddPostTargetHandler(toplevel_window_event_handler_.get());
if (nested_accelerator_controller_) {
aura::client::SetDispatcherClient(root_window,
nested_accelerator_controller_.get());
}
}
bool Shell::CanWindowReceiveEvents(aura::Window* window) {
RootWindowControllerList controllers = GetAllRootWindowControllers();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter) {
SystemModalContainerLayoutManager* layout_manager =
(*iter)->GetSystemModalLayoutManager(window);
if (layout_manager && layout_manager->CanWindowReceiveEvents(window))
return true;
// Allow events to fall through to the virtual keyboard even if displaying
// a system modal dialog.
if ((*iter)->IsVirtualKeyboardWindow(window))
return true;
}
return false;
}
////////////////////////////////////////////////////////////////////////////////
// Shell, ui::EventTarget overrides:
bool Shell::CanAcceptEvent(const ui::Event& event) {
return true;
}
ui::EventTarget* Shell::GetParentTarget() {
return aura::Env::GetInstance();
}
scoped_ptr<ui::EventTargetIterator> Shell::GetChildIterator() const {
return scoped_ptr<ui::EventTargetIterator>();
}
ui::EventTargeter* Shell::GetEventTargeter() {
NOTREACHED();
return NULL;
}
void Shell::OnEvent(ui::Event* event) {
}
////////////////////////////////////////////////////////////////////////////////
// Shell, aura::client::ActivationChangeObserver implementation:
void Shell::OnWindowActivated(aura::Window* gained_active,
aura::Window* lost_active) {
if (gained_active)
target_root_window_ = gained_active->GetRootWindow();
}
} // namespace ash
| mohamed--abdel-maksoud/chromium.src | ash/shell.cc | C++ | bsd-3-clause | 43,179 |
/* Main.java -- a standalone viewer for Java applets
Copyright (C) 2003, 2004, 2006 Free Software Foundation, Inc.
This file is part of GNU Classpath.
GNU Classpath is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
GNU Classpath is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with GNU Classpath; see the file COPYING. If not, write to the
Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA.
Linking this library statically or dynamically with other modules is
making a combined work based on this library. Thus, the terms and
conditions of the GNU General Public License cover the whole
combination.
As a special exception, the copyright holders of this library give you
permission to link this library with independent modules to produce an
executable, regardless of the license terms of these independent
modules, and to copy and distribute the resulting executable under
terms of your choice, provided that you also meet, for each linked
independent module, the terms and conditions of the license of that
module. An independent module is a module which is not derived from
or based on this library. If you modify this library, you may extend
this exception to your version of the library, but you are not
obligated to do so. If you do not wish to do so, delete this
exception statement from your version. */
package gnu.classpath.tools.appletviewer;
import gnu.classpath.tools.getopt.ClasspathToolParser;
import gnu.classpath.tools.getopt.Option;
import gnu.classpath.tools.getopt.OptionException;
import gnu.classpath.tools.getopt.OptionGroup;
import gnu.classpath.tools.getopt.Parser;
import java.applet.Applet;
import java.awt.Dimension;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.ResourceBundle;
class Main
{
/**
* The localized strings are kept in a separate file.
*/
public static final ResourceBundle messages = ResourceBundle.getBundle
("gnu.classpath.tools.appletviewer.MessagesBundle");
private static HashMap classLoaderCache = new HashMap();
private static ClassLoader getClassLoader(URL codebase, ArrayList archives)
{
// Should load class loader each time. It is possible that there
// are more than one applet to be loaded with different archives.
AppletClassLoader loader = new AppletClassLoader(codebase, archives);
classLoaderCache.put(codebase, loader);
return loader;
}
private static String code = null;
private static String codebase = null;
private static String archive = null;
private static List parameters = new ArrayList();
private static Dimension dimensions = new Dimension(-1, -1);
private static String pipeInName = null;
private static String pipeOutName = null;
private static boolean pluginMode = false;
private static Parser parser = null;
static Applet createApplet(AppletTag tag)
{
Applet applet = null;
try
{
ClassLoader loader = getClassLoader(tag.prependCodeBase(""),
tag.getArchives());
String code = tag.getCode();
if (code.endsWith(".class"))
code = code.substring(0, code.length() - 6).replace('/', '.');
Class c = loader.loadClass(code);
applet = (Applet) c.newInstance();
}
catch (Exception e)
{
e.printStackTrace();
}
if (applet == null)
applet = new ErrorApplet("Error loading applet");
return applet;
}
protected static boolean verbose;
/**
* The main method starting the applet viewer.
*
* @param args the arguments given on the command line.
*
* @exception IOException if an error occurs.
*/
public static void main(String[] args) throws IOException
{
parser = new ClasspathToolParser("appletviewer", true);
parser.setHeader("usage: appletviewer [OPTION] -code CODE | URL...");
OptionGroup attributeGroup = new OptionGroup("Applet tag options");
attributeGroup.add(new Option("code", Main.messages.getString
("gcjwebplugin.code_description"),
"CODE")
{
public void parsed(String argument) throws OptionException
{
code = argument;
}
});
attributeGroup.add(new Option("codebase", Main.messages.getString
("gcjwebplugin.codebase_description"),
"CODEBASE")
{
public void parsed(String argument) throws OptionException
{
codebase = argument;
}
});
attributeGroup.add(new Option("archive", Main.messages.getString
("gcjwebplugin.archive_description"),
"ARCHIVE")
{
public void parsed(String argument) throws OptionException
{
archive = argument;
}
});
attributeGroup.add(new Option("width", Main.messages.getString
("gcjwebplugin.width_description"),
"WIDTH")
{
public void parsed(String argument) throws OptionException
{
dimensions.width = Integer.parseInt(argument);
}
});
attributeGroup.add(new Option("height", Main.messages.getString
("gcjwebplugin.height_description"),
"HEIGHT")
{
public void parsed(String argument) throws OptionException
{
dimensions.height = Integer.parseInt(argument);
}
});
attributeGroup.add(new Option("param", Main.messages.getString
("gcjwebplugin.param_description"),
"NAME,VALUE")
{
public void parsed(String argument) throws OptionException
{
parameters.add(argument);
}
});
OptionGroup pluginGroup = new OptionGroup("Plugin option");
pluginGroup.add(new Option("plugin", Main.messages.getString
("gcjwebplugin.plugin_description"),
"INPUT,OUTPUT")
{
public void parsed(String argument) throws OptionException
{
pluginMode = true;
int comma = argument.indexOf(',');
pipeInName = argument.substring(0, comma);
pipeOutName = argument.substring(comma + 1);
}
});
OptionGroup debuggingGroup = new OptionGroup("Debugging option");
debuggingGroup.add(new Option("verbose", Main.messages.getString
("gcjwebplugin.verbose_description"),
(String) null)
{
public void parsed(String argument) throws OptionException
{
verbose = true;
}
});
OptionGroup compatibilityGroup = new OptionGroup("Compatibility options");
compatibilityGroup.add(new Option("debug", Main.messages.getString
("gcjwebplugin.debug_description"),
(String) null)
{
public void parsed(String argument) throws OptionException
{
// Currently ignored.
}
});
compatibilityGroup.add(new Option("encoding", Main.messages.getString
("gcjwebplugin.encoding_description"),
"CHARSET")
{
public void parsed(String argument) throws OptionException
{
// FIXME: We should probably be using
// java.nio.charset.CharsetDecoder to handle the encoding. What
// is the status of Classpath's implementation?
}
});
parser.add(attributeGroup);
parser.add(pluginGroup);
parser.add(debuggingGroup);
parser.add(compatibilityGroup);
String[] urls = parser.parse(args);
// Print arguments.
printArguments(args);
args = urls;
if (dimensions.height < 0)
dimensions.height = 200;
if (dimensions.width < 0)
dimensions.width = (int) (1.6 * dimensions.height);
//System.setSecurityManager(new AppletSecurityManager(pluginMode));
if (pluginMode)
{
InputStream in;
OutputStream out;
in = new FileInputStream(pipeInName);
out = new FileOutputStream(pipeOutName);
PluginAppletViewer.start(in, out);
}
else
{
if (code == null)
{
// The --code option wasn't given and there are no URL
// arguments so we have nothing to work with.
if (args.length == 0)
{
System.err.println(Main.messages.getString("gcjwebplugin.no_input_files"));
System.exit(1);
}
// Create a standalone appletviewer from a list of URLs.
new StandaloneAppletViewer(args);
}
else
{
// Create a standalone appletviewer from the --code
// option.
new StandaloneAppletViewer(code, codebase, archive, parameters, dimensions);
}
}
}
static void printArguments(String[] args)
{
if (verbose)
{
System.out.println("raw arguments:");
for (int i = 0; i < args.length; i++)
System.out.println(" " + args[i]);
}
}
}
| shaotuanchen/sunflower_exp | tools/source/gcc-4.2.4/libjava/classpath/tools/gnu/classpath/tools/appletviewer/Main.java | Java | bsd-3-clause | 9,974 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/audio/scoped_loop_observer.h"
#include "base/bind.h"
#include "base/synchronization/waitable_event.h"
namespace media {
ScopedLoopObserver::ScopedLoopObserver(
const scoped_refptr<base::MessageLoopProxy>& loop)
: loop_(loop) {
ObserveLoopDestruction(true, NULL);
}
ScopedLoopObserver::~ScopedLoopObserver() {
ObserveLoopDestruction(false, NULL);
}
void ScopedLoopObserver::ObserveLoopDestruction(bool enable,
base::WaitableEvent* done) {
// Note: |done| may be NULL.
if (loop_->BelongsToCurrentThread()) {
MessageLoop* loop = MessageLoop::current();
if (enable) {
loop->AddDestructionObserver(this);
} else {
loop->RemoveDestructionObserver(this);
}
} else {
base::WaitableEvent event(false, false);
if (loop_->PostTask(FROM_HERE,
base::Bind(&ScopedLoopObserver::ObserveLoopDestruction,
base::Unretained(this), enable, &event))) {
event.Wait();
} else {
// The message loop's thread has already terminated, so no need to wait.
}
}
if (done)
done->Signal();
}
} // namespace media.
| timopulkkinen/BubbleFish | media/audio/scoped_loop_observer.cc | C++ | bsd-3-clause | 1,340 |
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "skia/ext/fontmgr_default.h"
#include "third_party/skia/include/core/SkFontMgr.h"
namespace {
SkDEBUGCODE(bool g_factory_called;)
// This is a purposefully leaky pointer that has ownership of the FontMgr.
SkFontMgr* g_fontmgr_override = nullptr;
} // namespace
namespace skia {
void OverrideDefaultSkFontMgr(sk_sp<SkFontMgr> fontmgr) {
SkASSERT(!g_factory_called);
SkSafeUnref(g_fontmgr_override);
g_fontmgr_override = fontmgr.release();
}
} // namespace skia
SK_API sk_sp<SkFontMgr> SkFontMgr::Factory() {
SkDEBUGCODE(g_factory_called = true;);
return g_fontmgr_override ? sk_ref_sp(g_fontmgr_override)
: skia::CreateDefaultSkFontMgr();
} | nwjs/chromium.src | skia/ext/fontmgr_default.cc | C++ | bsd-3-clause | 873 |
// Copyright © 2010-2015 The CefSharp Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style license that can be found in the LICENSE file.
using System.IO;
namespace CefSharp
{
//TODO: Eval naming for this interface, not happy with this name
public interface IResourceHandler
{
/// <summary>
/// Processes request asynchronously.
/// </summary>
/// <param name="request">The request object.</param>
/// <param name="callback">The callback used to Continue or Cancel the request (async).</param>
/// <returns>true if the request is handled, false otherwise.</returns>
bool ProcessRequestAsync(IRequest request, ICallback callback);
Stream GetResponse(IResponse response, out long responseLength, out string redirectUrl);
}
}
| joshvera/CefSharp | CefSharp/IResourceHandler.cs | C# | bsd-3-clause | 844 |
from __future__ import absolute_import
from ..model import Model
from ..core.properties import (Any, Dict, String)
class ImageSource(Model):
""" A base class for all image source types. """
_args = ('url', 'extra_url_vars')
url = String(default="", help="""
tile service url (example: http://c.tile.openstreetmap.org/{Z}/{X}/{Y}.png)
""")
extra_url_vars = Dict(String, Any, help="""
A dictionary that maps url variable template keys to values.
These variables are useful for parts of tile urls which do not change from tile to tile (e.g. server host name, or layer name).
""")
| phobson/bokeh | bokeh/models/images.py | Python | bsd-3-clause | 617 |
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chromecast.shell;
import android.content.Context;
import android.content.Intent;
import android.graphics.Color;
import android.net.Uri;
import android.os.Bundle;
import android.os.IBinder;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.accessibility.AccessibilityNodeProvider;
import android.widget.FrameLayout;
import androidx.annotation.Nullable;
import org.chromium.base.Log;
import org.chromium.chromecast.base.CastSwitches;
/**
* View for displaying a WebContents in CastShell.
*
* <p>Intended to be used with {@link android.app.Presentation}.
*
* <p>
* Typically, this class is controlled by CastContentWindowAndroid through
* CastWebContentsSurfaceHelper. If the CastContentWindowAndroid is destroyed,
* CastWebContentsView should be removed from the activity holding it.
* Similarily, if the view is removed from a activity or the activity holding
* it is destroyed, CastContentWindowAndroid should be notified by intent.
*/
public class CastWebContentsView extends FrameLayout {
private static final String TAG = "CastWebContentV";
private CastWebContentsSurfaceHelper mSurfaceHelper;
public CastWebContentsView(Context context) {
super(context);
initView();
}
private void initView() {
FrameLayout.LayoutParams matchParent = new FrameLayout.LayoutParams(
FrameLayout.LayoutParams.MATCH_PARENT, FrameLayout.LayoutParams.MATCH_PARENT);
addView(LayoutInflater.from(getContext())
.inflate(R.layout.cast_web_contents_activity, null),
matchParent);
// Adds a transparent view on top to allow a highlight rectangule to be drawn when
// accessibility is turned on.
addView(new View(getContext()), matchParent);
}
public void onStart(Bundle startArgumentsBundle) {
Log.d(TAG, "onStart");
if (mSurfaceHelper != null) {
return;
}
mSurfaceHelper = new CastWebContentsSurfaceHelper(
CastWebContentsScopes.onLayoutView(getContext(),
findViewById(R.id.web_contents_container),
CastSwitches.getSwitchValueColor(
CastSwitches.CAST_APP_BACKGROUND_COLOR, Color.BLACK),
this ::getHostWindowToken),
(Uri uri) -> sendIntentSync(CastWebContentsIntentUtils.onWebContentStopped(uri)));
CastWebContentsSurfaceHelper.StartParams params =
CastWebContentsSurfaceHelper.StartParams.fromBundle(startArgumentsBundle);
if (params == null) return;
mSurfaceHelper.onNewStartParams(params);
}
public void onResume() {
Log.d(TAG, "onResume");
}
public void onPause() {
Log.d(TAG, "onPause");
}
public void onStop() {
Log.d(TAG, "onStop");
if (mSurfaceHelper != null) {
mSurfaceHelper.onDestroy();
}
}
@Nullable
protected IBinder getHostWindowToken() {
return getWindowToken();
}
private void sendIntentSync(Intent in) {
CastWebContentsIntentUtils.getLocalBroadcastManager().sendBroadcastSync(in);
}
@Override
public void setAccessibilityDelegate(AccessibilityDelegate delegate) {
View contentView = getContentView();
if (contentView != null) {
contentView.setAccessibilityDelegate(delegate);
} else {
Log.w(TAG, "Content view is null!");
}
}
@Override
public boolean onHoverEvent(MotionEvent event) {
View contentView = getContentView();
if (contentView != null) {
return contentView.onHoverEvent(event);
} else {
Log.w(TAG, "Content view is null!");
return false;
}
}
public AccessibilityNodeProvider getWebContentsAccessibilityNodeProvider() {
View contentView = getContentView();
if (contentView != null) {
return contentView.getAccessibilityNodeProvider();
} else {
Log.w(TAG, "Content view is null! Returns a null AccessibilityNodeProvider.");
return null;
}
}
private View getContentView() {
return findViewWithTag(CastWebContentsScopes.VIEW_TAG_CONTENT_VIEW);
}
}
| ric2b/Vivaldi-browser | chromium/chromecast/browser/android/apk/src/org/chromium/chromecast/shell/CastWebContentsView.java | Java | bsd-3-clause | 4,548 |
<?php
class CM_Paging_StreamSubscribe_User extends CM_Paging_StreamSubscribe_Abstract {
/**
* @param CM_Model_User $user
*/
public function __construct(CM_Model_User $user) {
$source = new CM_PagingSource_Sql('`id`', 'cm_stream_subscribe', '`userId` = ' . $user->getId());
parent::__construct($source);
}
}
| alexispeter/CM | library/CM/Paging/StreamSubscribe/User.php | PHP | mit | 347 |
require 'fastlane_core'
require 'credentials_manager'
module Snapshot
class Options
def self.available_options
output_directory = (File.directory?("fastlane") ? "fastlane/screenshots" : "screenshots")
@options ||= [
FastlaneCore::ConfigItem.new(key: :workspace,
short_option: "-w",
env_name: "SNAPSHOT_WORKSPACE",
optional: true,
description: "Path the workspace file",
verify_block: proc do |value|
v = File.expand_path(value.to_s)
UI.user_error!("Workspace file not found at path '#{v}'") unless File.exist?(v)
UI.user_error!("Workspace file invalid") unless File.directory?(v)
UI.user_error!("Workspace file is not a workspace, must end with .xcworkspace") unless v.include?(".xcworkspace")
end),
FastlaneCore::ConfigItem.new(key: :project,
short_option: "-p",
optional: true,
env_name: "SNAPSHOT_PROJECT",
description: "Path the project file",
verify_block: proc do |value|
v = File.expand_path(value.to_s)
UI.user_error!("Project file not found at path '#{v}'") unless File.exist?(v)
UI.user_error!("Project file invalid") unless File.directory?(v)
UI.user_error!("Project file is not a project file, must end with .xcodeproj") unless v.include?(".xcodeproj")
end),
FastlaneCore::ConfigItem.new(key: :xcargs,
short_option: "-X",
env_name: "SNAPSHOT_XCARGS",
description: "Pass additional arguments to xcodebuild for the test phase. Be sure to quote the setting names and values e.g. OTHER_LDFLAGS=\"-ObjC -lstdc++\"",
optional: true,
type: :shell_string),
FastlaneCore::ConfigItem.new(key: :devices,
description: "A list of devices you want to take the screenshots from",
short_option: "-d",
type: Array,
optional: true,
verify_block: proc do |value|
available = FastlaneCore::DeviceManager.simulators
value.each do |current|
device = current.strip
unless available.any? { |d| d.name.strip == device } || device == "Mac"
UI.user_error!("Device '#{device}' not in list of available simulators '#{available.join(', ')}'")
end
end
end),
FastlaneCore::ConfigItem.new(key: :languages,
description: "A list of languages which should be used",
short_option: "-g",
type: Array,
default_value: ['en-US']),
FastlaneCore::ConfigItem.new(key: :launch_arguments,
env_name: 'SNAPSHOT_LAUNCH_ARGUMENTS',
description: "A list of launch arguments which should be used",
short_option: "-m",
type: Array,
default_value: ['']),
FastlaneCore::ConfigItem.new(key: :output_directory,
short_option: "-o",
env_name: "SNAPSHOT_OUTPUT_DIRECTORY",
description: "The directory where to store the screenshots",
default_value: output_directory),
FastlaneCore::ConfigItem.new(key: :output_simulator_logs,
env_name: "SNAPSHOT_OUTPUT_SIMULATOR_LOGS",
description: "If the logs generated by the app (e.g. using NSLog, perror, etc.) in the Simulator should be written to the output_directory",
type: TrueClass,
default_value: false,
optional: true),
FastlaneCore::ConfigItem.new(key: :ios_version,
description: "By default, the latest version should be used automatically. If you want to change it, do it here",
short_option: "-i",
optional: true),
FastlaneCore::ConfigItem.new(key: :skip_open_summary,
env_name: 'SNAPSHOT_SKIP_OPEN_SUMMARY',
description: "Don't open the HTML summary after running _snapshot_",
default_value: false,
is_string: false),
FastlaneCore::ConfigItem.new(key: :skip_helper_version_check,
env_name: 'SNAPSHOT_SKIP_SKIP_HELPER_VERSION_CHECK',
description: "Do not check for most recent SnapshotHelper code",
default_value: false,
is_string: false),
FastlaneCore::ConfigItem.new(key: :clear_previous_screenshots,
env_name: 'SNAPSHOT_CLEAR_PREVIOUS_SCREENSHOTS',
description: "Enabling this option will automatically clear previously generated screenshots before running snapshot",
default_value: false,
is_string: false),
FastlaneCore::ConfigItem.new(key: :reinstall_app,
env_name: 'SNAPSHOT_REINSTALL_APP',
description: "Enabling this option will automatically uninstall the application before running it",
default_value: false,
is_string: false),
FastlaneCore::ConfigItem.new(key: :erase_simulator,
env_name: 'SNAPSHOT_ERASE_SIMULATOR',
description: "Enabling this option will automatically erase the simulator before running the application",
default_value: false,
is_string: false),
FastlaneCore::ConfigItem.new(key: :localize_simulator,
env_name: 'SNAPSHOT_LOCALIZE_SIMULATOR',
description: "Enabling this option will configure the Simulator's system language",
default_value: false,
is_string: false),
FastlaneCore::ConfigItem.new(key: :app_identifier,
env_name: 'SNAPSHOT_APP_IDENTIFIER',
short_option: "-a",
optional: true,
description: "The bundle identifier of the app to uninstall (only needed when enabling reinstall_app)",
default_value: ENV["SNAPSHOT_APP_IDENTITIFER"] || CredentialsManager::AppfileConfig.try_fetch_value(:app_identifier)),
FastlaneCore::ConfigItem.new(key: :add_photos,
env_name: 'SNAPSHOT_PHOTOS',
short_option: "-j",
description: "A list of photos that should be added to the simulator before running the application",
type: Array,
optional: true),
FastlaneCore::ConfigItem.new(key: :add_videos,
env_name: 'SNAPSHOT_VIDEOS',
short_option: "-u",
description: "A list of videos that should be added to the simulator before running the application",
type: Array,
optional: true),
# Everything around building
FastlaneCore::ConfigItem.new(key: :buildlog_path,
short_option: "-l",
env_name: "SNAPSHOT_BUILDLOG_PATH",
description: "The directory where to store the build log",
default_value: "#{FastlaneCore::Helper.buildlog_path}/snapshot"),
FastlaneCore::ConfigItem.new(key: :clean,
short_option: "-c",
env_name: "SNAPSHOT_CLEAN",
description: "Should the project be cleaned before building it?",
is_string: false,
default_value: false),
FastlaneCore::ConfigItem.new(key: :configuration,
short_option: "-q",
env_name: "SNAPSHOT_CONFIGURATION",
description: "The configuration to use when building the app. Defaults to 'Release'",
optional: true),
FastlaneCore::ConfigItem.new(key: :xcpretty_args,
short_option: "-x",
env_name: "SNAPSHOT_XCPRETTY_ARGS",
description: "Additional xcpretty arguments",
is_string: true,
optional: true),
FastlaneCore::ConfigItem.new(key: :sdk,
short_option: "-k",
env_name: "SNAPSHOT_SDK",
description: "The SDK that should be used for building the application",
optional: true),
FastlaneCore::ConfigItem.new(key: :scheme,
short_option: "-s",
env_name: 'SNAPSHOT_SCHEME',
description: "The scheme you want to use, this must be the scheme for the UI Tests",
optional: true), # optional true because we offer a picker to the user
FastlaneCore::ConfigItem.new(key: :number_of_retries,
short_option: "-n",
env_name: 'SNAPSHOT_NUMBER_OF_RETRIES',
description: "The number of times a test can fail before snapshot should stop retrying",
type: Integer,
default_value: 1),
FastlaneCore::ConfigItem.new(key: :stop_after_first_error,
env_name: 'SNAPSHOT_BREAK_ON_FIRST_ERROR',
description: "Should snapshot stop immediately after the tests completely failed on one device?",
default_value: false,
is_string: false),
FastlaneCore::ConfigItem.new(key: :derived_data_path,
short_option: "-f",
env_name: "SNAPSHOT_DERIVED_DATA_PATH",
description: "The directory where build products and other derived data will go",
optional: true),
FastlaneCore::ConfigItem.new(key: :test_target_name,
env_name: "SNAPSHOT_TEST_TARGET_NAME",
description: "The name of the target you want to test (if you desire to override the Target Application from Xcode)",
optional: true),
FastlaneCore::ConfigItem.new(key: :namespace_log_files,
env_name: "SNAPSHOT_NAMESPACE_LOG_FILES",
description: "Separate the log files per device and per language",
optional: true,
is_string: false)
]
end
end
end
| NicholasFFox/fastlane | snapshot/lib/snapshot/options.rb | Ruby | mit | 13,252 |
//---------------------------------------------------------------------
// <copyright file="JsonLightUtils.cs" company="Microsoft">
// Copyright (C) Microsoft Corporation. All rights reserved. See License.txt in the project root for license information.
// </copyright>
//---------------------------------------------------------------------
namespace Microsoft.Test.OData.TDD.Tests.Common.JsonLight
{
using System.Collections.Generic;
using Microsoft.OData.Core;
using Microsoft.OData.Core.JsonLight;
public static class JsonLightUtils
{
/// <summary>The default streaming Json Light media type.</summary>
internal static readonly ODataMediaType JsonLightStreamingMediaType = new ODataMediaType(
MimeConstants.MimeApplicationType,
MimeConstants.MimeJsonSubType,
new[]{
new KeyValuePair<string, string>(MimeConstants.MimeMetadataParameterName, MimeConstants.MimeMetadataParameterValueMinimal),
new KeyValuePair<string, string>(MimeConstants.MimeStreamingParameterName, MimeConstants.MimeParameterValueTrue),
new KeyValuePair<string, string>(MimeConstants.MimeIeee754CompatibleParameterName, MimeConstants.MimeParameterValueFalse)
});
/// <summary>
/// Gets the name of the property annotation property.
/// </summary>
/// <param name="propertyName">The name of the property to annotate.</param>
/// <param name="annotationName">The name of the annotation.</param>
/// <returns>The property name for the annotation property.</returns>
public static string GetPropertyAnnotationName(string propertyName, string annotationName)
{
return propertyName + JsonLightConstants.ODataPropertyAnnotationSeparatorChar + annotationName;
}
}
} | hotchandanisagar/odata.net | test/FunctionalTests/Tests/DataOData/Tests/OData.TDD.Tests/Common/JsonLight/JsonLightUtils.cs | C# | mit | 1,894 |
require "test_helper"
class MaintainingRepoSubscriptionsTest < ActionDispatch::IntegrationTest
fixtures :repos
def triage_the_sandbox
login_via_github
visit "/"
click_link "issue_triage_sandbox"
click_button "I Want to Triage: bemurphy/issue_triage_sandbox"
end
test "subscribing to a repo" do
assert_difference 'ActionMailer::Base.deliveries.size', +1 do
triage_the_sandbox
assert page.has_content?("issue_triage_sandbox")
end
assert_equal IssueAssignment.last.delivered, true
end
test "send an issue! button" do
triage_the_sandbox
assert_difference 'ActionMailer::Base.deliveries.size', +1 do
click_link "issue_triage_sandbox"
click_link "Send new issue!"
assert page.has_content?("You will receive an email with your new issue shortly")
end
assert_equal IssueAssignment.last.delivered, true
end
test "listing subscribers" do
triage_the_sandbox
click_link 'issue_triage_sandbox'
click_link 'Subscribers'
assert page.has_content?("@mockstar")
end
test "list only favorite languages" do
login_via_github
visit "/"
assert !page.has_content?("javascript")
end
end
| colinrubbert/codetriage | test/integration/maintaining_repo_subscriptions_test.rb | Ruby | mit | 1,192 |
// Generated by CoffeeScript 1.3.3
(function() {
define(["smog/server", "smog/notify", "templates/connect"], function(server, notify, templ) {
return {
show: function() {
$('#content').html(templ());
$('#connect-modal').modal({
backdrop: false
});
return $('#connect-button').click(function() {
var host;
host = $('#host').val();
return server.connect(host, function(err, okay) {
if (err != null) {
if (typeof err === 'object' && Object.keys(err).length === 0) {
err = "Server unavailable";
}
return notify.error("Connection error: " + (err.err || err));
} else {
$('#connect-modal').modal('hide');
return window.location.hash = '#/home';
}
});
});
}
};
});
}).call(this);
| wearefractal/smog | public/js/routes/index.js | JavaScript | mit | 911 |
package cmd
import (
"bytes"
"fmt"
"github.com/smira/aptly/aptly"
"github.com/smira/aptly/deb"
"github.com/smira/aptly/query"
"github.com/smira/aptly/utils"
"github.com/smira/commander"
"github.com/smira/flag"
"os"
"path/filepath"
"text/template"
)
func aptlyRepoInclude(cmd *commander.Command, args []string) error {
var err error
if len(args) < 1 {
cmd.Usage()
return commander.ErrCommandError
}
verifier, err := getVerifier(context.Flags())
if err != nil {
return fmt.Errorf("unable to initialize GPG verifier: %s", err)
}
if verifier == nil {
verifier = &utils.GpgVerifier{}
}
forceReplace := context.Flags().Lookup("force-replace").Value.Get().(bool)
acceptUnsigned := context.Flags().Lookup("accept-unsigned").Value.Get().(bool)
ignoreSignatures := context.Flags().Lookup("ignore-signatures").Value.Get().(bool)
noRemoveFiles := context.Flags().Lookup("no-remove-files").Value.Get().(bool)
repoTemplate, err := template.New("repo").Parse(context.Flags().Lookup("repo").Value.Get().(string))
if err != nil {
return fmt.Errorf("error parsing -repo template: %s", err)
}
uploaders := (*deb.Uploaders)(nil)
uploadersFile := context.Flags().Lookup("uploaders-file").Value.Get().(string)
if uploadersFile != "" {
uploaders, err = deb.NewUploadersFromFile(uploadersFile)
if err != nil {
return err
}
for i := range uploaders.Rules {
uploaders.Rules[i].CompiledCondition, err = query.Parse(uploaders.Rules[i].Condition)
if err != nil {
return fmt.Errorf("error parsing query %s: %s", uploaders.Rules[i].Condition, err)
}
}
}
reporter := &aptly.ConsoleResultReporter{Progress: context.Progress()}
var changesFiles, failedFiles, processedFiles []string
changesFiles, failedFiles = deb.CollectChangesFiles(args, reporter)
for _, path := range changesFiles {
var changes *deb.Changes
changes, err = deb.NewChanges(path)
if err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("unable to process file %s: %s", path, err)
continue
}
err = changes.VerifyAndParse(acceptUnsigned, ignoreSignatures, verifier)
if err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("unable to process file %s: %s", changes.ChangesName, err)
changes.Cleanup()
continue
}
err = changes.Prepare()
if err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("unable to process file %s: %s", changes.ChangesName, err)
changes.Cleanup()
continue
}
repoName := &bytes.Buffer{}
err = repoTemplate.Execute(repoName, changes.Stanza)
if err != nil {
return fmt.Errorf("error applying template to repo: %s", err)
}
context.Progress().Printf("Loading repository %s for changes file %s...\n", repoName.String(), changes.ChangesName)
repo, err := context.CollectionFactory().LocalRepoCollection().ByName(repoName.String())
if err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("unable to process file %s: %s", changes.ChangesName, err)
changes.Cleanup()
continue
}
currentUploaders := uploaders
if repo.Uploaders != nil {
currentUploaders = repo.Uploaders
for i := range currentUploaders.Rules {
currentUploaders.Rules[i].CompiledCondition, err = query.Parse(currentUploaders.Rules[i].Condition)
if err != nil {
return fmt.Errorf("error parsing query %s: %s", currentUploaders.Rules[i].Condition, err)
}
}
}
if currentUploaders != nil {
if err = currentUploaders.IsAllowed(changes); err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("changes file skipped due to uploaders config: %s, keys %#v: %s",
changes.ChangesName, changes.SignatureKeys, err)
changes.Cleanup()
continue
}
}
err = context.CollectionFactory().LocalRepoCollection().LoadComplete(repo)
if err != nil {
return fmt.Errorf("unable to load repo: %s", err)
}
list, err := deb.NewPackageListFromRefList(repo.RefList(), context.CollectionFactory().PackageCollection(), context.Progress())
if err != nil {
return fmt.Errorf("unable to load packages: %s", err)
}
packageFiles, _ := deb.CollectPackageFiles([]string{changes.TempDir}, reporter)
var restriction deb.PackageQuery
restriction, err = changes.PackageQuery()
if err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("unable to process file %s: %s", changes.ChangesName, err)
changes.Cleanup()
continue
}
var processedFiles2, failedFiles2 []string
processedFiles2, failedFiles2, err = deb.ImportPackageFiles(list, packageFiles, forceReplace, verifier, context.PackagePool(),
context.CollectionFactory().PackageCollection(), reporter, restriction)
if err != nil {
return fmt.Errorf("unable to import package files: %s", err)
}
repo.UpdateRefList(deb.NewPackageRefListFromPackageList(list))
err = context.CollectionFactory().LocalRepoCollection().Update(repo)
if err != nil {
return fmt.Errorf("unable to save: %s", err)
}
err = changes.Cleanup()
if err != nil {
return err
}
for _, file := range failedFiles2 {
failedFiles = append(failedFiles, filepath.Join(changes.BasePath, filepath.Base(file)))
}
for _, file := range processedFiles2 {
processedFiles = append(processedFiles, filepath.Join(changes.BasePath, filepath.Base(file)))
}
processedFiles = append(processedFiles, path)
}
if !noRemoveFiles {
processedFiles = utils.StrSliceDeduplicate(processedFiles)
for _, file := range processedFiles {
err := os.Remove(file)
if err != nil {
return fmt.Errorf("unable to remove file: %s", err)
}
}
}
if len(failedFiles) > 0 {
context.Progress().ColoredPrintf("@y[!]@| @!Some files were skipped due to errors:@|")
for _, file := range failedFiles {
context.Progress().ColoredPrintf(" %s", file)
}
return fmt.Errorf("some files failed to be added")
}
return err
}
func makeCmdRepoInclude() *commander.Command {
cmd := &commander.Command{
Run: aptlyRepoInclude,
UsageLine: "include <file.changes>|<directory> ...",
Short: "add packages to local repositories based on .changes files",
Long: `
Command include looks for .changes files in list of arguments or specified directories. Each
.changes file is verified, parsed, referenced files are put into separate temporary directory
and added into local repository. Successfully imported files are removed by default.
Additionally uploads could be restricted with <uploaders.json> file. Rules in this file control
uploads based on GPG key ID of .changes file signature and queries on .changes file fields.
Example:
$ aptly repo include -repo=foo-release incoming/
`,
Flag: *flag.NewFlagSet("aptly-repo-include", flag.ExitOnError),
}
cmd.Flag.Bool("no-remove-files", false, "don't remove files that have been imported successfully into repository")
cmd.Flag.Bool("force-replace", false, "when adding package that conflicts with existing package, remove existing package")
cmd.Flag.String("repo", "{{.Distribution}}", "which repo should files go to, defaults to Distribution field of .changes file")
cmd.Flag.Var(&keyRingsFlag{}, "keyring", "gpg keyring to use when verifying Release file (could be specified multiple times)")
cmd.Flag.Bool("ignore-signatures", false, "disable verification of .changes file signature")
cmd.Flag.Bool("accept-unsigned", false, "accept unsigned .changes files")
cmd.Flag.String("uploaders-file", "", "path to uploaders.json file")
return cmd
}
| bsundsrud/aptly | cmd/repo_include.go | GO | mit | 7,511 |
/**
* Creates a new instance of Emitter.
* @class
* @returns {Object} Returns a new instance of Emitter.
* @example
* // Creates a new instance of Emitter.
* var Emitter = require('emitter');
*
* var emitter = new Emitter();
*/
class Emitter {
/**
* Adds a listener to the collection for the specified event.
* @memberof! Emitter.prototype
* @function
* @param {String} event - The event name.
* @param {Function} listener - A listener function to add.
* @returns {Object} Returns an instance of Emitter.
* @example
* // Add an event listener to "foo" event.
* emitter.on('foo', listener);
*/
on(event, listener) {
// Use the current collection or create it.
this._eventCollection = this._eventCollection || {};
// Use the current collection of an event or create it.
this._eventCollection[event] = this._eventCollection[event] || [];
// Appends the listener into the collection of the given event
this._eventCollection[event].push(listener);
return this;
}
/**
* Adds a listener to the collection for the specified event that will be called only once.
* @memberof! Emitter.prototype
* @function
* @param {String} event - The event name.
* @param {Function} listener - A listener function to add.
* @returns {Object} Returns an instance of Emitter.
* @example
* // Will add an event handler to "foo" event once.
* emitter.once('foo', listener);
*/
once(event, listener) {
const self = this;
function fn() {
self.off(event, fn);
listener.apply(this, arguments);
}
fn.listener = listener;
this.on(event, fn);
return this;
}
/**
* Removes a listener from the collection for the specified event.
* @memberof! Emitter.prototype
* @function
* @param {String} event - The event name.
* @param {Function} listener - A listener function to remove.
* @returns {Object} Returns an instance of Emitter.
* @example
* // Remove a given listener.
* emitter.off('foo', listener);
*/
off(event, listener) {
let listeners;
// Defines listeners value.
if (!this._eventCollection || !(listeners = this._eventCollection[event])) {
return this;
}
listeners.forEach((fn, i) => {
if (fn === listener || fn.listener === listener) {
// Removes the given listener.
listeners.splice(i, 1);
}
});
// Removes an empty event collection.
if (listeners.length === 0) {
delete this._eventCollection[event];
}
return this;
}
/**
* Execute each item in the listener collection in order with the specified data.
* @memberof! Emitter.prototype
* @function
* @param {String} event - The name of the event you want to emit.
* @param {...Object} data - Data to pass to the listeners.
* @returns {Object} Returns an instance of Emitter.
* @example
* // Emits the "foo" event with 'param1' and 'param2' as arguments.
* emitter.emit('foo', 'param1', 'param2');
*/
emit(event, ...args) {
let listeners;
// Defines listeners value.
if (!this._eventCollection || !(listeners = this._eventCollection[event])) {
return this;
}
// Clone listeners
listeners = listeners.slice(0);
listeners.forEach(fn => fn.apply(this, args));
return this;
}
}
/**
* Exports Emitter
*/
export default Emitter;
| sinfin/folio | vendor/assets/bower_components/emitter-es6/src/index.js | JavaScript | mit | 3,401 |
var expect = require('expect.js'),
defaultOpts = require('..').prototype.options,
_ = require('lodash'),
parse = require('../lib/parse'),
render = require('../lib/render');
var html = function(str, options) {
options = _.defaults(options || {}, defaultOpts);
var dom = parse(str, options);
return render(dom);
};
var xml = function(str, options) {
options = _.defaults(options || {}, defaultOpts);
options.xmlMode = true;
var dom = parse(str, options);
return render(dom, options);
};
describe('render', function() {
describe('(html)', function() {
it('should render <br /> tags correctly', function() {
var str = '<br />';
expect(html(str)).to.equal('<br>');
});
it('should handle double quotes within single quoted attributes properly', function() {
var str = '<hr class=\'an "edge" case\' />';
expect(html(str)).to.equal('<hr class="an "edge" case">');
});
it('should retain encoded HTML content within attributes', function() {
var str = '<hr class="cheerio & node = happy parsing" />';
expect(html(str)).to.equal('<hr class="cheerio & node = happy parsing">');
});
it('should shorten the "checked" attribute when it contains the value "checked"', function() {
var str = '<input checked/>';
expect(html(str)).to.equal('<input checked>');
});
it('should not shorten the "name" attribute when it contains the value "name"', function() {
var str = '<input name="name"/>';
expect(html(str)).to.equal('<input name="name">');
});
it('should render comments correctly', function() {
var str = '<!-- comment -->';
expect(html(str)).to.equal('<!-- comment -->');
});
it('should render whitespace by default', function() {
var str = '<a href="./haha.html">hi</a> <a href="./blah.html">blah</a>';
expect(html(str)).to.equal(str);
});
it('should normalize whitespace if specified', function() {
var str = '<a href="./haha.html">hi</a> <a href="./blah.html">blah </a>';
expect(html(str, { normalizeWhitespace: true })).to.equal('<a href="./haha.html">hi</a> <a href="./blah.html">blah </a>');
});
it('should preserve multiple hyphens in data attributes', function() {
var str = '<div data-foo-bar-baz="value"></div>';
expect(html(str)).to.equal('<div data-foo-bar-baz="value"></div>');
});
it('should render CDATA correctly', function() {
var str = '<a> <b> <![CDATA[ asdf&asdf ]]> <c/> <![CDATA[ asdf&asdf ]]> </b> </a>';
expect(xml(str)).to.equal(str);
});
});
});
| JHand93/WebPerformanceTestSuite | webpagetest-charts-api/node_modules/cheerio/test/render.js | JavaScript | mit | 2,628 |
"use strict";
var path = require('canonical-path');
var packagePath = __dirname;
var Package = require('dgeni').Package;
// Create and export a new Dgeni package called angularjs. This package depends upon
// the ngdoc, nunjucks, and examples packages defined in the dgeni-packages npm module.
module.exports = new Package('angularjs', [
require('dgeni-packages/ngdoc'),
require('dgeni-packages/nunjucks'),
require('dgeni-packages/examples'),
require('dgeni-packages/git')
])
.factory(require('./services/errorNamespaceMap'))
.factory(require('./services/getMinerrInfo'))
.factory(require('./services/getVersion'))
.factory(require('./services/deployments/debug'))
.factory(require('./services/deployments/default'))
.factory(require('./services/deployments/jquery'))
.factory(require('./services/deployments/production'))
.factory(require('./inline-tag-defs/type'))
.processor(require('./processors/error-docs'))
.processor(require('./processors/index-page'))
.processor(require('./processors/keywords'))
.processor(require('./processors/pages-data'))
.processor(require('./processors/versions-data'))
.config(function(dgeni, log, readFilesProcessor, writeFilesProcessor) {
dgeni.stopOnValidationError = true;
dgeni.stopOnProcessingError = true;
log.level = 'info';
readFilesProcessor.basePath = path.resolve(__dirname,'../..');
readFilesProcessor.sourceFiles = [
{ include: 'src/**/*.js', exclude: 'src/angular.bind.js', basePath: 'src' },
{ include: 'docs/content/**/*.ngdoc', basePath: 'docs/content' }
];
writeFilesProcessor.outputFolder = 'build/docs';
})
.config(function(parseTagsProcessor) {
parseTagsProcessor.tagDefinitions.push(require('./tag-defs/tutorial-step'));
parseTagsProcessor.tagDefinitions.push(require('./tag-defs/sortOrder'));
})
.config(function(inlineTagProcessor, typeInlineTagDef) {
inlineTagProcessor.inlineTagDefinitions.push(typeInlineTagDef);
})
.config(function(templateFinder, renderDocsProcessor, gitData) {
templateFinder.templateFolders.unshift(path.resolve(packagePath, 'templates'));
renderDocsProcessor.extraData.git = gitData;
})
.config(function(computePathsProcessor, computeIdsProcessor) {
computePathsProcessor.pathTemplates.push({
docTypes: ['error'],
pathTemplate: 'error/${namespace}/${name}',
outputPathTemplate: 'partials/error/${namespace}/${name}.html'
});
computePathsProcessor.pathTemplates.push({
docTypes: ['errorNamespace'],
pathTemplate: 'error/${name}',
outputPathTemplate: 'partials/error/${name}.html'
});
computePathsProcessor.pathTemplates.push({
docTypes: ['overview', 'tutorial'],
getPath: function(doc) {
var docPath = path.dirname(doc.fileInfo.relativePath);
if ( doc.fileInfo.baseName !== 'index' ) {
docPath = path.join(docPath, doc.fileInfo.baseName);
}
return docPath;
},
outputPathTemplate: 'partials/${path}.html'
});
computePathsProcessor.pathTemplates.push({
docTypes: ['e2e-test'],
getPath: function() {},
outputPathTemplate: 'ptore2e/${example.id}/${deployment.name}_test.js'
});
computePathsProcessor.pathTemplates.push({
docTypes: ['indexPage'],
pathTemplate: '.',
outputPathTemplate: '${id}.html'
});
computePathsProcessor.pathTemplates.push({
docTypes: ['module' ],
pathTemplate: '${area}/${name}',
outputPathTemplate: 'partials/${area}/${name}.html'
});
computePathsProcessor.pathTemplates.push({
docTypes: ['componentGroup' ],
pathTemplate: '${area}/${moduleName}/${groupType}',
outputPathTemplate: 'partials/${area}/${moduleName}/${groupType}.html'
});
computeIdsProcessor.idTemplates.push({
docTypes: ['overview', 'tutorial', 'e2e-test', 'indexPage'],
getId: function(doc) { return doc.fileInfo.baseName; },
getAliases: function(doc) { return [doc.id]; }
});
computeIdsProcessor.idTemplates.push({
docTypes: ['error'],
getId: function(doc) { return 'error:' + doc.namespace + ':' + doc.name; },
getAliases: function(doc) { return [doc.name, doc.namespace + ':' + doc.name, doc.id]; }
},
{
docTypes: ['errorNamespace'],
getId: function(doc) { return 'error:' + doc.name; },
getAliases: function(doc) { return [doc.id]; }
}
);
})
.config(function(checkAnchorLinksProcessor) {
checkAnchorLinksProcessor.base = '/';
// We are only interested in docs that have an area (i.e. they are pages)
checkAnchorLinksProcessor.checkDoc = function(doc) { return doc.area; };
})
.config(function(
generateIndexPagesProcessor,
generateProtractorTestsProcessor,
generateExamplesProcessor,
debugDeployment, defaultDeployment,
jqueryDeployment, productionDeployment) {
generateIndexPagesProcessor.deployments = [
debugDeployment,
defaultDeployment,
jqueryDeployment,
productionDeployment
];
generateProtractorTestsProcessor.deployments = [
defaultDeployment,
jqueryDeployment
];
generateProtractorTestsProcessor.basePath = 'build/docs/';
generateExamplesProcessor.deployments = [
debugDeployment,
defaultDeployment,
jqueryDeployment,
productionDeployment
];
})
.config(function(generateKeywordsProcessor) {
generateKeywordsProcessor.docTypesToIgnore = ['componentGroup'];
});
| JonFerrera/angular.js | docs/config/index.js | JavaScript | mit | 5,289 |
var debug = require('debug')('keystone:core:openDatabaseConnection');
module.exports = function openDatabaseConnection (callback) {
var keystone = this;
var mongoConnectionOpen = false;
// support replica sets for mongoose
if (keystone.get('mongo replica set')) {
if (keystone.get('logger')) {
console.log('\nWarning: using the `mongo replica set` option has been deprecated and will be removed in'
+ ' a future version.\nInstead set the `mongo` connection string with your host details, e.g.'
+ ' mongodb://username:password@host:port,host:port,host:port/database and set any replica set options'
+ ' in `mongo options`.\n\nRefer to https://mongodb.github.io/node-mongodb-native/driver-articles/mongoclient.html'
+ ' for more details on the connection settings.');
}
debug('setting up mongo replica set');
var replicaData = keystone.get('mongo replica set');
var replica = '';
var credentials = (replicaData.username && replicaData.password) ? replicaData.username + ':' + replicaData.password + '@' : '';
replicaData.db.servers.forEach(function (server) {
replica += 'mongodb://' + credentials + server.host + ':' + server.port + '/' + replicaData.db.name + ',';
});
var options = {
auth: { authSource: replicaData.authSource },
replset: {
rs_name: replicaData.db.replicaSetOptions.rs_name,
readPreference: replicaData.db.replicaSetOptions.readPreference,
},
};
debug('connecting to replicate set');
keystone.mongoose.connect(replica, options);
} else {
debug('connecting to mongo');
keystone.mongoose.connect(keystone.get('mongo'), keystone.get('mongo options'));
}
keystone.mongoose.connection.on('error', function (err) {
if (keystone.get('logger')) {
console.log('------------------------------------------------');
console.log('Mongo Error:\n');
console.log(err);
}
if (mongoConnectionOpen) {
if (err.name === 'ValidationError') return;
throw err;
} else {
throw new Error('KeystoneJS (' + keystone.get('name') + ') failed to start - Check that you are running `mongod` in a separate process.');
}
}).on('open', function () {
debug('mongo connection open');
mongoConnectionOpen = true;
var connected = function () {
if (keystone.get('auto update')) {
debug('applying auto update');
keystone.applyUpdates(callback);
} else {
callback();
}
};
if (keystone.sessionStorePromise) {
keystone.sessionStorePromise.then(connected);
} else {
connected();
}
});
return this;
};
| andreufirefly/keystone | lib/core/openDatabaseConnection.js | JavaScript | mit | 2,539 |
#ifndef NUMEXPR_OBJECT_HPP
#define NUMEXPR_OBJECT_HPP
/*********************************************************************
Numexpr - Fast numerical array expression evaluator for NumPy.
License: MIT
Author: See AUTHORS.txt
See LICENSE.txt for details about copyright and rights to use.
**********************************************************************/
struct NumExprObject
{
PyObject_HEAD
PyObject *signature; /* a python string */
PyObject *tempsig;
PyObject *constsig;
PyObject *fullsig;
PyObject *program; /* a python string */
PyObject *constants; /* a tuple of int/float/complex */
PyObject *input_names; /* tuple of strings */
char **mem; /* pointers to registers */
char *rawmem; /* a chunks of raw memory for storing registers */
npy_intp *memsteps;
npy_intp *memsizes;
int rawmemsize;
int n_inputs;
int n_constants;
int n_temps;
};
extern PyTypeObject NumExprType;
#endif // NUMEXPR_OBJECT_HPP
| Alwnikrotikz/numexpr | numexpr/numexpr_object.hpp | C++ | mit | 1,069 |
// Zepto.js
// (c) 2010-2015 Thomas Fuchs
// Zepto.js may be freely distributed under the MIT license.
var Zepto = (function() {
var undefined, key, $, classList, emptyArray = [], concat = emptyArray.concat, filter = emptyArray.filter, slice = emptyArray.slice,
document = window.document,
elementDisplay = {}, classCache = {},
cssNumber = { 'column-count': 1, 'columns': 1, 'font-weight': 1, 'line-height': 1,'opacity': 1, 'z-index': 1, 'zoom': 1 },
fragmentRE = /^\s*<(\w+|!)[^>]*>/,
singleTagRE = /^<(\w+)\s*\/?>(?:<\/\1>|)$/,
tagExpanderRE = /<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig,
rootNodeRE = /^(?:body|html)$/i,
capitalRE = /([A-Z])/g,
// special attributes that should be get/set via method calls
methodAttributes = ['val', 'css', 'html', 'text', 'data', 'width', 'height', 'offset'],
adjacencyOperators = [ 'after', 'prepend', 'before', 'append' ],
table = document.createElement('table'),
tableRow = document.createElement('tr'),
containers = {
'tr': document.createElement('tbody'),
'tbody': table, 'thead': table, 'tfoot': table,
'td': tableRow, 'th': tableRow,
'*': document.createElement('div')
},
readyRE = /complete|loaded|interactive/,
simpleSelectorRE = /^[\w-]*$/,
class2type = {},
toString = class2type.toString,
zepto = {},
camelize, uniq,
tempParent = document.createElement('div'),
propMap = {
'tabindex': 'tabIndex',
'readonly': 'readOnly',
'for': 'htmlFor',
'class': 'className',
'maxlength': 'maxLength',
'cellspacing': 'cellSpacing',
'cellpadding': 'cellPadding',
'rowspan': 'rowSpan',
'colspan': 'colSpan',
'usemap': 'useMap',
'frameborder': 'frameBorder',
'contenteditable': 'contentEditable'
},
isArray = Array.isArray ||
function(object){ return object instanceof Array }
zepto.matches = function(element, selector) {
if (!selector || !element || element.nodeType !== 1) return false
var matchesSelector = element.webkitMatchesSelector || element.mozMatchesSelector ||
element.oMatchesSelector || element.matchesSelector
if (matchesSelector) return matchesSelector.call(element, selector)
// fall back to performing a selector:
var match, parent = element.parentNode, temp = !parent
if (temp) (parent = tempParent).appendChild(element)
match = ~zepto.qsa(parent, selector).indexOf(element)
temp && tempParent.removeChild(element)
return match
}
function type(obj) {
return obj == null ? String(obj) :
class2type[toString.call(obj)] || "object"
}
function isFunction(value) { return type(value) == "function" }
function isWindow(obj) { return obj != null && obj == obj.window }
function isDocument(obj) { return obj != null && obj.nodeType == obj.DOCUMENT_NODE }
function isObject(obj) { return type(obj) == "object" }
function isPlainObject(obj) {
return isObject(obj) && !isWindow(obj) && Object.getPrototypeOf(obj) == Object.prototype
}
function likeArray(obj) { return typeof obj.length == 'number' }
function compact(array) { return filter.call(array, function(item){ return item != null }) }
function flatten(array) { return array.length > 0 ? $.fn.concat.apply([], array) : array }
camelize = function(str){ return str.replace(/-+(.)?/g, function(match, chr){ return chr ? chr.toUpperCase() : '' }) }
function dasherize(str) {
return str.replace(/::/g, '/')
.replace(/([A-Z]+)([A-Z][a-z])/g, '$1_$2')
.replace(/([a-z\d])([A-Z])/g, '$1_$2')
.replace(/_/g, '-')
.toLowerCase()
}
uniq = function(array){ return filter.call(array, function(item, idx){ return array.indexOf(item) == idx }) }
function classRE(name) {
return name in classCache ?
classCache[name] : (classCache[name] = new RegExp('(^|\\s)' + name + '(\\s|$)'))
}
function maybeAddPx(name, value) {
return (typeof value == "number" && !cssNumber[dasherize(name)]) ? value + "px" : value
}
function defaultDisplay(nodeName) {
var element, display
if (!elementDisplay[nodeName]) {
element = document.createElement(nodeName)
document.body.appendChild(element)
display = getComputedStyle(element, '').getPropertyValue("display")
element.parentNode.removeChild(element)
display == "none" && (display = "block")
elementDisplay[nodeName] = display
}
return elementDisplay[nodeName]
}
function children(element) {
return 'children' in element ?
slice.call(element.children) :
$.map(element.childNodes, function(node){ if (node.nodeType == 1) return node })
}
function Z(dom, selector) {
var i, len = dom ? dom.length : 0
for (i = 0; i < len; i++) this[i] = dom[i]
this.length = len
this.selector = selector || ''
}
// `$.zepto.fragment` takes a html string and an optional tag name
// to generate DOM nodes nodes from the given html string.
// The generated DOM nodes are returned as an array.
// This function can be overriden in plugins for example to make
// it compatible with browsers that don't support the DOM fully.
zepto.fragment = function(html, name, properties) {
var dom, nodes, container
// A special case optimization for a single tag
if (singleTagRE.test(html)) dom = $(document.createElement(RegExp.$1))
if (!dom) {
if (html.replace) html = html.replace(tagExpanderRE, "<$1></$2>")
if (name === undefined) name = fragmentRE.test(html) && RegExp.$1
if (!(name in containers)) name = '*'
container = containers[name]
container.innerHTML = '' + html
dom = $.each(slice.call(container.childNodes), function(){
container.removeChild(this)
})
}
if (isPlainObject(properties)) {
nodes = $(dom)
$.each(properties, function(key, value) {
if (methodAttributes.indexOf(key) > -1) nodes[key](value)
else nodes.attr(key, value)
})
}
return dom
}
// `$.zepto.Z` swaps out the prototype of the given `dom` array
// of nodes with `$.fn` and thus supplying all the Zepto functions
// to the array. This method can be overriden in plugins.
zepto.Z = function(dom, selector) {
return new Z(dom, selector)
}
// `$.zepto.isZ` should return `true` if the given object is a Zepto
// collection. This method can be overriden in plugins.
zepto.isZ = function(object) {
return object instanceof zepto.Z
}
// `$.zepto.init` is Zepto's counterpart to jQuery's `$.fn.init` and
// takes a CSS selector and an optional context (and handles various
// special cases).
// This method can be overriden in plugins.
zepto.init = function(selector, context) {
var dom
// If nothing given, return an empty Zepto collection
if (!selector) return zepto.Z()
// Optimize for string selectors
else if (typeof selector == 'string') {
selector = selector.trim()
// If it's a html fragment, create nodes from it
// Note: In both Chrome 21 and Firefox 15, DOM error 12
// is thrown if the fragment doesn't begin with <
if (selector[0] == '<' && fragmentRE.test(selector))
dom = zepto.fragment(selector, RegExp.$1, context), selector = null
// If there's a context, create a collection on that context first, and select
// nodes from there
else if (context !== undefined) return $(context).find(selector)
// If it's a CSS selector, use it to select nodes.
else dom = zepto.qsa(document, selector)
}
// If a function is given, call it when the DOM is ready
else if (isFunction(selector)) return $(document).ready(selector)
// If a Zepto collection is given, just return it
else if (zepto.isZ(selector)) return selector
else {
// normalize array if an array of nodes is given
if (isArray(selector)) dom = compact(selector)
// Wrap DOM nodes.
else if (isObject(selector))
dom = [selector], selector = null
// If it's a html fragment, create nodes from it
else if (fragmentRE.test(selector))
dom = zepto.fragment(selector.trim(), RegExp.$1, context), selector = null
// If there's a context, create a collection on that context first, and select
// nodes from there
else if (context !== undefined) return $(context).find(selector)
// And last but no least, if it's a CSS selector, use it to select nodes.
else dom = zepto.qsa(document, selector)
}
// create a new Zepto collection from the nodes found
return zepto.Z(dom, selector)
}
// `$` will be the base `Zepto` object. When calling this
// function just call `$.zepto.init, which makes the implementation
// details of selecting nodes and creating Zepto collections
// patchable in plugins.
$ = function(selector, context){
return zepto.init(selector, context)
}
function extend(target, source, deep) {
for (key in source)
if (deep && (isPlainObject(source[key]) || isArray(source[key]))) {
if (isPlainObject(source[key]) && !isPlainObject(target[key]))
target[key] = {}
if (isArray(source[key]) && !isArray(target[key]))
target[key] = []
extend(target[key], source[key], deep)
}
else if (source[key] !== undefined) target[key] = source[key]
}
// Copy all but undefined properties from one or more
// objects to the `target` object.
$.extend = function(target){
var deep, args = slice.call(arguments, 1)
if (typeof target == 'boolean') {
deep = target
target = args.shift()
}
args.forEach(function(arg){ extend(target, arg, deep) })
return target
}
// `$.zepto.qsa` is Zepto's CSS selector implementation which
// uses `document.querySelectorAll` and optimizes for some special cases, like `#id`.
// This method can be overriden in plugins.
zepto.qsa = function(element, selector){
var found,
maybeID = selector[0] == '#',
maybeClass = !maybeID && selector[0] == '.',
nameOnly = maybeID || maybeClass ? selector.slice(1) : selector, // Ensure that a 1 char tag name still gets checked
isSimple = simpleSelectorRE.test(nameOnly)
return (element.getElementById && isSimple && maybeID) ? // Safari DocumentFragment doesn't have getElementById
( (found = element.getElementById(nameOnly)) ? [found] : [] ) :
(element.nodeType !== 1 && element.nodeType !== 9 && element.nodeType !== 11) ? [] :
slice.call(
isSimple && !maybeID && element.getElementsByClassName ? // DocumentFragment doesn't have getElementsByClassName/TagName
maybeClass ? element.getElementsByClassName(nameOnly) : // If it's simple, it could be a class
element.getElementsByTagName(selector) : // Or a tag
element.querySelectorAll(selector) // Or it's not simple, and we need to query all
)
}
function filtered(nodes, selector) {
return selector == null ? $(nodes) : $(nodes).filter(selector)
}
$.contains = document.documentElement.contains ?
function(parent, node) {
return parent !== node && parent.contains(node)
} :
function(parent, node) {
while (node && (node = node.parentNode))
if (node === parent) return true
return false
}
function funcArg(context, arg, idx, payload) {
return isFunction(arg) ? arg.call(context, idx, payload) : arg
}
function setAttribute(node, name, value) {
value == null ? node.removeAttribute(name) : node.setAttribute(name, value)
}
// access className property while respecting SVGAnimatedString
function className(node, value){
var klass = node.className || '',
svg = klass && klass.baseVal !== undefined
if (value === undefined) return svg ? klass.baseVal : klass
svg ? (klass.baseVal = value) : (node.className = value)
}
// "true" => true
// "false" => false
// "null" => null
// "42" => 42
// "42.5" => 42.5
// "08" => "08"
// JSON => parse if valid
// String => self
function deserializeValue(value) {
try {
return value ?
value == "true" ||
( value == "false" ? false :
value == "null" ? null :
+value + "" == value ? +value :
/^[\[\{]/.test(value) ? $.parseJSON(value) :
value )
: value
} catch(e) {
return value
}
}
$.type = type
$.isFunction = isFunction
$.isWindow = isWindow
$.isArray = isArray
$.isPlainObject = isPlainObject
$.isEmptyObject = function(obj) {
var name
for (name in obj) return false
return true
}
$.inArray = function(elem, array, i){
return emptyArray.indexOf.call(array, elem, i)
}
$.camelCase = camelize
$.trim = function(str) {
return str == null ? "" : String.prototype.trim.call(str)
}
// plugin compatibility
$.uuid = 0
$.support = { }
$.expr = { }
$.noop = function() {}
$.map = function(elements, callback){
var value, values = [], i, key
if (likeArray(elements))
for (i = 0; i < elements.length; i++) {
value = callback(elements[i], i)
if (value != null) values.push(value)
}
else
for (key in elements) {
value = callback(elements[key], key)
if (value != null) values.push(value)
}
return flatten(values)
}
$.each = function(elements, callback){
var i, key
if (likeArray(elements)) {
for (i = 0; i < elements.length; i++)
if (callback.call(elements[i], i, elements[i]) === false) return elements
} else {
for (key in elements)
if (callback.call(elements[key], key, elements[key]) === false) return elements
}
return elements
}
$.grep = function(elements, callback){
return filter.call(elements, callback)
}
if (window.JSON) $.parseJSON = JSON.parse
// Populate the class2type map
$.each("Boolean Number String Function Array Date RegExp Object Error".split(" "), function(i, name) {
class2type[ "[object " + name + "]" ] = name.toLowerCase()
})
// Define methods that will be available on all
// Zepto collections
$.fn = {
constructor: zepto.Z,
length: 0,
// Because a collection acts like an array
// copy over these useful array functions.
forEach: emptyArray.forEach,
reduce: emptyArray.reduce,
push: emptyArray.push,
sort: emptyArray.sort,
splice: emptyArray.splice,
indexOf: emptyArray.indexOf,
concat: function(){
var i, value, args = []
for (i = 0; i < arguments.length; i++) {
value = arguments[i]
args[i] = zepto.isZ(value) ? value.toArray() : value
}
return concat.apply(zepto.isZ(this) ? this.toArray() : this, args)
},
// `map` and `slice` in the jQuery API work differently
// from their array counterparts
map: function(fn){
return $($.map(this, function(el, i){ return fn.call(el, i, el) }))
},
slice: function(){
return $(slice.apply(this, arguments))
},
ready: function(callback){
// need to check if document.body exists for IE as that browser reports
// document ready when it hasn't yet created the body element
if (readyRE.test(document.readyState) && document.body) callback($)
else document.addEventListener('DOMContentLoaded', function(){ callback($) }, false)
return this
},
get: function(idx){
return idx === undefined ? slice.call(this) : this[idx >= 0 ? idx : idx + this.length]
},
toArray: function(){ return this.get() },
size: function(){
return this.length
},
remove: function(){
return this.each(function(){
if (this.parentNode != null)
this.parentNode.removeChild(this)
})
},
each: function(callback){
emptyArray.every.call(this, function(el, idx){
return callback.call(el, idx, el) !== false
})
return this
},
filter: function(selector){
if (isFunction(selector)) return this.not(this.not(selector))
return $(filter.call(this, function(element){
return zepto.matches(element, selector)
}))
},
add: function(selector,context){
return $(uniq(this.concat($(selector,context))))
},
is: function(selector){
return this.length > 0 && zepto.matches(this[0], selector)
},
not: function(selector){
var nodes=[]
if (isFunction(selector) && selector.call !== undefined)
this.each(function(idx){
if (!selector.call(this,idx)) nodes.push(this)
})
else {
var excludes = typeof selector == 'string' ? this.filter(selector) :
(likeArray(selector) && isFunction(selector.item)) ? slice.call(selector) : $(selector)
this.forEach(function(el){
if (excludes.indexOf(el) < 0) nodes.push(el)
})
}
return $(nodes)
},
has: function(selector){
return this.filter(function(){
return isObject(selector) ?
$.contains(this, selector) :
$(this).find(selector).size()
})
},
eq: function(idx){
return idx === -1 ? this.slice(idx) : this.slice(idx, + idx + 1)
},
first: function(){
var el = this[0]
return el && !isObject(el) ? el : $(el)
},
last: function(){
var el = this[this.length - 1]
return el && !isObject(el) ? el : $(el)
},
find: function(selector){
var result, $this = this
if (!selector) result = $()
else if (typeof selector == 'object')
result = $(selector).filter(function(){
var node = this
return emptyArray.some.call($this, function(parent){
return $.contains(parent, node)
})
})
else if (this.length == 1) result = $(zepto.qsa(this[0], selector))
else result = this.map(function(){ return zepto.qsa(this, selector) })
return result
},
closest: function(selector, context){
var node = this[0], collection = false
if (typeof selector == 'object') collection = $(selector)
while (node && !(collection ? collection.indexOf(node) >= 0 : zepto.matches(node, selector)))
node = node !== context && !isDocument(node) && node.parentNode
return $(node)
},
parents: function(selector){
var ancestors = [], nodes = this
while (nodes.length > 0)
nodes = $.map(nodes, function(node){
if ((node = node.parentNode) && !isDocument(node) && ancestors.indexOf(node) < 0) {
ancestors.push(node)
return node
}
})
return filtered(ancestors, selector)
},
parent: function(selector){
return filtered(uniq(this.pluck('parentNode')), selector)
},
children: function(selector){
return filtered(this.map(function(){ return children(this) }), selector)
},
contents: function() {
return this.map(function() { return this.contentDocument || slice.call(this.childNodes) })
},
siblings: function(selector){
return filtered(this.map(function(i, el){
return filter.call(children(el.parentNode), function(child){ return child!==el })
}), selector)
},
empty: function(){
return this.each(function(){ this.innerHTML = '' })
},
// `pluck` is borrowed from Prototype.js
pluck: function(property){
return $.map(this, function(el){ return el[property] })
},
show: function(){
return this.each(function(){
this.style.display == "none" && (this.style.display = '')
if (getComputedStyle(this, '').getPropertyValue("display") == "none")
this.style.display = defaultDisplay(this.nodeName)
})
},
replaceWith: function(newContent){
return this.before(newContent).remove()
},
wrap: function(structure){
var func = isFunction(structure)
if (this[0] && !func)
var dom = $(structure).get(0),
clone = dom.parentNode || this.length > 1
return this.each(function(index){
$(this).wrapAll(
func ? structure.call(this, index) :
clone ? dom.cloneNode(true) : dom
)
})
},
wrapAll: function(structure){
if (this[0]) {
$(this[0]).before(structure = $(structure))
var children
// drill down to the inmost element
while ((children = structure.children()).length) structure = children.first()
$(structure).append(this)
}
return this
},
wrapInner: function(structure){
var func = isFunction(structure)
return this.each(function(index){
var self = $(this), contents = self.contents(),
dom = func ? structure.call(this, index) : structure
contents.length ? contents.wrapAll(dom) : self.append(dom)
})
},
unwrap: function(){
this.parent().each(function(){
$(this).replaceWith($(this).children())
})
return this
},
clone: function(){
return this.map(function(){ return this.cloneNode(true) })
},
hide: function(){
return this.css("display", "none")
},
toggle: function(setting){
return this.each(function(){
var el = $(this)
;(setting === undefined ? el.css("display") == "none" : setting) ? el.show() : el.hide()
})
},
prev: function(selector){ return $(this.pluck('previousElementSibling')).filter(selector || '*') },
next: function(selector){ return $(this.pluck('nextElementSibling')).filter(selector || '*') },
html: function(html){
return 0 in arguments ?
this.each(function(idx){
var originHtml = this.innerHTML
$(this).empty().append( funcArg(this, html, idx, originHtml) )
}) :
(0 in this ? this[0].innerHTML : null)
},
text: function(text){
return 0 in arguments ?
this.each(function(idx){
var newText = funcArg(this, text, idx, this.textContent)
this.textContent = newText == null ? '' : ''+newText
}) :
(0 in this ? this[0].textContent : null)
},
attr: function(name, value){
var result
return (typeof name == 'string' && !(1 in arguments)) ?
(!this.length || this[0].nodeType !== 1 ? undefined :
(!(result = this[0].getAttribute(name)) && name in this[0]) ? this[0][name] : result
) :
this.each(function(idx){
if (this.nodeType !== 1) return
if (isObject(name)) for (key in name) setAttribute(this, key, name[key])
else setAttribute(this, name, funcArg(this, value, idx, this.getAttribute(name)))
})
},
removeAttr: function(name){
return this.each(function(){ this.nodeType === 1 && name.split(' ').forEach(function(attribute){
setAttribute(this, attribute)
}, this)})
},
prop: function(name, value){
name = propMap[name] || name
return (1 in arguments) ?
this.each(function(idx){
this[name] = funcArg(this, value, idx, this[name])
}) :
(this[0] && this[0][name])
},
data: function(name, value){
var attrName = 'data-' + name.replace(capitalRE, '-$1').toLowerCase()
var data = (1 in arguments) ?
this.attr(attrName, value) :
this.attr(attrName)
return data !== null ? deserializeValue(data) : undefined
},
val: function(value){
return 0 in arguments ?
this.each(function(idx){
this.value = funcArg(this, value, idx, this.value)
}) :
(this[0] && (this[0].multiple ?
$(this[0]).find('option').filter(function(){ return this.selected }).pluck('value') :
this[0].value)
)
},
offset: function(coordinates){
if (coordinates) return this.each(function(index){
var $this = $(this),
coords = funcArg(this, coordinates, index, $this.offset()),
parentOffset = $this.offsetParent().offset(),
props = {
top: coords.top - parentOffset.top,
left: coords.left - parentOffset.left
}
if ($this.css('position') == 'static') props['position'] = 'relative'
$this.css(props)
})
if (!this.length) return null
if (!$.contains(document.documentElement, this[0]))
return {top: 0, left: 0}
var obj = this[0].getBoundingClientRect()
return {
left: obj.left + window.pageXOffset,
top: obj.top + window.pageYOffset,
width: Math.round(obj.width),
height: Math.round(obj.height)
}
},
css: function(property, value){
if (arguments.length < 2) {
var computedStyle, element = this[0]
if(!element) return
computedStyle = getComputedStyle(element, '')
if (typeof property == 'string')
return element.style[camelize(property)] || computedStyle.getPropertyValue(property)
else if (isArray(property)) {
var props = {}
$.each(property, function(_, prop){
props[prop] = (element.style[camelize(prop)] || computedStyle.getPropertyValue(prop))
})
return props
}
}
var css = ''
if (type(property) == 'string') {
if (!value && value !== 0)
this.each(function(){ this.style.removeProperty(dasherize(property)) })
else
css = dasherize(property) + ":" + maybeAddPx(property, value)
} else {
for (key in property)
if (!property[key] && property[key] !== 0)
this.each(function(){ this.style.removeProperty(dasherize(key)) })
else
css += dasherize(key) + ':' + maybeAddPx(key, property[key]) + ';'
}
return this.each(function(){ this.style.cssText += ';' + css })
},
index: function(element){
return element ? this.indexOf($(element)[0]) : this.parent().children().indexOf(this[0])
},
hasClass: function(name){
if (!name) return false
return emptyArray.some.call(this, function(el){
return this.test(className(el))
}, classRE(name))
},
addClass: function(name){
if (!name) return this
return this.each(function(idx){
if (!('className' in this)) return
classList = []
var cls = className(this), newName = funcArg(this, name, idx, cls)
newName.split(/\s+/g).forEach(function(klass){
if (!$(this).hasClass(klass)) classList.push(klass)
}, this)
classList.length && className(this, cls + (cls ? " " : "") + classList.join(" "))
})
},
removeClass: function(name){
return this.each(function(idx){
if (!('className' in this)) return
if (name === undefined) return className(this, '')
classList = className(this)
funcArg(this, name, idx, classList).split(/\s+/g).forEach(function(klass){
classList = classList.replace(classRE(klass), " ")
})
className(this, classList.trim())
})
},
toggleClass: function(name, when){
if (!name) return this
return this.each(function(idx){
var $this = $(this), names = funcArg(this, name, idx, className(this))
names.split(/\s+/g).forEach(function(klass){
(when === undefined ? !$this.hasClass(klass) : when) ?
$this.addClass(klass) : $this.removeClass(klass)
})
})
},
scrollTop: function(value){
if (!this.length) return
var hasScrollTop = 'scrollTop' in this[0]
if (value === undefined) return hasScrollTop ? this[0].scrollTop : this[0].pageYOffset
return this.each(hasScrollTop ?
function(){ this.scrollTop = value } :
function(){ this.scrollTo(this.scrollX, value) })
},
scrollLeft: function(value){
if (!this.length) return
var hasScrollLeft = 'scrollLeft' in this[0]
if (value === undefined) return hasScrollLeft ? this[0].scrollLeft : this[0].pageXOffset
return this.each(hasScrollLeft ?
function(){ this.scrollLeft = value } :
function(){ this.scrollTo(value, this.scrollY) })
},
position: function() {
if (!this.length) return
var elem = this[0],
// Get *real* offsetParent
offsetParent = this.offsetParent(),
// Get correct offsets
offset = this.offset(),
parentOffset = rootNodeRE.test(offsetParent[0].nodeName) ? { top: 0, left: 0 } : offsetParent.offset()
// Subtract element margins
// note: when an element has margin: auto the offsetLeft and marginLeft
// are the same in Safari causing offset.left to incorrectly be 0
offset.top -= parseFloat( $(elem).css('margin-top') ) || 0
offset.left -= parseFloat( $(elem).css('margin-left') ) || 0
// Add offsetParent borders
parentOffset.top += parseFloat( $(offsetParent[0]).css('border-top-width') ) || 0
parentOffset.left += parseFloat( $(offsetParent[0]).css('border-left-width') ) || 0
// Subtract the two offsets
return {
top: offset.top - parentOffset.top,
left: offset.left - parentOffset.left
}
},
offsetParent: function() {
return this.map(function(){
var parent = this.offsetParent || document.body
while (parent && !rootNodeRE.test(parent.nodeName) && $(parent).css("position") == "static")
parent = parent.offsetParent
return parent
})
}
}
// for now
$.fn.detach = $.fn.remove
// Generate the `width` and `height` functions
;['width', 'height'].forEach(function(dimension){
var dimensionProperty =
dimension.replace(/./, function(m){ return m[0].toUpperCase() })
$.fn[dimension] = function(value){
var offset, el = this[0]
if (value === undefined) return isWindow(el) ? el['inner' + dimensionProperty] :
isDocument(el) ? el.documentElement['scroll' + dimensionProperty] :
(offset = this.offset()) && offset[dimension]
else return this.each(function(idx){
el = $(this)
el.css(dimension, funcArg(this, value, idx, el[dimension]()))
})
}
})
function traverseNode(node, fun) {
fun(node)
for (var i = 0, len = node.childNodes.length; i < len; i++)
traverseNode(node.childNodes[i], fun)
}
// Generate the `after`, `prepend`, `before`, `append`,
// `insertAfter`, `insertBefore`, `appendTo`, and `prependTo` methods.
adjacencyOperators.forEach(function(operator, operatorIndex) {
var inside = operatorIndex % 2 //=> prepend, append
$.fn[operator] = function(){
// arguments can be nodes, arrays of nodes, Zepto objects and HTML strings
var argType, nodes = $.map(arguments, function(arg) {
argType = type(arg)
return argType == "object" || argType == "array" || arg == null ?
arg : zepto.fragment(arg)
}),
parent, copyByClone = this.length > 1
if (nodes.length < 1) return this
return this.each(function(_, target){
parent = inside ? target : target.parentNode
// convert all methods to a "before" operation
target = operatorIndex == 0 ? target.nextSibling :
operatorIndex == 1 ? target.firstChild :
operatorIndex == 2 ? target :
null
var parentInDocument = $.contains(document.documentElement, parent)
nodes.forEach(function(node){
if (copyByClone) node = node.cloneNode(true)
else if (!parent) return $(node).remove()
parent.insertBefore(node, target)
if (parentInDocument) traverseNode(node, function(el){
if (el.nodeName != null && el.nodeName.toUpperCase() === 'SCRIPT' &&
(!el.type || el.type === 'text/javascript') && !el.src)
window['eval'].call(window, el.innerHTML)
})
})
})
}
// after => insertAfter
// prepend => prependTo
// before => insertBefore
// append => appendTo
$.fn[inside ? operator+'To' : 'insert'+(operatorIndex ? 'Before' : 'After')] = function(html){
$(html)[operator](this)
return this
}
})
zepto.Z.prototype = Z.prototype = $.fn
// Export internal API functions in the `$.zepto` namespace
zepto.uniq = uniq
zepto.deserializeValue = deserializeValue
$.zepto = zepto
return $
})()
// If `$` is not yet defined, point it to `Zepto`
window.Zepto = Zepto
window.$ === undefined && (window.$ = Zepto)
// Zepto.js
// (c) 2010-2015 Thomas Fuchs
// Zepto.js may be freely distributed under the MIT license.
;(function($){
var jsonpID = 0,
document = window.document,
key,
name,
rscript = /<script\b[^<]*(?:(?!<\/script>)<[^<]*)*<\/script>/gi,
scriptTypeRE = /^(?:text|application)\/javascript/i,
xmlTypeRE = /^(?:text|application)\/xml/i,
jsonType = 'application/json',
htmlType = 'text/html',
blankRE = /^\s*$/,
originAnchor = document.createElement('a')
originAnchor.href = window.location.href
// trigger a custom event and return false if it was cancelled
function triggerAndReturn(context, eventName, data) {
var event = $.Event(eventName)
$(context).trigger(event, data)
return !event.isDefaultPrevented()
}
// trigger an Ajax "global" event
function triggerGlobal(settings, context, eventName, data) {
if (settings.global) return triggerAndReturn(context || document, eventName, data)
}
// Number of active Ajax requests
$.active = 0
function ajaxStart(settings) {
if (settings.global && $.active++ === 0) triggerGlobal(settings, null, 'ajaxStart')
}
function ajaxStop(settings) {
if (settings.global && !(--$.active)) triggerGlobal(settings, null, 'ajaxStop')
}
// triggers an extra global event "ajaxBeforeSend" that's like "ajaxSend" but cancelable
function ajaxBeforeSend(xhr, settings) {
var context = settings.context
if (settings.beforeSend.call(context, xhr, settings) === false ||
triggerGlobal(settings, context, 'ajaxBeforeSend', [xhr, settings]) === false)
return false
triggerGlobal(settings, context, 'ajaxSend', [xhr, settings])
}
function ajaxSuccess(data, xhr, settings, deferred) {
var context = settings.context, status = 'success'
settings.success.call(context, data, status, xhr)
if (deferred) deferred.resolveWith(context, [data, status, xhr])
triggerGlobal(settings, context, 'ajaxSuccess', [xhr, settings, data])
ajaxComplete(status, xhr, settings)
}
// type: "timeout", "error", "abort", "parsererror"
function ajaxError(error, type, xhr, settings, deferred) {
var context = settings.context
settings.error.call(context, xhr, type, error)
if (deferred) deferred.rejectWith(context, [xhr, type, error])
triggerGlobal(settings, context, 'ajaxError', [xhr, settings, error || type])
ajaxComplete(type, xhr, settings)
}
// status: "success", "notmodified", "error", "timeout", "abort", "parsererror"
function ajaxComplete(status, xhr, settings) {
var context = settings.context
settings.complete.call(context, xhr, status)
triggerGlobal(settings, context, 'ajaxComplete', [xhr, settings])
ajaxStop(settings)
}
// Empty function, used as default callback
function empty() {}
$.ajaxJSONP = function(options, deferred){
if (!('type' in options)) return $.ajax(options)
var _callbackName = options.jsonpCallback,
callbackName = ($.isFunction(_callbackName) ?
_callbackName() : _callbackName) || ('jsonp' + (++jsonpID)),
script = document.createElement('script'),
originalCallback = window[callbackName],
responseData,
abort = function(errorType) {
$(script).triggerHandler('error', errorType || 'abort')
},
xhr = { abort: abort }, abortTimeout
if (deferred) deferred.promise(xhr)
$(script).on('load error', function(e, errorType){
clearTimeout(abortTimeout)
$(script).off().remove()
if (e.type == 'error' || !responseData) {
ajaxError(null, errorType || 'error', xhr, options, deferred)
} else {
ajaxSuccess(responseData[0], xhr, options, deferred)
}
window[callbackName] = originalCallback
if (responseData && $.isFunction(originalCallback))
originalCallback(responseData[0])
originalCallback = responseData = undefined
})
if (ajaxBeforeSend(xhr, options) === false) {
abort('abort')
return xhr
}
window[callbackName] = function(){
responseData = arguments
}
script.src = options.url.replace(/\?(.+)=\?/, '?$1=' + callbackName)
document.head.appendChild(script)
if (options.timeout > 0) abortTimeout = setTimeout(function(){
abort('timeout')
}, options.timeout)
return xhr
}
$.ajaxSettings = {
// Default type of request
type: 'GET',
// Callback that is executed before request
beforeSend: empty,
// Callback that is executed if the request succeeds
success: empty,
// Callback that is executed the the server drops error
error: empty,
// Callback that is executed on request complete (both: error and success)
complete: empty,
// The context for the callbacks
context: null,
// Whether to trigger "global" Ajax events
global: true,
// Transport
xhr: function () {
return new window.XMLHttpRequest()
},
// MIME types mapping
// IIS returns Javascript as "application/x-javascript"
accepts: {
script: 'text/javascript, application/javascript, application/x-javascript',
json: jsonType,
xml: 'application/xml, text/xml',
html: htmlType,
text: 'text/plain'
},
// Whether the request is to another domain
crossDomain: false,
// Default timeout
timeout: 0,
// Whether data should be serialized to string
processData: true,
// Whether the browser should be allowed to cache GET responses
cache: true
}
function mimeToDataType(mime) {
if (mime) mime = mime.split(';', 2)[0]
return mime && ( mime == htmlType ? 'html' :
mime == jsonType ? 'json' :
scriptTypeRE.test(mime) ? 'script' :
xmlTypeRE.test(mime) && 'xml' ) || 'text'
}
function appendQuery(url, query) {
if (query == '') return url
return (url + '&' + query).replace(/[&?]{1,2}/, '?')
}
// serialize payload and append it to the URL for GET requests
function serializeData(options) {
if (options.processData && options.data && $.type(options.data) != "string")
options.data = $.param(options.data, options.traditional)
if (options.data && (!options.type || options.type.toUpperCase() == 'GET'))
options.url = appendQuery(options.url, options.data), options.data = undefined
}
$.ajax = function(options){
var settings = $.extend({}, options || {}),
deferred = $.Deferred && $.Deferred(),
urlAnchor, hashIndex
for (key in $.ajaxSettings) if (settings[key] === undefined) settings[key] = $.ajaxSettings[key]
ajaxStart(settings)
if (!settings.crossDomain) {
urlAnchor = document.createElement('a')
urlAnchor.href = settings.url
urlAnchor.href = urlAnchor.href
settings.crossDomain = (originAnchor.protocol + '//' + originAnchor.host) !== (urlAnchor.protocol + '//' + urlAnchor.host)
}
if (!settings.url) settings.url = window.location.toString()
if ((hashIndex = settings.url.indexOf('#')) > -1) settings.url = settings.url.slice(0, hashIndex)
serializeData(settings)
var dataType = settings.dataType, hasPlaceholder = /\?.+=\?/.test(settings.url)
if (hasPlaceholder) dataType = 'jsonp'
if (settings.cache === false || (
(!options || options.cache !== true) &&
('script' == dataType || 'jsonp' == dataType)
))
settings.url = appendQuery(settings.url, '_=' + Date.now())
if ('jsonp' == dataType) {
if (!hasPlaceholder)
settings.url = appendQuery(settings.url,
settings.jsonp ? (settings.jsonp + '=?') : settings.jsonp === false ? '' : 'callback=?')
return $.ajaxJSONP(settings, deferred)
}
var mime = settings.accepts[dataType],
headers = { },
setHeader = function(name, value) { headers[name.toLowerCase()] = [name, value] },
protocol = /^([\w-]+:)\/\//.test(settings.url) ? RegExp.$1 : window.location.protocol,
xhr = settings.xhr(),
nativeSetHeader = xhr.setRequestHeader,
abortTimeout
if (deferred) deferred.promise(xhr)
if (!settings.crossDomain) setHeader('X-Requested-With', 'XMLHttpRequest')
setHeader('Accept', mime || '*/*')
if (mime = settings.mimeType || mime) {
if (mime.indexOf(',') > -1) mime = mime.split(',', 2)[0]
xhr.overrideMimeType && xhr.overrideMimeType(mime)
}
if (settings.contentType || (settings.contentType !== false && settings.data && settings.type.toUpperCase() != 'GET'))
setHeader('Content-Type', settings.contentType || 'application/x-www-form-urlencoded')
if (settings.headers) for (name in settings.headers) setHeader(name, settings.headers[name])
xhr.setRequestHeader = setHeader
xhr.onreadystatechange = function(){
if (xhr.readyState == 4) {
xhr.onreadystatechange = empty
clearTimeout(abortTimeout)
var result, error = false
if ((xhr.status >= 200 && xhr.status < 300) || xhr.status == 304 || (xhr.status == 0 && protocol == 'file:')) {
dataType = dataType || mimeToDataType(settings.mimeType || xhr.getResponseHeader('content-type'))
result = xhr.responseText
try {
// http://perfectionkills.com/global-eval-what-are-the-options/
if (dataType == 'script') (1,eval)(result)
else if (dataType == 'xml') result = xhr.responseXML
else if (dataType == 'json') result = blankRE.test(result) ? null : $.parseJSON(result)
} catch (e) { error = e }
if (error) ajaxError(error, 'parsererror', xhr, settings, deferred)
else ajaxSuccess(result, xhr, settings, deferred)
} else {
ajaxError(xhr.statusText || null, xhr.status ? 'error' : 'abort', xhr, settings, deferred)
}
}
}
if (ajaxBeforeSend(xhr, settings) === false) {
xhr.abort()
ajaxError(null, 'abort', xhr, settings, deferred)
return xhr
}
if (settings.xhrFields) for (name in settings.xhrFields) xhr[name] = settings.xhrFields[name]
var async = 'async' in settings ? settings.async : true
xhr.open(settings.type, settings.url, async, settings.username, settings.password)
for (name in headers) nativeSetHeader.apply(xhr, headers[name])
if (settings.timeout > 0) abortTimeout = setTimeout(function(){
xhr.onreadystatechange = empty
xhr.abort()
ajaxError(null, 'timeout', xhr, settings, deferred)
}, settings.timeout)
// avoid sending empty string (#319)
xhr.send(settings.data ? settings.data : null)
return xhr
}
// handle optional data/success arguments
function parseArguments(url, data, success, dataType) {
if ($.isFunction(data)) dataType = success, success = data, data = undefined
if (!$.isFunction(success)) dataType = success, success = undefined
return {
url: url
, data: data
, success: success
, dataType: dataType
}
}
$.get = function(/* url, data, success, dataType */){
return $.ajax(parseArguments.apply(null, arguments))
}
$.post = function(/* url, data, success, dataType */){
var options = parseArguments.apply(null, arguments)
options.type = 'POST'
return $.ajax(options)
}
$.getJSON = function(/* url, data, success */){
var options = parseArguments.apply(null, arguments)
options.dataType = 'json'
return $.ajax(options)
}
$.fn.load = function(url, data, success){
if (!this.length) return this
var self = this, parts = url.split(/\s/), selector,
options = parseArguments(url, data, success),
callback = options.success
if (parts.length > 1) options.url = parts[0], selector = parts[1]
options.success = function(response){
self.html(selector ?
$('<div>').html(response.replace(rscript, "")).find(selector)
: response)
callback && callback.apply(self, arguments)
}
$.ajax(options)
return this
}
var escape = encodeURIComponent
function serialize(params, obj, traditional, scope){
var type, array = $.isArray(obj), hash = $.isPlainObject(obj)
$.each(obj, function(key, value) {
type = $.type(value)
if (scope) key = traditional ? scope :
scope + '[' + (hash || type == 'object' || type == 'array' ? key : '') + ']'
// handle data in serializeArray() format
if (!scope && array) params.add(value.name, value.value)
// recurse into nested objects
else if (type == "array" || (!traditional && type == "object"))
serialize(params, value, traditional, key)
else params.add(key, value)
})
}
$.param = function(obj, traditional){
var params = []
params.add = function(key, value) {
if ($.isFunction(value)) value = value()
if (value == null) value = ""
this.push(escape(key) + '=' + escape(value))
}
serialize(params, obj, traditional)
return params.join('&').replace(/%20/g, '+')
}
})(Zepto)
// Zepto.js
// (c) 2010-2015 Thomas Fuchs
// Zepto.js may be freely distributed under the MIT license.
;(function($){
var _zid = 1, undefined,
slice = Array.prototype.slice,
isFunction = $.isFunction,
isString = function(obj){ return typeof obj == 'string' },
handlers = {},
specialEvents={},
focusinSupported = 'onfocusin' in window,
focus = { focus: 'focusin', blur: 'focusout' },
hover = { mouseenter: 'mouseover', mouseleave: 'mouseout' }
specialEvents.click = specialEvents.mousedown = specialEvents.mouseup = specialEvents.mousemove = 'MouseEvents'
function zid(element) {
return element._zid || (element._zid = _zid++)
}
function findHandlers(element, event, fn, selector) {
event = parse(event)
if (event.ns) var matcher = matcherFor(event.ns)
return (handlers[zid(element)] || []).filter(function(handler) {
return handler
&& (!event.e || handler.e == event.e)
&& (!event.ns || matcher.test(handler.ns))
&& (!fn || zid(handler.fn) === zid(fn))
&& (!selector || handler.sel == selector)
})
}
function parse(event) {
var parts = ('' + event).split('.')
return {e: parts[0], ns: parts.slice(1).sort().join(' ')}
}
function matcherFor(ns) {
return new RegExp('(?:^| )' + ns.replace(' ', ' .* ?') + '(?: |$)')
}
function eventCapture(handler, captureSetting) {
return handler.del &&
(!focusinSupported && (handler.e in focus)) ||
!!captureSetting
}
function realEvent(type) {
return hover[type] || (focusinSupported && focus[type]) || type
}
function add(element, events, fn, data, selector, delegator, capture){
var id = zid(element), set = (handlers[id] || (handlers[id] = []))
events.split(/\s/).forEach(function(event){
if (event == 'ready') return $(document).ready(fn)
var handler = parse(event)
handler.fn = fn
handler.sel = selector
// emulate mouseenter, mouseleave
if (handler.e in hover) fn = function(e){
var related = e.relatedTarget
if (!related || (related !== this && !$.contains(this, related)))
return handler.fn.apply(this, arguments)
}
handler.del = delegator
var callback = delegator || fn
handler.proxy = function(e){
e = compatible(e)
if (e.isImmediatePropagationStopped()) return
e.data = data
var result = callback.apply(element, e._args == undefined ? [e] : [e].concat(e._args))
if (result === false) e.preventDefault(), e.stopPropagation()
return result
}
handler.i = set.length
set.push(handler)
if ('addEventListener' in element)
element.addEventListener(realEvent(handler.e), handler.proxy, eventCapture(handler, capture))
})
}
function remove(element, events, fn, selector, capture){
var id = zid(element)
;(events || '').split(/\s/).forEach(function(event){
findHandlers(element, event, fn, selector).forEach(function(handler){
delete handlers[id][handler.i]
if ('removeEventListener' in element)
element.removeEventListener(realEvent(handler.e), handler.proxy, eventCapture(handler, capture))
})
})
}
$.event = { add: add, remove: remove }
$.proxy = function(fn, context) {
var args = (2 in arguments) && slice.call(arguments, 2)
if (isFunction(fn)) {
var proxyFn = function(){ return fn.apply(context, args ? args.concat(slice.call(arguments)) : arguments) }
proxyFn._zid = zid(fn)
return proxyFn
} else if (isString(context)) {
if (args) {
args.unshift(fn[context], fn)
return $.proxy.apply(null, args)
} else {
return $.proxy(fn[context], fn)
}
} else {
throw new TypeError("expected function")
}
}
$.fn.bind = function(event, data, callback){
return this.on(event, data, callback)
}
$.fn.unbind = function(event, callback){
return this.off(event, callback)
}
$.fn.one = function(event, selector, data, callback){
return this.on(event, selector, data, callback, 1)
}
var returnTrue = function(){return true},
returnFalse = function(){return false},
ignoreProperties = /^([A-Z]|returnValue$|layer[XY]$)/,
eventMethods = {
preventDefault: 'isDefaultPrevented',
stopImmediatePropagation: 'isImmediatePropagationStopped',
stopPropagation: 'isPropagationStopped'
}
function compatible(event, source) {
if (source || !event.isDefaultPrevented) {
source || (source = event)
$.each(eventMethods, function(name, predicate) {
var sourceMethod = source[name]
event[name] = function(){
this[predicate] = returnTrue
return sourceMethod && sourceMethod.apply(source, arguments)
}
event[predicate] = returnFalse
})
if (source.defaultPrevented !== undefined ? source.defaultPrevented :
'returnValue' in source ? source.returnValue === false :
source.getPreventDefault && source.getPreventDefault())
event.isDefaultPrevented = returnTrue
}
return event
}
function createProxy(event) {
var key, proxy = { originalEvent: event }
for (key in event)
if (!ignoreProperties.test(key) && event[key] !== undefined) proxy[key] = event[key]
return compatible(proxy, event)
}
$.fn.delegate = function(selector, event, callback){
return this.on(event, selector, callback)
}
$.fn.undelegate = function(selector, event, callback){
return this.off(event, selector, callback)
}
$.fn.live = function(event, callback){
$(document.body).delegate(this.selector, event, callback)
return this
}
$.fn.die = function(event, callback){
$(document.body).undelegate(this.selector, event, callback)
return this
}
$.fn.on = function(event, selector, data, callback, one){
var autoRemove, delegator, $this = this
if (event && !isString(event)) {
$.each(event, function(type, fn){
$this.on(type, selector, data, fn, one)
})
return $this
}
if (!isString(selector) && !isFunction(callback) && callback !== false)
callback = data, data = selector, selector = undefined
if (callback === undefined || data === false)
callback = data, data = undefined
if (callback === false) callback = returnFalse
return $this.each(function(_, element){
if (one) autoRemove = function(e){
remove(element, e.type, callback)
return callback.apply(this, arguments)
}
if (selector) delegator = function(e){
var evt, match = $(e.target).closest(selector, element).get(0)
if (match && match !== element) {
evt = $.extend(createProxy(e), {currentTarget: match, liveFired: element})
return (autoRemove || callback).apply(match, [evt].concat(slice.call(arguments, 1)))
}
}
add(element, event, callback, data, selector, delegator || autoRemove)
})
}
$.fn.off = function(event, selector, callback){
var $this = this
if (event && !isString(event)) {
$.each(event, function(type, fn){
$this.off(type, selector, fn)
})
return $this
}
if (!isString(selector) && !isFunction(callback) && callback !== false)
callback = selector, selector = undefined
if (callback === false) callback = returnFalse
return $this.each(function(){
remove(this, event, callback, selector)
})
}
$.fn.trigger = function(event, args){
event = (isString(event) || $.isPlainObject(event)) ? $.Event(event) : compatible(event)
event._args = args
return this.each(function(){
// handle focus(), blur() by calling them directly
if (event.type in focus && typeof this[event.type] == "function") this[event.type]()
// items in the collection might not be DOM elements
else if ('dispatchEvent' in this) this.dispatchEvent(event)
else $(this).triggerHandler(event, args)
})
}
// triggers event handlers on current element just as if an event occurred,
// doesn't trigger an actual event, doesn't bubble
$.fn.triggerHandler = function(event, args){
var e, result
this.each(function(i, element){
e = createProxy(isString(event) ? $.Event(event) : event)
e._args = args
e.target = element
$.each(findHandlers(element, event.type || event), function(i, handler){
result = handler.proxy(e)
if (e.isImmediatePropagationStopped()) return false
})
})
return result
}
// shortcut methods for `.bind(event, fn)` for each event type
;('focusin focusout focus blur load resize scroll unload click dblclick '+
'mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave '+
'change select keydown keypress keyup error').split(' ').forEach(function(event) {
$.fn[event] = function(callback) {
return (0 in arguments) ?
this.bind(event, callback) :
this.trigger(event)
}
})
$.Event = function(type, props) {
if (!isString(type)) props = type, type = props.type
var event = document.createEvent(specialEvents[type] || 'Events'), bubbles = true
if (props) for (var name in props) (name == 'bubbles') ? (bubbles = !!props[name]) : (event[name] = props[name])
event.initEvent(type, bubbles, true)
return compatible(event)
}
})(Zepto)
// Zepto.js
// (c) 2010-2015 Thomas Fuchs
// Zepto.js may be freely distributed under the MIT license.
;(function($){
$.fn.serializeArray = function() {
var name, type, result = [],
add = function(value) {
if (value.forEach) return value.forEach(add)
result.push({ name: name, value: value })
}
if (this[0]) $.each(this[0].elements, function(_, field){
type = field.type, name = field.name
if (name && field.nodeName.toLowerCase() != 'fieldset' &&
!field.disabled && type != 'submit' && type != 'reset' && type != 'button' && type != 'file' &&
((type != 'radio' && type != 'checkbox') || field.checked))
add($(field).val())
})
return result
}
$.fn.serialize = function(){
var result = []
this.serializeArray().forEach(function(elm){
result.push(encodeURIComponent(elm.name) + '=' + encodeURIComponent(elm.value))
})
return result.join('&')
}
$.fn.submit = function(callback) {
if (0 in arguments) this.bind('submit', callback)
else if (this.length) {
var event = $.Event('submit')
this.eq(0).trigger(event)
if (!event.isDefaultPrevented()) this.get(0).submit()
}
return this
}
})(Zepto)
// Zepto.js
// (c) 2010-2015 Thomas Fuchs
// Zepto.js may be freely distributed under the MIT license.
;(function($, undefined){
var prefix = '', eventPrefix,
vendors = { Webkit: 'webkit', Moz: '', O: 'o' },
testEl = document.createElement('div'),
supportedTransforms = /^((translate|rotate|scale)(X|Y|Z|3d)?|matrix(3d)?|perspective|skew(X|Y)?)$/i,
transform,
transitionProperty, transitionDuration, transitionTiming, transitionDelay,
animationName, animationDuration, animationTiming, animationDelay,
cssReset = {}
function dasherize(str) { return str.replace(/([a-z])([A-Z])/, '$1-$2').toLowerCase() }
function normalizeEvent(name) { return eventPrefix ? eventPrefix + name : name.toLowerCase() }
$.each(vendors, function(vendor, event){
if (testEl.style[vendor + 'TransitionProperty'] !== undefined) {
prefix = '-' + vendor.toLowerCase() + '-'
eventPrefix = event
return false
}
})
transform = prefix + 'transform'
cssReset[transitionProperty = prefix + 'transition-property'] =
cssReset[transitionDuration = prefix + 'transition-duration'] =
cssReset[transitionDelay = prefix + 'transition-delay'] =
cssReset[transitionTiming = prefix + 'transition-timing-function'] =
cssReset[animationName = prefix + 'animation-name'] =
cssReset[animationDuration = prefix + 'animation-duration'] =
cssReset[animationDelay = prefix + 'animation-delay'] =
cssReset[animationTiming = prefix + 'animation-timing-function'] = ''
$.fx = {
off: (eventPrefix === undefined && testEl.style.transitionProperty === undefined),
speeds: { _default: 400, fast: 200, slow: 600 },
cssPrefix: prefix,
transitionEnd: normalizeEvent('TransitionEnd'),
animationEnd: normalizeEvent('AnimationEnd')
}
$.fn.animate = function(properties, duration, ease, callback, delay){
if ($.isFunction(duration))
callback = duration, ease = undefined, duration = undefined
if ($.isFunction(ease))
callback = ease, ease = undefined
if ($.isPlainObject(duration))
ease = duration.easing, callback = duration.complete, delay = duration.delay, duration = duration.duration
if (duration) duration = (typeof duration == 'number' ? duration :
($.fx.speeds[duration] || $.fx.speeds._default)) / 1000
if (delay) delay = parseFloat(delay) / 1000
return this.anim(properties, duration, ease, callback, delay)
}
$.fn.anim = function(properties, duration, ease, callback, delay){
var key, cssValues = {}, cssProperties, transforms = '',
that = this, wrappedCallback, endEvent = $.fx.transitionEnd,
fired = false
if (duration === undefined) duration = $.fx.speeds._default / 1000
if (delay === undefined) delay = 0
if ($.fx.off) duration = 0
if (typeof properties == 'string') {
// keyframe animation
cssValues[animationName] = properties
cssValues[animationDuration] = duration + 's'
cssValues[animationDelay] = delay + 's'
cssValues[animationTiming] = (ease || 'linear')
endEvent = $.fx.animationEnd
} else {
cssProperties = []
// CSS transitions
for (key in properties)
if (supportedTransforms.test(key)) transforms += key + '(' + properties[key] + ') '
else cssValues[key] = properties[key], cssProperties.push(dasherize(key))
if (transforms) cssValues[transform] = transforms, cssProperties.push(transform)
if (duration > 0 && typeof properties === 'object') {
cssValues[transitionProperty] = cssProperties.join(', ')
cssValues[transitionDuration] = duration + 's'
cssValues[transitionDelay] = delay + 's'
cssValues[transitionTiming] = (ease || 'linear')
}
}
wrappedCallback = function(event){
if (typeof event !== 'undefined') {
if (event.target !== event.currentTarget) return // makes sure the event didn't bubble from "below"
$(event.target).unbind(endEvent, wrappedCallback)
} else
$(this).unbind(endEvent, wrappedCallback) // triggered by setTimeout
fired = true
$(this).css(cssReset)
callback && callback.call(this)
}
if (duration > 0){
this.bind(endEvent, wrappedCallback)
// transitionEnd is not always firing on older Android phones
// so make sure it gets fired
setTimeout(function(){
if (fired) return
wrappedCallback.call(that)
}, ((duration + delay) * 1000) + 25)
}
// trigger page reflow so new elements can animate
this.size() && this.get(0).clientLeft
this.css(cssValues)
if (duration <= 0) setTimeout(function() {
that.each(function(){ wrappedCallback.call(this) })
}, 0)
return this
}
testEl = null
})(Zepto)
// Zepto.js
// (c) 2010-2015 Thomas Fuchs
// Zepto.js may be freely distributed under the MIT license.
;(function($, undefined){
var document = window.document, docElem = document.documentElement,
origShow = $.fn.show, origHide = $.fn.hide, origToggle = $.fn.toggle
function anim(el, speed, opacity, scale, callback) {
if (typeof speed == 'function' && !callback) callback = speed, speed = undefined
var props = { opacity: opacity }
if (scale) {
props.scale = scale
el.css($.fx.cssPrefix + 'transform-origin', '0 0')
}
return el.animate(props, speed, null, callback)
}
function hide(el, speed, scale, callback) {
return anim(el, speed, 0, scale, function(){
origHide.call($(this))
callback && callback.call(this)
})
}
$.fn.show = function(speed, callback) {
origShow.call(this)
if (speed === undefined) speed = 0
else this.css('opacity', 0)
return anim(this, speed, 1, '1,1', callback)
}
$.fn.hide = function(speed, callback) {
if (speed === undefined) return origHide.call(this)
else return hide(this, speed, '0,0', callback)
}
$.fn.toggle = function(speed, callback) {
if (speed === undefined || typeof speed == 'boolean')
return origToggle.call(this, speed)
else return this.each(function(){
var el = $(this)
el[el.css('display') == 'none' ? 'show' : 'hide'](speed, callback)
})
}
$.fn.fadeTo = function(speed, opacity, callback) {
return anim(this, speed, opacity, null, callback)
}
$.fn.fadeIn = function(speed, callback) {
var target = this.css('opacity')
if (target > 0) this.css('opacity', 0)
else target = 1
return origShow.call(this).fadeTo(speed, target, callback)
}
$.fn.fadeOut = function(speed, callback) {
return hide(this, speed, null, callback)
}
$.fn.fadeToggle = function(speed, callback) {
return this.each(function(){
var el = $(this)
el[
(el.css('opacity') == 0 || el.css('display') == 'none') ? 'fadeIn' : 'fadeOut'
](speed, callback)
})
}
})(Zepto)
// Zepto.js
// (c) 2010-2015 Thomas Fuchs
// Zepto.js may be freely distributed under the MIT license.
;(function(){
// getComputedStyle shouldn't freak out when called
// without a valid element as argument
try {
getComputedStyle(undefined)
} catch(e) {
var nativeGetComputedStyle = getComputedStyle;
window.getComputedStyle = function(element){
try {
return nativeGetComputedStyle(element)
} catch(e) {
return null
}
}
}
})()
// Zepto.js
// (c) 2010-2015 Thomas Fuchs
// Zepto.js may be freely distributed under the MIT license.
;(function($){
var touch = {},
touchTimeout, tapTimeout, swipeTimeout, longTapTimeout,
longTapDelay = 750,
gesture
function swipeDirection(x1, x2, y1, y2) {
return Math.abs(x1 - x2) >=
Math.abs(y1 - y2) ? (x1 - x2 > 0 ? 'Left' : 'Right') : (y1 - y2 > 0 ? 'Up' : 'Down')
}
function longTap() {
longTapTimeout = null
if (touch.last) {
touch.el.trigger('longTap')
touch = {}
}
}
function cancelLongTap() {
if (longTapTimeout) clearTimeout(longTapTimeout)
longTapTimeout = null
}
function cancelAll() {
if (touchTimeout) clearTimeout(touchTimeout)
if (tapTimeout) clearTimeout(tapTimeout)
if (swipeTimeout) clearTimeout(swipeTimeout)
if (longTapTimeout) clearTimeout(longTapTimeout)
touchTimeout = tapTimeout = swipeTimeout = longTapTimeout = null
touch = {}
}
function isPrimaryTouch(event){
return (event.pointerType == 'touch' ||
event.pointerType == event.MSPOINTER_TYPE_TOUCH)
&& event.isPrimary
}
function isPointerEventType(e, type){
return (e.type == 'pointer'+type ||
e.type.toLowerCase() == 'mspointer'+type)
}
$(document).ready(function(){
var now, delta, deltaX = 0, deltaY = 0, firstTouch, _isPointerType
if ('MSGesture' in window) {
gesture = new MSGesture()
gesture.target = document.body
}
$(document)
.bind('MSGestureEnd', function(e){
var swipeDirectionFromVelocity =
e.velocityX > 1 ? 'Right' : e.velocityX < -1 ? 'Left' : e.velocityY > 1 ? 'Down' : e.velocityY < -1 ? 'Up' : null;
if (swipeDirectionFromVelocity) {
touch.el.trigger('swipe')
touch.el.trigger('swipe'+ swipeDirectionFromVelocity)
}
})
.on('touchstart MSPointerDown pointerdown', function(e){
if((_isPointerType = isPointerEventType(e, 'down')) &&
!isPrimaryTouch(e)) return
firstTouch = _isPointerType ? e : e.touches[0]
if (e.touches && e.touches.length === 1 && touch.x2) {
// Clear out touch movement data if we have it sticking around
// This can occur if touchcancel doesn't fire due to preventDefault, etc.
touch.x2 = undefined
touch.y2 = undefined
}
now = Date.now()
delta = now - (touch.last || now)
touch.el = $('tagName' in firstTouch.target ?
firstTouch.target : firstTouch.target.parentNode)
touchTimeout && clearTimeout(touchTimeout)
touch.x1 = firstTouch.pageX
touch.y1 = firstTouch.pageY
if (delta > 0 && delta <= 250) touch.isDoubleTap = true
touch.last = now
longTapTimeout = setTimeout(longTap, longTapDelay)
// adds the current touch contact for IE gesture recognition
if (gesture && _isPointerType) gesture.addPointer(e.pointerId);
})
.on('touchmove MSPointerMove pointermove', function(e){
if((_isPointerType = isPointerEventType(e, 'move')) &&
!isPrimaryTouch(e)) return
firstTouch = _isPointerType ? e : e.touches[0]
cancelLongTap()
touch.x2 = firstTouch.pageX
touch.y2 = firstTouch.pageY
deltaX += Math.abs(touch.x1 - touch.x2)
deltaY += Math.abs(touch.y1 - touch.y2)
})
.on('touchend MSPointerUp pointerup', function(e){
if((_isPointerType = isPointerEventType(e, 'up')) &&
!isPrimaryTouch(e)) return
cancelLongTap()
// swipe
if ((touch.x2 && Math.abs(touch.x1 - touch.x2) > 30) ||
(touch.y2 && Math.abs(touch.y1 - touch.y2) > 30))
swipeTimeout = setTimeout(function() {
touch.el.trigger('swipe')
touch.el.trigger('swipe' + (swipeDirection(touch.x1, touch.x2, touch.y1, touch.y2)))
touch = {}
}, 0)
// normal tap
else if ('last' in touch)
// don't fire tap when delta position changed by more than 30 pixels,
// for instance when moving to a point and back to origin
if (deltaX < 30 && deltaY < 30) {
// delay by one tick so we can cancel the 'tap' event if 'scroll' fires
// ('tap' fires before 'scroll')
tapTimeout = setTimeout(function() {
// trigger universal 'tap' with the option to cancelTouch()
// (cancelTouch cancels processing of single vs double taps for faster 'tap' response)
var event = $.Event('tap')
event.cancelTouch = cancelAll
touch.el.trigger(event)
// trigger double tap immediately
if (touch.isDoubleTap) {
if (touch.el) touch.el.trigger('doubleTap')
touch = {}
}
// trigger single tap after 250ms of inactivity
else {
touchTimeout = setTimeout(function(){
touchTimeout = null
if (touch.el) touch.el.trigger('singleTap')
touch = {}
}, 250)
}
}, 0)
} else {
touch = {}
}
deltaX = deltaY = 0
})
// when the browser window loses focus,
// for example when a modal dialog is shown,
// cancel all ongoing events
.on('touchcancel MSPointerCancel pointercancel', cancelAll)
// scrolling the window indicates intention of the user
// to scroll, not tap or swipe, so cancel all ongoing events
$(window).on('scroll', cancelAll)
})
;['swipe', 'swipeLeft', 'swipeRight', 'swipeUp', 'swipeDown',
'doubleTap', 'tap', 'singleTap', 'longTap'].forEach(function(eventName){
$.fn[eventName] = function(callback){ return this.on(eventName, callback) }
})
})(Zepto)
| nongfadai/front_demo | web/src/notuse/app/lib/zepto/zepto.js | JavaScript | mit | 69,690 |
var kunstmaanbundles = kunstmaanbundles || {};
kunstmaanbundles.datepicker = (function($, window, undefined) {
var init, reInit, _setDefaultDate, _initDatepicker;
var _today = window.moment(),
_tomorrow = window.moment(_today).add(1, 'days');
var defaultFormat = 'DD-MM-YYYY',
defaultCollapse = true,
defaultKeepOpen = false,
defaultMinDate = false,
defaultShowDefaultDate = false,
defaultStepping = 1;
init = function() {
$('.js-datepicker').each(function() {
_initDatepicker($(this));
});
};
reInit = function(el) {
if (el) {
_initDatepicker($(el));
} else {
$('.js-datepicker').each(function() {
if (!$(this).hasClass('datepicker--enabled')) {
_initDatepicker($(this));
}
});
}
};
_setDefaultDate = function(elMinDate) {
if(elMinDate === 'tomorrow') {
return _tomorrow;
} else {
return _today;
}
};
_initDatepicker = function($el) {
// Get Settings
var elFormat = $el.data('format'),
elCollapse = $el.data('collapse'),
elKeepOpen = $el.data('keep-open'),
elMinDate = $el.data('min-date'),
elShowDefaultDate = $el.data('default-date'),
elStepping = $el.data('stepping');
// Set Settings
var format = (elFormat !== undefined) ? elFormat : defaultFormat,
collapse = (elCollapse !== undefined) ? elCollapse : defaultCollapse,
keepOpen = (elKeepOpen !== undefined) ? elKeepOpen : defaultKeepOpen,
minDate = (elMinDate === 'tomorrow') ? _tomorrow : (elMinDate === 'today') ? _today : defaultMinDate,
defaultDate = (elShowDefaultDate) ? _setDefaultDate(elMinDate) : defaultShowDefaultDate,
stepping = (elStepping !== undefined) ? elStepping : defaultStepping;
// Setup
var $input = $el.find('input'),
$addon = $el.find('.input-group-addon'),
linkedDatepickerID = $el.data('linked-datepicker') || false;
if (format.indexOf('HH:mm') === -1) {
// Drop time if not necessary
if (minDate) {
minDate = minDate.clone().startOf('day'); // clone() because otherwise .startOf() mutates the original moment object
}
if (defaultDate) {
defaultDate = defaultDate.clone().startOf('day');
}
}
$input.datetimepicker({
format: format,
collapse: collapse,
keepOpen: keepOpen,
minDate: minDate,
defaultDate: defaultDate,
widgetPositioning: {
horizontal: 'left',
vertical: 'auto'
},
widgetParent: $el,
icons: {
time: 'fa fa-clock',
date: 'fa fa-calendar',
up: 'fa fa-chevron-up',
down: 'fa fa-chevron-down',
previous: 'fa fa-arrow-left',
next: 'fa fa-arrow-right',
today: 'fa fa-crosshairs',
clear: 'fa fa-trash'
},
stepping: stepping
});
$el.addClass('datepicker--enabled');
$addon.on('click', function() {
$input.focus();
});
// Linked datepickers - allow future datetime only - (un)publish modal
if (linkedDatepickerID) {
// set min time only if selected date = today
$(document).on('dp.change', linkedDatepickerID, function(e) {
if (e.target.value === _today.format('DD-MM-YYYY')) {
var selectedTime = window.moment($input.val(), 'HH:mm');
// Force user to select new time, if current time isn't valid anymore
selectedTime.isBefore(_today) && $input.data('DateTimePicker').show();
$input.data('DateTimePicker').minDate(_today);
} else {
$input.data('DateTimePicker').minDate(false);
}
});
}
};
return {
init: init,
reInit: reInit
};
})(jQuery, window);
| mwoynarski/KunstmaanBundlesCMS | src/Kunstmaan/AdminBundle/Resources/ui/js/_datepicker.js | JavaScript | mit | 4,312 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using Xunit;
namespace System.Collections.Sequences.Tests
{
public class SequenceTests
{
[Theory]
[InlineData(new int[] { })]
[InlineData(new int[] { 1 })]
[InlineData(new int[] { 1, 2, 3 })]
public void ArrayList(int[] array)
{
ArrayList<int> collection = CreateArrayList(array);
SequencePosition position = default;
int arrayIndex = 0;
while (collection.TryGet(ref position, out int item))
{
Assert.Equal(array[arrayIndex++], item);
}
Assert.Equal(array.Length, arrayIndex);
arrayIndex = 0;
foreach (int item in collection)
{
Assert.Equal(array[arrayIndex++], item);
}
Assert.Equal(array.Length, arrayIndex);
}
private static ArrayList<int> CreateArrayList(int[] array)
{
var collection = new ArrayList<int>();
foreach (var arrayItem in array) collection.Add(arrayItem);
return collection;
}
[Theory]
[InlineData(new int[] { })]
[InlineData(new int[] { 1 })]
[InlineData(new int[] { 1, 2, 3 })]
public void LinkedContainer(int[] array)
{
LinkedContainer<int> collection = CreateLinkedContainer(array);
SequencePosition position = default;
int arrayIndex = array.Length;
while (collection.TryGet(ref position, out int item))
{
Assert.Equal(array[--arrayIndex], item);
}
}
private static LinkedContainer<int> CreateLinkedContainer(int[] array)
{
var collection = new LinkedContainer<int>();
foreach (var item in array) collection.Add(item); // this adds to front
return collection;
}
[Theory]
[InlineData(new int[] { })]
[InlineData(new int[] { 1 })]
[InlineData(new int[] { 1, 2, 3 })]
public void Hashtable(int[] array)
{
Hashtable<int, string> collection = CreateHashtable(array);
int arrayIndex = 0;
SequencePosition position = default;
while (collection.TryGet(ref position, out KeyValuePair<int, string> item))
{
Assert.Equal(array[arrayIndex++], item.Key);
}
}
private static Hashtable<int, string> CreateHashtable(int[] array)
{
var collection = new Hashtable<int, string>(EqualityComparer<int>.Default);
foreach (var item in array) collection.Add(item, item.ToString());
return collection;
}
}
}
| KrzysztofCwalina/corefxlab | tests/System.Collections.Sequences.Tests/BasicUnitTests.cs | C# | mit | 2,965 |
<?php
/**
* This file is part of the Tmdb PHP API created by Michael Roterman.
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
* @package Tmdb
* @author Michael Roterman <michael@wtfz.net>
* @copyright (c) 2013, Michael Roterman
* @version 0.0.1
*/
namespace Tmdb\Model\Collection;
use Tmdb\Model\Common\GenericCollection;
/**
* Class QueryParametersCollection
* @package Tmdb\Model\Collection
*/
class QueryParametersCollection extends GenericCollection
{
}
| yorkulibraries/vufind | web/vendor/php-tmdb/api/lib/Tmdb/Model/Collection/QueryParametersCollection.php | PHP | gpl-2.0 | 555 |
using System;
using System.Collections.Generic;
using System.Text;
namespace Rssdp
{
/// <summary>
/// Event arguments for the <see cref="SsdpDevice.DeviceAdded"/> and <see cref="SsdpDevice.DeviceRemoved"/> events.
/// </summary>
public sealed class DeviceEventArgs : EventArgs
{
#region Fields
private readonly SsdpDevice _Device;
#endregion
#region Constructors
/// <summary>
/// Constructs a new instance for the specified <see cref="SsdpDevice"/>.
/// </summary>
/// <param name="device">The <see cref="SsdpDevice"/> associated with the event this argument class is being used for.</param>
/// <exception cref="System.ArgumentNullException">Thrown if the <paramref name="device"/> argument is null.</exception>
public DeviceEventArgs(SsdpDevice device)
{
if (device == null) throw new ArgumentNullException("device");
_Device = device;
}
#endregion
#region Public Properties
/// <summary>
/// Returns the <see cref="SsdpDevice"/> instance the event being raised for.
/// </summary>
public SsdpDevice Device
{
get { return _Device; }
}
#endregion
}
} | gerrit507/Emby | RSSDP/DeviceEventArgs.cs | C# | gpl-2.0 | 1,125 |
// { dg-do compile { target c++11 } }
#include "../abi/mangle55.C"
| Gurgel100/gcc | gcc/testsuite/g++.dg/analyzer/pr93899.C | C++ | gpl-2.0 | 67 |
/******************************************************************************
* Product: Adempiere ERP & CRM Smart Business Solution *
* Copyright (C) 1999-2007 ComPiere, Inc. All Rights Reserved. *
* This program is free software, you can redistribute it and/or modify it *
* under the terms version 2 of the GNU General Public License as published *
* by the Free Software Foundation. This program is distributed in the hope *
* that it will be useful, but WITHOUT ANY WARRANTY, without even the implied *
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *
* See the GNU General Public License for more details. *
* You should have received a copy of the GNU General Public License along *
* with this program, if not, write to the Free Software Foundation, Inc., *
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. *
* For the text or an alternative of this public license, you may reach us *
* ComPiere, Inc., 2620 Augustine Dr. #245, Santa Clara, CA 95054, USA *
* or via info@compiere.org or http://www.compiere.org/license.html *
*****************************************************************************/
package org.compiere.model;
import java.math.BigDecimal;
import java.sql.Timestamp;
import org.compiere.util.KeyNamePair;
/** Generated Interface for AD_WF_Responsible
* @author Adempiere (generated)
* @version Release 3.8.0
*/
public interface I_AD_WF_Responsible
{
/** TableName=AD_WF_Responsible */
public static final String Table_Name = "AD_WF_Responsible";
/** AD_Table_ID=646 */
public static final int Table_ID = MTable.getTable_ID(Table_Name);
KeyNamePair Model = new KeyNamePair(Table_ID, Table_Name);
/** AccessLevel = 6 - System - Client
*/
BigDecimal accessLevel = BigDecimal.valueOf(6);
/** Load Meta Data */
/** Column name AD_Client_ID */
public static final String COLUMNNAME_AD_Client_ID = "AD_Client_ID";
/** Get Client.
* Client/Tenant for this installation.
*/
public int getAD_Client_ID();
/** Column name AD_Org_ID */
public static final String COLUMNNAME_AD_Org_ID = "AD_Org_ID";
/** Set Organization.
* Organizational entity within client
*/
public void setAD_Org_ID (int AD_Org_ID);
/** Get Organization.
* Organizational entity within client
*/
public int getAD_Org_ID();
/** Column name AD_Role_ID */
public static final String COLUMNNAME_AD_Role_ID = "AD_Role_ID";
/** Set Role.
* Responsibility Role
*/
public void setAD_Role_ID (int AD_Role_ID);
/** Get Role.
* Responsibility Role
*/
public int getAD_Role_ID();
public org.compiere.model.I_AD_Role getAD_Role() throws RuntimeException;
/** Column name AD_User_ID */
public static final String COLUMNNAME_AD_User_ID = "AD_User_ID";
/** Set User/Contact.
* User within the system - Internal or Business Partner Contact
*/
public void setAD_User_ID (int AD_User_ID);
/** Get User/Contact.
* User within the system - Internal or Business Partner Contact
*/
public int getAD_User_ID();
public org.compiere.model.I_AD_User getAD_User() throws RuntimeException;
/** Column name AD_WF_Responsible_ID */
public static final String COLUMNNAME_AD_WF_Responsible_ID = "AD_WF_Responsible_ID";
/** Set Workflow Responsible.
* Responsible for Workflow Execution
*/
public void setAD_WF_Responsible_ID (int AD_WF_Responsible_ID);
/** Get Workflow Responsible.
* Responsible for Workflow Execution
*/
public int getAD_WF_Responsible_ID();
/** Column name Created */
public static final String COLUMNNAME_Created = "Created";
/** Get Created.
* Date this record was created
*/
public Timestamp getCreated();
/** Column name CreatedBy */
public static final String COLUMNNAME_CreatedBy = "CreatedBy";
/** Get Created By.
* User who created this records
*/
public int getCreatedBy();
/** Column name Description */
public static final String COLUMNNAME_Description = "Description";
/** Set Description.
* Optional short description of the record
*/
public void setDescription (String Description);
/** Get Description.
* Optional short description of the record
*/
public String getDescription();
/** Column name EntityType */
public static final String COLUMNNAME_EntityType = "EntityType";
/** Set Entity Type.
* Dictionary Entity Type;
Determines ownership and synchronization
*/
public void setEntityType (String EntityType);
/** Get Entity Type.
* Dictionary Entity Type;
Determines ownership and synchronization
*/
public String getEntityType();
/** Column name IsActive */
public static final String COLUMNNAME_IsActive = "IsActive";
/** Set Active.
* The record is active in the system
*/
public void setIsActive (boolean IsActive);
/** Get Active.
* The record is active in the system
*/
public boolean isActive();
/** Column name Name */
public static final String COLUMNNAME_Name = "Name";
/** Set Name.
* Alphanumeric identifier of the entity
*/
public void setName (String Name);
/** Get Name.
* Alphanumeric identifier of the entity
*/
public String getName();
/** Column name ResponsibleType */
public static final String COLUMNNAME_ResponsibleType = "ResponsibleType";
/** Set Responsible Type.
* Type of the Responsibility for a workflow
*/
public void setResponsibleType (String ResponsibleType);
/** Get Responsible Type.
* Type of the Responsibility for a workflow
*/
public String getResponsibleType();
/** Column name Updated */
public static final String COLUMNNAME_Updated = "Updated";
/** Get Updated.
* Date this record was updated
*/
public Timestamp getUpdated();
/** Column name UpdatedBy */
public static final String COLUMNNAME_UpdatedBy = "UpdatedBy";
/** Get Updated By.
* User who updated this records
*/
public int getUpdatedBy();
}
| armenrz/adempiere | base/src/org/compiere/model/I_AD_WF_Responsible.java | Java | gpl-2.0 | 6,101 |
/*
* Copyright (C) 2008-2018 TrinityCore <https://www.trinitycore.org/>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/* ScriptData
SDName: Boss_NexusPrince_Shaffar
SD%Complete: 80
SDComment: Need more tuning of spell timers, it should not be as linear fight as current. Also should possibly find a better way to deal with his three initial beacons to make sure all aggro.
SDCategory: Auchindoun, Mana Tombs
EndScriptData */
#include "ScriptMgr.h"
#include "mana_tombs.h"
#include "MotionMaster.h"
#include "ScriptedCreature.h"
enum Yells
{
SAY_INTRO = 0,
SAY_AGGRO = 1,
SAY_SLAY = 2,
SAY_SUMMON = 3,
SAY_DEAD = 4
};
enum Spells
{
SPELL_BLINK = 34605,
SPELL_FROSTBOLT = 32364,
SPELL_FIREBALL = 32363,
SPELL_FROSTNOVA = 32365,
SPELL_ETHEREAL_BEACON = 32371, // Summons NPC_BEACON
SPELL_ETHEREAL_BEACON_VISUAL = 32368,
// Ethereal Beacon
SPELL_ARCANE_BOLT = 15254,
SPELL_ETHEREAL_APPRENTICE = 32372 // Summon 18430
};
enum Creatures
{
NPC_BEACON = 18431,
NPC_SHAFFAR = 18344
};
enum Misc
{
NR_INITIAL_BEACONS = 3
};
enum Events
{
EVENT_BLINK = 1,
EVENT_BEACON,
EVENT_FIREBALL,
EVENT_FROSTBOLT,
EVENT_FROST_NOVA
};
class boss_nexusprince_shaffar : public CreatureScript
{
public:
boss_nexusprince_shaffar() : CreatureScript("boss_nexusprince_shaffar") { }
struct boss_nexusprince_shaffarAI : public BossAI
{
boss_nexusprince_shaffarAI(Creature* creature) : BossAI(creature, DATA_NEXUSPRINCE_SHAFFAR)
{
_hasTaunted = false;
}
void Reset() override
{
_Reset();
float dist = 8.0f;
float posX, posY, posZ, angle;
me->GetHomePosition(posX, posY, posZ, angle);
me->SummonCreature(NPC_BEACON, posX - dist, posY - dist, posZ, angle, TEMPSUMMON_CORPSE_TIMED_DESPAWN, 7200000);
me->SummonCreature(NPC_BEACON, posX - dist, posY + dist, posZ, angle, TEMPSUMMON_CORPSE_TIMED_DESPAWN, 7200000);
me->SummonCreature(NPC_BEACON, posX + dist, posY, posZ, angle, TEMPSUMMON_CORPSE_TIMED_DESPAWN, 7200000);
}
void MoveInLineOfSight(Unit* who) override
{
if (!_hasTaunted && who->GetTypeId() == TYPEID_PLAYER && me->IsWithinDistInMap(who, 100.0f))
{
Talk(SAY_INTRO);
_hasTaunted = true;
}
}
void EnterCombat(Unit* /*who*/) override
{
Talk(SAY_AGGRO);
_EnterCombat();
events.ScheduleEvent(EVENT_BEACON, 10000);
events.ScheduleEvent(EVENT_FIREBALL, 8000);
events.ScheduleEvent(EVENT_FROSTBOLT, 4000);
events.ScheduleEvent(EVENT_FROST_NOVA, 15000);
}
void JustSummoned(Creature* summoned) override
{
if (summoned->GetEntry() == NPC_BEACON)
{
summoned->CastSpell(summoned, SPELL_ETHEREAL_BEACON_VISUAL, false);
if (Unit* target = SelectTarget(SELECT_TARGET_RANDOM, 0))
summoned->AI()->AttackStart(target);
}
summons.Summon(summoned);
}
void KilledUnit(Unit* victim) override
{
if (victim->GetTypeId() == TYPEID_PLAYER)
Talk(SAY_SLAY);
}
void JustDied(Unit* /*killer*/) override
{
Talk(SAY_DEAD);
_JustDied();
}
void ExecuteEvent(uint32 eventId) override
{
switch (eventId)
{
case EVENT_BLINK:
if (me->IsNonMeleeSpellCast(false))
me->InterruptNonMeleeSpells(true);
// expire movement, will prevent from running right back to victim after cast
// (but should MoveChase be used again at a certain time or should he not move?)
if (me->GetMotionMaster()->GetCurrentMovementGeneratorType() == CHASE_MOTION_TYPE)
me->GetMotionMaster()->MovementExpired();
DoCast(me, SPELL_BLINK);
break;
case EVENT_BEACON:
if (!urand(0, 3))
Talk(SAY_SUMMON);
DoCast(me, SPELL_ETHEREAL_BEACON, true);
events.ScheduleEvent(EVENT_BEACON, 10000);
break;
case EVENT_FIREBALL:
DoCastVictim(SPELL_FROSTBOLT);
events.ScheduleEvent(EVENT_FIREBALL, urand(4500, 6000));
break;
case EVENT_FROSTBOLT:
DoCastVictim(SPELL_FROSTBOLT);
events.ScheduleEvent(EVENT_FROSTBOLT, urand(4500, 6000));
break;
case EVENT_FROST_NOVA:
DoCast(me, SPELL_FROSTNOVA);
events.ScheduleEvent(EVENT_FROST_NOVA, urand(17500, 25000));
events.ScheduleEvent(EVENT_BLINK, 1500);
break;
default:
break;
}
}
private:
bool _hasTaunted;
};
CreatureAI* GetAI(Creature* creature) const override
{
return GetManaTombsAI<boss_nexusprince_shaffarAI>(creature);
}
};
enum EtherealBeacon
{
EVENT_APPRENTICE = 1,
EVENT_ARCANE_BOLT
};
class npc_ethereal_beacon : public CreatureScript
{
public:
npc_ethereal_beacon() : CreatureScript("npc_ethereal_beacon") { }
struct npc_ethereal_beaconAI : public ScriptedAI
{
npc_ethereal_beaconAI(Creature* creature) : ScriptedAI(creature) { }
void Reset() override
{
_events.Reset();
}
void EnterCombat(Unit* who) override
{
if (Creature* shaffar = me->FindNearestCreature(NPC_SHAFFAR, 100.0f))
if (!shaffar->IsInCombat())
shaffar->AI()->AttackStart(who);
_events.ScheduleEvent(EVENT_APPRENTICE, DUNGEON_MODE(20000, 10000));
_events.ScheduleEvent(EVENT_ARCANE_BOLT, 1000);
}
void JustSummoned(Creature* summoned) override
{
summoned->AI()->AttackStart(me->GetVictim());
}
void UpdateAI(uint32 diff) override
{
if (!UpdateVictim())
return;
_events.Update(diff);
if (me->HasUnitState(UNIT_STATE_CASTING))
return;
while (uint32 eventId = _events.ExecuteEvent())
{
switch (eventId)
{
case EVENT_APPRENTICE:
DoCast(me, SPELL_ETHEREAL_APPRENTICE, true);
me->DespawnOrUnsummon();
break;
case EVENT_ARCANE_BOLT:
DoCastVictim(SPELL_ARCANE_BOLT);
_events.ScheduleEvent(EVENT_ARCANE_BOLT, urand(2000, 4500));
break;
default:
break;
}
}
}
private:
EventMap _events;
};
CreatureAI* GetAI(Creature* creature) const override
{
return GetManaTombsAI<npc_ethereal_beaconAI>(creature);
}
};
enum EtherealApprentice
{
SPELL_ETHEREAL_APPRENTICE_FIREBOLT = 32369,
SPELL_ETHEREAL_APPRENTICE_FROSTBOLT = 32370,
EVENT_ETHEREAL_APPRENTICE_FIREBOLT = 1,
EVENT_ETHEREAL_APPRENTICE_FROSTBOLT
};
class npc_ethereal_apprentice : public CreatureScript
{
public:
npc_ethereal_apprentice() : CreatureScript("npc_ethereal_apprentice") { }
struct npc_ethereal_apprenticeAI : public ScriptedAI
{
npc_ethereal_apprenticeAI(Creature* creature) : ScriptedAI(creature) { }
void Reset() override
{
_events.Reset();
}
void EnterCombat(Unit* /*who*/) override
{
_events.ScheduleEvent(EVENT_ETHEREAL_APPRENTICE_FIREBOLT, 3000);
}
void UpdateAI(uint32 diff) override
{
if (!UpdateVictim())
return;
_events.Update(diff);
if (me->HasUnitState(UNIT_STATE_CASTING))
return;
while (uint32 eventId = _events.ExecuteEvent())
{
switch (eventId)
{
case EVENT_ETHEREAL_APPRENTICE_FIREBOLT:
DoCastVictim(SPELL_ETHEREAL_APPRENTICE_FIREBOLT, true);
_events.ScheduleEvent(EVENT_ETHEREAL_APPRENTICE_FROSTBOLT, 3000);
break;
case EVENT_ETHEREAL_APPRENTICE_FROSTBOLT:
DoCastVictim(SPELL_ETHEREAL_APPRENTICE_FROSTBOLT, true);
_events.ScheduleEvent(EVENT_ETHEREAL_APPRENTICE_FIREBOLT, 3000);
break;
default:
break;
}
}
}
private:
EventMap _events;
};
CreatureAI* GetAI(Creature* creature) const override
{
return GetManaTombsAI<npc_ethereal_apprenticeAI>(creature);
}
};
enum Yor
{
SPELL_DOUBLE_BREATH = 38361,
EVENT_DOUBLE_BREATH = 1
};
class npc_yor : public CreatureScript
{
public:
npc_yor() : CreatureScript("npc_yor") { }
struct npc_yorAI : public ScriptedAI
{
npc_yorAI(Creature* creature) : ScriptedAI(creature) { }
void Reset() override { }
void EnterCombat(Unit* /*who*/) override
{
_events.ScheduleEvent(EVENT_DOUBLE_BREATH, urand(6000,9000));
}
void UpdateAI(uint32 diff) override
{
if (!UpdateVictim())
return;
_events.Update(diff);
while (uint32 eventId = _events.ExecuteEvent())
{
switch (eventId)
{
case EVENT_DOUBLE_BREATH:
if (me->IsWithinDist(me->GetVictim(), ATTACK_DISTANCE))
DoCastVictim(SPELL_DOUBLE_BREATH);
_events.ScheduleEvent(EVENT_DOUBLE_BREATH, urand(6000,9000));
break;
default:
break;
}
}
DoMeleeAttackIfReady();
}
private:
EventMap _events;
};
CreatureAI* GetAI(Creature* creature) const override
{
return GetManaTombsAI<npc_yorAI>(creature);
}
};
void AddSC_boss_nexusprince_shaffar()
{
new boss_nexusprince_shaffar();
new npc_ethereal_beacon();
new npc_ethereal_apprentice();
new npc_yor();
}
| Golrag/TrinityCore | src/server/scripts/Outland/Auchindoun/ManaTombs/boss_nexusprince_shaffar.cpp | C++ | gpl-2.0 | 12,484 |
package com.codename1.ui.layouts.mig;
import com.codename1.ui.Display;
import java.util.ArrayList;
import java.util.HashMap;
/*
* License (BSD):
* ==============
*
* Copyright (c) 2004, Mikael Grev, MiG InfoCom AB. (miglayout (at) miginfocom (dot) com)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or other
* materials provided with the distribution.
* Neither the name of the MiG InfoCom AB nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
* OF SUCH DAMAGE.
*
* @version 1.0
* @author Mikael Grev, MiG InfoCom AB
* Date: 2006-sep-08
*/
/**
*/
public final class LinkHandler
{
public static final int X = 0;
public static final int Y = 1;
public static final int WIDTH = 2;
public static final int HEIGHT = 3;
public static final int X2 = 4;
public static final int Y2 = 5;
private static final ArrayList<Object> LAYOUTS = new ArrayList<Object>(4);
private static final ArrayList<HashMap<String, int[]>> VALUES = new ArrayList<HashMap<String, int[]>>(4);
private static final ArrayList<HashMap<String, int[]>> VALUES_TEMP = new ArrayList<HashMap<String, int[]>>(4);
private LinkHandler()
{
}
public synchronized static Integer getValue(Object layout, String key, int type)
{
Integer ret = null;
boolean cont = true;
for (int i = LAYOUTS.size() - 1; i >= 0; i--) {
Object l = Display.getInstance().extractHardRef(LAYOUTS.get(i));
if (ret == null && l == layout) {
int[] rect = VALUES_TEMP.get(i).get(key);
if (cont && rect != null && rect[type] != LayoutUtil.NOT_SET) {
ret = new Integer(rect[type]);
} else {
rect = VALUES.get(i).get(key);
ret = (rect != null && rect[type] != LayoutUtil.NOT_SET) ? new Integer(rect[type]) : null;
}
cont = false;
}
if (l == null) {
LAYOUTS.remove(i);
VALUES.remove(i);
VALUES_TEMP.remove(i);
}
}
return ret;
}
/** Sets a key that can be linked to from any component.
* @param layout The MigLayout instance
* @param key The key to link to. This is the same as the ID in a component constraint.
* @param x x
* @param y y
* @param width Width
* @param height Height
* @return If the value was changed
*/
public synchronized static boolean setBounds(Object layout, String key, int x, int y, int width, int height)
{
return setBounds(layout, key, x, y, width, height, false, false);
}
synchronized static boolean setBounds(Object layout, String key, int x, int y, int width, int height, boolean temporary, boolean incCur)
{
for (int i = LAYOUTS.size() - 1; i >= 0; i--) {
Object l = Display.getInstance().extractHardRef(LAYOUTS.get(i));
if (l == layout) {
HashMap<String, int[]> map = (temporary ? VALUES_TEMP : VALUES).get(i);
int[] old = map.get(key);
if (old == null || old[X] != x || old[Y] != y || old[WIDTH] != width || old[HEIGHT] != height) {
if (old == null || incCur == false) {
map.put(key, new int[] {x, y, width, height, x + width, y + height});
return true;
} else {
boolean changed = false;
if (x != LayoutUtil.NOT_SET) {
if (old[X] == LayoutUtil.NOT_SET || x < old[X]) {
old[X] = x;
old[WIDTH] = old[X2] - x;
changed = true;
}
if (width != LayoutUtil.NOT_SET) {
int x2 = x + width;
if (old[X2] == LayoutUtil.NOT_SET || x2 > old[X2]) {
old[X2] = x2;
old[WIDTH] = x2 - old[X];
changed = true;
}
}
}
if (y != LayoutUtil.NOT_SET) {
if (old[Y] == LayoutUtil.NOT_SET || y < old[Y]) {
old[Y] = y;
old[HEIGHT] = old[Y2] - y;
changed = true;
}
if (height != LayoutUtil.NOT_SET) {
int y2 = y + height;
if (old[Y2] == LayoutUtil.NOT_SET || y2 > old[Y2]) {
old[Y2] = y2;
old[HEIGHT] = y2 - old[Y];
changed = true;
}
}
}
return changed;
}
}
return false;
}
}
LAYOUTS.add(Display.getInstance().createSoftWeakRef(layout));
int[] bounds = new int[] {x, y, width, height, x + width, y + height};
HashMap<String, int[]> values = new HashMap<String, int[]>(4);
if (temporary)
values.put(key, bounds);
VALUES_TEMP.add(values);
values = new HashMap<String, int[]>(4);
if (temporary == false)
values.put(key, bounds);
VALUES.add(values);
return true;
}
/** This method clear any weak references right away instead of waiting for the GC. This might be advantageous
* if lots of layout are created and disposed of quickly to keep memory consumption down.
* @since 3.7.4
*/
public synchronized static void clearWeakReferencesNow()
{
LAYOUTS.clear();
}
public synchronized static boolean clearBounds(Object layout, String key)
{
for (int i = LAYOUTS.size() - 1; i >= 0; i--) {
Object l = Display.getInstance().extractHardRef(LAYOUTS.get(i));
if (l == layout)
return VALUES.get(i).remove(key) != null;
}
return false;
}
synchronized static void clearTemporaryBounds(Object layout)
{
for (int i = LAYOUTS.size() - 1; i >= 0; i--) {
Object l = Display.getInstance().extractHardRef(LAYOUTS.get(i));
if (l == layout) {
VALUES_TEMP.get(i).clear();
return;
}
}
}
}
| JrmyDev/CodenameOne | CodenameOne/src/com/codename1/ui/layouts/mig/LinkHandler.java | Java | gpl-2.0 | 6,705 |
require 'test/unit'
require 'soap/rpc/driver'
require 'soap/rpc/standaloneServer'
require 'soap/header/simplehandler'
module SOAP
module Header
class TestAuthHeader < Test::Unit::TestCase
Port = 17171
PortName = 'http://tempuri.org/authHeaderPort'
MyHeaderName = XSD::QName.new("http://tempuri.org/authHeader", "auth")
DummyHeaderName = XSD::QName.new("http://tempuri.org/authHeader", "dummy")
class AuthHeaderPortServer < SOAP::RPC::StandaloneServer
class AuthHeaderService
def self.create
new
end
def deposit(amt)
"deposit #{amt} OK"
end
def withdrawal(amt)
"withdrawal #{amt} OK"
end
end
def initialize(*arg)
super
add_rpc_servant(AuthHeaderService.new, PortName)
ServerAuthHeaderHandler.init
add_request_headerhandler(ServerAuthHeaderHandler)
end
class ServerAuthHeaderHandler < SOAP::Header::SimpleHandler
class << self
def create
new
end
def init
@users = {
'NaHi' => 'passwd',
'HiNa' => 'wspass'
}
@sessions = {}
end
def login(userid, passwd)
userid and passwd and @users[userid] == passwd
end
def auth(sessionid)
@sessions[sessionid][0]
end
def create_session(userid)
while true
key = create_sessionkey
break unless @sessions[key]
end
@sessions[key] = [userid]
key
end
def destroy_session(sessionkey)
@sessions.delete(sessionkey)
end
def sessions
@sessions
end
private
def create_sessionkey
Time.now.usec.to_s
end
end
def initialize
super(MyHeaderName)
@userid = @sessionid = nil
end
def on_simple_outbound
{ "sessionid" => @sessionid }
end
def on_simple_inbound(my_header, mu)
auth = false
userid = my_header["userid"]
passwd = my_header["passwd"]
if self.class.login(userid, passwd)
auth = true
elsif sessionid = my_header["sessionid"]
if userid = self.class.auth(sessionid)
self.class.destroy_session(sessionid)
auth = true
end
end
raise RuntimeError.new("authentication failed") unless auth
@userid = userid
@sessionid = self.class.create_session(userid)
end
end
end
class ClientAuthHeaderHandler < SOAP::Header::SimpleHandler
def initialize(userid, passwd, mustunderstand)
super(MyHeaderName)
@sessionid = nil
@userid = userid
@passwd = passwd
@mustunderstand = mustunderstand
end
def on_simple_outbound
if @sessionid
{ "sessionid" => @sessionid }
else
{ "userid" => @userid, "passwd" => @passwd }
end
end
def on_simple_inbound(my_header, mustunderstand)
@sessionid = my_header["sessionid"]
end
def sessionid
@sessionid
end
end
class DummyHeaderHandler < SOAP::Header::SimpleHandler
def initialize(mustunderstand)
super(DummyHeaderName)
@mustunderstand = mustunderstand
end
def on_simple_outbound
{ XSD::QName.new("foo", "bar") => nil }
end
def on_simple_inbound(my_header, mustunderstand)
end
end
def setup
@endpoint = "http://localhost:#{Port}/"
setup_server
setup_client
end
def setup_server
@server = AuthHeaderPortServer.new(self.class.name, nil, '0.0.0.0', Port)
@server.level = Logger::Severity::ERROR
@t = Thread.new {
@server.start
}
end
def setup_client
@client = SOAP::RPC::Driver.new(@endpoint, PortName)
@client.wiredump_dev = STDERR if $DEBUG
@client.add_method('deposit', 'amt')
@client.add_method('withdrawal', 'amt')
end
def teardown
teardown_server
teardown_client
end
def teardown_server
@server.shutdown
@t.kill
@t.join
end
def teardown_client
@client.reset_stream
end
def test_success_no_mu
h = ClientAuthHeaderHandler.new('NaHi', 'passwd', false)
@client.headerhandler << h
do_transaction_check(h)
end
def test_success_mu
h = ClientAuthHeaderHandler.new('NaHi', 'passwd', true)
@client.headerhandler << h
do_transaction_check(h)
end
def test_no_mu
h = ClientAuthHeaderHandler.new('NaHi', 'passwd', true)
@client.headerhandler << h
@client.headerhandler << DummyHeaderHandler.new(false)
do_transaction_check(h)
end
def test_mu
h = ClientAuthHeaderHandler.new('NaHi', 'passwd', true)
@client.headerhandler << h
@client.headerhandler << (h2 = DummyHeaderHandler.new(true))
assert_raise(SOAP::UnhandledMustUnderstandHeaderError) do
assert_equal("deposit 150 OK", @client.deposit(150))
end
@client.headerhandler.delete(h2)
@client.headerhandler << (h2 = DummyHeaderHandler.new(false))
do_transaction_check(h)
end
def do_transaction_check(h)
assert_equal("deposit 150 OK", @client.deposit(150))
serversess = AuthHeaderPortServer::ServerAuthHeaderHandler.sessions[h.sessionid]
assert_equal("NaHi", serversess[0])
assert_equal("withdrawal 120 OK", @client.withdrawal(120))
serversess = AuthHeaderPortServer::ServerAuthHeaderHandler.sessions[h.sessionid]
assert_equal("NaHi", serversess[0])
end
def test_authfailure
h = ClientAuthHeaderHandler.new('NaHi', 'pa', false)
@client.headerhandler << h
assert_raises(RuntimeError) do
@client.deposit(150)
end
end
end
end
end
| jacques/connector | vendor/gems/soap4r-1.5.5.20061022/test/soap/header/test_authheader.rb | Ruby | gpl-2.0 | 5,284 |
#######################################################################
#
# Author: Malte Helmert (helmert@informatik.uni-freiburg.de)
# (C) Copyright 2003-2004 Malte Helmert
#
# This file is part of LAMA.
#
# LAMA is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the license, or (at your option) any later version.
#
# LAMA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
#
#######################################################################
import cStringIO
import textwrap
__all__ = ["print_nested_list"]
def tokenize_list(obj):
if isinstance(obj, list):
yield "("
for item in obj:
for elem in tokenize_list(item):
yield elem
yield ")"
else:
yield obj
def wrap_lines(lines):
for line in lines:
indent = " " * (len(line) - len(line.lstrip()) + 4)
line = line.replace("-", "_") # textwrap breaks on "-", but not "_"
line = textwrap.fill(line, subsequent_indent=indent, break_long_words=False)
yield line.replace("_", "-")
def print_nested_list(nested_list):
stream = cStringIO.StringIO()
indent = 0
startofline = True
pendingspace = False
for token in tokenize_list(nested_list):
if token == "(":
if not startofline:
stream.write("\n")
stream.write("%s(" % (" " * indent))
indent += 2
startofline = False
pendingspace = False
elif token == ")":
indent -= 2
stream.write(")")
startofline = False
pendingspace = False
else:
if startofline:
stream.write(" " * indent)
if pendingspace:
stream.write(" ")
stream.write(token)
startofline = False
pendingspace = True
for line in wrap_lines(stream.getvalue().splitlines()):
print line
| PlanTool/plantool | wrappingPlanners/Deterministic/LAMA/seq-sat-lama/lama/translate/pddl/pretty_print.py | Python | gpl-2.0 | 2,178 |
<?php
/**
* @package com_zoo Component
* @file category.php
* @version 2.4.9 May 2011
* @author YOOtheme http://www.yootheme.com
* @copyright Copyright (C) 2007 - 2011 YOOtheme GmbH
* @license http://www.gnu.org/licenses/gpl-2.0.html GNU/GPLv2 only
*/
/*
Class: Category
Category related attributes and functions.
*/
class Category {
/*
Variable: id
Primary key.
*/
public $id;
/*
Variable: application_id
Related application id.
*/
public $application_id;
/*
Variable: name
Category name.
*/
public $name;
/*
Variable: alias
Category alias.
*/
public $alias;
/*
Variable: description
Category description.
*/
public $description;
/*
Variable: parent
Categories parent id.
*/
public $parent;
/*
Variable: ordering
Categories ordering.
*/
public $ordering;
/*
Variable: published
Category published state.
*/
public $published;
/*
Variable: params
Category params.
*/
public $params;
/*
Variable: item_ids
Related category item ids.
*/
public $item_ids;
/*
Variable: app
App instance.
*/
public $app;
/*
Variable: _parent
Related category parent object.
*/
protected $_parent;
/*
Variable: _children
Related category children objects.
*/
protected $_children = array();
/*
Variable: _items
Related category item objects.
*/
protected $_items = array();
/*
Variable: _item_count
Related category item count.
*/
public $_item_count;
/*
Variable: _total_item_count
Item count including subcategories.
*/
protected $_total_item_count = null;
public function __construct() {
// init vars
$app = App::getInstance('zoo');
// decorate data as object
$this->params = $app->parameter->create($this->params);
// set related item ids
$this->item_ids = isset($this->item_ids) ? explode(',', $this->item_ids) : array();
if (!empty($this->item_ids)) {
$this->item_ids = array_combine($this->item_ids, $this->item_ids);
}
}
/*
Function: getApplication
Get related application object.
Returns:
Application - application object
*/
public function getApplication() {
return $this->app->table->application->get($this->application_id);
}
/*
Function: hasChildren
Does this category have children.
Returns:
Bool
*/
public function hasChildren() {
return !empty($this->_children);
}
/*
Function: getChildren
Method to get category's children.
Parameters:
recursive - Recursivly retrieve childrens children.
Returns:
id - children
*/
public function getChildren($recursive = false) {
if ($recursive) {
$children = array();
foreach ($this->_children as $child) {
$children[$child->id] = $child;
$children += $child->getChildren(true);
}
return $children;
}
return $this->_children;
}
/*
Function: setChildren
Set children.
Returns:
Category
*/
public function setChildren($val) {
$this->_children = $val;
return $this;
}
/*
Function: addChildren
Add children.
Returns:
Category
*/
public function addChild($category) {
$this->_children[] = $category;
return $this;
}
/*
Function: removeChild
Remove a child.
Returns:
Category
*/
public function removeChild($child) {
unset($this->_children[$child->id]);
return $this;
}
/*
Function: getParent
Method to get category's parent.
Returns:
id - parent
*/
public function getParent() {
return $this->_parent;
}
/*
Function: setParent
Set parent.
Returns:
Category
*/
public function setParent($val) {
$this->_parent = $val;
return $this;
}
/*
Function: getPathway
Method to get category's pathway.
Returns:
Array - Array of parent categories
*/
public function getPathway() {
if ($this->_parent == null) {
return array();
}
$pathway = $this->_parent->getPathway();
$pathway[$this->id] = $this;
return $pathway;
}
/*
Function: isPublished
Get published state.
Returns:
-
*/
public function isPublished() {
return $this->published;
}
/*
Function: setPublished
Set category published state and fire event.
Parameters:
$val - State
$save - Autosave category before fire event
Returns:
Category
*/
public function setPublished($val, $save = false) {
if ($this->published != $val) {
// set state
$old_state = $this->state;
$this->published = $val;
// autosave category ?
if ($save) {
$this->app->table->category->save($this);
}
// fire event
$this->app->event->dispatcher->notify($this->app->event->create($this, 'category:stateChanged', compact('old_state')));
}
return $this;
}
/*
Function: getPath
Method to get the path to this category.
Returns:
Array - Category path
*/
public function getPath($path = array()) {
$path[] = $this->id;
if ($this->_parent != null) {
$path = $this->_parent->getPath($path);
}
return $path;
}
/*
Function: getItems
Method to get category's items.
Returns:
Array
*/
public function getItems($published = false, $user = null, $orderby = '') {
if (empty($this->_items)) {
$this->_items = $this->app->table->item->getFromCategory($this->application_id, $this->id, $published, $user, $orderby);
}
return $this->_items;
}
/*
Function: itemCount
Method to count category's items.
Returns:
Int - Number of items
*/
public function itemCount() {
if (!isset($this->_item_count)) {
$this->_item_count = count($this->item_ids);
}
return $this->_item_count;
}
/*
Function: total_item_count
Method to count category's published items including all childrens items.
Returns:
Int - Number of items
*/
public function totalItemCount() {
if (!isset($this->_total_item_count)) {
$this->_total_item_count = count($this->getItemIds(true));
}
return $this->_total_item_count;
}
/*
Function: getItemIds
Method to get related item ids.
Returns:
Array - Related item ids
*/
public function getItemIds($recursive = false) {
$item_ids = $this->item_ids;
if ($recursive) {
foreach($this->getChildren(true) as $child) {
$item_ids += $child->item_ids;
}
}
return $item_ids;
}
/*
Function: childrenHaveItems
Method to check if children have items.
Returns:
Bool
*/
public function childrenHaveItems() {
foreach ($this->getChildren(true) as $child) {
if ($child->itemCount()) {
return true;
}
}
return false;
}
/*
Function: getParams
Gets category params.
Parameters:
$for - Get params for a specific use, including overidden values.
Returns:
Object - AppParameter
*/
public function getParams($for = null) {
// get site params and inherit globals
if ($for == 'site') {
return $this->app->parameter->create()
->set('config.', $this->getApplication()->getParams()->get('global.config.'))
->set('template.', $this->getApplication()->getParams()->get('global.template.'))
->loadArray($this->params->getData());
}
return $this->params;
}
/*
Function: getImage
Get image resource info.
Parameters:
$name - the param name of the image
Returns:
Array - Image info
*/
public function getImage($name) {
$params = $this->getParams();
if ($image = $params->get($name)) {
return $this->app->html->_('zoo.image', $image, $params->get($name . '_width'), $params->get($name . '_height'));
}
return null;
}
/*
Function: getImage
Executes Content Plugins on text.
Parameters:
$text - the text
Returns:
text - string
*/
public function getText($text) {
return $this->app->zoo->triggerContentPlugins($text);
}
}
/*
Class: CategoryException
*/
class CategoryException extends AppException {} | berkeley-amsa/amsa | tmp/install_4e730b9d77cb3/admin/classes/category.php | PHP | gpl-2.0 | 8,168 |
/*
* Copyright (C) 2016 Team Kodi
* http://kodi.tv
*
* This Program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2, or (at your option)
* any later version.
*
* This Program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this Program; see the file COPYING. If not, see
* <http://www.gnu.org/licenses/>.
*
*/
#include "GUIDialogNewJoystick.h"
#include "ServiceBroker.h"
#include "guilib/GUIWindowManager.h"
#include "guilib/WindowIDs.h"
#include "messaging/helpers/DialogHelper.h"
#include "settings/Settings.h"
using namespace JOYSTICK;
CGUIDialogNewJoystick::CGUIDialogNewJoystick() :
CThread("NewJoystickDlg")
{
}
void CGUIDialogNewJoystick::ShowAsync()
{
bool bShow = true;
if (IsRunning())
bShow = false;
else if (!CServiceBroker::GetSettings().GetBool(CSettings::SETTING_INPUT_ASKNEWCONTROLLERS))
bShow = false;
else if (g_windowManager.IsWindowActive(WINDOW_DIALOG_GAME_CONTROLLERS, false))
bShow = false;
if (bShow)
Create();
}
void CGUIDialogNewJoystick::Process()
{
using namespace KODI::MESSAGING::HELPERS;
// "New controller detected"
// "A new controller has been detected. Configuration can be done at any time in "Settings -> System Settings -> Input". Would you like to configure it now?"
if (ShowYesNoDialogText(CVariant{ 35011 }, CVariant{ 35012 }) == DialogResponse::YES)
{
g_windowManager.ActivateWindow(WINDOW_DIALOG_GAME_CONTROLLERS);
}
else
{
CServiceBroker::GetSettings().SetBool(CSettings::SETTING_INPUT_ASKNEWCONTROLLERS, false);
}
}
| hackthis02/xbmc | xbmc/input/joysticks/dialogs/GUIDialogNewJoystick.cpp | C++ | gpl-2.0 | 1,952 |
#!/usr/bin/env python
""" turtle-example-suite:
xtx_lindenmayer_indian.py
Each morning women in Tamil Nadu, in southern
India, place designs, created by using rice
flour and known as kolam on the thresholds of
their homes.
These can be described by Lindenmayer systems,
which can easily be implemented with turtle
graphics and Python.
Two examples are shown here:
(1) the snake kolam
(2) anklets of Krishna
Taken from Marcia Ascher: Mathematics
Elsewhere, An Exploration of Ideas Across
Cultures
"""
################################
# Mini Lindenmayer tool
###############################
from turtle import *
def replace( seq, replacementRules, n ):
for i in range(n):
newseq = ""
for element in seq:
newseq = newseq + replacementRules.get(element,element)
seq = newseq
return seq
def draw( commands, rules ):
for b in commands:
try:
rules[b]()
except TypeError:
try:
draw(rules[b], rules)
except:
pass
def main():
################################
# Example 1: Snake kolam
################################
def r():
right(45)
def l():
left(45)
def f():
forward(7.5)
snake_rules = {"-":r, "+":l, "f":f, "b":"f+f+f--f--f+f+f"}
snake_replacementRules = {"b": "b+f+b--f--b+f+b"}
snake_start = "b--f--b--f"
drawing = replace(snake_start, snake_replacementRules, 3)
reset()
speed(3)
tracer(1,0)
ht()
up()
backward(195)
down()
draw(drawing, snake_rules)
from time import sleep
sleep(3)
################################
# Example 2: Anklets of Krishna
################################
def A():
color("red")
circle(10,90)
def B():
from math import sqrt
color("black")
l = 5/sqrt(2)
forward(l)
circle(l, 270)
forward(l)
def F():
color("green")
forward(10)
krishna_rules = {"a":A, "b":B, "f":F}
krishna_replacementRules = {"a" : "afbfa", "b" : "afbfbfbfa" }
krishna_start = "fbfbfbfb"
reset()
speed(0)
tracer(3,0)
ht()
left(45)
drawing = replace(krishna_start, krishna_replacementRules, 3)
draw(drawing, krishna_rules)
tracer(1)
return "Done!"
if __name__=='__main__':
msg = main()
print msg
mainloop()
| teeple/pns_server | work/install/Python-2.7.4/Demo/turtle/tdemo_lindenmayer_indian.py | Python | gpl-2.0 | 2,432 |
/*
* Copyright (C) 2005-2018 Team Kodi
* This file is part of Kodi - https://kodi.tv
*
* SPDX-License-Identifier: GPL-2.0-or-later
* See LICENSES/README.md for more information.
*/
#include "platform/Filesystem.h"
#include "platform/win32/CharsetConverter.h"
#include <Windows.h>
namespace win = KODI::PLATFORM::WINDOWS;
namespace KODI
{
namespace PLATFORM
{
namespace FILESYSTEM
{
space_info space(const std::string& path, std::error_code& ec)
{
ec.clear();
space_info sp;
auto pathW = win::ToW(path);
ULARGE_INTEGER capacity;
ULARGE_INTEGER available;
ULARGE_INTEGER free;
auto result = GetDiskFreeSpaceExW(pathW.c_str(), &available, &capacity, &free);
if (result == FALSE)
{
ec.assign(GetLastError(), std::system_category());
sp.available = static_cast<uintmax_t>(-1);
sp.capacity = static_cast<uintmax_t>(-1);
sp.free = static_cast<uintmax_t>(-1);
return sp;
}
sp.available = static_cast<uintmax_t>(available.QuadPart);
sp.capacity = static_cast<uintmax_t>(capacity.QuadPart);
sp.free = static_cast<uintmax_t>(free.QuadPart);
return sp;
}
std::string temp_directory_path(std::error_code &ec)
{
wchar_t lpTempPathBuffer[MAX_PATH + 1];
if (!GetTempPathW(MAX_PATH, lpTempPathBuffer))
{
ec.assign(GetLastError(), std::system_category());
return std::string();
}
ec.clear();
return win::FromW(lpTempPathBuffer);
}
std::string create_temp_directory(std::error_code &ec)
{
wchar_t lpTempPathBuffer[MAX_PATH + 1];
std::wstring xbmcTempPath = win::ToW(temp_directory_path(ec));
if (ec)
return std::string();
if (!GetTempFileNameW(xbmcTempPath.c_str(), L"xbm", 0, lpTempPathBuffer))
{
ec.assign(GetLastError(), std::system_category());
return std::string();
}
DeleteFileW(lpTempPathBuffer);
if (!CreateDirectoryW(lpTempPathBuffer, nullptr))
{
ec.assign(GetLastError(), std::system_category());
return std::string();
}
ec.clear();
return win::FromW(lpTempPathBuffer);
}
std::string temp_file_path(const std::string&, std::error_code& ec)
{
wchar_t lpTempPathBuffer[MAX_PATH + 1];
std::wstring xbmcTempPath = win::ToW(create_temp_directory(ec));
if (ec)
return std::string();
if (!GetTempFileNameW(xbmcTempPath.c_str(), L"xbm", 0, lpTempPathBuffer))
{
ec.assign(GetLastError(), std::system_category());
return std::string();
}
DeleteFileW(lpTempPathBuffer);
ec.clear();
return win::FromW(lpTempPathBuffer);
}
}
}
}
| asavah/xbmc | xbmc/platform/win32/Filesystem.cpp | C++ | gpl-2.0 | 2,486 |
// Copyright 2008 Dolphin Emulator Project
// Licensed under GPLv2+
// Refer to the license.txt file included.
#include "VideoCommon/Fifo.h"
#include <atomic>
#include <cstring>
#include "Common/Assert.h"
#include "Common/Atomic.h"
#include "Common/BlockingLoop.h"
#include "Common/ChunkFile.h"
#include "Common/Event.h"
#include "Common/FPURoundMode.h"
#include "Common/MemoryUtil.h"
#include "Common/MsgHandler.h"
#include "Core/ConfigManager.h"
#include "Core/CoreTiming.h"
#include "Core/HW/Memmap.h"
#include "Core/Host.h"
#include "VideoCommon/AsyncRequests.h"
#include "VideoCommon/CPMemory.h"
#include "VideoCommon/CommandProcessor.h"
#include "VideoCommon/DataReader.h"
#include "VideoCommon/OpcodeDecoding.h"
#include "VideoCommon/VertexLoaderManager.h"
#include "VideoCommon/VertexManagerBase.h"
#include "VideoCommon/VideoBackendBase.h"
namespace Fifo
{
static constexpr u32 FIFO_SIZE = 2 * 1024 * 1024;
static constexpr int GPU_TIME_SLOT_SIZE = 1000;
static Common::BlockingLoop s_gpu_mainloop;
static Common::Flag s_emu_running_state;
// Most of this array is unlikely to be faulted in...
static u8 s_fifo_aux_data[FIFO_SIZE];
static u8* s_fifo_aux_write_ptr;
static u8* s_fifo_aux_read_ptr;
// This could be in SConfig, but it depends on multiple settings
// and can change at runtime.
static bool s_use_deterministic_gpu_thread;
static CoreTiming::EventType* s_event_sync_gpu;
// STATE_TO_SAVE
static u8* s_video_buffer;
static u8* s_video_buffer_read_ptr;
static std::atomic<u8*> s_video_buffer_write_ptr;
static std::atomic<u8*> s_video_buffer_seen_ptr;
static u8* s_video_buffer_pp_read_ptr;
// The read_ptr is always owned by the GPU thread. In normal mode, so is the
// write_ptr, despite it being atomic. In deterministic GPU thread mode,
// things get a bit more complicated:
// - The seen_ptr is written by the GPU thread, and points to what it's already
// processed as much of as possible - in the case of a partial command which
// caused it to stop, not the same as the read ptr. It's written by the GPU,
// under the lock, and updating the cond.
// - The write_ptr is written by the CPU thread after it copies data from the
// FIFO. Maybe someday it will be under the lock. For now, because RunGpuLoop
// polls, it's just atomic.
// - The pp_read_ptr is the CPU preprocessing version of the read_ptr.
static std::atomic<int> s_sync_ticks;
static bool s_syncing_suspended;
static Common::Event s_sync_wakeup_event;
void DoState(PointerWrap& p)
{
p.DoArray(s_video_buffer, FIFO_SIZE);
u8* write_ptr = s_video_buffer_write_ptr;
p.DoPointer(write_ptr, s_video_buffer);
s_video_buffer_write_ptr = write_ptr;
p.DoPointer(s_video_buffer_read_ptr, s_video_buffer);
if (p.mode == PointerWrap::MODE_READ && s_use_deterministic_gpu_thread)
{
// We're good and paused, right?
s_video_buffer_seen_ptr = s_video_buffer_pp_read_ptr = s_video_buffer_read_ptr;
}
p.Do(s_sync_ticks);
p.Do(s_syncing_suspended);
}
void PauseAndLock(bool doLock, bool unpauseOnUnlock)
{
if (doLock)
{
SyncGPU(SyncGPUReason::Other);
EmulatorState(false);
const SConfig& param = SConfig::GetInstance();
if (!param.bCPUThread || s_use_deterministic_gpu_thread)
return;
s_gpu_mainloop.WaitYield(std::chrono::milliseconds(100), Host_YieldToUI);
}
else
{
if (unpauseOnUnlock)
EmulatorState(true);
}
}
void Init()
{
// Padded so that SIMD overreads in the vertex loader are safe
s_video_buffer = static_cast<u8*>(Common::AllocateMemoryPages(FIFO_SIZE + 4));
ResetVideoBuffer();
if (SConfig::GetInstance().bCPUThread)
s_gpu_mainloop.Prepare();
s_sync_ticks.store(0);
}
void Shutdown()
{
if (s_gpu_mainloop.IsRunning())
PanicAlert("Fifo shutting down while active");
Common::FreeMemoryPages(s_video_buffer, FIFO_SIZE + 4);
s_video_buffer = nullptr;
s_video_buffer_write_ptr = nullptr;
s_video_buffer_pp_read_ptr = nullptr;
s_video_buffer_read_ptr = nullptr;
s_video_buffer_seen_ptr = nullptr;
s_fifo_aux_write_ptr = nullptr;
s_fifo_aux_read_ptr = nullptr;
}
// May be executed from any thread, even the graphics thread.
// Created to allow for self shutdown.
void ExitGpuLoop()
{
// This should break the wait loop in CPU thread
CommandProcessor::fifo.bFF_GPReadEnable = false;
FlushGpu();
// Terminate GPU thread loop
s_emu_running_state.Set();
s_gpu_mainloop.Stop(s_gpu_mainloop.kNonBlock);
}
void EmulatorState(bool running)
{
s_emu_running_state.Set(running);
if (running)
s_gpu_mainloop.Wakeup();
else
s_gpu_mainloop.AllowSleep();
}
void SyncGPU(SyncGPUReason reason, bool may_move_read_ptr)
{
if (s_use_deterministic_gpu_thread)
{
s_gpu_mainloop.Wait();
if (!s_gpu_mainloop.IsRunning())
return;
// Opportunistically reset FIFOs so we don't wrap around.
if (may_move_read_ptr && s_fifo_aux_write_ptr != s_fifo_aux_read_ptr)
PanicAlert("aux fifo not synced (%p, %p)", s_fifo_aux_write_ptr, s_fifo_aux_read_ptr);
memmove(s_fifo_aux_data, s_fifo_aux_read_ptr, s_fifo_aux_write_ptr - s_fifo_aux_read_ptr);
s_fifo_aux_write_ptr -= (s_fifo_aux_read_ptr - s_fifo_aux_data);
s_fifo_aux_read_ptr = s_fifo_aux_data;
if (may_move_read_ptr)
{
u8* write_ptr = s_video_buffer_write_ptr;
// what's left over in the buffer
size_t size = write_ptr - s_video_buffer_pp_read_ptr;
memmove(s_video_buffer, s_video_buffer_pp_read_ptr, size);
// This change always decreases the pointers. We write seen_ptr
// after write_ptr here, and read it before in RunGpuLoop, so
// 'write_ptr > seen_ptr' there cannot become spuriously true.
s_video_buffer_write_ptr = write_ptr = s_video_buffer + size;
s_video_buffer_pp_read_ptr = s_video_buffer;
s_video_buffer_read_ptr = s_video_buffer;
s_video_buffer_seen_ptr = write_ptr;
}
}
}
void PushFifoAuxBuffer(const void* ptr, size_t size)
{
if (size > (size_t)(s_fifo_aux_data + FIFO_SIZE - s_fifo_aux_write_ptr))
{
SyncGPU(SyncGPUReason::AuxSpace, /* may_move_read_ptr */ false);
if (!s_gpu_mainloop.IsRunning())
{
// GPU is shutting down
return;
}
if (size > (size_t)(s_fifo_aux_data + FIFO_SIZE - s_fifo_aux_write_ptr))
{
// That will sync us up to the last 32 bytes, so this short region
// of FIFO would have to point to a 2MB display list or something.
PanicAlert("absurdly large aux buffer");
return;
}
}
memcpy(s_fifo_aux_write_ptr, ptr, size);
s_fifo_aux_write_ptr += size;
}
void* PopFifoAuxBuffer(size_t size)
{
void* ret = s_fifo_aux_read_ptr;
s_fifo_aux_read_ptr += size;
return ret;
}
// Description: RunGpuLoop() sends data through this function.
static void ReadDataFromFifo(u32 readPtr)
{
size_t len = 32;
if (len > (size_t)(s_video_buffer + FIFO_SIZE - s_video_buffer_write_ptr))
{
size_t existing_len = s_video_buffer_write_ptr - s_video_buffer_read_ptr;
if (len > (size_t)(FIFO_SIZE - existing_len))
{
PanicAlert("FIFO out of bounds (existing %zu + new %zu > %u)", existing_len, len, FIFO_SIZE);
return;
}
memmove(s_video_buffer, s_video_buffer_read_ptr, existing_len);
s_video_buffer_write_ptr = s_video_buffer + existing_len;
s_video_buffer_read_ptr = s_video_buffer;
}
// Copy new video instructions to s_video_buffer for future use in rendering the new picture
Memory::CopyFromEmu(s_video_buffer_write_ptr, readPtr, len);
s_video_buffer_write_ptr += len;
}
// The deterministic_gpu_thread version.
static void ReadDataFromFifoOnCPU(u32 readPtr)
{
size_t len = 32;
u8* write_ptr = s_video_buffer_write_ptr;
if (len > (size_t)(s_video_buffer + FIFO_SIZE - write_ptr))
{
// We can't wrap around while the GPU is working on the data.
// This should be very rare due to the reset in SyncGPU.
SyncGPU(SyncGPUReason::Wraparound);
if (!s_gpu_mainloop.IsRunning())
{
// GPU is shutting down, so the next asserts may fail
return;
}
if (s_video_buffer_pp_read_ptr != s_video_buffer_read_ptr)
{
PanicAlert("desynced read pointers");
return;
}
write_ptr = s_video_buffer_write_ptr;
size_t existing_len = write_ptr - s_video_buffer_pp_read_ptr;
if (len > (size_t)(FIFO_SIZE - existing_len))
{
PanicAlert("FIFO out of bounds (existing %zu + new %zu > %u)", existing_len, len, FIFO_SIZE);
return;
}
}
Memory::CopyFromEmu(s_video_buffer_write_ptr, readPtr, len);
s_video_buffer_pp_read_ptr = OpcodeDecoder::Run<true>(
DataReader(s_video_buffer_pp_read_ptr, write_ptr + len), nullptr, false);
// This would have to be locked if the GPU thread didn't spin.
s_video_buffer_write_ptr = write_ptr + len;
}
void ResetVideoBuffer()
{
s_video_buffer_read_ptr = s_video_buffer;
s_video_buffer_write_ptr = s_video_buffer;
s_video_buffer_seen_ptr = s_video_buffer;
s_video_buffer_pp_read_ptr = s_video_buffer;
s_fifo_aux_write_ptr = s_fifo_aux_data;
s_fifo_aux_read_ptr = s_fifo_aux_data;
}
// Description: Main FIFO update loop
// Purpose: Keep the Core HW updated about the CPU-GPU distance
void RunGpuLoop()
{
AsyncRequests::GetInstance()->SetEnable(true);
AsyncRequests::GetInstance()->SetPassthrough(false);
s_gpu_mainloop.Run(
[] {
const SConfig& param = SConfig::GetInstance();
// Run events from the CPU thread.
AsyncRequests::GetInstance()->PullEvents();
// Do nothing while paused
if (!s_emu_running_state.IsSet())
return;
if (s_use_deterministic_gpu_thread)
{
// All the fifo/CP stuff is on the CPU. We just need to run the opcode decoder.
u8* seen_ptr = s_video_buffer_seen_ptr;
u8* write_ptr = s_video_buffer_write_ptr;
// See comment in SyncGPU
if (write_ptr > seen_ptr)
{
s_video_buffer_read_ptr =
OpcodeDecoder::Run(DataReader(s_video_buffer_read_ptr, write_ptr), nullptr, false);
s_video_buffer_seen_ptr = write_ptr;
}
}
else
{
CommandProcessor::SCPFifoStruct& fifo = CommandProcessor::fifo;
CommandProcessor::SetCPStatusFromGPU();
// check if we are able to run this buffer
while (!CommandProcessor::IsInterruptWaiting() && fifo.bFF_GPReadEnable &&
fifo.CPReadWriteDistance && !AtBreakpoint())
{
if (param.bSyncGPU && s_sync_ticks.load() < param.iSyncGpuMinDistance)
break;
u32 cyclesExecuted = 0;
u32 readPtr = fifo.CPReadPointer;
ReadDataFromFifo(readPtr);
if (readPtr == fifo.CPEnd)
readPtr = fifo.CPBase;
else
readPtr += 32;
ASSERT_MSG(COMMANDPROCESSOR, (s32)fifo.CPReadWriteDistance - 32 >= 0,
"Negative fifo.CPReadWriteDistance = %i in FIFO Loop !\nThat can produce "
"instability in the game. Please report it.",
fifo.CPReadWriteDistance - 32);
u8* write_ptr = s_video_buffer_write_ptr;
s_video_buffer_read_ptr = OpcodeDecoder::Run(
DataReader(s_video_buffer_read_ptr, write_ptr), &cyclesExecuted, false);
Common::AtomicStore(fifo.CPReadPointer, readPtr);
Common::AtomicAdd(fifo.CPReadWriteDistance, static_cast<u32>(-32));
if ((write_ptr - s_video_buffer_read_ptr) == 0)
Common::AtomicStore(fifo.SafeCPReadPointer, fifo.CPReadPointer);
CommandProcessor::SetCPStatusFromGPU();
if (param.bSyncGPU)
{
cyclesExecuted = (int)(cyclesExecuted / param.fSyncGpuOverclock);
int old = s_sync_ticks.fetch_sub(cyclesExecuted);
if (old >= param.iSyncGpuMaxDistance &&
old - (int)cyclesExecuted < param.iSyncGpuMaxDistance)
s_sync_wakeup_event.Set();
}
// This call is pretty important in DualCore mode and must be called in the FIFO Loop.
// If we don't, s_swapRequested or s_efbAccessRequested won't be set to false
// leading the CPU thread to wait in Video_BeginField or Video_AccessEFB thus slowing
// things down.
AsyncRequests::GetInstance()->PullEvents();
}
// fast skip remaining GPU time if fifo is empty
if (s_sync_ticks.load() > 0)
{
int old = s_sync_ticks.exchange(0);
if (old >= param.iSyncGpuMaxDistance)
s_sync_wakeup_event.Set();
}
// The fifo is empty and it's unlikely we will get any more work in the near future.
// Make sure VertexManager finishes drawing any primitives it has stored in it's buffer.
g_vertex_manager->Flush();
}
},
100);
AsyncRequests::GetInstance()->SetEnable(false);
AsyncRequests::GetInstance()->SetPassthrough(true);
}
void FlushGpu()
{
const SConfig& param = SConfig::GetInstance();
if (!param.bCPUThread || s_use_deterministic_gpu_thread)
return;
s_gpu_mainloop.Wait();
}
void GpuMaySleep()
{
s_gpu_mainloop.AllowSleep();
}
bool AtBreakpoint()
{
CommandProcessor::SCPFifoStruct& fifo = CommandProcessor::fifo;
return fifo.bFF_BPEnable && (fifo.CPReadPointer == fifo.CPBreakpoint);
}
void RunGpu()
{
const SConfig& param = SConfig::GetInstance();
// wake up GPU thread
if (param.bCPUThread && !s_use_deterministic_gpu_thread)
{
s_gpu_mainloop.Wakeup();
}
// if the sync GPU callback is suspended, wake it up.
if (!SConfig::GetInstance().bCPUThread || s_use_deterministic_gpu_thread ||
SConfig::GetInstance().bSyncGPU)
{
if (s_syncing_suspended)
{
s_syncing_suspended = false;
CoreTiming::ScheduleEvent(GPU_TIME_SLOT_SIZE, s_event_sync_gpu, GPU_TIME_SLOT_SIZE);
}
}
}
static int RunGpuOnCpu(int ticks)
{
CommandProcessor::SCPFifoStruct& fifo = CommandProcessor::fifo;
bool reset_simd_state = false;
int available_ticks = int(ticks * SConfig::GetInstance().fSyncGpuOverclock) + s_sync_ticks.load();
while (fifo.bFF_GPReadEnable && fifo.CPReadWriteDistance && !AtBreakpoint() &&
available_ticks >= 0)
{
if (s_use_deterministic_gpu_thread)
{
ReadDataFromFifoOnCPU(fifo.CPReadPointer);
s_gpu_mainloop.Wakeup();
}
else
{
if (!reset_simd_state)
{
FPURoundMode::SaveSIMDState();
FPURoundMode::LoadDefaultSIMDState();
reset_simd_state = true;
}
ReadDataFromFifo(fifo.CPReadPointer);
u32 cycles = 0;
s_video_buffer_read_ptr = OpcodeDecoder::Run(
DataReader(s_video_buffer_read_ptr, s_video_buffer_write_ptr), &cycles, false);
available_ticks -= cycles;
}
if (fifo.CPReadPointer == fifo.CPEnd)
fifo.CPReadPointer = fifo.CPBase;
else
fifo.CPReadPointer += 32;
fifo.CPReadWriteDistance -= 32;
}
CommandProcessor::SetCPStatusFromGPU();
if (reset_simd_state)
{
FPURoundMode::LoadSIMDState();
}
// Discard all available ticks as there is nothing to do any more.
s_sync_ticks.store(std::min(available_ticks, 0));
// If the GPU is idle, drop the handler.
if (available_ticks >= 0)
return -1;
// Always wait at least for GPU_TIME_SLOT_SIZE cycles.
return -available_ticks + GPU_TIME_SLOT_SIZE;
}
void UpdateWantDeterminism(bool want)
{
// We are paused (or not running at all yet), so
// it should be safe to change this.
const SConfig& param = SConfig::GetInstance();
bool gpu_thread = false;
switch (param.m_GPUDeterminismMode)
{
case GPUDeterminismMode::Auto:
gpu_thread = want;
break;
case GPUDeterminismMode::Disabled:
gpu_thread = false;
break;
case GPUDeterminismMode::FakeCompletion:
gpu_thread = true;
break;
}
gpu_thread = gpu_thread && param.bCPUThread;
if (s_use_deterministic_gpu_thread != gpu_thread)
{
s_use_deterministic_gpu_thread = gpu_thread;
if (gpu_thread)
{
// These haven't been updated in non-deterministic mode.
s_video_buffer_seen_ptr = s_video_buffer_pp_read_ptr = s_video_buffer_read_ptr;
CopyPreprocessCPStateFromMain();
VertexLoaderManager::MarkAllDirty();
}
}
}
bool UseDeterministicGPUThread()
{
return s_use_deterministic_gpu_thread;
}
/* This function checks the emulated CPU - GPU distance and may wake up the GPU,
* or block the CPU if required. It should be called by the CPU thread regularly.
* @ticks The gone emulated CPU time.
* @return A good time to call WaitForGpuThread() next.
*/
static int WaitForGpuThread(int ticks)
{
const SConfig& param = SConfig::GetInstance();
int old = s_sync_ticks.fetch_add(ticks);
int now = old + ticks;
// GPU is idle, so stop polling.
if (old >= 0 && s_gpu_mainloop.IsDone())
return -1;
// Wakeup GPU
if (old < param.iSyncGpuMinDistance && now >= param.iSyncGpuMinDistance)
RunGpu();
// If the GPU is still sleeping, wait for a longer time
if (now < param.iSyncGpuMinDistance)
return GPU_TIME_SLOT_SIZE + param.iSyncGpuMinDistance - now;
// Wait for GPU
if (now >= param.iSyncGpuMaxDistance)
s_sync_wakeup_event.Wait();
return GPU_TIME_SLOT_SIZE;
}
static void SyncGPUCallback(u64 ticks, s64 cyclesLate)
{
ticks += cyclesLate;
int next = -1;
if (!SConfig::GetInstance().bCPUThread || s_use_deterministic_gpu_thread)
{
next = RunGpuOnCpu((int)ticks);
}
else if (SConfig::GetInstance().bSyncGPU)
{
next = WaitForGpuThread((int)ticks);
}
s_syncing_suspended = next < 0;
if (!s_syncing_suspended)
CoreTiming::ScheduleEvent(next, s_event_sync_gpu, next);
}
// Initialize GPU - CPU thread syncing, this gives us a deterministic way to start the GPU thread.
void Prepare()
{
s_event_sync_gpu = CoreTiming::RegisterEvent("SyncGPUCallback", SyncGPUCallback);
s_syncing_suspended = true;
}
} // namespace Fifo
| LAGonauta/dolphin | Source/Core/VideoCommon/Fifo.cpp | C++ | gpl-2.0 | 18,046 |
<?php
/**
* Magento
*
* NOTICE OF LICENSE
*
* This source file is subject to the Open Software License (OSL 3.0)
* that is bundled with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://opensource.org/licenses/osl-3.0.php
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@magentocommerce.com so we can send you a copy immediately.
*
* DISCLAIMER
*
* Do not edit or add to this file if you wish to upgrade Magento to newer
* versions in the future. If you wish to customize Magento for your
* needs please refer to http://www.magentocommerce.com for more information.
*
* @category Mage
* @package Mage_Core
* @copyright Copyright (c) 2012 Magento Inc. (http://www.magentocommerce.com)
* @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0)
*/
/**
* Core Website model
*
* @method Mage_Core_Model_Resource_Website _getResource()
* @method Mage_Core_Model_Resource_Website getResource()
* @method Mage_Core_Model_Website setCode(string $value)
* @method string getName()
* @method Mage_Core_Model_Website setName(string $value)
* @method int getSortOrder()
* @method Mage_Core_Model_Website setSortOrder(int $value)
* @method Mage_Core_Model_Website setDefaultGroupId(int $value)
* @method int getIsDefault()
* @method Mage_Core_Model_Website setIsDefault(int $value)
*
* @category Mage
* @package Mage_Core
* @author Magento Core Team <core@magentocommerce.com>
*/
class Mage_Core_Model_Website extends Mage_Core_Model_Abstract
{
const ENTITY = 'core_website';
const CACHE_TAG = 'website';
protected $_cacheTag = true;
/**
* @var string
*/
protected $_eventPrefix = 'website';
/**
* @var string
*/
protected $_eventObject = 'website';
/**
* Cache configuration array
*
* @var array
*/
protected $_configCache = array();
/**
* Website Group Coleection array
*
* @var array
*/
protected $_groups;
/**
* Website group ids array
*
* @var array
*/
protected $_groupIds = array();
/**
* The number of groups in a website
*
* @var int
*/
protected $_groupsCount;
/**
* Website Store collection array
*
* @var array
*/
protected $_stores;
/**
* Website store ids array
*
* @var array
*/
protected $_storeIds = array();
/**
* Website store codes array
*
* @var array
*/
protected $_storeCodes = array();
/**
* The number of stores in a website
*
* @var int
*/
protected $_storesCount = 0;
/**
* Website default group
*
* @var Mage_Core_Model_Store_Group
*/
protected $_defaultGroup;
/**
* Website default store
*
* @var Mage_Core_Model_Store
*/
protected $_defaultStore;
/**
* is can delete website
*
* @var bool
*/
protected $_isCanDelete;
/**
* @var bool
*/
private $_isReadOnly = false;
/**
* init model
*
*/
protected function _construct()
{
$this->_init('core/website');
}
/**
* Custom load
*
* @param int|string $id
* @param string $field
* @return Mage_Core_Model_Website
*/
public function load($id, $field = null)
{
if (!is_numeric($id) && is_null($field)) {
$this->_getResource()->load($this, $id, 'code');
return $this;
}
return parent::load($id, $field);
}
/**
* Load website configuration
*
* @param string $code
* @return Mage_Core_Model_Website
*/
public function loadConfig($code)
{
if (!Mage::getConfig()->getNode('websites')) {
return $this;
}
if (is_numeric($code)) {
foreach (Mage::getConfig()->getNode('websites')->children() as $websiteCode=>$website) {
if ((int)$website->system->website->id==$code) {
$code = $websiteCode;
break;
}
}
} else {
$website = Mage::getConfig()->getNode('websites/'.$code);
}
if (!empty($website)) {
$this->setCode($code);
$id = (int)$website->system->website->id;
$this->setId($id)->setStoreId($id);
}
return $this;
}
/**
* Get website config data
*
* @param string $path
* @return mixed
*/
public function getConfig($path) {
if (!isset($this->_configCache[$path])) {
$config = Mage::getConfig()->getNode('websites/'.$this->getCode().'/'.$path);
if (!$config) {
return false;
#throw Mage::exception('Mage_Core', Mage::helper('core')->__('Invalid website\'s configuration path: %s', $path));
}
if ($config->hasChildren()) {
$value = array();
foreach ($config->children() as $k=>$v) {
$value[$k] = $v;
}
} else {
$value = (string)$config;
}
$this->_configCache[$path] = $value;
}
return $this->_configCache[$path];
}
/**
* Load group collection and set internal data
*
*/
protected function _loadGroups()
{
$this->_groups = array();
$this->_groupsCount = 0;
foreach ($this->getGroupCollection() as $group) {
$this->_groups[$group->getId()] = $group;
$this->_groupIds[$group->getId()] = $group->getId();
if ($this->getDefaultGroupId() == $group->getId()) {
$this->_defaultGroup = $group;
}
$this->_groupsCount ++;
}
}
/**
* Set website groups
*
* @param array $groups
*/
public function setGroups($groups)
{
$this->_groups = array();
$this->_groupsCount = 0;
foreach ($groups as $group) {
$this->_groups[$group->getId()] = $group;
$this->_groupIds[$group->getId()] = $group->getId();
if ($this->getDefaultGroupId() == $group->getId()) {
$this->_defaultGroup = $group;
}
$this->_groupsCount ++;
}
return $this;
}
/**
* Retrieve new (not loaded) Group collection object with website filter
*
* @return Mage_Core_Model_Mysql4_Store_Group_Collection
*/
public function getGroupCollection()
{
return Mage::getModel('core/store_group')
->getCollection()
->addWebsiteFilter($this->getId());
}
/**
* Retrieve website groups
*
* @return array
*/
public function getGroups()
{
if (is_null($this->_groups)) {
$this->_loadGroups();
}
return $this->_groups;
}
/**
* Retrieve website group ids
*
* @return array
*/
public function getGroupIds()
{
if (is_null($this->_groups)) {
$this->_loadGroups();
}
return $this->_groupIds;
}
/**
* Retrieve number groups in a website
*
* @return int
*/
public function getGroupsCount()
{
if (is_null($this->_groups)) {
$this->_loadGroups();
}
return $this->_groupsCount;
}
/**
* Retrieve default group model
*
* @return Mage_Core_Model_Store_Group
*/
public function getDefaultGroup()
{
if (!$this->hasDefaultGroupId()) {
return false;
}
if (is_null($this->_groups)) {
$this->_loadGroups();
}
return $this->_defaultGroup;
}
/**
* Load store collection and set internal data
*
*/
protected function _loadStores()
{
$this->_stores = array();
$this->_storesCount = 0;
foreach ($this->getStoreCollection() as $store) {
$this->_stores[$store->getId()] = $store;
$this->_storeIds[$store->getId()] = $store->getId();
$this->_storeCodes[$store->getId()] = $store->getCode();
if ($this->getDefaultGroup() && $this->getDefaultGroup()->getDefaultStoreId() == $store->getId()) {
$this->_defaultStore = $store;
}
$this->_storesCount ++;
}
}
/**
* Set website stores
*
* @param array $stores
*/
public function setStores($stores)
{
$this->_stores = array();
$this->_storesCount = 0;
foreach ($stores as $store) {
$this->_stores[$store->getId()] = $store;
$this->_storeIds[$store->getId()] = $store->getId();
$this->_storeCodes[$store->getId()] = $store->getCode();
if ($this->getDefaultGroup() && $this->getDefaultGroup()->getDefaultStoreId() == $store->getId()) {
$this->_defaultStore = $store;
}
$this->_storesCount ++;
}
}
/**
* Retrieve new (not loaded) Store collection object with website filter
*
* @return Mage_Core_Model_Mysql4_Store_Collection
*/
public function getStoreCollection()
{
return Mage::getModel('core/store')
->getCollection()
->addWebsiteFilter($this->getId());
}
/**
* Retrieve wersite store objects
*
* @return array
*/
public function getStores()
{
if (is_null($this->_stores)) {
$this->_loadStores();
}
return $this->_stores;
}
/**
* Retrieve website store ids
*
* @return array
*/
public function getStoreIds()
{
if (is_null($this->_stores)) {
$this->_loadStores();
}
return $this->_storeIds;
}
/**
* Retrieve website store codes
*
* @return array
*/
public function getStoreCodes()
{
if (is_null($this->_stores)) {
$this->_loadStores();
}
return $this->_storeCodes;
}
/**
* Retrieve number stores in a website
*
* @return int
*/
public function getStoresCount()
{
if (is_null($this->_stores)) {
$this->_loadStores();
}
return $this->_storesCount;
}
/**
* is can delete website
*
* @return bool
*/
public function isCanDelete()
{
if ($this->_isReadOnly || !$this->getId()) {
return false;
}
if (is_null($this->_isCanDelete)) {
$this->_isCanDelete = (Mage::getModel('core/website')->getCollection()->getSize() > 2)
&& !$this->getIsDefault();
}
return $this->_isCanDelete;
}
/**
* Retrieve unique website-group-store key for collection with groups and stores
*
* @return string
*/
public function getWebsiteGroupStore()
{
return join('-', array($this->getWebsiteId(), $this->getGroupId(), $this->getStoreId()));
}
public function getDefaultGroupId()
{
return $this->_getData('default_group_id');
}
public function getCode()
{
return $this->_getData('code');
}
protected function _beforeDelete()
{
$this->_protectFromNonAdmin();
return parent::_beforeDelete();
}
/**
* rewrite in order to clear configuration cache
*
* @return Mage_Core_Model_Website
*/
protected function _afterDelete()
{
Mage::app()->clearWebsiteCache($this->getId());
parent::_afterDelete();
Mage::getConfig()->removeCache();
return $this;
}
/**
* Retrieve website base currency code
*
* @return string
*/
public function getBaseCurrencyCode()
{
if ($this->getConfig(Mage_Core_Model_Store::XML_PATH_PRICE_SCOPE)
== Mage_Core_Model_Store::PRICE_SCOPE_GLOBAL
) {
return Mage::app()->getBaseCurrencyCode();
} else {
return $this->getConfig(Mage_Directory_Model_Currency::XML_PATH_CURRENCY_BASE);
}
}
/**
* Retrieve website base currency
*
* @return Mage_Directory_Model_Currency
*/
public function getBaseCurrency()
{
$currency = $this->getData('base_currency');
if (is_null($currency)) {
$currency = Mage::getModel('directory/currency')->load($this->getBaseCurrencyCode());
$this->setData('base_currency', $currency);
}
return $currency;
}
/**
* Retrieve Default Website Store or null
*
* @return Mage_Core_Model_Store
*/
public function getDefaultStore()
{
// init stores if not loaded
$this->getStores();
return $this->_defaultStore;
}
/**
* Retrieve default stores select object
* Select fields website_id, store_id
*
* @param $withDefault include/exclude default admin website
* @return Varien_Db_Select
*/
public function getDefaultStoresSelect($withDefault = false)
{
return $this->getResource()->getDefaultStoresSelect($withDefault);
}
/**
* Get/Set isReadOnly flag
*
* @param bool $value
* @return bool
*/
public function isReadOnly($value = null)
{
if (null !== $value) {
$this->_isReadOnly = (bool)$value;
}
return $this->_isReadOnly;
}
}
| keegan2149/magento | sites/default/app/code/core/Mage/Core/Model/Website.php | PHP | gpl-2.0 | 13,740 |
// Test that g++ complains about referring to a builtin type in a
// mem-initializer.
// Contributed by Kevin Buhr <buhr@stat.wisc.edu>
int r = 0;
struct foo { // ERROR - candidate
foo(int x) { r = 1; } // ERROR - candidate
};
struct bar : foo {
typedef int an_int;
bar() : bar::an_int(3) {} // ERROR - not a base
};
int
main() {
bar b;
return r;
}
| nslu2/Build-gcc-3.2.1 | gcc/testsuite/g++.old-deja/g++.robertl/eb69.C | C++ | gpl-2.0 | 373 |
<?php
/**
* Magento
*
* NOTICE OF LICENSE
*
* This source file is subject to the Open Software License (OSL 3.0)
* that is bundled with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://opensource.org/licenses/osl-3.0.php
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@magentocommerce.com so we can send you a copy immediately.
*
* DISCLAIMER
*
* Do not edit or add to this file if you wish to upgrade Magento to newer
* versions in the future. If you wish to customize Magento for your
* needs please refer to http://www.magentocommerce.com for more information.
*
* @category Mage
* @package Mage_Adminhtml
* @copyright Copyright (c) 2012 Magento Inc. (http://www.magentocommerce.com)
* @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0)
*/
class Mage_Adminhtml_Model_System_Config_Backend_Design_Package extends Mage_Core_Model_Config_Data
{
protected function _beforeSave()
{
$value = $this->getValue();
if (empty($value)) {
throw new Exception('package name is empty.');
}
if (!Mage::getDesign()->designPackageExists($value)) {
throw new Exception('package with this name does not exist and cannot be set.');
}
}
}
| keegan2149/magento | sites/default/app/code/core/Mage/Adminhtml/Model/System/Config/Backend/Design/Package.php | PHP | gpl-2.0 | 1,419 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
objs = orm.ProjectApplication.objects
apps = objs.filter(chain__chained_project=None).order_by(
'chain', '-id')
checked_chain = None
projs = []
for app in apps:
chain = app.chain
if chain.pk != checked_chain:
checked_chain = chain.pk
projs.append(orm.Project(id=chain, application=app, state=1))
orm.Project.objects.bulk_create(projs)
def backwards(self, orm):
"Write your backwards methods here."
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'im.additionalmail': {
'Meta': {'object_name': 'AdditionalMail'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.AstakosUser']"})
},
'im.approvalterms': {
'Meta': {'object_name': 'ApprovalTerms'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'im.astakosuser': {
'Meta': {'object_name': 'AstakosUser', '_ormbases': ['auth.User']},
'accepted_email': ('django.db.models.fields.EmailField', [], {'default': 'None', 'max_length': '75', 'null': 'True', 'blank': 'True'}),
'accepted_policy': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'activation_sent': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'affiliation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'auth_token': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'auth_token_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'auth_token_expires': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date_signed_terms': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deactivated_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deactivated_reason': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
'disturbed_quota': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'email_verified': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'has_credits': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'has_signed_terms': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'invitations': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_rejected': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_verified': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'level': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'moderated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'moderated_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'moderated_data': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'policy': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['im.Resource']", 'null': 'True', 'through': "orm['im.AstakosUserQuota']", 'symmetrical': 'False'}),
'rejected_reason': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {}),
'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True', 'null': 'True'}),
'verification_code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True', 'null': 'True'}),
'verified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'im.astakosuserauthprovider': {
'Meta': {'unique_together': "(('identifier', 'module', 'user'),)", 'object_name': 'AstakosUserAuthProvider'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'affiliation': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'auth_backend': ('django.db.models.fields.CharField', [], {'default': "'astakos'", 'max_length': '255'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'info_data': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'module': ('django.db.models.fields.CharField', [], {'default': "'local'", 'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'auth_providers'", 'to': "orm['im.AstakosUser']"})
},
'im.astakosuserquota': {
'Meta': {'unique_together': "(('resource', 'user'),)", 'object_name': 'AstakosUserQuota'},
'capacity': ('snf_django.lib.db.fields.IntDecimalField', [], {'max_digits': '38', 'decimal_places': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.Resource']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.AstakosUser']"})
},
'im.authproviderpolicyprofile': {
'Meta': {'object_name': 'AuthProviderPolicyProfile'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'authpolicy_profiles'", 'symmetrical': 'False', 'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_exclusive': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'policy_add': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'policy_automoderate': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'policy_create': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'policy_limit': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
'policy_login': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'policy_remove': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'policy_required': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'policy_switch': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'priority': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'authpolicy_profiles'", 'symmetrical': 'False', 'to': "orm['im.AstakosUser']"})
},
'im.chain': {
'Meta': {'object_name': 'Chain'},
'chain': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'im.component': {
'Meta': {'object_name': 'Component'},
'auth_token': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'auth_token_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'auth_token_expires': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'})
},
'im.emailchange': {
'Meta': {'object_name': 'EmailChange'},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'requested_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'emailchanges'", 'unique': 'True', 'to': "orm['im.AstakosUser']"})
},
'im.endpoint': {
'Meta': {'object_name': 'Endpoint'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'endpoints'", 'to': "orm['im.Service']"})
},
'im.endpointdata': {
'Meta': {'unique_together': "(('endpoint', 'key'),)", 'object_name': 'EndpointData'},
'endpoint': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'data'", 'to': "orm['im.Endpoint']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '1024'})
},
'im.invitation': {
'Meta': {'object_name': 'Invitation'},
'code': ('django.db.models.fields.BigIntegerField', [], {'db_index': 'True'}),
'consumed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inviter': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'invitations_sent'", 'null': 'True', 'to': "orm['im.AstakosUser']"}),
'is_consumed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'realname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'im.pendingthirdpartyuser': {
'Meta': {'unique_together': "(('provider', 'third_party_identifier'),)", 'object_name': 'PendingThirdPartyUser'},
'affiliation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'third_party_identifier': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'im.project': {
'Meta': {'object_name': 'Project'},
'application': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'project'", 'unique': 'True', 'to': "orm['im.ProjectApplication']"}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deactivation_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'deactivation_reason': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'id': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'chained_project'", 'unique': 'True', 'primary_key': 'True', 'db_column': "'id'", 'to': "orm['im.Chain']"}),
'last_approval_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['im.AstakosUser']", 'through': "orm['im.ProjectMembership']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'state': ('django.db.models.fields.IntegerField', [], {'default': '1', 'db_index': 'True'})
},
'im.projectapplication': {
'Meta': {'unique_together': "(('chain', 'id'),)", 'object_name': 'ProjectApplication'},
'applicant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'projects_applied'", 'to': "orm['im.AstakosUser']"}),
'chain': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'chained_apps'", 'db_column': "'chain'", 'to': "orm['im.Chain']"}),
'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateTimeField', [], {}),
'homepage': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issue_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'limit_on_members_number': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'member_join_policy': ('django.db.models.fields.IntegerField', [], {}),
'member_leave_policy': ('django.db.models.fields.IntegerField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'projects_owned'", 'to': "orm['im.AstakosUser']"}),
'precursor_application': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.ProjectApplication']", 'null': 'True', 'blank': 'True'}),
'resource_grants': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['im.Resource']", 'null': 'True', 'through': "orm['im.ProjectResourceGrant']", 'blank': 'True'}),
'response': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'response_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'})
},
'im.projectmembership': {
'Meta': {'unique_together': "(('person', 'project'),)", 'object_name': 'ProjectMembership'},
'acceptance_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'leave_request_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.AstakosUser']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.Project']"}),
'request_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'})
},
'im.projectmembershiphistory': {
'Meta': {'object_name': 'ProjectMembershipHistory'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.BigIntegerField', [], {}),
'project': ('django.db.models.fields.BigIntegerField', [], {}),
'reason': ('django.db.models.fields.IntegerField', [], {}),
'serial': ('django.db.models.fields.BigIntegerField', [], {})
},
'im.projectresourcegrant': {
'Meta': {'unique_together': "(('resource', 'project_application'),)", 'object_name': 'ProjectResourceGrant'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member_capacity': ('snf_django.lib.db.fields.IntDecimalField', [], {'default': '0', 'max_digits': '38', 'decimal_places': '0'}),
'project_application': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.ProjectApplication']", 'null': 'True'}),
'project_capacity': ('snf_django.lib.db.fields.IntDecimalField', [], {'null': 'True', 'max_digits': '38', 'decimal_places': '0'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.Resource']"})
},
'im.resource': {
'Meta': {'object_name': 'Resource'},
'allow_in_projects': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'desc': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'service_origin': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'service_type': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'unit': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'uplimit': ('snf_django.lib.db.fields.IntDecimalField', [], {'default': '0', 'max_digits': '38', 'decimal_places': '0'})
},
'im.serial': {
'Meta': {'object_name': 'Serial'},
'serial': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'im.service': {
'Meta': {'object_name': 'Service'},
'component': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.Component']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'im.sessioncatalog': {
'Meta': {'object_name': 'SessionCatalog'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sessions'", 'null': 'True', 'to': "orm['im.AstakosUser']"})
},
'im.usersetting': {
'Meta': {'unique_together': "(('user', 'setting'),)", 'object_name': 'UserSetting'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'setting': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.AstakosUser']"}),
'value': ('django.db.models.fields.IntegerField', [], {})
}
}
complete_apps = ['im']
| grnet/synnefo | snf-astakos-app/astakos/im/migrations/old/0043_uninitialized_projects.py | Python | gpl-3.0 | 25,194 |
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2
short_description: create, terminate, start or stop an instance in ec2
description:
- Creates or terminates ec2 instances.
- C(state=restarted) was added in 2.2
version_added: "0.9"
options:
key_name:
description:
- key pair to use on the instance
required: false
default: null
aliases: ['keypair']
id:
version_added: "1.1"
description:
- identifier for this instance or set of instances, so that the module will be idempotent with respect to EC2 instances. This identifier is valid for at least 24 hours after the termination of the instance, and should not be reused for another call later on. For details, see the description of client token at U(http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Run_Instance_Idempotency.html).
required: false
default: null
aliases: []
group:
description:
- security group (or list of groups) to use with the instance
required: false
default: null
aliases: [ 'groups' ]
group_id:
version_added: "1.1"
description:
- security group id (or list of ids) to use with the instance
required: false
default: null
aliases: []
region:
version_added: "1.2"
description:
- The AWS region to use. Must be specified if ec2_url is not used. If not specified then the value of the EC2_REGION environment variable, if any, is used. See U(http://docs.aws.amazon.com/general/latest/gr/rande.html#ec2_region)
required: false
default: null
aliases: [ 'aws_region', 'ec2_region' ]
zone:
version_added: "1.2"
description:
- AWS availability zone in which to launch the instance
required: false
default: null
aliases: [ 'aws_zone', 'ec2_zone' ]
instance_type:
description:
- instance type to use for the instance, see U(http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-types.html)
required: true
default: null
aliases: []
tenancy:
version_added: "1.9"
description:
- An instance with a tenancy of "dedicated" runs on single-tenant hardware and can only be launched into a VPC. Note that to use dedicated tenancy you MUST specify a vpc_subnet_id as well. Dedicated tenancy is not available for EC2 "micro" instances.
required: false
default: default
choices: [ "default", "dedicated" ]
aliases: []
spot_price:
version_added: "1.5"
description:
- Maximum spot price to bid, If not set a regular on-demand instance is requested. A spot request is made with this maximum bid. When it is filled, the instance is started.
required: false
default: null
aliases: []
spot_type:
version_added: "2.0"
description:
- Type of spot request; one of "one-time" or "persistent". Defaults to "one-time" if not supplied.
required: false
default: "one-time"
choices: [ "one-time", "persistent" ]
aliases: []
image:
description:
- I(ami) ID to use for the instance
required: true
default: null
aliases: []
kernel:
description:
- kernel I(eki) to use for the instance
required: false
default: null
aliases: []
ramdisk:
description:
- ramdisk I(eri) to use for the instance
required: false
default: null
aliases: []
wait:
description:
- wait for the instance to be 'running' before returning. Does not wait for SSH, see 'wait_for' example for details.
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: []
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 300
aliases: []
spot_wait_timeout:
version_added: "1.5"
description:
- how long to wait for the spot instance request to be fulfilled
default: 600
aliases: []
count:
description:
- number of instances to launch
required: False
default: 1
aliases: []
monitoring:
version_added: "1.1"
description:
- enable detailed monitoring (CloudWatch) for instance
required: false
default: null
choices: [ "yes", "no" ]
aliases: []
user_data:
version_added: "0.9"
description:
- opaque blob of data which is made available to the ec2 instance
required: false
default: null
aliases: []
instance_tags:
version_added: "1.0"
description:
- a hash/dictionary of tags to add to the new instance or for starting/stopping instance by tag; '{"key":"value"}' and '{"key":"value","key":"value"}'
required: false
default: null
aliases: []
placement_group:
version_added: "1.3"
description:
- placement group for the instance when using EC2 Clustered Compute
required: false
default: null
aliases: []
vpc_subnet_id:
version_added: "1.1"
description:
- the subnet ID in which to launch the instance (VPC)
required: false
default: null
aliases: []
assign_public_ip:
version_added: "1.5"
description:
- when provisioning within vpc, assign a public IP address. Boto library must be 2.13.0+
required: false
default: null
choices: [ "yes", "no" ]
aliases: []
private_ip:
version_added: "1.2"
description:
- the private ip address to assign the instance (from the vpc subnet)
required: false
default: null
aliases: []
instance_profile_name:
version_added: "1.3"
description:
- Name of the IAM instance profile to use. Boto library must be 2.5.0+
required: false
default: null
aliases: []
instance_ids:
version_added: "1.3"
description:
- "list of instance ids, currently used for states: absent, running, stopped"
required: false
default: null
aliases: ['instance_id']
source_dest_check:
version_added: "1.6"
description:
- Enable or Disable the Source/Destination checks (for NAT instances and Virtual Routers)
required: false
default: yes
choices: [ "yes", "no" ]
termination_protection:
version_added: "2.0"
description:
- Enable or Disable the Termination Protection
required: false
default: no
choices: [ "yes", "no" ]
instance_initiated_shutdown_behavior:
version_added: "2.2"
description:
- Set whether AWS will Stop or Terminate an instance on shutdown
required: false
default: 'stop'
choices: [ "stop", "terminate" ]
state:
version_added: "1.3"
description:
- create or terminate instances
required: false
default: 'present'
aliases: []
choices: ['present', 'absent', 'running', 'restarted', 'stopped']
volumes:
version_added: "1.5"
description:
- a list of hash/dictionaries of volumes to add to the new instance; '[{"key":"value", "key":"value"}]'; keys allowed are - device_name (str; required), delete_on_termination (bool; False), device_type (deprecated), ephemeral (str), encrypted (bool; False), snapshot (str), volume_type (str), iops (int) - device_type is deprecated use volume_type, iops must be set when volume_type='io1', ephemeral and snapshot are mutually exclusive.
required: false
default: null
aliases: []
ebs_optimized:
version_added: "1.6"
description:
- whether instance is using optimized EBS volumes, see U(http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSOptimized.html)
required: false
default: 'false'
exact_count:
version_added: "1.5"
description:
- An integer value which indicates how many instances that match the 'count_tag' parameter should be running. Instances are either created or terminated based on this value.
required: false
default: null
aliases: []
count_tag:
version_added: "1.5"
description:
- Used with 'exact_count' to determine how many nodes based on a specific tag criteria should be running. This can be expressed in multiple ways and is shown in the EXAMPLES section. For instance, one can request 25 servers that are tagged with "class=webserver". The specified tag must already exist or be passed in as the 'instance_tags' option.
required: false
default: null
aliases: []
network_interfaces:
version_added: "2.0"
description:
- A list of existing network interfaces to attach to the instance at launch. When specifying existing network interfaces, none of the assign_public_ip, private_ip, vpc_subnet_id, group, or group_id parameters may be used. (Those parameters are for creating a new network interface at launch.)
required: false
default: null
aliases: ['network_interface']
spot_launch_group:
version_added: "2.1"
description:
- Launch group for spot request, see U(http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/how-spot-instances-work.html#spot-launch-group)
required: false
default: null
author:
- "Tim Gerla (@tgerla)"
- "Lester Wade (@lwade)"
- "Seth Vidal"
extends_documentation_fragment: aws
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Basic provisioning example
- ec2:
key_name: mykey
instance_type: t2.micro
image: ami-123456
wait: yes
group: webserver
count: 3
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
# Advanced example with tagging and CloudWatch
- ec2:
key_name: mykey
group: databases
instance_type: t2.micro
image: ami-123456
wait: yes
wait_timeout: 500
count: 5
instance_tags:
db: postgres
monitoring: yes
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
# Single instance with additional IOPS volume from snapshot and volume delete on termination
- ec2:
key_name: mykey
group: webserver
instance_type: c3.medium
image: ami-123456
wait: yes
wait_timeout: 500
volumes:
- device_name: /dev/sdb
snapshot: snap-abcdef12
volume_type: io1
iops: 1000
volume_size: 100
delete_on_termination: true
monitoring: yes
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
# Single instance with ssd gp2 root volume
- ec2:
key_name: mykey
group: webserver
instance_type: c3.medium
image: ami-123456
wait: yes
wait_timeout: 500
volumes:
- device_name: /dev/xvda
volume_type: gp2
volume_size: 8
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
exact_count: 1
# Multiple groups example
- ec2:
key_name: mykey
group: ['databases', 'internal-services', 'sshable', 'and-so-forth']
instance_type: m1.large
image: ami-6e649707
wait: yes
wait_timeout: 500
count: 5
instance_tags:
db: postgres
monitoring: yes
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
# Multiple instances with additional volume from snapshot
- ec2:
key_name: mykey
group: webserver
instance_type: m1.large
image: ami-6e649707
wait: yes
wait_timeout: 500
count: 5
volumes:
- device_name: /dev/sdb
snapshot: snap-abcdef12
volume_size: 10
monitoring: yes
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
# Dedicated tenancy example
- local_action:
module: ec2
assign_public_ip: yes
group_id: sg-1dc53f72
key_name: mykey
image: ami-6e649707
instance_type: m1.small
tenancy: dedicated
vpc_subnet_id: subnet-29e63245
wait: yes
# Spot instance example
- ec2:
spot_price: 0.24
spot_wait_timeout: 600
keypair: mykey
group_id: sg-1dc53f72
instance_type: m1.small
image: ami-6e649707
wait: yes
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
spot_launch_group: report_generators
# Examples using pre-existing network interfaces
- ec2:
key_name: mykey
instance_type: t2.small
image: ami-f005ba11
network_interface: eni-deadbeef
- ec2:
key_name: mykey
instance_type: t2.small
image: ami-f005ba11
network_interfaces: ['eni-deadbeef', 'eni-5ca1ab1e']
# Launch instances, runs some tasks
# and then terminate them
- name: Create a sandbox instance
hosts: localhost
gather_facts: False
vars:
key_name: my_keypair
instance_type: m1.small
security_group: my_securitygroup
image: my_ami_id
region: us-east-1
tasks:
- name: Launch instance
ec2:
key_name: "{{ keypair }}"
group: "{{ security_group }}"
instance_type: "{{ instance_type }}"
image: "{{ image }}"
wait: true
region: "{{ region }}"
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
register: ec2
- name: Add new instance to host group
add_host:
hostname: "{{ item.public_ip }}"
groupname: launched
with_items: "{{ ec2.instances }}"
- name: Wait for SSH to come up
wait_for:
host: "{{ item.public_dns_name }}"
port: 22
delay: 60
timeout: 320
state: started
with_items: "{{ ec2.instances }}"
- name: Configure instance(s)
hosts: launched
become: True
gather_facts: True
roles:
- my_awesome_role
- my_awesome_test
- name: Terminate instances
hosts: localhost
connection: local
tasks:
- name: Terminate instances that were previously launched
ec2:
state: 'absent'
instance_ids: '{{ ec2.instance_ids }}'
# Start a few existing instances, run some tasks
# and stop the instances
- name: Start sandbox instances
hosts: localhost
gather_facts: false
connection: local
vars:
instance_ids:
- 'i-xxxxxx'
- 'i-xxxxxx'
- 'i-xxxxxx'
region: us-east-1
tasks:
- name: Start the sandbox instances
ec2:
instance_ids: '{{ instance_ids }}'
region: '{{ region }}'
state: running
wait: True
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
roles:
- do_neat_stuff
- do_more_neat_stuff
- name: Stop sandbox instances
hosts: localhost
gather_facts: false
connection: local
vars:
instance_ids:
- 'i-xxxxxx'
- 'i-xxxxxx'
- 'i-xxxxxx'
region: us-east-1
tasks:
- name: Stop the sandbox instances
ec2:
instance_ids: '{{ instance_ids }}'
region: '{{ region }}'
state: stopped
wait: True
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
#
# Start stopped instances specified by tag
#
- local_action:
module: ec2
instance_tags:
Name: ExtraPower
state: running
#
# Restart instances specified by tag
#
- local_action:
module: ec2
instance_tags:
Name: ExtraPower
state: restarted
#
# Enforce that 5 instances with a tag "foo" are running
# (Highly recommended!)
#
- ec2:
key_name: mykey
instance_type: c1.medium
image: ami-40603AD1
wait: yes
group: webserver
instance_tags:
foo: bar
exact_count: 5
count_tag: foo
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
#
# Enforce that 5 running instances named "database" with a "dbtype" of "postgres"
#
- ec2:
key_name: mykey
instance_type: c1.medium
image: ami-40603AD1
wait: yes
group: webserver
instance_tags:
Name: database
dbtype: postgres
exact_count: 5
count_tag:
Name: database
dbtype: postgres
vpc_subnet_id: subnet-29e63245
assign_public_ip: yes
#
# count_tag complex argument examples
#
# instances with tag foo
count_tag:
foo:
# instances with tag foo=bar
count_tag:
foo: bar
# instances with tags foo=bar & baz
count_tag:
foo: bar
baz:
# instances with tags foo & bar & baz=bang
count_tag:
- foo
- bar
- baz: bang
'''
import time
from ast import literal_eval
from ansible.module_utils.six import iteritems
from ansible.module_utils.six import get_function_code
try:
import boto.ec2
from boto.ec2.blockdevicemapping import BlockDeviceType, BlockDeviceMapping
from boto.exception import EC2ResponseError
from boto.vpc import VPCConnection
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def find_running_instances_by_count_tag(module, ec2, count_tag, zone=None):
# get reservations for instances that match tag(s) and are running
reservations = get_reservations(module, ec2, tags=count_tag, state="running", zone=zone)
instances = []
for res in reservations:
if hasattr(res, 'instances'):
for inst in res.instances:
instances.append(inst)
return reservations, instances
def _set_none_to_blank(dictionary):
result = dictionary
for k in result:
if isinstance(result[k], dict):
result[k] = _set_none_to_blank(result[k])
elif not result[k]:
result[k] = ""
return result
def get_reservations(module, ec2, tags=None, state=None, zone=None):
# TODO: filters do not work with tags that have underscores
filters = dict()
if tags is not None:
if isinstance(tags, str):
try:
tags = literal_eval(tags)
except:
pass
# if string, we only care that a tag of that name exists
if isinstance(tags, str):
filters.update({"tag-key": tags})
# if list, append each item to filters
if isinstance(tags, list):
for x in tags:
if isinstance(x, dict):
x = _set_none_to_blank(x)
filters.update(dict(("tag:"+tn, tv) for (tn,tv) in iteritems(x)))
else:
filters.update({"tag-key": x})
# if dict, add the key and value to the filter
if isinstance(tags, dict):
tags = _set_none_to_blank(tags)
filters.update(dict(("tag:"+tn, tv) for (tn,tv) in iteritems(tags)))
if state:
# http://stackoverflow.com/questions/437511/what-are-the-valid-instancestates-for-the-amazon-ec2-api
filters.update({'instance-state-name': state})
if zone:
filters.update({'availability-zone': zone})
results = ec2.get_all_instances(filters=filters)
return results
def get_instance_info(inst):
"""
Retrieves instance information from an instance
ID and returns it as a dictionary
"""
instance_info = {'id': inst.id,
'ami_launch_index': inst.ami_launch_index,
'private_ip': inst.private_ip_address,
'private_dns_name': inst.private_dns_name,
'public_ip': inst.ip_address,
'dns_name': inst.dns_name,
'public_dns_name': inst.public_dns_name,
'state_code': inst.state_code,
'architecture': inst.architecture,
'image_id': inst.image_id,
'key_name': inst.key_name,
'placement': inst.placement,
'region': inst.placement[:-1],
'kernel': inst.kernel,
'ramdisk': inst.ramdisk,
'launch_time': inst.launch_time,
'instance_type': inst.instance_type,
'root_device_type': inst.root_device_type,
'root_device_name': inst.root_device_name,
'state': inst.state,
'hypervisor': inst.hypervisor,
'tags': inst.tags,
'groups': dict((group.id, group.name) for group in inst.groups),
}
try:
instance_info['virtualization_type'] = getattr(inst,'virtualization_type')
except AttributeError:
instance_info['virtualization_type'] = None
try:
instance_info['ebs_optimized'] = getattr(inst, 'ebs_optimized')
except AttributeError:
instance_info['ebs_optimized'] = False
try:
bdm_dict = {}
bdm = getattr(inst, 'block_device_mapping')
for device_name in bdm.keys():
bdm_dict[device_name] = {
'status': bdm[device_name].status,
'volume_id': bdm[device_name].volume_id,
'delete_on_termination': bdm[device_name].delete_on_termination
}
instance_info['block_device_mapping'] = bdm_dict
except AttributeError:
instance_info['block_device_mapping'] = False
try:
instance_info['tenancy'] = getattr(inst, 'placement_tenancy')
except AttributeError:
instance_info['tenancy'] = 'default'
return instance_info
def boto_supports_associate_public_ip_address(ec2):
"""
Check if Boto library has associate_public_ip_address in the NetworkInterfaceSpecification
class. Added in Boto 2.13.0
ec2: authenticated ec2 connection object
Returns:
True if Boto library accepts associate_public_ip_address argument, else false
"""
try:
network_interface = boto.ec2.networkinterface.NetworkInterfaceSpecification()
getattr(network_interface, "associate_public_ip_address")
return True
except AttributeError:
return False
def boto_supports_profile_name_arg(ec2):
"""
Check if Boto library has instance_profile_name argument. instance_profile_name has been added in Boto 2.5.0
ec2: authenticated ec2 connection object
Returns:
True if Boto library accept instance_profile_name argument, else false
"""
run_instances_method = getattr(ec2, 'run_instances')
return 'instance_profile_name' in get_function_code(run_instances_method).co_varnames
def create_block_device(module, ec2, volume):
# Not aware of a way to determine this programatically
# http://aws.amazon.com/about-aws/whats-new/2013/10/09/ebs-provisioned-iops-maximum-iops-gb-ratio-increased-to-30-1/
MAX_IOPS_TO_SIZE_RATIO = 30
# device_type has been used historically to represent volume_type,
# however ec2_vol uses volume_type, as does the BlockDeviceType, so
# we add handling for either/or but not both
if all(key in volume for key in ['device_type','volume_type']):
module.fail_json(msg = 'device_type is a deprecated name for volume_type. Do not use both device_type and volume_type')
# get whichever one is set, or NoneType if neither are set
volume_type = volume.get('device_type') or volume.get('volume_type')
if 'snapshot' not in volume and 'ephemeral' not in volume:
if 'volume_size' not in volume:
module.fail_json(msg = 'Size must be specified when creating a new volume or modifying the root volume')
if 'snapshot' in volume:
if volume_type == 'io1' and 'iops' not in volume:
module.fail_json(msg = 'io1 volumes must have an iops value set')
if 'iops' in volume:
snapshot = ec2.get_all_snapshots(snapshot_ids=[volume['snapshot']])[0]
size = volume.get('volume_size', snapshot.volume_size)
if int(volume['iops']) > MAX_IOPS_TO_SIZE_RATIO * size:
module.fail_json(msg = 'IOPS must be at most %d times greater than size' % MAX_IOPS_TO_SIZE_RATIO)
if 'encrypted' in volume:
module.fail_json(msg = 'You can not set encryption when creating a volume from a snapshot')
if 'ephemeral' in volume:
if 'snapshot' in volume:
module.fail_json(msg = 'Cannot set both ephemeral and snapshot')
return BlockDeviceType(snapshot_id=volume.get('snapshot'),
ephemeral_name=volume.get('ephemeral'),
size=volume.get('volume_size'),
volume_type=volume_type,
delete_on_termination=volume.get('delete_on_termination', False),
iops=volume.get('iops'),
encrypted=volume.get('encrypted', None))
def boto_supports_param_in_spot_request(ec2, param):
"""
Check if Boto library has a <param> in its request_spot_instances() method. For example, the placement_group parameter wasn't added until 2.3.0.
ec2: authenticated ec2 connection object
Returns:
True if boto library has the named param as an argument on the request_spot_instances method, else False
"""
method = getattr(ec2, 'request_spot_instances')
return param in get_function_code(method).co_varnames
def await_spot_requests(module, ec2, spot_requests, count):
"""
Wait for a group of spot requests to be fulfilled, or fail.
module: Ansible module object
ec2: authenticated ec2 connection object
spot_requests: boto.ec2.spotinstancerequest.SpotInstanceRequest object returned by ec2.request_spot_instances
count: Total number of instances to be created by the spot requests
Returns:
list of instance ID's created by the spot request(s)
"""
spot_wait_timeout = int(module.params.get('spot_wait_timeout'))
wait_complete = time.time() + spot_wait_timeout
spot_req_inst_ids = dict()
while time.time() < wait_complete:
reqs = ec2.get_all_spot_instance_requests()
for sirb in spot_requests:
if sirb.id in spot_req_inst_ids:
continue
for sir in reqs:
if sir.id != sirb.id:
continue # this is not our spot instance
if sir.instance_id is not None:
spot_req_inst_ids[sirb.id] = sir.instance_id
elif sir.state == 'open':
continue # still waiting, nothing to do here
elif sir.state == 'active':
continue # Instance is created already, nothing to do here
elif sir.state == 'failed':
module.fail_json(msg="Spot instance request %s failed with status %s and fault %s:%s" % (
sir.id, sir.status.code, sir.fault.code, sir.fault.message))
elif sir.state == 'cancelled':
module.fail_json(msg="Spot instance request %s was cancelled before it could be fulfilled." % sir.id)
elif sir.state == 'closed':
# instance is terminating or marked for termination
# this may be intentional on the part of the operator,
# or it may have been terminated by AWS due to capacity,
# price, or group constraints in this case, we'll fail
# the module if the reason for the state is anything
# other than termination by user. Codes are documented at
# http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-bid-status.html
if sir.status.code == 'instance-terminated-by-user':
# do nothing, since the user likely did this on purpose
pass
else:
spot_msg = "Spot instance request %s was closed by AWS with the status %s and fault %s:%s"
module.fail_json(msg=spot_msg % (sir.id, sir.status.code, sir.fault.code, sir.fault.message))
if len(spot_req_inst_ids) < count:
time.sleep(5)
else:
return spot_req_inst_ids.values()
module.fail_json(msg = "wait for spot requests timeout on %s" % time.asctime())
def enforce_count(module, ec2, vpc):
exact_count = module.params.get('exact_count')
count_tag = module.params.get('count_tag')
zone = module.params.get('zone')
# fail here if the exact count was specified without filtering
# on a tag, as this may lead to a undesired removal of instances
if exact_count and count_tag is None:
module.fail_json(msg="you must use the 'count_tag' option with exact_count")
reservations, instances = find_running_instances_by_count_tag(module, ec2, count_tag, zone)
changed = None
checkmode = False
instance_dict_array = []
changed_instance_ids = None
if len(instances) == exact_count:
changed = False
elif len(instances) < exact_count:
changed = True
to_create = exact_count - len(instances)
if not checkmode:
(instance_dict_array, changed_instance_ids, changed) \
= create_instances(module, ec2, vpc, override_count=to_create)
for inst in instance_dict_array:
instances.append(inst)
elif len(instances) > exact_count:
changed = True
to_remove = len(instances) - exact_count
if not checkmode:
all_instance_ids = sorted([ x.id for x in instances ])
remove_ids = all_instance_ids[0:to_remove]
instances = [ x for x in instances if x.id not in remove_ids]
(changed, instance_dict_array, changed_instance_ids) \
= terminate_instances(module, ec2, remove_ids)
terminated_list = []
for inst in instance_dict_array:
inst['state'] = "terminated"
terminated_list.append(inst)
instance_dict_array = terminated_list
# ensure all instances are dictionaries
all_instances = []
for inst in instances:
if not isinstance(inst, dict):
inst = get_instance_info(inst)
all_instances.append(inst)
return (all_instances, instance_dict_array, changed_instance_ids, changed)
def create_instances(module, ec2, vpc, override_count=None):
"""
Creates new instances
module : AnsibleModule object
ec2: authenticated ec2 connection object
Returns:
A list of dictionaries with instance information
about the instances that were launched
"""
key_name = module.params.get('key_name')
id = module.params.get('id')
group_name = module.params.get('group')
group_id = module.params.get('group_id')
zone = module.params.get('zone')
instance_type = module.params.get('instance_type')
tenancy = module.params.get('tenancy')
spot_price = module.params.get('spot_price')
spot_type = module.params.get('spot_type')
image = module.params.get('image')
if override_count:
count = override_count
else:
count = module.params.get('count')
monitoring = module.params.get('monitoring')
kernel = module.params.get('kernel')
ramdisk = module.params.get('ramdisk')
wait = module.params.get('wait')
wait_timeout = int(module.params.get('wait_timeout'))
spot_wait_timeout = int(module.params.get('spot_wait_timeout'))
placement_group = module.params.get('placement_group')
user_data = module.params.get('user_data')
instance_tags = module.params.get('instance_tags')
vpc_subnet_id = module.params.get('vpc_subnet_id')
assign_public_ip = module.boolean(module.params.get('assign_public_ip'))
private_ip = module.params.get('private_ip')
instance_profile_name = module.params.get('instance_profile_name')
volumes = module.params.get('volumes')
ebs_optimized = module.params.get('ebs_optimized')
exact_count = module.params.get('exact_count')
count_tag = module.params.get('count_tag')
source_dest_check = module.boolean(module.params.get('source_dest_check'))
termination_protection = module.boolean(module.params.get('termination_protection'))
network_interfaces = module.params.get('network_interfaces')
spot_launch_group = module.params.get('spot_launch_group')
instance_initiated_shutdown_behavior = module.params.get('instance_initiated_shutdown_behavior')
# group_id and group_name are exclusive of each other
if group_id and group_name:
module.fail_json(msg = str("Use only one type of parameter (group_name) or (group_id)"))
vpc_id = None
if vpc_subnet_id:
if not vpc:
module.fail_json(msg="region must be specified")
else:
vpc_id = vpc.get_all_subnets(subnet_ids=[vpc_subnet_id])[0].vpc_id
else:
vpc_id = None
try:
# Here we try to lookup the group id from the security group name - if group is set.
if group_name:
if vpc_id:
grp_details = ec2.get_all_security_groups(filters={'vpc_id': vpc_id})
else:
grp_details = ec2.get_all_security_groups()
if isinstance(group_name, basestring):
group_name = [group_name]
unmatched = set(group_name).difference(str(grp.name) for grp in grp_details)
if len(unmatched) > 0:
module.fail_json(msg="The following group names are not valid: %s" % ', '.join(unmatched))
group_id = [ str(grp.id) for grp in grp_details if str(grp.name) in group_name ]
# Now we try to lookup the group id testing if group exists.
elif group_id:
#wrap the group_id in a list if it's not one already
if isinstance(group_id, basestring):
group_id = [group_id]
grp_details = ec2.get_all_security_groups(group_ids=group_id)
group_name = [grp_item.name for grp_item in grp_details]
except boto.exception.NoAuthHandlerFound as e:
module.fail_json(msg = str(e))
# Lookup any instances that much our run id.
running_instances = []
count_remaining = int(count)
if id != None:
filter_dict = {'client-token':id, 'instance-state-name' : 'running'}
previous_reservations = ec2.get_all_instances(None, filter_dict)
for res in previous_reservations:
for prev_instance in res.instances:
running_instances.append(prev_instance)
count_remaining = count_remaining - len(running_instances)
# Both min_count and max_count equal count parameter. This means the launch request is explicit (we want count, or fail) in how many instances we want.
if count_remaining == 0:
changed = False
else:
changed = True
try:
params = {'image_id': image,
'key_name': key_name,
'monitoring_enabled': monitoring,
'placement': zone,
'instance_type': instance_type,
'kernel_id': kernel,
'ramdisk_id': ramdisk,
'user_data': user_data}
if ebs_optimized:
params['ebs_optimized'] = ebs_optimized
# 'tenancy' always has a default value, but it is not a valid parameter for spot instance request
if not spot_price:
params['tenancy'] = tenancy
if boto_supports_profile_name_arg(ec2):
params['instance_profile_name'] = instance_profile_name
else:
if instance_profile_name is not None:
module.fail_json(
msg="instance_profile_name parameter requires Boto version 2.5.0 or higher")
if assign_public_ip:
if not boto_supports_associate_public_ip_address(ec2):
module.fail_json(
msg="assign_public_ip parameter requires Boto version 2.13.0 or higher.")
elif not vpc_subnet_id:
module.fail_json(
msg="assign_public_ip only available with vpc_subnet_id")
else:
if private_ip:
interface = boto.ec2.networkinterface.NetworkInterfaceSpecification(
subnet_id=vpc_subnet_id,
private_ip_address=private_ip,
groups=group_id,
associate_public_ip_address=assign_public_ip)
else:
interface = boto.ec2.networkinterface.NetworkInterfaceSpecification(
subnet_id=vpc_subnet_id,
groups=group_id,
associate_public_ip_address=assign_public_ip)
interfaces = boto.ec2.networkinterface.NetworkInterfaceCollection(interface)
params['network_interfaces'] = interfaces
else:
if network_interfaces:
if isinstance(network_interfaces, basestring):
network_interfaces = [network_interfaces]
interfaces = []
for i, network_interface_id in enumerate(network_interfaces):
interface = boto.ec2.networkinterface.NetworkInterfaceSpecification(
network_interface_id=network_interface_id,
device_index=i)
interfaces.append(interface)
params['network_interfaces'] = \
boto.ec2.networkinterface.NetworkInterfaceCollection(*interfaces)
else:
params['subnet_id'] = vpc_subnet_id
if vpc_subnet_id:
params['security_group_ids'] = group_id
else:
params['security_groups'] = group_name
if volumes:
bdm = BlockDeviceMapping()
for volume in volumes:
if 'device_name' not in volume:
module.fail_json(msg = 'Device name must be set for volume')
# Minimum volume size is 1GB. We'll use volume size explicitly set to 0
# to be a signal not to create this volume
if 'volume_size' not in volume or int(volume['volume_size']) > 0:
bdm[volume['device_name']] = create_block_device(module, ec2, volume)
params['block_device_map'] = bdm
# check to see if we're using spot pricing first before starting instances
if not spot_price:
if assign_public_ip and private_ip:
params.update(dict(
min_count = count_remaining,
max_count = count_remaining,
client_token = id,
placement_group = placement_group,
))
else:
params.update(dict(
min_count = count_remaining,
max_count = count_remaining,
client_token = id,
placement_group = placement_group,
private_ip_address = private_ip,
))
# For ordinary (not spot) instances, we can select 'stop'
# (the default) or 'terminate' here.
params['instance_initiated_shutdown_behavior'] = instance_initiated_shutdown_behavior or 'stop'
res = ec2.run_instances(**params)
instids = [ i.id for i in res.instances ]
while True:
try:
ec2.get_all_instances(instids)
break
except boto.exception.EC2ResponseError as e:
if "<Code>InvalidInstanceID.NotFound</Code>" in str(e):
# there's a race between start and get an instance
continue
else:
module.fail_json(msg = str(e))
# The instances returned through ec2.run_instances above can be in
# terminated state due to idempotency. See commit 7f11c3d for a complete
# explanation.
terminated_instances = [
str(instance.id) for instance in res.instances if instance.state == 'terminated'
]
if terminated_instances:
module.fail_json(msg = "Instances with id(s) %s " % terminated_instances +
"were created previously but have since been terminated - " +
"use a (possibly different) 'instanceid' parameter")
else:
if private_ip:
module.fail_json(
msg='private_ip only available with on-demand (non-spot) instances')
if boto_supports_param_in_spot_request(ec2, 'placement_group'):
params['placement_group'] = placement_group
elif placement_group :
module.fail_json(
msg="placement_group parameter requires Boto version 2.3.0 or higher.")
# You can't tell spot instances to 'stop'; they will always be
# 'terminate'd. For convenience, we'll ignore the latter value.
if instance_initiated_shutdown_behavior and instance_initiated_shutdown_behavior != 'terminate':
module.fail_json(
msg="instance_initiated_shutdown_behavior=stop is not supported for spot instances.")
if spot_launch_group and isinstance(spot_launch_group, basestring):
params['launch_group'] = spot_launch_group
params.update(dict(
count = count_remaining,
type = spot_type,
))
res = ec2.request_spot_instances(spot_price, **params)
# Now we have to do the intermediate waiting
if wait:
instids = await_spot_requests(module, ec2, res, count)
except boto.exception.BotoServerError as e:
module.fail_json(msg = "Instance creation failed => %s: %s" % (e.error_code, e.error_message))
# wait here until the instances are up
num_running = 0
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time() and num_running < len(instids):
try:
res_list = ec2.get_all_instances(instids)
except boto.exception.BotoServerError as e:
if e.error_code == 'InvalidInstanceID.NotFound':
time.sleep(1)
continue
else:
raise
num_running = 0
for res in res_list:
num_running += len([ i for i in res.instances if i.state=='running' ])
if len(res_list) <= 0:
# got a bad response of some sort, possibly due to
# stale/cached data. Wait a second and then try again
time.sleep(1)
continue
if wait and num_running < len(instids):
time.sleep(5)
else:
break
if wait and wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg = "wait for instances running timeout on %s" % time.asctime())
#We do this after the loop ends so that we end up with one list
for res in res_list:
running_instances.extend(res.instances)
# Enabled by default by AWS
if source_dest_check is False:
for inst in res.instances:
inst.modify_attribute('sourceDestCheck', False)
# Disabled by default by AWS
if termination_protection is True:
for inst in res.instances:
inst.modify_attribute('disableApiTermination', True)
# Leave this as late as possible to try and avoid InvalidInstanceID.NotFound
if instance_tags:
try:
ec2.create_tags(instids, instance_tags)
except boto.exception.EC2ResponseError as e:
module.fail_json(msg = "Instance tagging failed => %s: %s" % (e.error_code, e.error_message))
instance_dict_array = []
created_instance_ids = []
for inst in running_instances:
inst.update()
d = get_instance_info(inst)
created_instance_ids.append(inst.id)
instance_dict_array.append(d)
return (instance_dict_array, created_instance_ids, changed)
def terminate_instances(module, ec2, instance_ids):
"""
Terminates a list of instances
module: Ansible module object
ec2: authenticated ec2 connection object
termination_list: a list of instances to terminate in the form of
[ {id: <inst-id>}, ..]
Returns a dictionary of instance information
about the instances terminated.
If the instance to be terminated is running
"changed" will be set to False.
"""
# Whether to wait for termination to complete before returning
wait = module.params.get('wait')
wait_timeout = int(module.params.get('wait_timeout'))
changed = False
instance_dict_array = []
if not isinstance(instance_ids, list) or len(instance_ids) < 1:
module.fail_json(msg='instance_ids should be a list of instances, aborting')
terminated_instance_ids = []
for res in ec2.get_all_instances(instance_ids):
for inst in res.instances:
if inst.state == 'running' or inst.state == 'stopped':
terminated_instance_ids.append(inst.id)
instance_dict_array.append(get_instance_info(inst))
try:
ec2.terminate_instances([inst.id])
except EC2ResponseError as e:
module.fail_json(msg='Unable to terminate instance {0}, error: {1}'.format(inst.id, e))
changed = True
# wait here until the instances are 'terminated'
if wait:
num_terminated = 0
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time() and num_terminated < len(terminated_instance_ids):
response = ec2.get_all_instances( \
instance_ids=terminated_instance_ids, \
filters={'instance-state-name':'terminated'})
try:
num_terminated = sum([len(res.instances) for res in response])
except Exception as e:
# got a bad response of some sort, possibly due to
# stale/cached data. Wait a second and then try again
time.sleep(1)
continue
if num_terminated < len(terminated_instance_ids):
time.sleep(5)
# waiting took too long
if wait_timeout < time.time() and num_terminated < len(terminated_instance_ids):
module.fail_json(msg = "wait for instance termination timeout on %s" % time.asctime())
#Lets get the current state of the instances after terminating - issue600
instance_dict_array = []
for res in ec2.get_all_instances(instance_ids=terminated_instance_ids,\
filters={'instance-state-name':'terminated'}):
for inst in res.instances:
instance_dict_array.append(get_instance_info(inst))
return (changed, instance_dict_array, terminated_instance_ids)
def startstop_instances(module, ec2, instance_ids, state, instance_tags):
"""
Starts or stops a list of existing instances
module: Ansible module object
ec2: authenticated ec2 connection object
instance_ids: The list of instances to start in the form of
[ {id: <inst-id>}, ..]
instance_tags: A dict of tag keys and values in the form of
{key: value, ... }
state: Intended state ("running" or "stopped")
Returns a dictionary of instance information
about the instances started/stopped.
If the instance was not able to change state,
"changed" will be set to False.
Note that if instance_ids and instance_tags are both non-empty,
this method will process the intersection of the two
"""
wait = module.params.get('wait')
wait_timeout = int(module.params.get('wait_timeout'))
source_dest_check = module.params.get('source_dest_check')
termination_protection = module.params.get('termination_protection')
changed = False
instance_dict_array = []
if not isinstance(instance_ids, list) or len(instance_ids) < 1:
# Fail unless the user defined instance tags
if not instance_tags:
module.fail_json(msg='instance_ids should be a list of instances, aborting')
# To make an EC2 tag filter, we need to prepend 'tag:' to each key.
# An empty filter does no filtering, so it's safe to pass it to the
# get_all_instances method even if the user did not specify instance_tags
filters = {}
if instance_tags:
for key, value in instance_tags.items():
filters["tag:" + key] = value
# Check that our instances are not in the state we want to take
# Check (and eventually change) instances attributes and instances state
existing_instances_array = []
for res in ec2.get_all_instances(instance_ids, filters=filters):
for inst in res.instances:
# Check "source_dest_check" attribute
try:
if inst.vpc_id is not None and inst.get_attribute('sourceDestCheck')['sourceDestCheck'] != source_dest_check:
inst.modify_attribute('sourceDestCheck', source_dest_check)
changed = True
except boto.exception.EC2ResponseError as exc:
# instances with more than one Elastic Network Interface will
# fail, because they have the sourceDestCheck attribute defined
# per-interface
if exc.code == 'InvalidInstanceID':
for interface in inst.interfaces:
if interface.source_dest_check != source_dest_check:
ec2.modify_network_interface_attribute(interface.id, "sourceDestCheck", source_dest_check)
changed = True
else:
module.fail_json(msg='Failed to handle source_dest_check state for instance {0}, error: {1}'.format(inst.id, exc),
exception=traceback.format_exc(exc))
# Check "termination_protection" attribute
if (inst.get_attribute('disableApiTermination')['disableApiTermination'] != termination_protection
and termination_protection is not None):
inst.modify_attribute('disableApiTermination', termination_protection)
changed = True
# Check instance state
if inst.state != state:
instance_dict_array.append(get_instance_info(inst))
try:
if state == 'running':
inst.start()
else:
inst.stop()
except EC2ResponseError as e:
module.fail_json(msg='Unable to change state for instance {0}, error: {1}'.format(inst.id, e))
changed = True
existing_instances_array.append(inst.id)
instance_ids = list(set(existing_instances_array + (instance_ids or [])))
## Wait for all the instances to finish starting or stopping
wait_timeout = time.time() + wait_timeout
while wait and wait_timeout > time.time():
instance_dict_array = []
matched_instances = []
for res in ec2.get_all_instances(instance_ids):
for i in res.instances:
if i.state == state:
instance_dict_array.append(get_instance_info(i))
matched_instances.append(i)
if len(matched_instances) < len(instance_ids):
time.sleep(5)
else:
break
if wait and wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg = "wait for instances running timeout on %s" % time.asctime())
return (changed, instance_dict_array, instance_ids)
def restart_instances(module, ec2, instance_ids, state, instance_tags):
"""
Restarts a list of existing instances
module: Ansible module object
ec2: authenticated ec2 connection object
instance_ids: The list of instances to start in the form of
[ {id: <inst-id>}, ..]
instance_tags: A dict of tag keys and values in the form of
{key: value, ... }
state: Intended state ("restarted")
Returns a dictionary of instance information
about the instances.
If the instance was not able to change state,
"changed" will be set to False.
Wait will not apply here as this is a OS level operation.
Note that if instance_ids and instance_tags are both non-empty,
this method will process the intersection of the two.
"""
source_dest_check = module.params.get('source_dest_check')
termination_protection = module.params.get('termination_protection')
changed = False
instance_dict_array = []
if not isinstance(instance_ids, list) or len(instance_ids) < 1:
# Fail unless the user defined instance tags
if not instance_tags:
module.fail_json(msg='instance_ids should be a list of instances, aborting')
# To make an EC2 tag filter, we need to prepend 'tag:' to each key.
# An empty filter does no filtering, so it's safe to pass it to the
# get_all_instances method even if the user did not specify instance_tags
filters = {}
if instance_tags:
for key, value in instance_tags.items():
filters["tag:" + key] = value
# Check that our instances are not in the state we want to take
# Check (and eventually change) instances attributes and instances state
for res in ec2.get_all_instances(instance_ids, filters=filters):
for inst in res.instances:
# Check "source_dest_check" attribute
try:
if inst.vpc_id is not None and inst.get_attribute('sourceDestCheck')['sourceDestCheck'] != source_dest_check:
inst.modify_attribute('sourceDestCheck', source_dest_check)
changed = True
except boto.exception.EC2ResponseError as exc:
# instances with more than one Elastic Network Interface will
# fail, because they have the sourceDestCheck attribute defined
# per-interface
if exc.code == 'InvalidInstanceID':
for interface in inst.interfaces:
if interface.source_dest_check != source_dest_check:
ec2.modify_network_interface_attribute(interface.id, "sourceDestCheck", source_dest_check)
changed = True
else:
module.fail_json(msg='Failed to handle source_dest_check state for instance {0}, error: {1}'.format(inst.id, exc),
exception=traceback.format_exc(exc))
# Check "termination_protection" attribute
if (inst.get_attribute('disableApiTermination')['disableApiTermination'] != termination_protection
and termination_protection is not None):
inst.modify_attribute('disableApiTermination', termination_protection)
changed = True
# Check instance state
if inst.state != state:
instance_dict_array.append(get_instance_info(inst))
try:
inst.reboot()
except EC2ResponseError as e:
module.fail_json(msg='Unable to change state for instance {0}, error: {1}'.format(inst.id, e))
changed = True
return (changed, instance_dict_array, instance_ids)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
key_name = dict(aliases = ['keypair']),
id = dict(),
group = dict(type='list', aliases=['groups']),
group_id = dict(type='list'),
zone = dict(aliases=['aws_zone', 'ec2_zone']),
instance_type = dict(aliases=['type']),
spot_price = dict(),
spot_type = dict(default='one-time', choices=["one-time", "persistent"]),
spot_launch_group = dict(),
image = dict(),
kernel = dict(),
count = dict(type='int', default='1'),
monitoring = dict(type='bool', default=False),
ramdisk = dict(),
wait = dict(type='bool', default=False),
wait_timeout = dict(default=300),
spot_wait_timeout = dict(default=600),
placement_group = dict(),
user_data = dict(),
instance_tags = dict(type='dict'),
vpc_subnet_id = dict(),
assign_public_ip = dict(type='bool', default=False),
private_ip = dict(),
instance_profile_name = dict(),
instance_ids = dict(type='list', aliases=['instance_id']),
source_dest_check = dict(type='bool', default=True),
termination_protection = dict(type='bool', default=None),
state = dict(default='present', choices=['present', 'absent', 'running', 'restarted', 'stopped']),
instance_initiated_shutdown_behavior=dict(default=None, choices=['stop', 'terminate']),
exact_count = dict(type='int', default=None),
count_tag = dict(),
volumes = dict(type='list'),
ebs_optimized = dict(type='bool', default=False),
tenancy = dict(default='default'),
network_interfaces = dict(type='list', aliases=['network_interface'])
)
)
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive = [
['exact_count', 'count'],
['exact_count', 'state'],
['exact_count', 'instance_ids'],
['network_interfaces', 'assign_public_ip'],
['network_interfaces', 'group'],
['network_interfaces', 'group_id'],
['network_interfaces', 'private_ip'],
['network_interfaces', 'vpc_subnet_id'],
],
)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
ec2 = ec2_connect(module)
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module)
if region:
try:
vpc = connect_to_aws(boto.vpc, region, **aws_connect_kwargs)
except boto.exception.NoAuthHandlerFound as e:
module.fail_json(msg = str(e))
else:
vpc = None
tagged_instances = []
state = module.params['state']
if state == 'absent':
instance_ids = module.params['instance_ids']
if not instance_ids:
module.fail_json(msg='instance_ids list is required for absent state')
(changed, instance_dict_array, new_instance_ids) = terminate_instances(module, ec2, instance_ids)
elif state in ('running', 'stopped'):
instance_ids = module.params.get('instance_ids')
instance_tags = module.params.get('instance_tags')
if not (isinstance(instance_ids, list) or isinstance(instance_tags, dict)):
module.fail_json(msg='running list needs to be a list of instances or set of tags to run: %s' % instance_ids)
(changed, instance_dict_array, new_instance_ids) = startstop_instances(module, ec2, instance_ids, state, instance_tags)
elif state in ('restarted'):
instance_ids = module.params.get('instance_ids')
instance_tags = module.params.get('instance_tags')
if not (isinstance(instance_ids, list) or isinstance(instance_tags, dict)):
module.fail_json(msg='running list needs to be a list of instances or set of tags to run: %s' % instance_ids)
(changed, instance_dict_array, new_instance_ids) = restart_instances(module, ec2, instance_ids, state, instance_tags)
elif state == 'present':
# Changed is always set to true when provisioning new instances
if not module.params.get('image'):
module.fail_json(msg='image parameter is required for new instance')
if module.params.get('exact_count') is None:
(instance_dict_array, new_instance_ids, changed) = create_instances(module, ec2, vpc)
else:
(tagged_instances, instance_dict_array, new_instance_ids, changed) = enforce_count(module, ec2, vpc)
module.exit_json(changed=changed, instance_ids=new_instance_ids, instances=instance_dict_array, tagged_instances=tagged_instances)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
main()
| wimnat/ansible-modules-core | cloud/amazon/ec2.py | Python | gpl-3.0 | 61,472 |
using System;
using System.Collections.ObjectModel;
using System.Globalization;
using System.Linq;
using MixERP.Net.Common;
using MixERP.Net.Core.Modules.Sales.Data.Data;
using MixERP.Net.DbFactory;
using MixERP.Net.Entities.Core;
using MixERP.Net.Entities.Transactions.Models;
using Npgsql;
namespace MixERP.Net.Core.Modules.Sales.Data.Transactions
{
internal static class GlTransaction
{
public static long Add(string catalog, string bookName, DateTime valueDate, int officeId, int userId,
long loginId, int costCenterId, string referenceNumber, string statementReference, StockMaster stockMaster,
Collection<StockDetail> details, Collection<Attachment> attachments, bool nonTaxable,
Collection<long> tranIds)
{
if (stockMaster == null)
{
return 0;
}
if (details == null)
{
return 0;
}
if (details.Count.Equals(0))
{
return 0;
}
string detail = StockMasterDetailHelper.CreateStockMasterDetailParameter(details);
string attachment = AttachmentHelper.CreateAttachmentModelParameter(attachments);
string ids = "NULL::bigint";
if (tranIds != null && tranIds.Count > 0)
{
ids = string.Join(",", tranIds);
}
string sql = string.Format(CultureInfo.InvariantCulture,
"SELECT * FROM transactions.post_sales(@BookName::national character varying(48), @OfficeId::integer, @UserId::integer, @LoginId::bigint, @ValueDate::date, @CostCenterId::integer, @ReferenceNumber::national character varying(24), @StatementReference::text, @IsCredit::boolean, @PaymentTermId::integer, @PartyCode::national character varying(12), @PriceTypeId::integer, @SalespersonId::integer, @ShipperId::integer, @ShippingAddressCode::national character varying(12), @StoreId::integer, @NonTaxable::boolean, ARRAY[{0}], ARRAY[{1}], ARRAY[{2}])",
detail, attachment, ids);
using (NpgsqlCommand command = new NpgsqlCommand(sql))
{
command.Parameters.AddWithValue("@BookName", bookName);
command.Parameters.AddWithValue("@OfficeId", officeId);
command.Parameters.AddWithValue("@UserId", userId);
command.Parameters.AddWithValue("@LoginId", loginId);
command.Parameters.AddWithValue("@ValueDate", valueDate);
command.Parameters.AddWithValue("@CostCenterId", costCenterId);
command.Parameters.AddWithValue("@ReferenceNumber", referenceNumber);
command.Parameters.AddWithValue("@StatementReference", statementReference);
command.Parameters.AddWithValue("@IsCredit", stockMaster.IsCredit);
if (stockMaster.PaymentTermId.Equals(0))
{
command.Parameters.AddWithValue("@PaymentTermId", DBNull.Value);
}
else
{
command.Parameters.AddWithValue("@PaymentTermId", stockMaster.PaymentTermId);
}
command.Parameters.AddWithValue("@PartyCode", stockMaster.PartyCode);
command.Parameters.AddWithValue("@PriceTypeId", stockMaster.PriceTypeId);
command.Parameters.AddWithValue("@SalespersonId", stockMaster.SalespersonId);
command.Parameters.AddWithValue("@ShipperId", stockMaster.ShipperId);
command.Parameters.AddWithValue("@ShippingAddressCode", stockMaster.ShippingAddressCode);
command.Parameters.AddWithValue("@StoreId", stockMaster.StoreId);
command.Parameters.AddWithValue("@NonTaxable", nonTaxable);
command.Parameters.AddRange(StockMasterDetailHelper.AddStockMasterDetailParameter(details).ToArray());
command.Parameters.AddRange(AttachmentHelper.AddAttachmentParameter(attachments).ToArray());
long tranId = Conversion.TryCastLong(DbOperation.GetScalarValue(catalog, command));
return tranId;
}
}
}
} | mixerp/mixerp | src/FrontEnd/Modules/Sales.Data/Transactions/GlTransaction.cs | C# | gpl-3.0 | 4,220 |
/**
* Copyright 2015 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
var when = require("when");
var clone = require("clone");
var typeRegistry = require("../registry");
var Log = require("../../log");
var redUtil = require("../../util");
var flowUtil = require("./util");
function Flow(global,flow) {
if (typeof flow === 'undefined') {
flow = global;
}
var activeNodes = {};
var subflowInstanceNodes = {};
var catchNodeMap = {};
var statusNodeMap = {};
this.start = function(diff) {
var node;
var newNode;
var id;
catchNodeMap = {};
statusNodeMap = {};
for (id in flow.configs) {
if (flow.configs.hasOwnProperty(id)) {
node = flow.configs[id];
if (!activeNodes[id]) {
newNode = createNode(node.type,node);
if (newNode) {
activeNodes[id] = newNode;
}
}
}
}
if (diff && diff.rewired) {
for (var j=0;j<diff.rewired.length;j++) {
var rewireNode = activeNodes[diff.rewired[j]];
if (rewireNode) {
rewireNode.updateWires(flow.nodes[rewireNode.id].wires);
}
}
}
for (id in flow.nodes) {
if (flow.nodes.hasOwnProperty(id)) {
node = flow.nodes[id];
if (!node.subflow) {
if (!activeNodes[id]) {
newNode = createNode(node.type,node);
if (newNode) {
activeNodes[id] = newNode;
}
}
} else {
if (!subflowInstanceNodes[id]) {
try {
var nodes = createSubflow(flow.subflows[node.subflow]||global.subflows[node.subflow],node,flow.subflows,global.subflows,activeNodes);
subflowInstanceNodes[id] = nodes.map(function(n) { return n.id});
for (var i=0;i<nodes.length;i++) {
if (nodes[i]) {
activeNodes[nodes[i].id] = nodes[i];
}
}
} catch(err) {
console.log(err.stack)
}
}
}
}
}
for (id in activeNodes) {
if (activeNodes.hasOwnProperty(id)) {
node = activeNodes[id];
if (node.type === "catch") {
catchNodeMap[node.z] = catchNodeMap[node.z] || [];
catchNodeMap[node.z].push(node);
} else if (node.type === "status") {
statusNodeMap[node.z] = statusNodeMap[node.z] || [];
statusNodeMap[node.z].push(node);
}
}
}
}
this.stop = function(stopList) {
return when.promise(function(resolve) {
var i;
if (stopList) {
for (i=0;i<stopList.length;i++) {
if (subflowInstanceNodes[stopList[i]]) {
// The first in the list is the instance node we already
// know about
stopList = stopList.concat(subflowInstanceNodes[stopList[i]].slice(1))
}
}
} else {
stopList = Object.keys(activeNodes);
}
var promises = [];
for (i=0;i<stopList.length;i++) {
var node = activeNodes[stopList[i]];
if (node) {
delete activeNodes[stopList[i]];
if (subflowInstanceNodes[stopList[i]]) {
delete subflowInstanceNodes[stopList[i]];
}
try {
var p = node.close();
if (p) {
promises.push(p);
}
} catch(err) {
node.error(err);
}
}
}
when.settle(promises).then(function() {
resolve();
});
});
}
this.update = function(_global,_flow) {
global = _global;
flow = _flow;
}
this.getNode = function(id) {
return activeNodes[id];
}
this.getActiveNodes = function() {
return activeNodes;
}
this.handleStatus = function(node,statusMessage) {
var targetStatusNodes = null;
var reportingNode = node;
var handled = false;
while(reportingNode && !handled) {
targetStatusNodes = statusNodeMap[reportingNode.z];
if (targetStatusNodes) {
targetStatusNodes.forEach(function(targetStatusNode) {
if (targetStatusNode.scope && targetStatusNode.scope.indexOf(node.id) === -1) {
return;
}
var message = {
status: {
text: "",
source: {
id: node.id,
type: node.type,
name: node.name
}
}
};
if (statusMessage.text) {
message.status.text = statusMessage.text;
}
targetStatusNode.receive(message);
handled = true;
});
}
if (!handled) {
reportingNode = activeNodes[reportingNode.z];
}
}
}
this.handleError = function(node,logMessage,msg) {
var count = 1;
if (msg && msg.hasOwnProperty("error")) {
if (msg.error.hasOwnProperty("source")) {
if (msg.error.source.id === node.id) {
count = msg.error.source.count+1;
if (count === 10) {
node.warn(Log._("nodes.flow.error-loop"));
return;
}
}
}
}
var targetCatchNodes = null;
var throwingNode = node;
var handled = false;
while (throwingNode && !handled) {
targetCatchNodes = catchNodeMap[throwingNode.z];
if (targetCatchNodes) {
targetCatchNodes.forEach(function(targetCatchNode) {
if (targetCatchNode.scope && targetCatchNode.scope.indexOf(throwingNode.id) === -1) {
return;
}
var errorMessage;
if (msg) {
errorMessage = redUtil.cloneMessage(msg);
} else {
errorMessage = {};
}
if (errorMessage.hasOwnProperty("error")) {
errorMessage._error = errorMessage.error;
}
errorMessage.error = {
message: logMessage.toString(),
source: {
id: node.id,
type: node.type,
name: node.name,
count: count
}
};
targetCatchNode.receive(errorMessage);
handled = true;
});
}
if (!handled) {
throwingNode = activeNodes[throwingNode.z];
}
}
}
}
var EnvVarPropertyRE = /^\$\((\S+)\)$/;
function mapEnvVarProperties(obj,prop) {
if (Buffer.isBuffer(obj[prop])) {
return;
} else if (Array.isArray(obj[prop])) {
for (var i=0;i<obj[prop].length;i++) {
mapEnvVarProperties(obj[prop],i);
}
} else if (typeof obj[prop] === 'string') {
var m;
if ( (m = EnvVarPropertyRE.exec(obj[prop])) !== null) {
if (process.env.hasOwnProperty(m[1])) {
obj[prop] = process.env[m[1]];
}
}
} else {
for (var p in obj[prop]) {
if (obj[prop].hasOwnProperty) {
mapEnvVarProperties(obj[prop],p);
}
}
}
}
function createNode(type,config) {
var nn = null;
var nt = typeRegistry.get(type);
if (nt) {
var conf = clone(config);
delete conf.credentials;
for (var p in conf) {
if (conf.hasOwnProperty(p)) {
mapEnvVarProperties(conf,p);
}
}
try {
nn = new nt(conf);
}
catch (err) {
Log.log({
level: Log.ERROR,
id:conf.id,
type: type,
msg: err
});
}
} else {
Log.error(Log._("nodes.flow.unknown-type", {type:type}));
}
return nn;
}
function createSubflow(sf,sfn,subflows,globalSubflows,activeNodes) {
//console.log("CREATE SUBFLOW",sf.id,sfn.id);
var nodes = [];
var node_map = {};
var newNodes = [];
var node;
var wires;
var i,j,k;
var createNodeInSubflow = function(def) {
node = clone(def);
var nid = redUtil.generateId();
node_map[node.id] = node;
node._alias = node.id;
node.id = nid;
node.z = sfn.id;
newNodes.push(node);
}
// Clone all of the subflow node definitions and give them new IDs
for (i in sf.configs) {
if (sf.configs.hasOwnProperty(i)) {
createNodeInSubflow(sf.configs[i]);
}
}
// Clone all of the subflow node definitions and give them new IDs
for (i in sf.nodes) {
if (sf.nodes.hasOwnProperty(i)) {
createNodeInSubflow(sf.nodes[i]);
}
}
// Look for any catch/status nodes and update their scope ids
// Update all subflow interior wiring to reflect new node IDs
for (i=0;i<newNodes.length;i++) {
node = newNodes[i];
if (node.wires) {
var outputs = node.wires;
for (j=0;j<outputs.length;j++) {
wires = outputs[j];
for (k=0;k<wires.length;k++) {
outputs[j][k] = node_map[outputs[j][k]].id
}
}
if ((node.type === 'catch' || node.type === 'status') && node.scope) {
node.scope = node.scope.map(function(id) {
return node_map[id]?node_map[id].id:""
})
} else {
for (var prop in node) {
if (node.hasOwnProperty(prop) && prop !== '_alias') {
if (node_map[node[prop]]) {
//console.log("Mapped",node.type,node.id,prop,node_map[node[prop]].id);
node[prop] = node_map[node[prop]].id;
}
}
}
}
}
}
// Create a subflow node to accept inbound messages and route appropriately
var Node = require("../Node");
var subflowInstance = {
id: sfn.id,
type: sfn.type,
z: sfn.z,
name: sfn.name,
wires: []
}
if (sf.in) {
subflowInstance.wires = sf.in.map(function(n) { return n.wires.map(function(w) { return node_map[w.id].id;})})
subflowInstance._originalWires = clone(subflowInstance.wires);
}
var subflowNode = new Node(subflowInstance);
subflowNode.on("input", function(msg) { this.send(msg);});
subflowNode._updateWires = subflowNode.updateWires;
subflowNode.updateWires = function(newWires) {
// Wire the subflow outputs
if (sf.out) {
var node,wires,i,j;
// Restore the original wiring to the internal nodes
subflowInstance.wires = clone(subflowInstance._originalWires);
for (i=0;i<sf.out.length;i++) {
wires = sf.out[i].wires;
for (j=0;j<wires.length;j++) {
if (wires[j].id != sf.id) {
node = node_map[wires[j].id];
if (node._originalWires) {
node.wires = clone(node._originalWires);
}
}
}
}
var modifiedNodes = {};
var subflowInstanceModified = false;
for (i=0;i<sf.out.length;i++) {
wires = sf.out[i].wires;
for (j=0;j<wires.length;j++) {
if (wires[j].id === sf.id) {
subflowInstance.wires[wires[j].port] = subflowInstance.wires[wires[j].port].concat(newWires[i]);
subflowInstanceModified = true;
} else {
node = node_map[wires[j].id];
node.wires[wires[j].port] = node.wires[wires[j].port].concat(newWires[i]);
modifiedNodes[node.id] = node;
}
}
}
Object.keys(modifiedNodes).forEach(function(id) {
var node = modifiedNodes[id];
subflowNode.instanceNodes[id].updateWires(node.wires);
});
if (subflowInstanceModified) {
subflowNode._updateWires(subflowInstance.wires);
}
}
}
nodes.push(subflowNode);
// Wire the subflow outputs
if (sf.out) {
var modifiedNodes = {};
for (i=0;i<sf.out.length;i++) {
wires = sf.out[i].wires;
for (j=0;j<wires.length;j++) {
if (wires[j].id === sf.id) {
// A subflow input wired straight to a subflow output
subflowInstance.wires[wires[j].port] = subflowInstance.wires[wires[j].port].concat(sfn.wires[i])
subflowNode._updateWires(subflowInstance.wires);
} else {
node = node_map[wires[j].id];
modifiedNodes[node.id] = node;
if (!node._originalWires) {
node._originalWires = clone(node.wires);
}
node.wires[wires[j].port] = (node.wires[wires[j].port]||[]).concat(sfn.wires[i]);
}
}
}
}
// Instantiate the nodes
for (i=0;i<newNodes.length;i++) {
node = newNodes[i];
var type = node.type;
var m = /^subflow:(.+)$/.exec(type);
if (!m) {
var newNode = createNode(type,node);
if (newNode) {
activeNodes[node.id] = newNode;
nodes.push(newNode);
}
} else {
var subflowId = m[1];
nodes = nodes.concat(createSubflow(subflows[subflowId]||globalSubflows[subflowId],node,subflows,globalSubflows,activeNodes));
}
}
subflowNode.instanceNodes = {};
nodes.forEach(function(node) {
subflowNode.instanceNodes[node.id] = node;
});
return nodes;
}
module.exports = {
create: function(global,conf) {
return new Flow(global,conf);
}
}
| lemio/w-esp | w-esp-node-red/red/runtime/nodes/flows/Flow.js | JavaScript | gpl-3.0 | 16,004 |
/*
Copyright (c) 2003-2014, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang( 'removeformat', 'en-gb', {
toolbar: 'Remove Format'
} );
| gmuro/dolibarr | htdocs/includes/ckeditor/ckeditor/_source/plugins/removeformat/lang/en-gb.js | JavaScript | gpl-3.0 | 227 |
/*
Copyright (c) 2003-2014, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang( 'flash', 'ku', {
access: 'دەستپێگەیشتنی نووسراو',
accessAlways: 'هەمیشه',
accessNever: 'هەرگیز',
accessSameDomain: 'هەمان دۆمەین',
alignAbsBottom: 'له ژێرەوه',
alignAbsMiddle: 'لەناوەند',
alignBaseline: 'هێڵەبنەڕەت',
alignTextTop: 'دەق لەسەرەوه',
bgcolor: 'ڕەنگی پاشبنەما',
chkFull: 'ڕێپێدان بە پڕی شاشه',
chkLoop: 'گرێ',
chkMenu: 'چالاککردنی لیستەی فلاش',
chkPlay: 'پێکردنی یان لێدانی خۆکار',
flashvars: 'گۆڕاوەکان بۆ فلاش',
hSpace: 'بۆشایی ئاسۆیی',
properties: 'خاسیەتی فلاش',
propertiesTab: 'خاسیەت',
quality: 'جۆرایەتی',
qualityAutoHigh: 'بەرزی خۆکار',
qualityAutoLow: 'نزمی خۆکار',
qualityBest: 'باشترین',
qualityHigh: 'بەرزی',
qualityLow: 'نزم',
qualityMedium: 'مامناوەند',
scale: 'پێوانه',
scaleAll: 'نیشاندانی هەموو',
scaleFit: 'بەوردی بگونجێت',
scaleNoBorder: 'بێ پەراوێز',
title: 'خاسیەتی فلاش',
vSpace: 'بۆشایی ئەستونی',
validateHSpace: 'بۆشایی ئاسۆیی دەبێت ژمارە بێت.',
validateSrc: 'ناونیشانی بەستەر نابێت خاڵی بێت',
validateVSpace: 'بۆشایی ئەستونی دەبێت ژماره بێت.',
windowMode: 'شێوازی پەنجەره',
windowModeOpaque: 'ناڕوون',
windowModeTransparent: 'ڕۆشن',
windowModeWindow: 'پەنجەره'
} );
| gmuro/dolibarr | htdocs/includes/ckeditor/ckeditor/_source/plugins/flash/lang/ku.js | JavaScript | gpl-3.0 | 1,731 |
// Copyright 2010 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifdef ENABLE_GDB_JIT_INTERFACE
#include "src/v8.h"
#include "src/base/platform/platform.h"
#include "src/bootstrapper.h"
#include "src/compiler.h"
#include "src/frames-inl.h"
#include "src/frames.h"
#include "src/gdb-jit.h"
#include "src/global-handles.h"
#include "src/messages.h"
#include "src/natives.h"
#include "src/ostreams.h"
#include "src/scopes.h"
namespace v8 {
namespace internal {
#ifdef __APPLE__
#define __MACH_O
class MachO;
class MachOSection;
typedef MachO DebugObject;
typedef MachOSection DebugSection;
#else
#define __ELF
class ELF;
class ELFSection;
typedef ELF DebugObject;
typedef ELFSection DebugSection;
#endif
class Writer BASE_EMBEDDED {
public:
explicit Writer(DebugObject* debug_object)
: debug_object_(debug_object),
position_(0),
capacity_(1024),
buffer_(reinterpret_cast<byte*>(malloc(capacity_))) {
}
~Writer() {
free(buffer_);
}
uintptr_t position() const {
return position_;
}
template<typename T>
class Slot {
public:
Slot(Writer* w, uintptr_t offset) : w_(w), offset_(offset) { }
T* operator-> () {
return w_->RawSlotAt<T>(offset_);
}
void set(const T& value) {
*w_->RawSlotAt<T>(offset_) = value;
}
Slot<T> at(int i) {
return Slot<T>(w_, offset_ + sizeof(T) * i);
}
private:
Writer* w_;
uintptr_t offset_;
};
template<typename T>
void Write(const T& val) {
Ensure(position_ + sizeof(T));
*RawSlotAt<T>(position_) = val;
position_ += sizeof(T);
}
template<typename T>
Slot<T> SlotAt(uintptr_t offset) {
Ensure(offset + sizeof(T));
return Slot<T>(this, offset);
}
template<typename T>
Slot<T> CreateSlotHere() {
return CreateSlotsHere<T>(1);
}
template<typename T>
Slot<T> CreateSlotsHere(uint32_t count) {
uintptr_t slot_position = position_;
position_ += sizeof(T) * count;
Ensure(position_);
return SlotAt<T>(slot_position);
}
void Ensure(uintptr_t pos) {
if (capacity_ < pos) {
while (capacity_ < pos) capacity_ *= 2;
buffer_ = reinterpret_cast<byte*>(realloc(buffer_, capacity_));
}
}
DebugObject* debug_object() { return debug_object_; }
byte* buffer() { return buffer_; }
void Align(uintptr_t align) {
uintptr_t delta = position_ % align;
if (delta == 0) return;
uintptr_t padding = align - delta;
Ensure(position_ += padding);
DCHECK((position_ % align) == 0);
}
void WriteULEB128(uintptr_t value) {
do {
uint8_t byte = value & 0x7F;
value >>= 7;
if (value != 0) byte |= 0x80;
Write<uint8_t>(byte);
} while (value != 0);
}
void WriteSLEB128(intptr_t value) {
bool more = true;
while (more) {
int8_t byte = value & 0x7F;
bool byte_sign = byte & 0x40;
value >>= 7;
if ((value == 0 && !byte_sign) || (value == -1 && byte_sign)) {
more = false;
} else {
byte |= 0x80;
}
Write<int8_t>(byte);
}
}
void WriteString(const char* str) {
do {
Write<char>(*str);
} while (*str++);
}
private:
template<typename T> friend class Slot;
template<typename T>
T* RawSlotAt(uintptr_t offset) {
DCHECK(offset < capacity_ && offset + sizeof(T) <= capacity_);
return reinterpret_cast<T*>(&buffer_[offset]);
}
DebugObject* debug_object_;
uintptr_t position_;
uintptr_t capacity_;
byte* buffer_;
};
class ELFStringTable;
template<typename THeader>
class DebugSectionBase : public ZoneObject {
public:
virtual ~DebugSectionBase() { }
virtual void WriteBody(Writer::Slot<THeader> header, Writer* writer) {
uintptr_t start = writer->position();
if (WriteBodyInternal(writer)) {
uintptr_t end = writer->position();
header->offset = start;
#if defined(__MACH_O)
header->addr = 0;
#endif
header->size = end - start;
}
}
virtual bool WriteBodyInternal(Writer* writer) {
return false;
}
typedef THeader Header;
};
struct MachOSectionHeader {
char sectname[16];
char segname[16];
#if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
uint32_t addr;
uint32_t size;
#else
uint64_t addr;
uint64_t size;
#endif
uint32_t offset;
uint32_t align;
uint32_t reloff;
uint32_t nreloc;
uint32_t flags;
uint32_t reserved1;
uint32_t reserved2;
};
class MachOSection : public DebugSectionBase<MachOSectionHeader> {
public:
enum Type {
S_REGULAR = 0x0u,
S_ATTR_COALESCED = 0xbu,
S_ATTR_SOME_INSTRUCTIONS = 0x400u,
S_ATTR_DEBUG = 0x02000000u,
S_ATTR_PURE_INSTRUCTIONS = 0x80000000u
};
MachOSection(const char* name,
const char* segment,
uintptr_t align,
uint32_t flags)
: name_(name),
segment_(segment),
align_(align),
flags_(flags) {
if (align_ != 0) {
DCHECK(IsPowerOf2(align));
align_ = WhichPowerOf2(align_);
}
}
virtual ~MachOSection() { }
virtual void PopulateHeader(Writer::Slot<Header> header) {
header->addr = 0;
header->size = 0;
header->offset = 0;
header->align = align_;
header->reloff = 0;
header->nreloc = 0;
header->flags = flags_;
header->reserved1 = 0;
header->reserved2 = 0;
memset(header->sectname, 0, sizeof(header->sectname));
memset(header->segname, 0, sizeof(header->segname));
DCHECK(strlen(name_) < sizeof(header->sectname));
DCHECK(strlen(segment_) < sizeof(header->segname));
strncpy(header->sectname, name_, sizeof(header->sectname));
strncpy(header->segname, segment_, sizeof(header->segname));
}
private:
const char* name_;
const char* segment_;
uintptr_t align_;
uint32_t flags_;
};
struct ELFSectionHeader {
uint32_t name;
uint32_t type;
uintptr_t flags;
uintptr_t address;
uintptr_t offset;
uintptr_t size;
uint32_t link;
uint32_t info;
uintptr_t alignment;
uintptr_t entry_size;
};
#if defined(__ELF)
class ELFSection : public DebugSectionBase<ELFSectionHeader> {
public:
enum Type {
TYPE_NULL = 0,
TYPE_PROGBITS = 1,
TYPE_SYMTAB = 2,
TYPE_STRTAB = 3,
TYPE_RELA = 4,
TYPE_HASH = 5,
TYPE_DYNAMIC = 6,
TYPE_NOTE = 7,
TYPE_NOBITS = 8,
TYPE_REL = 9,
TYPE_SHLIB = 10,
TYPE_DYNSYM = 11,
TYPE_LOPROC = 0x70000000,
TYPE_X86_64_UNWIND = 0x70000001,
TYPE_HIPROC = 0x7fffffff,
TYPE_LOUSER = 0x80000000,
TYPE_HIUSER = 0xffffffff
};
enum Flags {
FLAG_WRITE = 1,
FLAG_ALLOC = 2,
FLAG_EXEC = 4
};
enum SpecialIndexes {
INDEX_ABSOLUTE = 0xfff1
};
ELFSection(const char* name, Type type, uintptr_t align)
: name_(name), type_(type), align_(align) { }
virtual ~ELFSection() { }
void PopulateHeader(Writer::Slot<Header> header, ELFStringTable* strtab);
virtual void WriteBody(Writer::Slot<Header> header, Writer* w) {
uintptr_t start = w->position();
if (WriteBodyInternal(w)) {
uintptr_t end = w->position();
header->offset = start;
header->size = end - start;
}
}
virtual bool WriteBodyInternal(Writer* w) {
return false;
}
uint16_t index() const { return index_; }
void set_index(uint16_t index) { index_ = index; }
protected:
virtual void PopulateHeader(Writer::Slot<Header> header) {
header->flags = 0;
header->address = 0;
header->offset = 0;
header->size = 0;
header->link = 0;
header->info = 0;
header->entry_size = 0;
}
private:
const char* name_;
Type type_;
uintptr_t align_;
uint16_t index_;
};
#endif // defined(__ELF)
#if defined(__MACH_O)
class MachOTextSection : public MachOSection {
public:
MachOTextSection(uintptr_t align,
uintptr_t addr,
uintptr_t size)
: MachOSection("__text",
"__TEXT",
align,
MachOSection::S_REGULAR |
MachOSection::S_ATTR_SOME_INSTRUCTIONS |
MachOSection::S_ATTR_PURE_INSTRUCTIONS),
addr_(addr),
size_(size) { }
protected:
virtual void PopulateHeader(Writer::Slot<Header> header) {
MachOSection::PopulateHeader(header);
header->addr = addr_;
header->size = size_;
}
private:
uintptr_t addr_;
uintptr_t size_;
};
#endif // defined(__MACH_O)
#if defined(__ELF)
class FullHeaderELFSection : public ELFSection {
public:
FullHeaderELFSection(const char* name,
Type type,
uintptr_t align,
uintptr_t addr,
uintptr_t offset,
uintptr_t size,
uintptr_t flags)
: ELFSection(name, type, align),
addr_(addr),
offset_(offset),
size_(size),
flags_(flags) { }
protected:
virtual void PopulateHeader(Writer::Slot<Header> header) {
ELFSection::PopulateHeader(header);
header->address = addr_;
header->offset = offset_;
header->size = size_;
header->flags = flags_;
}
private:
uintptr_t addr_;
uintptr_t offset_;
uintptr_t size_;
uintptr_t flags_;
};
class ELFStringTable : public ELFSection {
public:
explicit ELFStringTable(const char* name)
: ELFSection(name, TYPE_STRTAB, 1), writer_(NULL), offset_(0), size_(0) {
}
uintptr_t Add(const char* str) {
if (*str == '\0') return 0;
uintptr_t offset = size_;
WriteString(str);
return offset;
}
void AttachWriter(Writer* w) {
writer_ = w;
offset_ = writer_->position();
// First entry in the string table should be an empty string.
WriteString("");
}
void DetachWriter() {
writer_ = NULL;
}
virtual void WriteBody(Writer::Slot<Header> header, Writer* w) {
DCHECK(writer_ == NULL);
header->offset = offset_;
header->size = size_;
}
private:
void WriteString(const char* str) {
uintptr_t written = 0;
do {
writer_->Write(*str);
written++;
} while (*str++);
size_ += written;
}
Writer* writer_;
uintptr_t offset_;
uintptr_t size_;
};
void ELFSection::PopulateHeader(Writer::Slot<ELFSection::Header> header,
ELFStringTable* strtab) {
header->name = strtab->Add(name_);
header->type = type_;
header->alignment = align_;
PopulateHeader(header);
}
#endif // defined(__ELF)
#if defined(__MACH_O)
class MachO BASE_EMBEDDED {
public:
explicit MachO(Zone* zone) : zone_(zone), sections_(6, zone) { }
uint32_t AddSection(MachOSection* section) {
sections_.Add(section, zone_);
return sections_.length() - 1;
}
void Write(Writer* w, uintptr_t code_start, uintptr_t code_size) {
Writer::Slot<MachOHeader> header = WriteHeader(w);
uintptr_t load_command_start = w->position();
Writer::Slot<MachOSegmentCommand> cmd = WriteSegmentCommand(w,
code_start,
code_size);
WriteSections(w, cmd, header, load_command_start);
}
private:
struct MachOHeader {
uint32_t magic;
uint32_t cputype;
uint32_t cpusubtype;
uint32_t filetype;
uint32_t ncmds;
uint32_t sizeofcmds;
uint32_t flags;
#if V8_TARGET_ARCH_X64
uint32_t reserved;
#endif
};
struct MachOSegmentCommand {
uint32_t cmd;
uint32_t cmdsize;
char segname[16];
#if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
uint32_t vmaddr;
uint32_t vmsize;
uint32_t fileoff;
uint32_t filesize;
#else
uint64_t vmaddr;
uint64_t vmsize;
uint64_t fileoff;
uint64_t filesize;
#endif
uint32_t maxprot;
uint32_t initprot;
uint32_t nsects;
uint32_t flags;
};
enum MachOLoadCommandCmd {
LC_SEGMENT_32 = 0x00000001u,
LC_SEGMENT_64 = 0x00000019u
};
Writer::Slot<MachOHeader> WriteHeader(Writer* w) {
DCHECK(w->position() == 0);
Writer::Slot<MachOHeader> header = w->CreateSlotHere<MachOHeader>();
#if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
header->magic = 0xFEEDFACEu;
header->cputype = 7; // i386
header->cpusubtype = 3; // CPU_SUBTYPE_I386_ALL
#elif V8_TARGET_ARCH_X64
header->magic = 0xFEEDFACFu;
header->cputype = 7 | 0x01000000; // i386 | 64-bit ABI
header->cpusubtype = 3; // CPU_SUBTYPE_I386_ALL
header->reserved = 0;
#else
#error Unsupported target architecture.
#endif
header->filetype = 0x1; // MH_OBJECT
header->ncmds = 1;
header->sizeofcmds = 0;
header->flags = 0;
return header;
}
Writer::Slot<MachOSegmentCommand> WriteSegmentCommand(Writer* w,
uintptr_t code_start,
uintptr_t code_size) {
Writer::Slot<MachOSegmentCommand> cmd =
w->CreateSlotHere<MachOSegmentCommand>();
#if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
cmd->cmd = LC_SEGMENT_32;
#else
cmd->cmd = LC_SEGMENT_64;
#endif
cmd->vmaddr = code_start;
cmd->vmsize = code_size;
cmd->fileoff = 0;
cmd->filesize = 0;
cmd->maxprot = 7;
cmd->initprot = 7;
cmd->flags = 0;
cmd->nsects = sections_.length();
memset(cmd->segname, 0, 16);
cmd->cmdsize = sizeof(MachOSegmentCommand) + sizeof(MachOSection::Header) *
cmd->nsects;
return cmd;
}
void WriteSections(Writer* w,
Writer::Slot<MachOSegmentCommand> cmd,
Writer::Slot<MachOHeader> header,
uintptr_t load_command_start) {
Writer::Slot<MachOSection::Header> headers =
w->CreateSlotsHere<MachOSection::Header>(sections_.length());
cmd->fileoff = w->position();
header->sizeofcmds = w->position() - load_command_start;
for (int section = 0; section < sections_.length(); ++section) {
sections_[section]->PopulateHeader(headers.at(section));
sections_[section]->WriteBody(headers.at(section), w);
}
cmd->filesize = w->position() - (uintptr_t)cmd->fileoff;
}
Zone* zone_;
ZoneList<MachOSection*> sections_;
};
#endif // defined(__MACH_O)
#if defined(__ELF)
class ELF BASE_EMBEDDED {
public:
explicit ELF(Zone* zone) : zone_(zone), sections_(6, zone) {
sections_.Add(new(zone) ELFSection("", ELFSection::TYPE_NULL, 0), zone);
sections_.Add(new(zone) ELFStringTable(".shstrtab"), zone);
}
void Write(Writer* w) {
WriteHeader(w);
WriteSectionTable(w);
WriteSections(w);
}
ELFSection* SectionAt(uint32_t index) {
return sections_[index];
}
uint32_t AddSection(ELFSection* section) {
sections_.Add(section, zone_);
section->set_index(sections_.length() - 1);
return sections_.length() - 1;
}
private:
struct ELFHeader {
uint8_t ident[16];
uint16_t type;
uint16_t machine;
uint32_t version;
uintptr_t entry;
uintptr_t pht_offset;
uintptr_t sht_offset;
uint32_t flags;
uint16_t header_size;
uint16_t pht_entry_size;
uint16_t pht_entry_num;
uint16_t sht_entry_size;
uint16_t sht_entry_num;
uint16_t sht_strtab_index;
};
void WriteHeader(Writer* w) {
DCHECK(w->position() == 0);
Writer::Slot<ELFHeader> header = w->CreateSlotHere<ELFHeader>();
#if (V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_X87 || \
(V8_TARGET_ARCH_X64 && V8_TARGET_ARCH_32_BIT))
const uint8_t ident[16] =
{ 0x7f, 'E', 'L', 'F', 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0};
#elif V8_TARGET_ARCH_X64 && V8_TARGET_ARCH_64_BIT
const uint8_t ident[16] =
{ 0x7f, 'E', 'L', 'F', 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0};
#else
#error Unsupported target architecture.
#endif
memcpy(header->ident, ident, 16);
header->type = 1;
#if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
header->machine = 3;
#elif V8_TARGET_ARCH_X64
// Processor identification value for x64 is 62 as defined in
// System V ABI, AMD64 Supplement
// http://www.x86-64.org/documentation/abi.pdf
header->machine = 62;
#elif V8_TARGET_ARCH_ARM
// Set to EM_ARM, defined as 40, in "ARM ELF File Format" at
// infocenter.arm.com/help/topic/com.arm.doc.dui0101a/DUI0101A_Elf.pdf
header->machine = 40;
#else
#error Unsupported target architecture.
#endif
header->version = 1;
header->entry = 0;
header->pht_offset = 0;
header->sht_offset = sizeof(ELFHeader); // Section table follows header.
header->flags = 0;
header->header_size = sizeof(ELFHeader);
header->pht_entry_size = 0;
header->pht_entry_num = 0;
header->sht_entry_size = sizeof(ELFSection::Header);
header->sht_entry_num = sections_.length();
header->sht_strtab_index = 1;
}
void WriteSectionTable(Writer* w) {
// Section headers table immediately follows file header.
DCHECK(w->position() == sizeof(ELFHeader));
Writer::Slot<ELFSection::Header> headers =
w->CreateSlotsHere<ELFSection::Header>(sections_.length());
// String table for section table is the first section.
ELFStringTable* strtab = static_cast<ELFStringTable*>(SectionAt(1));
strtab->AttachWriter(w);
for (int i = 0, length = sections_.length();
i < length;
i++) {
sections_[i]->PopulateHeader(headers.at(i), strtab);
}
strtab->DetachWriter();
}
int SectionHeaderPosition(uint32_t section_index) {
return sizeof(ELFHeader) + sizeof(ELFSection::Header) * section_index;
}
void WriteSections(Writer* w) {
Writer::Slot<ELFSection::Header> headers =
w->SlotAt<ELFSection::Header>(sizeof(ELFHeader));
for (int i = 0, length = sections_.length();
i < length;
i++) {
sections_[i]->WriteBody(headers.at(i), w);
}
}
Zone* zone_;
ZoneList<ELFSection*> sections_;
};
class ELFSymbol BASE_EMBEDDED {
public:
enum Type {
TYPE_NOTYPE = 0,
TYPE_OBJECT = 1,
TYPE_FUNC = 2,
TYPE_SECTION = 3,
TYPE_FILE = 4,
TYPE_LOPROC = 13,
TYPE_HIPROC = 15
};
enum Binding {
BIND_LOCAL = 0,
BIND_GLOBAL = 1,
BIND_WEAK = 2,
BIND_LOPROC = 13,
BIND_HIPROC = 15
};
ELFSymbol(const char* name,
uintptr_t value,
uintptr_t size,
Binding binding,
Type type,
uint16_t section)
: name(name),
value(value),
size(size),
info((binding << 4) | type),
other(0),
section(section) {
}
Binding binding() const {
return static_cast<Binding>(info >> 4);
}
#if (V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_X87 || \
(V8_TARGET_ARCH_X64 && V8_TARGET_ARCH_32_BIT))
struct SerializedLayout {
SerializedLayout(uint32_t name,
uintptr_t value,
uintptr_t size,
Binding binding,
Type type,
uint16_t section)
: name(name),
value(value),
size(size),
info((binding << 4) | type),
other(0),
section(section) {
}
uint32_t name;
uintptr_t value;
uintptr_t size;
uint8_t info;
uint8_t other;
uint16_t section;
};
#elif V8_TARGET_ARCH_X64 && V8_TARGET_ARCH_64_BIT
struct SerializedLayout {
SerializedLayout(uint32_t name,
uintptr_t value,
uintptr_t size,
Binding binding,
Type type,
uint16_t section)
: name(name),
info((binding << 4) | type),
other(0),
section(section),
value(value),
size(size) {
}
uint32_t name;
uint8_t info;
uint8_t other;
uint16_t section;
uintptr_t value;
uintptr_t size;
};
#endif
void Write(Writer::Slot<SerializedLayout> s, ELFStringTable* t) {
// Convert symbol names from strings to indexes in the string table.
s->name = t->Add(name);
s->value = value;
s->size = size;
s->info = info;
s->other = other;
s->section = section;
}
private:
const char* name;
uintptr_t value;
uintptr_t size;
uint8_t info;
uint8_t other;
uint16_t section;
};
class ELFSymbolTable : public ELFSection {
public:
ELFSymbolTable(const char* name, Zone* zone)
: ELFSection(name, TYPE_SYMTAB, sizeof(uintptr_t)),
locals_(1, zone),
globals_(1, zone) {
}
virtual void WriteBody(Writer::Slot<Header> header, Writer* w) {
w->Align(header->alignment);
int total_symbols = locals_.length() + globals_.length() + 1;
header->offset = w->position();
Writer::Slot<ELFSymbol::SerializedLayout> symbols =
w->CreateSlotsHere<ELFSymbol::SerializedLayout>(total_symbols);
header->size = w->position() - header->offset;
// String table for this symbol table should follow it in the section table.
ELFStringTable* strtab =
static_cast<ELFStringTable*>(w->debug_object()->SectionAt(index() + 1));
strtab->AttachWriter(w);
symbols.at(0).set(ELFSymbol::SerializedLayout(0,
0,
0,
ELFSymbol::BIND_LOCAL,
ELFSymbol::TYPE_NOTYPE,
0));
WriteSymbolsList(&locals_, symbols.at(1), strtab);
WriteSymbolsList(&globals_, symbols.at(locals_.length() + 1), strtab);
strtab->DetachWriter();
}
void Add(const ELFSymbol& symbol, Zone* zone) {
if (symbol.binding() == ELFSymbol::BIND_LOCAL) {
locals_.Add(symbol, zone);
} else {
globals_.Add(symbol, zone);
}
}
protected:
virtual void PopulateHeader(Writer::Slot<Header> header) {
ELFSection::PopulateHeader(header);
// We are assuming that string table will follow symbol table.
header->link = index() + 1;
header->info = locals_.length() + 1;
header->entry_size = sizeof(ELFSymbol::SerializedLayout);
}
private:
void WriteSymbolsList(const ZoneList<ELFSymbol>* src,
Writer::Slot<ELFSymbol::SerializedLayout> dst,
ELFStringTable* strtab) {
for (int i = 0, len = src->length();
i < len;
i++) {
src->at(i).Write(dst.at(i), strtab);
}
}
ZoneList<ELFSymbol> locals_;
ZoneList<ELFSymbol> globals_;
};
#endif // defined(__ELF)
class LineInfo : public Malloced {
public:
LineInfo() : pc_info_(10) {}
void SetPosition(intptr_t pc, int pos, bool is_statement) {
AddPCInfo(PCInfo(pc, pos, is_statement));
}
struct PCInfo {
PCInfo(intptr_t pc, int pos, bool is_statement)
: pc_(pc), pos_(pos), is_statement_(is_statement) {}
intptr_t pc_;
int pos_;
bool is_statement_;
};
List<PCInfo>* pc_info() { return &pc_info_; }
private:
void AddPCInfo(const PCInfo& pc_info) { pc_info_.Add(pc_info); }
List<PCInfo> pc_info_;
};
class CodeDescription BASE_EMBEDDED {
public:
#if V8_TARGET_ARCH_X64
enum StackState {
POST_RBP_PUSH,
POST_RBP_SET,
POST_RBP_POP,
STACK_STATE_MAX
};
#endif
CodeDescription(const char* name, Code* code, Handle<Script> script,
LineInfo* lineinfo, GDBJITInterface::CodeTag tag,
CompilationInfo* info)
: name_(name),
code_(code),
script_(script),
lineinfo_(lineinfo),
tag_(tag),
info_(info) {}
const char* name() const {
return name_;
}
LineInfo* lineinfo() const { return lineinfo_; }
GDBJITInterface::CodeTag tag() const {
return tag_;
}
CompilationInfo* info() const {
return info_;
}
bool IsInfoAvailable() const {
return info_ != NULL;
}
uintptr_t CodeStart() const {
return reinterpret_cast<uintptr_t>(code_->instruction_start());
}
uintptr_t CodeEnd() const {
return reinterpret_cast<uintptr_t>(code_->instruction_end());
}
uintptr_t CodeSize() const {
return CodeEnd() - CodeStart();
}
bool IsLineInfoAvailable() {
return !script_.is_null() &&
script_->source()->IsString() &&
script_->HasValidSource() &&
script_->name()->IsString() &&
lineinfo_ != NULL;
}
#if V8_TARGET_ARCH_X64
uintptr_t GetStackStateStartAddress(StackState state) const {
DCHECK(state < STACK_STATE_MAX);
return stack_state_start_addresses_[state];
}
void SetStackStateStartAddress(StackState state, uintptr_t addr) {
DCHECK(state < STACK_STATE_MAX);
stack_state_start_addresses_[state] = addr;
}
#endif
SmartArrayPointer<char> GetFilename() {
return String::cast(script_->name())->ToCString();
}
int GetScriptLineNumber(int pos) {
return script_->GetLineNumber(pos) + 1;
}
private:
const char* name_;
Code* code_;
Handle<Script> script_;
LineInfo* lineinfo_;
GDBJITInterface::CodeTag tag_;
CompilationInfo* info_;
#if V8_TARGET_ARCH_X64
uintptr_t stack_state_start_addresses_[STACK_STATE_MAX];
#endif
};
#if defined(__ELF)
static void CreateSymbolsTable(CodeDescription* desc,
Zone* zone,
ELF* elf,
int text_section_index) {
ELFSymbolTable* symtab = new(zone) ELFSymbolTable(".symtab", zone);
ELFStringTable* strtab = new(zone) ELFStringTable(".strtab");
// Symbol table should be followed by the linked string table.
elf->AddSection(symtab);
elf->AddSection(strtab);
symtab->Add(ELFSymbol("V8 Code",
0,
0,
ELFSymbol::BIND_LOCAL,
ELFSymbol::TYPE_FILE,
ELFSection::INDEX_ABSOLUTE),
zone);
symtab->Add(ELFSymbol(desc->name(),
0,
desc->CodeSize(),
ELFSymbol::BIND_GLOBAL,
ELFSymbol::TYPE_FUNC,
text_section_index),
zone);
}
#endif // defined(__ELF)
class DebugInfoSection : public DebugSection {
public:
explicit DebugInfoSection(CodeDescription* desc)
#if defined(__ELF)
: ELFSection(".debug_info", TYPE_PROGBITS, 1),
#else
: MachOSection("__debug_info",
"__DWARF",
1,
MachOSection::S_REGULAR | MachOSection::S_ATTR_DEBUG),
#endif
desc_(desc) { }
// DWARF2 standard
enum DWARF2LocationOp {
DW_OP_reg0 = 0x50,
DW_OP_reg1 = 0x51,
DW_OP_reg2 = 0x52,
DW_OP_reg3 = 0x53,
DW_OP_reg4 = 0x54,
DW_OP_reg5 = 0x55,
DW_OP_reg6 = 0x56,
DW_OP_reg7 = 0x57,
DW_OP_fbreg = 0x91 // 1 param: SLEB128 offset
};
enum DWARF2Encoding {
DW_ATE_ADDRESS = 0x1,
DW_ATE_SIGNED = 0x5
};
bool WriteBodyInternal(Writer* w) {
uintptr_t cu_start = w->position();
Writer::Slot<uint32_t> size = w->CreateSlotHere<uint32_t>();
uintptr_t start = w->position();
w->Write<uint16_t>(2); // DWARF version.
w->Write<uint32_t>(0); // Abbreviation table offset.
w->Write<uint8_t>(sizeof(intptr_t));
w->WriteULEB128(1); // Abbreviation code.
w->WriteString(desc_->GetFilename().get());
w->Write<intptr_t>(desc_->CodeStart());
w->Write<intptr_t>(desc_->CodeStart() + desc_->CodeSize());
w->Write<uint32_t>(0);
uint32_t ty_offset = static_cast<uint32_t>(w->position() - cu_start);
w->WriteULEB128(3);
w->Write<uint8_t>(kPointerSize);
w->WriteString("v8value");
if (desc_->IsInfoAvailable()) {
Scope* scope = desc_->info()->scope();
w->WriteULEB128(2);
w->WriteString(desc_->name());
w->Write<intptr_t>(desc_->CodeStart());
w->Write<intptr_t>(desc_->CodeStart() + desc_->CodeSize());
Writer::Slot<uint32_t> fb_block_size = w->CreateSlotHere<uint32_t>();
uintptr_t fb_block_start = w->position();
#if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
w->Write<uint8_t>(DW_OP_reg5); // The frame pointer's here on ia32
#elif V8_TARGET_ARCH_X64
w->Write<uint8_t>(DW_OP_reg6); // and here on x64.
#elif V8_TARGET_ARCH_ARM
UNIMPLEMENTED();
#elif V8_TARGET_ARCH_MIPS
UNIMPLEMENTED();
#elif V8_TARGET_ARCH_MIPS64
UNIMPLEMENTED();
#else
#error Unsupported target architecture.
#endif
fb_block_size.set(static_cast<uint32_t>(w->position() - fb_block_start));
int params = scope->num_parameters();
int slots = scope->num_stack_slots();
int context_slots = scope->ContextLocalCount();
// The real slot ID is internal_slots + context_slot_id.
int internal_slots = Context::MIN_CONTEXT_SLOTS;
int locals = scope->StackLocalCount();
int current_abbreviation = 4;
for (int param = 0; param < params; ++param) {
w->WriteULEB128(current_abbreviation++);
w->WriteString(
scope->parameter(param)->name()->ToCString(DISALLOW_NULLS).get());
w->Write<uint32_t>(ty_offset);
Writer::Slot<uint32_t> block_size = w->CreateSlotHere<uint32_t>();
uintptr_t block_start = w->position();
w->Write<uint8_t>(DW_OP_fbreg);
w->WriteSLEB128(
JavaScriptFrameConstants::kLastParameterOffset +
kPointerSize * (params - param - 1));
block_size.set(static_cast<uint32_t>(w->position() - block_start));
}
EmbeddedVector<char, 256> buffer;
StringBuilder builder(buffer.start(), buffer.length());
for (int slot = 0; slot < slots; ++slot) {
w->WriteULEB128(current_abbreviation++);
builder.Reset();
builder.AddFormatted("slot%d", slot);
w->WriteString(builder.Finalize());
}
// See contexts.h for more information.
DCHECK(Context::MIN_CONTEXT_SLOTS == 4);
DCHECK(Context::CLOSURE_INDEX == 0);
DCHECK(Context::PREVIOUS_INDEX == 1);
DCHECK(Context::EXTENSION_INDEX == 2);
DCHECK(Context::GLOBAL_OBJECT_INDEX == 3);
w->WriteULEB128(current_abbreviation++);
w->WriteString(".closure");
w->WriteULEB128(current_abbreviation++);
w->WriteString(".previous");
w->WriteULEB128(current_abbreviation++);
w->WriteString(".extension");
w->WriteULEB128(current_abbreviation++);
w->WriteString(".global");
for (int context_slot = 0;
context_slot < context_slots;
++context_slot) {
w->WriteULEB128(current_abbreviation++);
builder.Reset();
builder.AddFormatted("context_slot%d", context_slot + internal_slots);
w->WriteString(builder.Finalize());
}
ZoneList<Variable*> stack_locals(locals, scope->zone());
ZoneList<Variable*> context_locals(context_slots, scope->zone());
scope->CollectStackAndContextLocals(&stack_locals, &context_locals);
for (int local = 0; local < locals; ++local) {
w->WriteULEB128(current_abbreviation++);
w->WriteString(
stack_locals[local]->name()->ToCString(DISALLOW_NULLS).get());
w->Write<uint32_t>(ty_offset);
Writer::Slot<uint32_t> block_size = w->CreateSlotHere<uint32_t>();
uintptr_t block_start = w->position();
w->Write<uint8_t>(DW_OP_fbreg);
w->WriteSLEB128(
JavaScriptFrameConstants::kLocal0Offset -
kPointerSize * local);
block_size.set(static_cast<uint32_t>(w->position() - block_start));
}
{
w->WriteULEB128(current_abbreviation++);
w->WriteString("__function");
w->Write<uint32_t>(ty_offset);
Writer::Slot<uint32_t> block_size = w->CreateSlotHere<uint32_t>();
uintptr_t block_start = w->position();
w->Write<uint8_t>(DW_OP_fbreg);
w->WriteSLEB128(JavaScriptFrameConstants::kFunctionOffset);
block_size.set(static_cast<uint32_t>(w->position() - block_start));
}
{
w->WriteULEB128(current_abbreviation++);
w->WriteString("__context");
w->Write<uint32_t>(ty_offset);
Writer::Slot<uint32_t> block_size = w->CreateSlotHere<uint32_t>();
uintptr_t block_start = w->position();
w->Write<uint8_t>(DW_OP_fbreg);
w->WriteSLEB128(StandardFrameConstants::kContextOffset);
block_size.set(static_cast<uint32_t>(w->position() - block_start));
}
w->WriteULEB128(0); // Terminate the sub program.
}
w->WriteULEB128(0); // Terminate the compile unit.
size.set(static_cast<uint32_t>(w->position() - start));
return true;
}
private:
CodeDescription* desc_;
};
class DebugAbbrevSection : public DebugSection {
public:
explicit DebugAbbrevSection(CodeDescription* desc)
#ifdef __ELF
: ELFSection(".debug_abbrev", TYPE_PROGBITS, 1),
#else
: MachOSection("__debug_abbrev",
"__DWARF",
1,
MachOSection::S_REGULAR | MachOSection::S_ATTR_DEBUG),
#endif
desc_(desc) { }
// DWARF2 standard, figure 14.
enum DWARF2Tags {
DW_TAG_FORMAL_PARAMETER = 0x05,
DW_TAG_POINTER_TYPE = 0xf,
DW_TAG_COMPILE_UNIT = 0x11,
DW_TAG_STRUCTURE_TYPE = 0x13,
DW_TAG_BASE_TYPE = 0x24,
DW_TAG_SUBPROGRAM = 0x2e,
DW_TAG_VARIABLE = 0x34
};
// DWARF2 standard, figure 16.
enum DWARF2ChildrenDetermination {
DW_CHILDREN_NO = 0,
DW_CHILDREN_YES = 1
};
// DWARF standard, figure 17.
enum DWARF2Attribute {
DW_AT_LOCATION = 0x2,
DW_AT_NAME = 0x3,
DW_AT_BYTE_SIZE = 0xb,
DW_AT_STMT_LIST = 0x10,
DW_AT_LOW_PC = 0x11,
DW_AT_HIGH_PC = 0x12,
DW_AT_ENCODING = 0x3e,
DW_AT_FRAME_BASE = 0x40,
DW_AT_TYPE = 0x49
};
// DWARF2 standard, figure 19.
enum DWARF2AttributeForm {
DW_FORM_ADDR = 0x1,
DW_FORM_BLOCK4 = 0x4,
DW_FORM_STRING = 0x8,
DW_FORM_DATA4 = 0x6,
DW_FORM_BLOCK = 0x9,
DW_FORM_DATA1 = 0xb,
DW_FORM_FLAG = 0xc,
DW_FORM_REF4 = 0x13
};
void WriteVariableAbbreviation(Writer* w,
int abbreviation_code,
bool has_value,
bool is_parameter) {
w->WriteULEB128(abbreviation_code);
w->WriteULEB128(is_parameter ? DW_TAG_FORMAL_PARAMETER : DW_TAG_VARIABLE);
w->Write<uint8_t>(DW_CHILDREN_NO);
w->WriteULEB128(DW_AT_NAME);
w->WriteULEB128(DW_FORM_STRING);
if (has_value) {
w->WriteULEB128(DW_AT_TYPE);
w->WriteULEB128(DW_FORM_REF4);
w->WriteULEB128(DW_AT_LOCATION);
w->WriteULEB128(DW_FORM_BLOCK4);
}
w->WriteULEB128(0);
w->WriteULEB128(0);
}
bool WriteBodyInternal(Writer* w) {
int current_abbreviation = 1;
bool extra_info = desc_->IsInfoAvailable();
DCHECK(desc_->IsLineInfoAvailable());
w->WriteULEB128(current_abbreviation++);
w->WriteULEB128(DW_TAG_COMPILE_UNIT);
w->Write<uint8_t>(extra_info ? DW_CHILDREN_YES : DW_CHILDREN_NO);
w->WriteULEB128(DW_AT_NAME);
w->WriteULEB128(DW_FORM_STRING);
w->WriteULEB128(DW_AT_LOW_PC);
w->WriteULEB128(DW_FORM_ADDR);
w->WriteULEB128(DW_AT_HIGH_PC);
w->WriteULEB128(DW_FORM_ADDR);
w->WriteULEB128(DW_AT_STMT_LIST);
w->WriteULEB128(DW_FORM_DATA4);
w->WriteULEB128(0);
w->WriteULEB128(0);
if (extra_info) {
Scope* scope = desc_->info()->scope();
int params = scope->num_parameters();
int slots = scope->num_stack_slots();
int context_slots = scope->ContextLocalCount();
// The real slot ID is internal_slots + context_slot_id.
int internal_slots = Context::MIN_CONTEXT_SLOTS;
int locals = scope->StackLocalCount();
// Total children is params + slots + context_slots + internal_slots +
// locals + 2 (__function and __context).
// The extra duplication below seems to be necessary to keep
// gdb from getting upset on OSX.
w->WriteULEB128(current_abbreviation++); // Abbreviation code.
w->WriteULEB128(DW_TAG_SUBPROGRAM);
w->Write<uint8_t>(DW_CHILDREN_YES);
w->WriteULEB128(DW_AT_NAME);
w->WriteULEB128(DW_FORM_STRING);
w->WriteULEB128(DW_AT_LOW_PC);
w->WriteULEB128(DW_FORM_ADDR);
w->WriteULEB128(DW_AT_HIGH_PC);
w->WriteULEB128(DW_FORM_ADDR);
w->WriteULEB128(DW_AT_FRAME_BASE);
w->WriteULEB128(DW_FORM_BLOCK4);
w->WriteULEB128(0);
w->WriteULEB128(0);
w->WriteULEB128(current_abbreviation++);
w->WriteULEB128(DW_TAG_STRUCTURE_TYPE);
w->Write<uint8_t>(DW_CHILDREN_NO);
w->WriteULEB128(DW_AT_BYTE_SIZE);
w->WriteULEB128(DW_FORM_DATA1);
w->WriteULEB128(DW_AT_NAME);
w->WriteULEB128(DW_FORM_STRING);
w->WriteULEB128(0);
w->WriteULEB128(0);
for (int param = 0; param < params; ++param) {
WriteVariableAbbreviation(w, current_abbreviation++, true, true);
}
for (int slot = 0; slot < slots; ++slot) {
WriteVariableAbbreviation(w, current_abbreviation++, false, false);
}
for (int internal_slot = 0;
internal_slot < internal_slots;
++internal_slot) {
WriteVariableAbbreviation(w, current_abbreviation++, false, false);
}
for (int context_slot = 0;
context_slot < context_slots;
++context_slot) {
WriteVariableAbbreviation(w, current_abbreviation++, false, false);
}
for (int local = 0; local < locals; ++local) {
WriteVariableAbbreviation(w, current_abbreviation++, true, false);
}
// The function.
WriteVariableAbbreviation(w, current_abbreviation++, true, false);
// The context.
WriteVariableAbbreviation(w, current_abbreviation++, true, false);
w->WriteULEB128(0); // Terminate the sibling list.
}
w->WriteULEB128(0); // Terminate the table.
return true;
}
private:
CodeDescription* desc_;
};
class DebugLineSection : public DebugSection {
public:
explicit DebugLineSection(CodeDescription* desc)
#ifdef __ELF
: ELFSection(".debug_line", TYPE_PROGBITS, 1),
#else
: MachOSection("__debug_line",
"__DWARF",
1,
MachOSection::S_REGULAR | MachOSection::S_ATTR_DEBUG),
#endif
desc_(desc) { }
// DWARF2 standard, figure 34.
enum DWARF2Opcodes {
DW_LNS_COPY = 1,
DW_LNS_ADVANCE_PC = 2,
DW_LNS_ADVANCE_LINE = 3,
DW_LNS_SET_FILE = 4,
DW_LNS_SET_COLUMN = 5,
DW_LNS_NEGATE_STMT = 6
};
// DWARF2 standard, figure 35.
enum DWARF2ExtendedOpcode {
DW_LNE_END_SEQUENCE = 1,
DW_LNE_SET_ADDRESS = 2,
DW_LNE_DEFINE_FILE = 3
};
bool WriteBodyInternal(Writer* w) {
// Write prologue.
Writer::Slot<uint32_t> total_length = w->CreateSlotHere<uint32_t>();
uintptr_t start = w->position();
// Used for special opcodes
const int8_t line_base = 1;
const uint8_t line_range = 7;
const int8_t max_line_incr = (line_base + line_range - 1);
const uint8_t opcode_base = DW_LNS_NEGATE_STMT + 1;
w->Write<uint16_t>(2); // Field version.
Writer::Slot<uint32_t> prologue_length = w->CreateSlotHere<uint32_t>();
uintptr_t prologue_start = w->position();
w->Write<uint8_t>(1); // Field minimum_instruction_length.
w->Write<uint8_t>(1); // Field default_is_stmt.
w->Write<int8_t>(line_base); // Field line_base.
w->Write<uint8_t>(line_range); // Field line_range.
w->Write<uint8_t>(opcode_base); // Field opcode_base.
w->Write<uint8_t>(0); // DW_LNS_COPY operands count.
w->Write<uint8_t>(1); // DW_LNS_ADVANCE_PC operands count.
w->Write<uint8_t>(1); // DW_LNS_ADVANCE_LINE operands count.
w->Write<uint8_t>(1); // DW_LNS_SET_FILE operands count.
w->Write<uint8_t>(1); // DW_LNS_SET_COLUMN operands count.
w->Write<uint8_t>(0); // DW_LNS_NEGATE_STMT operands count.
w->Write<uint8_t>(0); // Empty include_directories sequence.
w->WriteString(desc_->GetFilename().get()); // File name.
w->WriteULEB128(0); // Current directory.
w->WriteULEB128(0); // Unknown modification time.
w->WriteULEB128(0); // Unknown file size.
w->Write<uint8_t>(0);
prologue_length.set(static_cast<uint32_t>(w->position() - prologue_start));
WriteExtendedOpcode(w, DW_LNE_SET_ADDRESS, sizeof(intptr_t));
w->Write<intptr_t>(desc_->CodeStart());
w->Write<uint8_t>(DW_LNS_COPY);
intptr_t pc = 0;
intptr_t line = 1;
bool is_statement = true;
List<LineInfo::PCInfo>* pc_info = desc_->lineinfo()->pc_info();
pc_info->Sort(&ComparePCInfo);
int pc_info_length = pc_info->length();
for (int i = 0; i < pc_info_length; i++) {
LineInfo::PCInfo* info = &pc_info->at(i);
DCHECK(info->pc_ >= pc);
// Reduce bloating in the debug line table by removing duplicate line
// entries (per DWARF2 standard).
intptr_t new_line = desc_->GetScriptLineNumber(info->pos_);
if (new_line == line) {
continue;
}
// Mark statement boundaries. For a better debugging experience, mark
// the last pc address in the function as a statement (e.g. "}"), so that
// a user can see the result of the last line executed in the function,
// should control reach the end.
if ((i+1) == pc_info_length) {
if (!is_statement) {
w->Write<uint8_t>(DW_LNS_NEGATE_STMT);
}
} else if (is_statement != info->is_statement_) {
w->Write<uint8_t>(DW_LNS_NEGATE_STMT);
is_statement = !is_statement;
}
// Generate special opcodes, if possible. This results in more compact
// debug line tables. See the DWARF 2.0 standard to learn more about
// special opcodes.
uintptr_t pc_diff = info->pc_ - pc;
intptr_t line_diff = new_line - line;
// Compute special opcode (see DWARF 2.0 standard)
intptr_t special_opcode = (line_diff - line_base) +
(line_range * pc_diff) + opcode_base;
// If special_opcode is less than or equal to 255, it can be used as a
// special opcode. If line_diff is larger than the max line increment
// allowed for a special opcode, or if line_diff is less than the minimum
// line that can be added to the line register (i.e. line_base), then
// special_opcode can't be used.
if ((special_opcode >= opcode_base) && (special_opcode <= 255) &&
(line_diff <= max_line_incr) && (line_diff >= line_base)) {
w->Write<uint8_t>(special_opcode);
} else {
w->Write<uint8_t>(DW_LNS_ADVANCE_PC);
w->WriteSLEB128(pc_diff);
w->Write<uint8_t>(DW_LNS_ADVANCE_LINE);
w->WriteSLEB128(line_diff);
w->Write<uint8_t>(DW_LNS_COPY);
}
// Increment the pc and line operands.
pc += pc_diff;
line += line_diff;
}
// Advance the pc to the end of the routine, since the end sequence opcode
// requires this.
w->Write<uint8_t>(DW_LNS_ADVANCE_PC);
w->WriteSLEB128(desc_->CodeSize() - pc);
WriteExtendedOpcode(w, DW_LNE_END_SEQUENCE, 0);
total_length.set(static_cast<uint32_t>(w->position() - start));
return true;
}
private:
void WriteExtendedOpcode(Writer* w,
DWARF2ExtendedOpcode op,
size_t operands_size) {
w->Write<uint8_t>(0);
w->WriteULEB128(operands_size + 1);
w->Write<uint8_t>(op);
}
static int ComparePCInfo(const LineInfo::PCInfo* a,
const LineInfo::PCInfo* b) {
if (a->pc_ == b->pc_) {
if (a->is_statement_ != b->is_statement_) {
return b->is_statement_ ? +1 : -1;
}
return 0;
} else if (a->pc_ > b->pc_) {
return +1;
} else {
return -1;
}
}
CodeDescription* desc_;
};
#if V8_TARGET_ARCH_X64
class UnwindInfoSection : public DebugSection {
public:
explicit UnwindInfoSection(CodeDescription* desc);
virtual bool WriteBodyInternal(Writer* w);
int WriteCIE(Writer* w);
void WriteFDE(Writer* w, int);
void WriteFDEStateOnEntry(Writer* w);
void WriteFDEStateAfterRBPPush(Writer* w);
void WriteFDEStateAfterRBPSet(Writer* w);
void WriteFDEStateAfterRBPPop(Writer* w);
void WriteLength(Writer* w,
Writer::Slot<uint32_t>* length_slot,
int initial_position);
private:
CodeDescription* desc_;
// DWARF3 Specification, Table 7.23
enum CFIInstructions {
DW_CFA_ADVANCE_LOC = 0x40,
DW_CFA_OFFSET = 0x80,
DW_CFA_RESTORE = 0xC0,
DW_CFA_NOP = 0x00,
DW_CFA_SET_LOC = 0x01,
DW_CFA_ADVANCE_LOC1 = 0x02,
DW_CFA_ADVANCE_LOC2 = 0x03,
DW_CFA_ADVANCE_LOC4 = 0x04,
DW_CFA_OFFSET_EXTENDED = 0x05,
DW_CFA_RESTORE_EXTENDED = 0x06,
DW_CFA_UNDEFINED = 0x07,
DW_CFA_SAME_VALUE = 0x08,
DW_CFA_REGISTER = 0x09,
DW_CFA_REMEMBER_STATE = 0x0A,
DW_CFA_RESTORE_STATE = 0x0B,
DW_CFA_DEF_CFA = 0x0C,
DW_CFA_DEF_CFA_REGISTER = 0x0D,
DW_CFA_DEF_CFA_OFFSET = 0x0E,
DW_CFA_DEF_CFA_EXPRESSION = 0x0F,
DW_CFA_EXPRESSION = 0x10,
DW_CFA_OFFSET_EXTENDED_SF = 0x11,
DW_CFA_DEF_CFA_SF = 0x12,
DW_CFA_DEF_CFA_OFFSET_SF = 0x13,
DW_CFA_VAL_OFFSET = 0x14,
DW_CFA_VAL_OFFSET_SF = 0x15,
DW_CFA_VAL_EXPRESSION = 0x16
};
// System V ABI, AMD64 Supplement, Version 0.99.5, Figure 3.36
enum RegisterMapping {
// Only the relevant ones have been added to reduce clutter.
AMD64_RBP = 6,
AMD64_RSP = 7,
AMD64_RA = 16
};
enum CFIConstants {
CIE_ID = 0,
CIE_VERSION = 1,
CODE_ALIGN_FACTOR = 1,
DATA_ALIGN_FACTOR = 1,
RETURN_ADDRESS_REGISTER = AMD64_RA
};
};
void UnwindInfoSection::WriteLength(Writer* w,
Writer::Slot<uint32_t>* length_slot,
int initial_position) {
uint32_t align = (w->position() - initial_position) % kPointerSize;
if (align != 0) {
for (uint32_t i = 0; i < (kPointerSize - align); i++) {
w->Write<uint8_t>(DW_CFA_NOP);
}
}
DCHECK((w->position() - initial_position) % kPointerSize == 0);
length_slot->set(w->position() - initial_position);
}
UnwindInfoSection::UnwindInfoSection(CodeDescription* desc)
#ifdef __ELF
: ELFSection(".eh_frame", TYPE_X86_64_UNWIND, 1),
#else
: MachOSection("__eh_frame", "__TEXT", sizeof(uintptr_t),
MachOSection::S_REGULAR),
#endif
desc_(desc) { }
int UnwindInfoSection::WriteCIE(Writer* w) {
Writer::Slot<uint32_t> cie_length_slot = w->CreateSlotHere<uint32_t>();
uint32_t cie_position = w->position();
// Write out the CIE header. Currently no 'common instructions' are
// emitted onto the CIE; every FDE has its own set of instructions.
w->Write<uint32_t>(CIE_ID);
w->Write<uint8_t>(CIE_VERSION);
w->Write<uint8_t>(0); // Null augmentation string.
w->WriteSLEB128(CODE_ALIGN_FACTOR);
w->WriteSLEB128(DATA_ALIGN_FACTOR);
w->Write<uint8_t>(RETURN_ADDRESS_REGISTER);
WriteLength(w, &cie_length_slot, cie_position);
return cie_position;
}
void UnwindInfoSection::WriteFDE(Writer* w, int cie_position) {
// The only FDE for this function. The CFA is the current RBP.
Writer::Slot<uint32_t> fde_length_slot = w->CreateSlotHere<uint32_t>();
int fde_position = w->position();
w->Write<int32_t>(fde_position - cie_position + 4);
w->Write<uintptr_t>(desc_->CodeStart());
w->Write<uintptr_t>(desc_->CodeSize());
WriteFDEStateOnEntry(w);
WriteFDEStateAfterRBPPush(w);
WriteFDEStateAfterRBPSet(w);
WriteFDEStateAfterRBPPop(w);
WriteLength(w, &fde_length_slot, fde_position);
}
void UnwindInfoSection::WriteFDEStateOnEntry(Writer* w) {
// The first state, just after the control has been transferred to the the
// function.
// RBP for this function will be the value of RSP after pushing the RBP
// for the previous function. The previous RBP has not been pushed yet.
w->Write<uint8_t>(DW_CFA_DEF_CFA_SF);
w->WriteULEB128(AMD64_RSP);
w->WriteSLEB128(-kPointerSize);
// The RA is stored at location CFA + kCallerPCOffset. This is an invariant,
// and hence omitted from the next states.
w->Write<uint8_t>(DW_CFA_OFFSET_EXTENDED);
w->WriteULEB128(AMD64_RA);
w->WriteSLEB128(StandardFrameConstants::kCallerPCOffset);
// The RBP of the previous function is still in RBP.
w->Write<uint8_t>(DW_CFA_SAME_VALUE);
w->WriteULEB128(AMD64_RBP);
// Last location described by this entry.
w->Write<uint8_t>(DW_CFA_SET_LOC);
w->Write<uint64_t>(
desc_->GetStackStateStartAddress(CodeDescription::POST_RBP_PUSH));
}
void UnwindInfoSection::WriteFDEStateAfterRBPPush(Writer* w) {
// The second state, just after RBP has been pushed.
// RBP / CFA for this function is now the current RSP, so just set the
// offset from the previous rule (from -8) to 0.
w->Write<uint8_t>(DW_CFA_DEF_CFA_OFFSET);
w->WriteULEB128(0);
// The previous RBP is stored at CFA + kCallerFPOffset. This is an invariant
// in this and the next state, and hence omitted in the next state.
w->Write<uint8_t>(DW_CFA_OFFSET_EXTENDED);
w->WriteULEB128(AMD64_RBP);
w->WriteSLEB128(StandardFrameConstants::kCallerFPOffset);
// Last location described by this entry.
w->Write<uint8_t>(DW_CFA_SET_LOC);
w->Write<uint64_t>(
desc_->GetStackStateStartAddress(CodeDescription::POST_RBP_SET));
}
void UnwindInfoSection::WriteFDEStateAfterRBPSet(Writer* w) {
// The third state, after the RBP has been set.
// The CFA can now directly be set to RBP.
w->Write<uint8_t>(DW_CFA_DEF_CFA);
w->WriteULEB128(AMD64_RBP);
w->WriteULEB128(0);
// Last location described by this entry.
w->Write<uint8_t>(DW_CFA_SET_LOC);
w->Write<uint64_t>(
desc_->GetStackStateStartAddress(CodeDescription::POST_RBP_POP));
}
void UnwindInfoSection::WriteFDEStateAfterRBPPop(Writer* w) {
// The fourth (final) state. The RBP has been popped (just before issuing a
// return).
// The CFA can is now calculated in the same way as in the first state.
w->Write<uint8_t>(DW_CFA_DEF_CFA_SF);
w->WriteULEB128(AMD64_RSP);
w->WriteSLEB128(-kPointerSize);
// The RBP
w->Write<uint8_t>(DW_CFA_OFFSET_EXTENDED);
w->WriteULEB128(AMD64_RBP);
w->WriteSLEB128(StandardFrameConstants::kCallerFPOffset);
// Last location described by this entry.
w->Write<uint8_t>(DW_CFA_SET_LOC);
w->Write<uint64_t>(desc_->CodeEnd());
}
bool UnwindInfoSection::WriteBodyInternal(Writer* w) {
uint32_t cie_position = WriteCIE(w);
WriteFDE(w, cie_position);
return true;
}
#endif // V8_TARGET_ARCH_X64
static void CreateDWARFSections(CodeDescription* desc,
Zone* zone,
DebugObject* obj) {
if (desc->IsLineInfoAvailable()) {
obj->AddSection(new(zone) DebugInfoSection(desc));
obj->AddSection(new(zone) DebugAbbrevSection(desc));
obj->AddSection(new(zone) DebugLineSection(desc));
}
#if V8_TARGET_ARCH_X64
obj->AddSection(new(zone) UnwindInfoSection(desc));
#endif
}
// -------------------------------------------------------------------
// Binary GDB JIT Interface as described in
// http://sourceware.org/gdb/onlinedocs/gdb/Declarations.html
extern "C" {
typedef enum {
JIT_NOACTION = 0,
JIT_REGISTER_FN,
JIT_UNREGISTER_FN
} JITAction;
struct JITCodeEntry {
JITCodeEntry* next_;
JITCodeEntry* prev_;
Address symfile_addr_;
uint64_t symfile_size_;
};
struct JITDescriptor {
uint32_t version_;
uint32_t action_flag_;
JITCodeEntry* relevant_entry_;
JITCodeEntry* first_entry_;
};
// GDB will place breakpoint into this function.
// To prevent GCC from inlining or removing it we place noinline attribute
// and inline assembler statement inside.
void __attribute__((noinline)) __jit_debug_register_code() {
__asm__("");
}
// GDB will inspect contents of this descriptor.
// Static initialization is necessary to prevent GDB from seeing
// uninitialized descriptor.
JITDescriptor __jit_debug_descriptor = { 1, 0, 0, 0 };
#ifdef OBJECT_PRINT
void __gdb_print_v8_object(Object* object) {
OFStream os(stdout);
object->Print(os);
os << flush;
}
#endif
}
static JITCodeEntry* CreateCodeEntry(Address symfile_addr,
uintptr_t symfile_size) {
JITCodeEntry* entry = static_cast<JITCodeEntry*>(
malloc(sizeof(JITCodeEntry) + symfile_size));
entry->symfile_addr_ = reinterpret_cast<Address>(entry + 1);
entry->symfile_size_ = symfile_size;
MemCopy(entry->symfile_addr_, symfile_addr, symfile_size);
entry->prev_ = entry->next_ = NULL;
return entry;
}
static void DestroyCodeEntry(JITCodeEntry* entry) {
free(entry);
}
static void RegisterCodeEntry(JITCodeEntry* entry,
bool dump_if_enabled,
const char* name_hint) {
#if defined(DEBUG) && !V8_OS_WIN
static int file_num = 0;
if (FLAG_gdbjit_dump && dump_if_enabled) {
static const int kMaxFileNameSize = 64;
static const char* kElfFilePrefix = "/tmp/elfdump";
static const char* kObjFileExt = ".o";
char file_name[64];
SNPrintF(Vector<char>(file_name, kMaxFileNameSize),
"%s%s%d%s",
kElfFilePrefix,
(name_hint != NULL) ? name_hint : "",
file_num++,
kObjFileExt);
WriteBytes(file_name, entry->symfile_addr_, entry->symfile_size_);
}
#endif
entry->next_ = __jit_debug_descriptor.first_entry_;
if (entry->next_ != NULL) entry->next_->prev_ = entry;
__jit_debug_descriptor.first_entry_ =
__jit_debug_descriptor.relevant_entry_ = entry;
__jit_debug_descriptor.action_flag_ = JIT_REGISTER_FN;
__jit_debug_register_code();
}
static void UnregisterCodeEntry(JITCodeEntry* entry) {
if (entry->prev_ != NULL) {
entry->prev_->next_ = entry->next_;
} else {
__jit_debug_descriptor.first_entry_ = entry->next_;
}
if (entry->next_ != NULL) {
entry->next_->prev_ = entry->prev_;
}
__jit_debug_descriptor.relevant_entry_ = entry;
__jit_debug_descriptor.action_flag_ = JIT_UNREGISTER_FN;
__jit_debug_register_code();
}
static JITCodeEntry* CreateELFObject(CodeDescription* desc, Isolate* isolate) {
#ifdef __MACH_O
Zone zone(isolate);
MachO mach_o(&zone);
Writer w(&mach_o);
mach_o.AddSection(new(&zone) MachOTextSection(kCodeAlignment,
desc->CodeStart(),
desc->CodeSize()));
CreateDWARFSections(desc, &zone, &mach_o);
mach_o.Write(&w, desc->CodeStart(), desc->CodeSize());
#else
Zone zone(isolate);
ELF elf(&zone);
Writer w(&elf);
int text_section_index = elf.AddSection(
new(&zone) FullHeaderELFSection(
".text",
ELFSection::TYPE_NOBITS,
kCodeAlignment,
desc->CodeStart(),
0,
desc->CodeSize(),
ELFSection::FLAG_ALLOC | ELFSection::FLAG_EXEC));
CreateSymbolsTable(desc, &zone, &elf, text_section_index);
CreateDWARFSections(desc, &zone, &elf);
elf.Write(&w);
#endif
return CreateCodeEntry(w.buffer(), w.position());
}
static bool SameCodeObjects(void* key1, void* key2) {
return key1 == key2;
}
static HashMap* GetEntries() {
static HashMap* entries = NULL;
if (entries == NULL) {
entries = new HashMap(&SameCodeObjects);
}
return entries;
}
static uint32_t HashForCodeObject(Code* code) {
static const uintptr_t kGoldenRatio = 2654435761u;
uintptr_t hash = reinterpret_cast<uintptr_t>(code->address());
return static_cast<uint32_t>((hash >> kCodeAlignmentBits) * kGoldenRatio);
}
static const intptr_t kLineInfoTag = 0x1;
static bool IsLineInfoTagged(void* ptr) {
return 0 != (reinterpret_cast<intptr_t>(ptr) & kLineInfoTag);
}
static void* TagLineInfo(LineInfo* ptr) {
return reinterpret_cast<void*>(
reinterpret_cast<intptr_t>(ptr) | kLineInfoTag);
}
static LineInfo* UntagLineInfo(void* ptr) {
return reinterpret_cast<LineInfo*>(reinterpret_cast<intptr_t>(ptr) &
~kLineInfoTag);
}
void GDBJITInterface::AddCode(Handle<Name> name,
Handle<Script> script,
Handle<Code> code,
CompilationInfo* info) {
if (!FLAG_gdbjit) return;
Script::InitLineEnds(script);
if (!name.is_null() && name->IsString()) {
SmartArrayPointer<char> name_cstring =
Handle<String>::cast(name)->ToCString(DISALLOW_NULLS);
AddCode(name_cstring.get(), *code, GDBJITInterface::FUNCTION, *script,
info);
} else {
AddCode("", *code, GDBJITInterface::FUNCTION, *script, info);
}
}
static void AddUnwindInfo(CodeDescription* desc) {
#if V8_TARGET_ARCH_X64
if (desc->tag() == GDBJITInterface::FUNCTION) {
// To avoid propagating unwinding information through
// compilation pipeline we use an approximation.
// For most use cases this should not affect usability.
static const int kFramePointerPushOffset = 1;
static const int kFramePointerSetOffset = 4;
static const int kFramePointerPopOffset = -3;
uintptr_t frame_pointer_push_address =
desc->CodeStart() + kFramePointerPushOffset;
uintptr_t frame_pointer_set_address =
desc->CodeStart() + kFramePointerSetOffset;
uintptr_t frame_pointer_pop_address =
desc->CodeEnd() + kFramePointerPopOffset;
desc->SetStackStateStartAddress(CodeDescription::POST_RBP_PUSH,
frame_pointer_push_address);
desc->SetStackStateStartAddress(CodeDescription::POST_RBP_SET,
frame_pointer_set_address);
desc->SetStackStateStartAddress(CodeDescription::POST_RBP_POP,
frame_pointer_pop_address);
} else {
desc->SetStackStateStartAddress(CodeDescription::POST_RBP_PUSH,
desc->CodeStart());
desc->SetStackStateStartAddress(CodeDescription::POST_RBP_SET,
desc->CodeStart());
desc->SetStackStateStartAddress(CodeDescription::POST_RBP_POP,
desc->CodeEnd());
}
#endif // V8_TARGET_ARCH_X64
}
static base::LazyMutex mutex = LAZY_MUTEX_INITIALIZER;
void GDBJITInterface::AddCode(const char* name,
Code* code,
GDBJITInterface::CodeTag tag,
Script* script,
CompilationInfo* info) {
base::LockGuard<base::Mutex> lock_guard(mutex.Pointer());
DisallowHeapAllocation no_gc;
HashMap::Entry* e = GetEntries()->Lookup(code, HashForCodeObject(code), true);
if (e->value != NULL && !IsLineInfoTagged(e->value)) return;
LineInfo* lineinfo = UntagLineInfo(e->value);
CodeDescription code_desc(name,
code,
script != NULL ? Handle<Script>(script)
: Handle<Script>(),
lineinfo,
tag,
info);
if (!FLAG_gdbjit_full && !code_desc.IsLineInfoAvailable()) {
delete lineinfo;
GetEntries()->Remove(code, HashForCodeObject(code));
return;
}
AddUnwindInfo(&code_desc);
Isolate* isolate = code->GetIsolate();
JITCodeEntry* entry = CreateELFObject(&code_desc, isolate);
DCHECK(!IsLineInfoTagged(entry));
delete lineinfo;
e->value = entry;
const char* name_hint = NULL;
bool should_dump = false;
if (FLAG_gdbjit_dump) {
if (strlen(FLAG_gdbjit_dump_filter) == 0) {
name_hint = name;
should_dump = true;
} else if (name != NULL) {
name_hint = strstr(name, FLAG_gdbjit_dump_filter);
should_dump = (name_hint != NULL);
}
}
RegisterCodeEntry(entry, should_dump, name_hint);
}
void GDBJITInterface::RemoveCode(Code* code) {
if (!FLAG_gdbjit) return;
base::LockGuard<base::Mutex> lock_guard(mutex.Pointer());
HashMap::Entry* e = GetEntries()->Lookup(code,
HashForCodeObject(code),
false);
if (e == NULL) return;
if (IsLineInfoTagged(e->value)) {
delete UntagLineInfo(e->value);
} else {
JITCodeEntry* entry = static_cast<JITCodeEntry*>(e->value);
UnregisterCodeEntry(entry);
DestroyCodeEntry(entry);
}
e->value = NULL;
GetEntries()->Remove(code, HashForCodeObject(code));
}
void GDBJITInterface::RemoveCodeRange(Address start, Address end) {
HashMap* entries = GetEntries();
Zone zone(Isolate::Current());
ZoneList<Code*> dead_codes(1, &zone);
for (HashMap::Entry* e = entries->Start(); e != NULL; e = entries->Next(e)) {
Code* code = reinterpret_cast<Code*>(e->key);
if (code->address() >= start && code->address() < end) {
dead_codes.Add(code, &zone);
}
}
for (int i = 0; i < dead_codes.length(); i++) {
RemoveCode(dead_codes.at(i));
}
}
static void RegisterDetailedLineInfo(Code* code, LineInfo* line_info) {
base::LockGuard<base::Mutex> lock_guard(mutex.Pointer());
DCHECK(!IsLineInfoTagged(line_info));
HashMap::Entry* e = GetEntries()->Lookup(code, HashForCodeObject(code), true);
DCHECK(e->value == NULL);
e->value = TagLineInfo(line_info);
}
void GDBJITInterface::EventHandler(const v8::JitCodeEvent* event) {
if (!FLAG_gdbjit) return;
switch (event->type) {
case v8::JitCodeEvent::CODE_ADDED: {
Code* code = Code::GetCodeFromTargetAddress(
reinterpret_cast<Address>(event->code_start));
if (code->kind() == Code::OPTIMIZED_FUNCTION ||
code->kind() == Code::FUNCTION) {
break;
}
EmbeddedVector<char, 256> buffer;
StringBuilder builder(buffer.start(), buffer.length());
builder.AddSubstring(event->name.str, static_cast<int>(event->name.len));
AddCode(builder.Finalize(), code, NON_FUNCTION, NULL, NULL);
break;
}
case v8::JitCodeEvent::CODE_MOVED:
break;
case v8::JitCodeEvent::CODE_REMOVED: {
Code* code = Code::GetCodeFromTargetAddress(
reinterpret_cast<Address>(event->code_start));
RemoveCode(code);
break;
}
case v8::JitCodeEvent::CODE_ADD_LINE_POS_INFO: {
LineInfo* line_info = reinterpret_cast<LineInfo*>(event->user_data);
line_info->SetPosition(static_cast<intptr_t>(event->line_info.offset),
static_cast<int>(event->line_info.pos),
event->line_info.position_type ==
v8::JitCodeEvent::STATEMENT_POSITION);
break;
}
case v8::JitCodeEvent::CODE_START_LINE_INFO_RECORDING: {
v8::JitCodeEvent* mutable_event = const_cast<v8::JitCodeEvent*>(event);
mutable_event->user_data = new LineInfo();
break;
}
case v8::JitCodeEvent::CODE_END_LINE_INFO_RECORDING: {
LineInfo* line_info = reinterpret_cast<LineInfo*>(event->user_data);
Code* code = Code::GetCodeFromTargetAddress(
reinterpret_cast<Address>(event->code_start));
RegisterDetailedLineInfo(code, line_info);
break;
}
}
}
} } // namespace v8::internal
#endif
| xushiwei/fibjs | vender/src/v8/src/gdb-jit.cc | C++ | gpl-3.0 | 63,323 |
#--
# Copyright (c) 2004-2010 David Heinemeier Hansson
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#++
activesupport_path = File.expand_path('../../../activesupport/lib', __FILE__)
$:.unshift(activesupport_path) if File.directory?(activesupport_path) && !$:.include?(activesupport_path)
activemodel_path = File.expand_path('../../../activemodel/lib', __FILE__)
$:.unshift(activemodel_path) if File.directory?(activemodel_path) && !$:.include?(activemodel_path)
require 'active_support'
require 'active_support/i18n'
require 'active_model'
require 'arel'
require 'active_record/version'
module ActiveRecord
extend ActiveSupport::Autoload
eager_autoload do
autoload :ActiveRecordError, 'active_record/errors'
autoload :ConnectionNotEstablished, 'active_record/errors'
autoload :Aggregations
autoload :AssociationPreload
autoload :Associations
autoload :AttributeMethods
autoload :AutosaveAssociation
autoload :Relation
autoload_under 'relation' do
autoload :QueryMethods
autoload :FinderMethods
autoload :Calculations
autoload :PredicateBuilder
autoload :SpawnMethods
autoload :Batches
end
autoload :Base
autoload :Callbacks
autoload :CounterCache
autoload :DynamicFinderMatch
autoload :DynamicScopeMatch
autoload :Migration
autoload :Migrator, 'active_record/migration'
autoload :NamedScope
autoload :NestedAttributes
autoload :Observer
autoload :Persistence
autoload :QueryCache
autoload :Reflection
autoload :Schema
autoload :SchemaDumper
autoload :Serialization
autoload :SessionStore
autoload :Timestamp
autoload :Transactions
autoload :Validations
end
module AttributeMethods
extend ActiveSupport::Autoload
eager_autoload do
autoload :BeforeTypeCast
autoload :Dirty
autoload :PrimaryKey
autoload :Query
autoload :Read
autoload :TimeZoneConversion
autoload :Write
end
end
module Locking
extend ActiveSupport::Autoload
eager_autoload do
autoload :Optimistic
autoload :Pessimistic
end
end
module ConnectionAdapters
extend ActiveSupport::Autoload
eager_autoload do
autoload :AbstractAdapter
autoload :ConnectionManagement, "active_record/connection_adapters/abstract/connection_pool"
end
end
autoload :TestCase
autoload :TestFixtures, 'active_record/fixtures'
end
ActiveSupport.on_load(:active_record) do
Arel::Table.engine = self
end
I18n.load_path << File.dirname(__FILE__) + '/active_record/locale/en.yml'
| mzemel/kpsu.org | vendor/gems/ruby/1.8/gems/activerecord-3.0.3/lib/active_record.rb | Ruby | gpl-3.0 | 3,626 |
package org.thoughtcrime.securesms.push;
import android.content.Context;
import org.thoughtcrime.securesms.BuildConfig;
import org.thoughtcrime.securesms.util.TextSecurePreferences;
import org.whispersystems.signalservice.api.SignalServiceAccountManager;
public class TextSecureCommunicationFactory {
public static SignalServiceAccountManager createManager(Context context) {
return new SignalServiceAccountManager(BuildConfig.TEXTSECURE_URL,
new TextSecurePushTrustStore(context),
TextSecurePreferences.getLocalNumber(context),
TextSecurePreferences.getPushServerPassword(context),
BuildConfig.USER_AGENT);
}
public static SignalServiceAccountManager createManager(Context context, String number, String password) {
return new SignalServiceAccountManager(BuildConfig.TEXTSECURE_URL, new TextSecurePushTrustStore(context),
number, password, BuildConfig.USER_AGENT);
}
}
| IBobko/signal | src/org/thoughtcrime/securesms/push/TextSecureCommunicationFactory.java | Java | gpl-3.0 | 1,107 |
<?php
return array(
'about_asset_depreciations' => 'Over afschrijving van materiaal',
'about_depreciations' => 'U kan de materiaalafschrijving instellen om materiaal af te schrijven op basis van lineaire afschrijving.',
'asset_depreciations' => 'Materiaalafschrijvingen',
'create' => 'Afschrijving aanmaken',
'depreciation_name' => 'Afschrijvingsnaam',
'number_of_months' => 'Aantal maanden',
'update' => 'Afschrijving bijwerken',
);
| madd15/snipe-it | resources/lang/nl/admin/depreciations/general.php | PHP | agpl-3.0 | 528 |
# Copyright (C) 2006-2013 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
#! /usr/bin/env python
import sys, os
import essentia, essentia.standard, essentia.streaming
from essentia.streaming import *
from numpy import argmax, log10, mean, tanh
dynamicFrameSize = 88200
dynamicHopSize = 44100
analysisSampleRate = 44100.0
# expects the audio source to already be equal-loudness filtered
class LevelExtractor(essentia.streaming.CompositeBase):
#"""describes the dynamics of an audio signal"""
def __init__(self, frameSize=dynamicFrameSize, hopSize=dynamicHopSize):
super(LevelExtractor, self).__init__()
fc = FrameCutter(frameSize=frameSize,
hopSize=hopSize,
startFromZero=True,
silentFrames='noise')
dy = Loudness()
fc.frame >> dy.signal
# define inputs:
self.inputs['signal'] = fc.signal
# define outputs:
self.outputs['loudness'] = dy.loudness
def squeezeRange(x, x1, x2):
return 0.5 + 0.5 * tanh(-1.0 + 2.0 * (x - x1) / (x2 - x1))
def levelAverage(pool, namespace=''):
epsilon = 1e-4
threshold = 1e-4 # -80dB
if namespace: namespace += '.lowlevel.'
else: namespace = 'lowlevel.'
loudness = pool[namespace + 'loudness']
pool.remove(namespace + 'loudness')
maxValue = loudness[argmax(loudness)]
if maxValue <= epsilon: maxValue = epsilon
# normalization of the maximum:
def f(x):
x /= float(maxValue)
if x <= threshold : return threshold
return x
loudness = map(f, loudness)
# average level:
levelAverage = 10.0*log10(mean(loudness))
# Re-scaling and range-control
# This yields in numbers between
# 0 for signals with large dynamic variace and thus low dynamic average
# 1 for signal with little dynamic range and thus
# a dynamic average close to the maximum
x1 = -5.0
x2 = -2.0
levelAverageSqueezed = squeezeRange(levelAverage, x1, x2)
pool.set(namespace + 'average_loudness', levelAverageSqueezed)
usage = 'level.py [options] <inputfilename> <outputfilename>'
def parse_args():
import numpy
essentia_version = '%s\n'\
'python version: %s\n'\
'numpy version: %s' % (essentia.__version__, # full version
sys.version.split()[0], # python major version
numpy.__version__) # numpy version
from optparse import OptionParser
parser = OptionParser(usage=usage, version=essentia_version)
parser.add_option("-c","--cpp", action="store_true", dest="generate_cpp",
help="generate cpp code from CompositeBase algorithm")
parser.add_option("-d", "--dot", action="store_true", dest="generate_dot",
help="generate dot and cpp code from CompositeBase algorithm")
(options, args) = parser.parse_args()
return options, args
if __name__ == '__main__':
opts, args = parse_args()
if len(args) != 2:
cmd = './'+os.path.basename(sys.argv[0])+ ' -h'
os.system(cmd)
sys.exit(1)
if opts.generate_dot:
essentia.translate(LevelExtractor, 'streaming_extractorlevel', dot_graph=True)
elif opts.generate_cpp:
essentia.translate(LevelExtractor, 'streaming_extractorlevel', dot_graph=False)
# find out replay gain:
loader = EqloudLoader(filename=args[0],
sampleRate=analysisSampleRate,
downmix='mix')
rgain = ReplayGain(applyEqloud=False)
pool = essentia.Pool()
loader.audio >> rgain.signal
rgain.replayGain >> (pool, 'replay_gain')
essentia.run(loader)
# get average level:
loader = EqloudLoader(filename=args[0],
replayGain=pool['replay_gain'],
sampleRate=analysisSampleRate,
downmix='mix')
levelExtractor = LevelExtractor()
loader.audio >> levelExtractor.signal
levelExtractor.loudness >> (pool, 'lowlevel.loudness')
essentia.run(loader)
levelAverage(pool)
essentia.standard.YamlOutput(filename=args[1])(pool)
| GiantSteps/essentia | src/examples/python/streaming_extractor/level.py | Python | agpl-3.0 | 4,852 |
/*
* BioJava development code
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. If you do not have a copy,
* see:
*
* http://www.gnu.org/copyleft/lesser.html
*
* Copyright for this code is held jointly by the individual
* authors. These should be listed in @author doc comments.
*
* For more information on the BioJava project and its aims,
* or to join the biojava-l mailing list, visit the home page
* at:
*
* http://www.biojava.org/
*
*/
package org.biojava.nbio.structure.io.mmcif.model;
/** A bean for the PDBX_NONPOLY_SCHEME category, which provides residue level nomenclature
* mapping for non-polymer entities.
* @author Andreas Prlic
* @since 1.7
*/
public class PdbxNonPolyScheme {
String asym_id;
String entity_id;
String seq_id;
String mon_id;
String ndb_seq_num;
String pdb_seq_num ;
String auth_seq_num ;
String pdb_mon_id;
String auth_mon_id;
String pdb_strand_id;
String pdb_ins_code;
public String getAsym_id() {
return asym_id;
}
public void setAsym_id(String asym_id) {
this.asym_id = asym_id;
}
public String getEntity_id() {
return entity_id;
}
public void setEntity_id(String entity_id) {
this.entity_id = entity_id;
}
public String getSeq_id() {
return seq_id;
}
public void setSeq_id(String seq_id) {
this.seq_id = seq_id;
}
public String getMon_id() {
return mon_id;
}
public void setMon_id(String mon_id) {
this.mon_id = mon_id;
}
public String getNdb_seq_num() {
return ndb_seq_num;
}
public void setNdb_seq_num(String ndb_seq_num) {
this.ndb_seq_num = ndb_seq_num;
}
public String getPdb_seq_num() {
return pdb_seq_num;
}
public void setPdb_seq_num(String pdb_seq_num) {
this.pdb_seq_num = pdb_seq_num;
}
public String getAuth_seq_num() {
return auth_seq_num;
}
public void setAuth_seq_num(String auth_seq_num) {
this.auth_seq_num = auth_seq_num;
}
public String getPdb_mon_id() {
return pdb_mon_id;
}
public void setPdb_mon_id(String pdb_mon_id) {
this.pdb_mon_id = pdb_mon_id;
}
public String getAuth_mon_id() {
return auth_mon_id;
}
public void setAuth_mon_id(String auth_mon_id) {
this.auth_mon_id = auth_mon_id;
}
public String getPdb_strand_id() {
return pdb_strand_id;
}
public void setPdb_strand_id(String pdb_strand_id) {
this.pdb_strand_id = pdb_strand_id;
}
public String getPdb_ins_code() {
return pdb_ins_code;
}
public void setPdb_ins_code(String pdb_ins_code) {
this.pdb_ins_code = pdb_ins_code;
}
}
| fionakim/biojava | biojava-structure/src/main/java/org/biojava/nbio/structure/io/mmcif/model/PdbxNonPolyScheme.java | Java | lgpl-2.1 | 2,607 |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2008, Red Hat Middleware LLC, and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.wildfly.iiop.openjdk.rmi.ir;
import org.omg.CORBA.IDLTypeOperations;
/**
* Interface of local IDL types.
*
* @author <a href="mailto:osh@sparre.dk">Ole Husgaard</a>
* @version $Revision: 81018 $
*/
interface LocalIDLType extends IDLTypeOperations, LocalIRObject {
}
| xasx/wildfly | iiop-openjdk/src/main/java/org/wildfly/iiop/openjdk/rmi/ir/LocalIDLType.java | Java | lgpl-2.1 | 1,319 |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.formatting;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* Allows a custom language plugin to build a formatting model for a file in the language, or
* for a portion of a file.
* A formatting model defines how a file is broken into non-whitespace blocks and different
* types of whitespace (alignment, indents and wraps) between them.
* <p>For certain aspects of the custom formatting to work properly, it is recommended to use TokenType.WHITE_SPACE
* as the language's whitespace tokens. See {@link com.intellij.lang.ParserDefinition}
*
* @apiNote in case you getting a {@link StackOverflowError}, with your builder, most likely you haven't implemented any model building
* methods. Please, implement {@link #createModel(FormattingContext)}
* @see com.intellij.lang.LanguageFormatting
* @see FormattingModelProvider#createFormattingModelForPsiFile(PsiFile, Block, CodeStyleSettings)
*/
public interface FormattingModelBuilder {
/**
* Requests building the formatting model for a section of the file containing
* the specified PSI element and its children.
*
* @return the formatting model for the file.
* @see FormattingContext
*/
default @NotNull FormattingModel createModel(@NotNull FormattingContext formattingContext) {
return createModel(formattingContext.getPsiElement(),
formattingContext.getFormattingRange(),
formattingContext.getCodeStyleSettings(),
formattingContext.getFormattingMode());
}
/**
* Returns the TextRange which should be processed by the formatter in order to detect proper indent options.
*
* @param file the file in which the line break is inserted.
* @param offset the line break offset.
* @param elementAtOffset the parameter at {@code offset}
* @return the range to reformat, or null if the default range should be used
*/
default @Nullable TextRange getRangeAffectingIndent(PsiFile file, int offset, ASTNode elementAtOffset) {
return null;
}
/**
* @deprecated use {@link #createModel(FormattingContext)}
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.1")
default @NotNull FormattingModel createModel(final @NotNull PsiElement element,
final @NotNull TextRange range,
final @NotNull CodeStyleSettings settings,
final @NotNull FormattingMode mode) {
return createModel(element, settings, mode); // just for compatibility with old implementations
}
/**
* @deprecated use {@link #createModel(FormattingContext)}
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.1")
default @NotNull FormattingModel createModel(final @NotNull PsiElement element,
final @NotNull CodeStyleSettings settings,
@NotNull FormattingMode mode) {
return createModel(element, settings);
}
/**
* @deprecated use {@link #createModel(FormattingContext)}
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.1")
default @NotNull FormattingModel createModel(final PsiElement element, final CodeStyleSettings settings) {
return createModel(FormattingContext.create(element, settings));
}
}
| siosio/intellij-community | platform/code-style-api/src/com/intellij/formatting/FormattingModelBuilder.java | Java | apache-2.0 | 3,839 |
/*
* Copyright 2014 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apiman.manager.api.beans.clients;
/**
* The various client statuses.
*
* @author eric.wittmann@redhat.com
*/
public enum ClientStatus {
Created, Ready, Registered, Retired
}
| jasonchaffee/apiman | manager/api/beans/src/main/java/io/apiman/manager/api/beans/clients/ClientStatus.java | Java | apache-2.0 | 790 |
/*
* Copyright 2015-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.ui.table;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.onosproject.ui.JsonUtils;
import org.onosproject.ui.RequestHandler;
import static org.onosproject.ui.table.TableModel.sortDir;
/**
* Message handler specifically for table views.
*/
public abstract class TableRequestHandler extends RequestHandler {
private static final String FIRST_COL = "firstCol";
private static final String FIRST_DIR = "firstDir";
private static final String SECOND_COL = "secondCol";
private static final String SECOND_DIR = "secondDir";
private static final String ASC = "asc";
private static final String ANNOTS = "annots";
private static final String NO_ROWS_MSG_KEY = "no_rows_msg";
private final String respType;
private final String nodeName;
/**
* Constructs a table request handler for a specific table view. When
* table requests come in, the handler will generate the appropriate
* table rows, sort them according the the request sort parameters, and
* send back the response to the client.
*
* @param reqType type of the request event
* @param respType type of the response event
* @param nodeName name of JSON node holding row data
*/
public TableRequestHandler(String reqType, String respType, String nodeName) {
super(reqType);
this.respType = respType;
this.nodeName = nodeName;
}
@Override
public void process(long sid, ObjectNode payload) {
TableModel tm = createTableModel();
populateTable(tm, payload);
String firstCol = JsonUtils.string(payload, FIRST_COL, defaultColumnId());
String firstDir = JsonUtils.string(payload, FIRST_DIR, ASC);
String secondCol = JsonUtils.string(payload, SECOND_COL, null);
String secondDir = JsonUtils.string(payload, SECOND_DIR, null);
tm.sort(firstCol, sortDir(firstDir), secondCol, sortDir(secondDir));
addTableConfigAnnotations(tm, payload);
ObjectNode rootNode = MAPPER.createObjectNode();
rootNode.set(nodeName, TableUtils.generateRowArrayNode(tm));
rootNode.set(ANNOTS, TableUtils.generateAnnotObjectNode(tm));
sendMessage(respType, 0, rootNode);
}
/**
* Creates the table model (devoid of data) using {@link #getColumnIds()}
* to initialize it, ready to be populated.
* <p>
* This default implementation returns a table model with default
* formatters and comparators for all columns.
*
* @return an empty table model
*/
protected TableModel createTableModel() {
return new TableModel(getColumnIds());
}
/**
* Adds table configuration specific annotations to table model.
*
* @param tm a table model
* @param payload the event payload from the client
*/
protected void addTableConfigAnnotations(TableModel tm, ObjectNode payload) {
tm.addAnnotation(NO_ROWS_MSG_KEY, noRowsMessage(payload));
}
/**
* Returns the default column ID to be used when one is not supplied in
* the payload as the column on which to sort.
* <p>
* This default implementation returns "id".
*
* @return default sort column identifier
*/
protected String defaultColumnId() {
return "id";
}
/**
* Subclasses should return the array of column IDs with which
* to initialize their table model.
*
* @return the column IDs
*/
protected abstract String[] getColumnIds();
/**
* Subclasses should return the message to display in the table when there
* are no rows to display. For example, a host table might return
* "No hosts found".
*
* @param payload request payload
* @return the message
*/
protected abstract String noRowsMessage(ObjectNode payload);
/**
* Subclasses should populate the table model by adding
* {@link TableModel.Row rows}.
* <pre>
* tm.addRow()
* .cell(COL_ONE, ...)
* .cell(COL_TWO, ...)
* ... ;
* </pre>
* The request payload is provided in case there are request filtering
* parameters (other than sort column and sort direction) that are required
* to generate the appropriate data.
*
* @param tm the table model
* @param payload request payload
*/
protected abstract void populateTable(TableModel tm, ObjectNode payload);
} | VinodKumarS-Huawei/ietf96yang | core/api/src/main/java/org/onosproject/ui/table/TableRequestHandler.java | Java | apache-2.0 | 5,111 |
/*
* Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.entitlement.ui;
/**
* Policy editor related constants
*/
public class PolicyEditorConstants {
public static final String ATTRIBUTE_SEPARATOR = ",";
public static final String TARGET_ELEMENT = "Target";
public static final String ANY_OF_ELEMENT = "AnyOf";
public static final String ALL_OF_ELEMENT = "AllOf";
public static final String COMBINE_FUNCTION_AND = "AND";
public static final String COMBINE_FUNCTION_OR = "OR";
public static final String COMBINE_FUNCTION_END = "END";
public static final String MATCH_ELEMENT = "Match";
public static final String MATCH_ID = "MatchId";
public static final String ATTRIBUTE_ID = "AttributeId";
public static final String CATEGORY = "Category";
public static final String DATA_TYPE = "DataType";
public static final String ISSUER = "Issuer";
public static final String SOA_CATEGORY_USER = "Subject";
public static final String SOA_CATEGORY_SUBJECT = "Subject";
public static final String SOA_CATEGORY_RESOURCE = "Resource";
public static final String SOA_CATEGORY_ACTION = "Action";
public static final String SOA_CATEGORY_ENVIRONMENT = "Environment";
public static final String MUST_BE_PRESENT = "MustBePresent";
public static final String ATTRIBUTE_DESIGNATOR = "AttributeDesignator";
public static final class PreFunctions {
public static final String PRE_FUNCTION_IS = "is";
public static final String PRE_FUNCTION_IS_NOT = "is-not";
public static final String PRE_FUNCTION_ARE = "are";
public static final String PRE_FUNCTION_ARE_NOT = "are-not";
public static final String CAN_DO = "can";
public static final String CAN_NOT_DO = "can not";
}
public static final class TargetPreFunctions {
public static final String PRE_FUNCTION_IS = "is";
}
public static final class TargetFunctions {
public static final String FUNCTION_EQUAL = "equal";
}
public static final String RULE_EFFECT_PERMIT = "Permit";
public static final String RULE_EFFECT_DENY = "Deny";
public static final class DataType {
public static final String DAY_TIME_DURATION = "http://www.w3.org/2001/XMLSchema#dayTimeDuration";
public static final String YEAR_MONTH_DURATION = "http://www.w3.org/2001/XMLSchema#yearMonthDuration";
public static final String STRING = "http://www.w3.org/2001/XMLSchema#string";
public static final String TIME = "http://www.w3.org/2001/XMLSchema#time";
public static final String IP_ADDRESS = "urn:oasis:names:tc:xacml:2.0:data-type:ipAddress";
public static final String DATE_TIME = "http://www.w3.org/2001/XMLSchema#dateTime";
public static final String DATE = "http://www.w3.org/2001/XMLSchema#date";
public static final String DOUBLE = "http://www.w3.org/2001/XMLSchema#double";
public static final String INT = "http://www.w3.org/2001/XMLSchema#integer";
}
public static final class CombiningAlog {
public static final String DENY_OVERRIDE_ID = "deny-overrides";
public static final String PERMIT_OVERRIDE_ID = "permit-overrides";
public static final String FIRST_APPLICABLE_ID = "first-applicable";
public static final String ORDER_PERMIT_OVERRIDE_ID = "ordered-permit-overrides";
public static final String ORDER_DENY_OVERRIDE_ID = "ordered-deny-overrides";
public static final String DENY_UNLESS_PERMIT_ID = "deny-unless-permit";
public static final String PERMIT_UNLESS_DENY_ID = "permit-unless-deny";
public static final String ONLY_ONE_APPLICABLE_ID = "only-one-applicable";
}
public static final String RULE_ALGORITHM_IDENTIFIER_1 = "urn:oasis:names:tc:xacml:1.0:" +
"rule-combining-algorithm:";
public static final String RULE_ALGORITHM_IDENTIFIER_3 = "urn:oasis:names:tc:xacml:3.0:" +
"rule-combining-algorithm:";
public static final String POLICY_ALGORITHM_IDENTIFIER_1 = "urn:oasis:names:tc:xacml:1.0:" +
"policy-combining-algorithm:";
public static final String POLICY_ALGORITHM_IDENTIFIER_3 = "urn:oasis:names:tc:xacml:3.0:" +
"policy-combining-algorithm:";
public static final String POLICY_EDITOR_SEPARATOR = "|";
public static final int POLICY_EDITOR_ROW_DATA = 7;
public static final String DYNAMIC_SELECTOR_CATEGORY = "Category";
public static final String DYNAMIC_SELECTOR_FUNCTION = "Function";
public static final String SUBJECT_ID_DEFAULT= "urn:oasis:names:tc:xacml:1.0:subject:subject-id";
public static final String SUBJECT_ID_ROLE= "http://wso2.org/claims/role";
public static final String RESOURCE_ID_DEFAULT = "urn:oasis:names:tc:xacml:1.0:resource:resource-id";
public static final String ACTION_ID_DEFAULT = "urn:oasis:names:tc:xacml:1.0:action:action-id";
public static final String ENVIRONMENT_ID_DEFAULT = "urn:oasis:names:tc:xacml:1.0:environment:environment-id";
public static final String RESOURCE_CATEGORY_URI = "urn:oasis:names:tc:xacml:3.0:" +
"attribute-category:resource";
public static final String SUBJECT_CATEGORY_URI = "urn:oasis:names:tc:xacml:1.0:" +
"subject-category:access-subject";
public static final String ACTION_CATEGORY_URI = "urn:oasis:names:tc:xacml:3.0:" +
"attribute-category:action";
public static final String ENVIRONMENT_CATEGORY_URI = "urn:oasis:names:tc:xacml:3.0:" +
"attribute-category:environment";
public static final String ENVIRONMENT_CURRENT_DATE = "urn:oasis:names:tc:xacml:1.0:environment:current-date";
public static final String ENVIRONMENT_CURRENT_TIME = "urn:oasis:names:tc:xacml:1.0:environment:current-time";
public static final String ENVIRONMENT_CURRENT_DATETIME = "urn:oasis:names:tc:xacml:1.0:environment:current-dateTime";
public static final String SOA_POLICY_EDITOR = "SOA";
public static class FunctionIdentifier {
public static final String ANY = "*";
public static final String EQUAL_RANGE = "[";
public static final String EQUAL_RANGE_CLOSE = "]";
public static final String RANGE = "(";
public static final String RANGE_CLOSE = ")";
public static final String GREATER = ">";
public static final String GREATER_EQUAL = ">=";
public static final String LESS = "<";
public static final String LESS_EQUAL = "<=";
public static final String REGEX = "{";
public static final String AND = "&";
public static final String OR = "|";
}
public static final class AttributeId {
public static final String ENV_DOMAIN = "Domain";
public static final String ENV_DATE = "Date";
public static final String ENV_DATE_TIME = "DateTime";
public static final String ENV_IP = "IP";
public static final String ENV_TIME = "Time";
public static final String USER_AGE = "Age";
}
}
| SupunS/carbon-identity | components/identity/org.wso2.carbon.identity.entitlement.ui/src/main/java/org/wso2/carbon/identity/entitlement/ui/PolicyEditorConstants.java | Java | apache-2.0 | 8,025 |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.psi.impl;
import com.intellij.lang.ASTNode;
import com.intellij.navigation.ItemPresentation;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiReference;
import com.intellij.psi.StubBasedPsiElement;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.search.SearchScope;
import com.intellij.psi.stubs.IStubElementType;
import com.intellij.psi.stubs.StubElement;
import com.intellij.psi.util.CachedValueProvider;
import com.intellij.psi.util.CachedValuesManager;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.QualifiedName;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.PlatformIcons;
import com.jetbrains.python.PyElementTypes;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.PyTokenTypes;
import com.jetbrains.python.codeInsight.controlflow.ControlFlowCache;
import com.jetbrains.python.codeInsight.controlflow.ScopeOwner;
import com.jetbrains.python.codeInsight.dataflow.scope.ScopeUtil;
import com.jetbrains.python.documentation.docstrings.DocStringUtil;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.resolve.PyResolveContext;
import com.jetbrains.python.psi.resolve.QualifiedNameFinder;
import com.jetbrains.python.psi.stubs.PyClassStub;
import com.jetbrains.python.psi.stubs.PyFunctionStub;
import com.jetbrains.python.psi.stubs.PyTargetExpressionStub;
import com.jetbrains.python.psi.types.*;
import com.jetbrains.python.sdk.PythonSdkType;
import icons.PythonIcons;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.*;
import static com.intellij.openapi.util.text.StringUtil.notNullize;
import static com.jetbrains.python.psi.PyFunction.Modifier.CLASSMETHOD;
import static com.jetbrains.python.psi.PyFunction.Modifier.STATICMETHOD;
import static com.jetbrains.python.psi.impl.PyCallExpressionHelper.interpretAsModifierWrappingCall;
/**
* Implements PyFunction.
*/
public class PyFunctionImpl extends PyBaseElementImpl<PyFunctionStub> implements PyFunction {
public PyFunctionImpl(ASTNode astNode) {
super(astNode);
}
public PyFunctionImpl(final PyFunctionStub stub) {
this(stub, PyElementTypes.FUNCTION_DECLARATION);
}
public PyFunctionImpl(PyFunctionStub stub, IStubElementType nodeType) {
super(stub, nodeType);
}
private class CachedStructuredDocStringProvider implements CachedValueProvider<StructuredDocString> {
@Nullable
@Override
public Result<StructuredDocString> compute() {
final PyFunctionImpl f = PyFunctionImpl.this;
return Result.create(DocStringUtil.getStructuredDocString(f), f);
}
}
private CachedStructuredDocStringProvider myCachedStructuredDocStringProvider = new CachedStructuredDocStringProvider();
@Nullable
@Override
public String getName() {
final PyFunctionStub stub = getStub();
if (stub != null) {
return stub.getName();
}
ASTNode node = getNameNode();
return node != null ? node.getText() : null;
}
public PsiElement getNameIdentifier() {
final ASTNode nameNode = getNameNode();
return nameNode != null ? nameNode.getPsi() : null;
}
public PsiElement setName(@NotNull String name) throws IncorrectOperationException {
final ASTNode nameElement = PyUtil.createNewName(this, name);
final ASTNode nameNode = getNameNode();
if (nameNode != null) {
getNode().replaceChild(nameNode, nameElement);
}
return this;
}
@Override
public Icon getIcon(int flags) {
if (isValid()) {
final Property property = getProperty();
if (property != null) {
if (property.getGetter().valueOrNull() == this) {
return PythonIcons.Python.PropertyGetter;
}
if (property.getSetter().valueOrNull() == this) {
return PythonIcons.Python.PropertySetter;
}
if (property.getDeleter().valueOrNull() == this) {
return PythonIcons.Python.PropertyDeleter;
}
return PlatformIcons.PROPERTY_ICON;
}
if (getContainingClass() != null) {
return PlatformIcons.METHOD_ICON;
}
}
return PythonIcons.Python.Function;
}
@Nullable
public ASTNode getNameNode() {
return getNode().findChildByType(PyTokenTypes.IDENTIFIER);
}
@NotNull
public PyParameterList getParameterList() {
return getRequiredStubOrPsiChild(PyElementTypes.PARAMETER_LIST);
}
@Override
@NotNull
public PyStatementList getStatementList() {
final PyStatementList statementList = childToPsi(PyElementTypes.STATEMENT_LIST);
assert statementList != null : "Statement list missing for function " + getText();
return statementList;
}
public PyClass getContainingClass() {
final PyFunctionStub stub = getStub();
if (stub != null) {
final StubElement parentStub = stub.getParentStub();
if (parentStub instanceof PyClassStub) {
return ((PyClassStub)parentStub).getPsi();
}
return null;
}
final PsiElement parent = PsiTreeUtil.getParentOfType(this, StubBasedPsiElement.class);
if (parent instanceof PyClass) {
return (PyClass)parent;
}
return null;
}
@Nullable
public PyDecoratorList getDecoratorList() {
return getStubOrPsiChild(PyElementTypes.DECORATOR_LIST); // PsiTreeUtil.getChildOfType(this, PyDecoratorList.class);
}
@Nullable
@Override
public PyType getReturnType(@NotNull TypeEvalContext context, @NotNull TypeEvalContext.Key key) {
final PyType type = getReturnType(context);
return isAsync() ? createCoroutineType(type) : type;
}
@Nullable
private PyType getReturnType(@NotNull TypeEvalContext context) {
for (PyTypeProvider typeProvider : Extensions.getExtensions(PyTypeProvider.EP_NAME)) {
final Ref<PyType> returnTypeRef = typeProvider.getReturnType(this, context);
if (returnTypeRef != null) {
final PyType returnType = returnTypeRef.get();
if (returnType != null) {
returnType.assertValid(typeProvider.toString());
}
return returnType;
}
}
final PyType docStringType = getReturnTypeFromDocString();
if (docStringType != null) {
docStringType.assertValid("from docstring");
return docStringType;
}
if (context.allowReturnTypes(this)) {
final Ref<? extends PyType> yieldTypeRef = getYieldStatementType(context);
if (yieldTypeRef != null) {
return yieldTypeRef.get();
}
return getReturnStatementType(context);
}
return null;
}
@Nullable
@Override
public PyType getCallType(@NotNull TypeEvalContext context, @NotNull PyCallSiteExpression callSite) {
for (PyTypeProvider typeProvider : Extensions.getExtensions(PyTypeProvider.EP_NAME)) {
final PyType type = typeProvider.getCallType(this, callSite, context);
if (type != null) {
type.assertValid(typeProvider.toString());
return type;
}
}
final PyExpression receiver = PyTypeChecker.getReceiver(callSite, this);
final List<PyExpression> arguments = PyTypeChecker.getArguments(callSite, this);
final List<PyParameter> parameters = PyUtil.getParameters(this, context);
final PyResolveContext resolveContext = PyResolveContext.noImplicits().withTypeEvalContext(context);
final List<PyParameter> explicitParameters = PyTypeChecker.filterExplicitParameters(parameters, this, callSite, resolveContext);
final Map<PyExpression, PyNamedParameter> mapping = PyCallExpressionHelper.mapArguments(arguments, explicitParameters);
return getCallType(receiver, mapping, context);
}
@Nullable
@Override
public PyType getCallType(@Nullable PyExpression receiver,
@NotNull Map<PyExpression, PyNamedParameter> parameters,
@NotNull TypeEvalContext context) {
return analyzeCallType(context.getReturnType(this), receiver, parameters, context);
}
@Nullable
private PyType analyzeCallType(@Nullable PyType type,
@Nullable PyExpression receiver,
@NotNull Map<PyExpression, PyNamedParameter> parameters,
@NotNull TypeEvalContext context) {
if (PyTypeChecker.hasGenerics(type, context)) {
final Map<PyGenericType, PyType> substitutions = PyTypeChecker.unifyGenericCall(receiver, parameters, context);
if (substitutions != null) {
type = PyTypeChecker.substitute(type, substitutions, context);
}
else {
type = null;
}
}
if (receiver != null) {
type = replaceSelf(type, receiver, context);
}
if (type != null && isDynamicallyEvaluated(parameters.values(), context)) {
type = PyUnionType.createWeakType(type);
}
return type;
}
@Override
public ItemPresentation getPresentation() {
return new PyElementPresentation(this) {
@Nullable
@Override
public String getPresentableText() {
return notNullize(getName(), PyNames.UNNAMED_ELEMENT) + getParameterList().getPresentableText(true);
}
@Nullable
@Override
public String getLocationString() {
final PyClass containingClass = getContainingClass();
if (containingClass != null) {
return "(" + containingClass.getName() + " in " + getPackageForFile(getContainingFile()) + ")";
}
return super.getLocationString();
}
};
}
@Nullable
private PyType replaceSelf(@Nullable PyType returnType, @Nullable PyExpression receiver, @NotNull TypeEvalContext context) {
if (receiver != null) {
// TODO: Currently we substitute only simple subclass types, but we could handle union and collection types as well
if (returnType instanceof PyClassType) {
final PyClassType returnClassType = (PyClassType)returnType;
if (returnClassType.getPyClass() == getContainingClass()) {
final PyType receiverType = context.getType(receiver);
if (receiverType instanceof PyClassType && PyTypeChecker.match(returnType, receiverType, context)) {
return returnClassType.isDefinition() ? receiverType : ((PyClassType)receiverType).toInstance();
}
}
}
}
return returnType;
}
private static boolean isDynamicallyEvaluated(@NotNull Collection<PyNamedParameter> parameters, @NotNull TypeEvalContext context) {
for (PyNamedParameter parameter : parameters) {
final PyType type = context.getType(parameter);
if (type instanceof PyDynamicallyEvaluatedType) {
return true;
}
}
return false;
}
@Nullable
private Ref<? extends PyType> getYieldStatementType(@NotNull final TypeEvalContext context) {
Ref<PyType> elementType = null;
final PyBuiltinCache cache = PyBuiltinCache.getInstance(this);
final PyStatementList statements = getStatementList();
final Set<PyType> types = new LinkedHashSet<PyType>();
statements.accept(new PyRecursiveElementVisitor() {
@Override
public void visitPyYieldExpression(PyYieldExpression node) {
final PyType type = context.getType(node);
if (node.isDelegating() && type instanceof PyCollectionType) {
final PyCollectionType collectionType = (PyCollectionType)type;
// TODO: Select the parameter types that matches T in Iterable[T]
final List<PyType> elementTypes = collectionType.getElementTypes(context);
types.add(elementTypes.isEmpty() ? null : elementTypes.get(0));
}
else {
types.add(type);
}
}
@Override
public void visitPyFunction(PyFunction node) {
// Ignore nested functions
}
});
final int n = types.size();
if (n == 1) {
elementType = Ref.create(types.iterator().next());
}
else if (n > 0) {
elementType = Ref.create(PyUnionType.union(types));
}
if (elementType != null) {
final PyClass generator = cache.getClass(PyNames.FAKE_GENERATOR);
if (generator != null) {
final List<PyType> parameters = Arrays.asList(elementType.get(), null, getReturnStatementType(context));
return Ref.create(new PyCollectionTypeImpl(generator, false, parameters));
}
}
if (!types.isEmpty()) {
return Ref.create(null);
}
return null;
}
@Nullable
public PyType getReturnStatementType(TypeEvalContext typeEvalContext) {
final ReturnVisitor visitor = new ReturnVisitor(this, typeEvalContext);
final PyStatementList statements = getStatementList();
statements.accept(visitor);
if (isGeneratedStub() && !visitor.myHasReturns) {
if (PyNames.INIT.equals(getName())) {
return PyNoneType.INSTANCE;
}
return null;
}
return visitor.result();
}
@Nullable
private PyType createCoroutineType(@Nullable PyType returnType) {
final PyBuiltinCache cache = PyBuiltinCache.getInstance(this);
if (returnType instanceof PyClassLikeType && PyNames.FAKE_COROUTINE.equals(((PyClassLikeType)returnType).getClassQName())) {
return returnType;
}
final PyClass generator = cache.getClass(PyNames.FAKE_COROUTINE);
return generator != null ? new PyCollectionTypeImpl(generator, false, Collections.singletonList(returnType)) : null;
}
public PyFunction asMethod() {
if (getContainingClass() != null) {
return this;
}
else {
return null;
}
}
@Nullable
@Override
public PyType getReturnTypeFromDocString() {
final String typeName = extractReturnType();
return typeName != null ? PyTypeParser.getTypeByName(this, typeName) : null;
}
@Nullable
@Override
public String getDeprecationMessage() {
PyFunctionStub stub = getStub();
if (stub != null) {
return stub.getDeprecationMessage();
}
return extractDeprecationMessage();
}
@Nullable
public String extractDeprecationMessage() {
PyStatementList statementList = getStatementList();
return extractDeprecationMessage(Arrays.asList(statementList.getStatements()));
}
@Override
public PyType getType(@NotNull TypeEvalContext context, @NotNull TypeEvalContext.Key key) {
for (PyTypeProvider provider : Extensions.getExtensions(PyTypeProvider.EP_NAME)) {
final PyType type = provider.getCallableType(this, context);
if (type != null) {
return type;
}
}
final PyFunctionTypeImpl type = new PyFunctionTypeImpl(this);
if (PyKnownDecoratorUtil.hasUnknownDecorator(this, context) && getProperty() == null) {
return PyUnionType.createWeakType(type);
}
return type;
}
@Nullable
public static String extractDeprecationMessage(List<PyStatement> statements) {
for (PyStatement statement : statements) {
if (statement instanceof PyExpressionStatement) {
PyExpressionStatement expressionStatement = (PyExpressionStatement)statement;
if (expressionStatement.getExpression() instanceof PyCallExpression) {
PyCallExpression callExpression = (PyCallExpression)expressionStatement.getExpression();
if (callExpression.isCalleeText(PyNames.WARN)) {
PyReferenceExpression warningClass = callExpression.getArgument(1, PyReferenceExpression.class);
if (warningClass != null && (PyNames.DEPRECATION_WARNING.equals(warningClass.getReferencedName()) ||
PyNames.PENDING_DEPRECATION_WARNING.equals(warningClass.getReferencedName()))) {
return PyPsiUtils.strValue(callExpression.getArguments()[0]);
}
}
}
}
}
return null;
}
@Override
public String getDocStringValue() {
final PyFunctionStub stub = getStub();
if (stub != null) {
return stub.getDocString();
}
return DocStringUtil.getDocStringValue(this);
}
@Nullable
@Override
public StructuredDocString getStructuredDocString() {
return CachedValuesManager.getCachedValue(this, myCachedStructuredDocStringProvider);
}
private boolean isGeneratedStub() {
VirtualFile vFile = getContainingFile().getVirtualFile();
if (vFile != null) {
vFile = vFile.getParent();
if (vFile != null) {
vFile = vFile.getParent();
if (vFile != null && vFile.getName().equals(PythonSdkType.SKELETON_DIR_NAME)) {
return true;
}
}
}
return false;
}
@Nullable
private String extractReturnType() {
final String ARROW = "->";
final StructuredDocString structuredDocString = getStructuredDocString();
if (structuredDocString != null) {
return structuredDocString.getReturnType();
}
final String docString = getDocStringValue();
if (docString != null && docString.contains(ARROW)) {
final List<String> lines = StringUtil.split(docString, "\n");
while (lines.size() > 0 && lines.get(0).trim().length() == 0) {
lines.remove(0);
}
if (lines.size() > 1 && lines.get(1).trim().length() == 0) {
String firstLine = lines.get(0);
int pos = firstLine.lastIndexOf(ARROW);
if (pos >= 0) {
return firstLine.substring(pos + 2).trim();
}
}
}
return null;
}
private static class ReturnVisitor extends PyRecursiveElementVisitor {
private final PyFunction myFunction;
private final TypeEvalContext myContext;
private PyType myResult = null;
private boolean myHasReturns = false;
private boolean myHasRaises = false;
public ReturnVisitor(PyFunction function, final TypeEvalContext context) {
myFunction = function;
myContext = context;
}
@Override
public void visitPyReturnStatement(PyReturnStatement node) {
if (PsiTreeUtil.getParentOfType(node, ScopeOwner.class, true) == myFunction) {
final PyExpression expr = node.getExpression();
PyType returnType;
returnType = expr == null ? PyNoneType.INSTANCE : myContext.getType(expr);
if (!myHasReturns) {
myResult = returnType;
myHasReturns = true;
}
else {
myResult = PyUnionType.union(myResult, returnType);
}
}
}
@Override
public void visitPyRaiseStatement(PyRaiseStatement node) {
myHasRaises = true;
}
@Nullable
PyType result() {
return myHasReturns || myHasRaises ? myResult : PyNoneType.INSTANCE;
}
}
@Override
protected void acceptPyVisitor(PyElementVisitor pyVisitor) {
pyVisitor.visitPyFunction(this);
}
public int getTextOffset() {
final ASTNode name = getNameNode();
return name != null ? name.getStartOffset() : super.getTextOffset();
}
public PyStringLiteralExpression getDocStringExpression() {
final PyStatementList stmtList = getStatementList();
return DocStringUtil.findDocStringExpression(stmtList);
}
@NotNull
public Iterable<PyElement> iterateNames() {
return Collections.<PyElement>singleton(this);
}
public PyElement getElementNamed(final String the_name) {
return the_name.equals(getName()) ? this : null;
}
public boolean mustResolveOutside() {
return false;
}
@Override
public String toString() {
return super.toString() + "('" + getName() + "')";
}
public void subtreeChanged() {
super.subtreeChanged();
ControlFlowCache.clear(this);
}
public Property getProperty() {
final PyClass containingClass = getContainingClass();
if (containingClass != null) {
return containingClass.findPropertyByCallable(this);
}
return null;
}
@Override
public PyAnnotation getAnnotation() {
return getStubOrPsiChild(PyElementTypes.ANNOTATION);
}
@NotNull
@Override
public SearchScope getUseScope() {
final ScopeOwner scopeOwner = ScopeUtil.getScopeOwner(this);
if (scopeOwner instanceof PyFunction) {
return new LocalSearchScope(scopeOwner);
}
return super.getUseScope();
}
/**
* Looks for two standard decorators to a function, or a wrapping assignment that closely follows it.
*
* @return a flag describing what was detected.
*/
@Nullable
public Modifier getModifier() {
String deconame = getClassOrStaticMethodDecorator();
if (PyNames.CLASSMETHOD.equals(deconame)) {
return CLASSMETHOD;
}
else if (PyNames.STATICMETHOD.equals(deconame)) {
return STATICMETHOD;
}
// implicit staticmethod __new__
PyClass cls = getContainingClass();
if (cls != null && PyNames.NEW.equals(getName()) && cls.isNewStyleClass(null)) {
return STATICMETHOD;
}
//
if (getStub() != null) {
return getWrappersFromStub();
}
String func_name = getName();
if (func_name != null) {
PyAssignmentStatement assignment = PsiTreeUtil.getNextSiblingOfType(this, PyAssignmentStatement.class);
if (assignment != null) {
for (Pair<PyExpression, PyExpression> pair : assignment.getTargetsToValuesMapping()) {
PyExpression value = pair.getSecond();
if (value instanceof PyCallExpression) {
PyExpression target = pair.getFirst();
if (target instanceof PyTargetExpression && func_name.equals(target.getName())) {
Pair<String, PyFunction> interpreted = interpretAsModifierWrappingCall((PyCallExpression)value, this);
if (interpreted != null) {
PyFunction original = interpreted.getSecond();
if (original == this) {
String wrapper_name = interpreted.getFirst();
if (PyNames.CLASSMETHOD.equals(wrapper_name)) {
return CLASSMETHOD;
}
else if (PyNames.STATICMETHOD.equals(wrapper_name)) {
return STATICMETHOD;
}
}
}
}
}
}
}
}
return null;
}
@Override
public boolean isAsync() {
final PyFunctionStub stub = getStub();
if (stub != null) {
return stub.isAsync();
}
return getNode().findChildByType(PyTokenTypes.ASYNC_KEYWORD) != null;
}
@Nullable
private Modifier getWrappersFromStub() {
final StubElement parentStub = getStub().getParentStub();
final List childrenStubs = parentStub.getChildrenStubs();
int index = childrenStubs.indexOf(getStub());
if (index >= 0 && index < childrenStubs.size() - 1) {
StubElement nextStub = (StubElement)childrenStubs.get(index + 1);
if (nextStub instanceof PyTargetExpressionStub) {
final PyTargetExpressionStub targetExpressionStub = (PyTargetExpressionStub)nextStub;
if (targetExpressionStub.getInitializerType() == PyTargetExpressionStub.InitializerType.CallExpression) {
final QualifiedName qualifiedName = targetExpressionStub.getInitializer();
if (QualifiedName.fromComponents(PyNames.CLASSMETHOD).equals(qualifiedName)) {
return CLASSMETHOD;
}
if (QualifiedName.fromComponents(PyNames.STATICMETHOD).equals(qualifiedName)) {
return STATICMETHOD;
}
}
}
}
return null;
}
/**
* When a function is decorated many decorators, finds the deepest builtin decorator:
* <pre>
* @foo
* @classmethod <b># <-- that's it</b>
* @bar
* def moo(cls):
* pass
* </pre>
*
* @return name of the built-in decorator, or null (even if there are non-built-in decorators).
*/
@Nullable
private String getClassOrStaticMethodDecorator() {
PyDecoratorList decolist = getDecoratorList();
if (decolist != null) {
PyDecorator[] decos = decolist.getDecorators();
if (decos.length > 0) {
for (int i = decos.length - 1; i >= 0; i -= 1) {
PyDecorator deco = decos[i];
String deconame = deco.getName();
if (PyNames.CLASSMETHOD.equals(deconame) || PyNames.STATICMETHOD.equals(deconame)) {
return deconame;
}
for (PyKnownDecoratorProvider provider : PyUtil.KnownDecoratorProviderHolder.KNOWN_DECORATOR_PROVIDERS) {
String name = provider.toKnownDecorator(deconame);
if (name != null) {
return name;
}
}
}
}
}
return null;
}
@Nullable
@Override
public String getQualifiedName() {
return QualifiedNameFinder.getQualifiedName(this);
}
@NotNull
@Override
public List<PyAssignmentStatement> findAttributes() {
final List<PyAssignmentStatement> result = new ArrayList<PyAssignmentStatement>();
for (final PyAssignmentStatement statement : new PsiQuery(this).siblings(PyAssignmentStatement.class).getElements()) {
for (final PyQualifiedExpression targetExpression : new PsiQuery(statement.getTargets()).filter(PyQualifiedExpression.class)
.getElements()) {
final PyExpression qualifier = targetExpression.getQualifier();
if (qualifier == null) {
continue;
}
final PsiReference qualifierReference = qualifier.getReference();
if (qualifierReference == null) {
continue;
}
if (qualifierReference.isReferenceTo(this)) {
result.add(statement);
}
}
}
return result;
}
@NotNull
@Override
public ProtectionLevel getProtectionLevel() {
final int underscoreLevels = PyUtil.getInitialUnderscores(getName());
for (final ProtectionLevel level : ProtectionLevel.values()) {
if (level.getUnderscoreLevel() == underscoreLevels) {
return level;
}
}
return ProtectionLevel.PRIVATE;
}
}
| ivan-fedorov/intellij-community | python/src/com/jetbrains/python/psi/impl/PyFunctionImpl.java | Java | apache-2.0 | 26,495 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.common.settings;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.LocalNodeMasterListener;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.hash.MessageDigests;
import java.nio.charset.StandardCharsets;
import java.security.NoSuchAlgorithmException;
import java.security.spec.InvalidKeySpecException;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.PBEKeySpec;
/**
* Used to publish secure setting hashes in the cluster state and to validate those hashes against the local values of those same settings.
* This is colloquially referred to as the secure setting consistency check. It will publish and verify hashes only for the collection
* of settings passed in the constructor. The settings have to have the {@link Setting.Property#Consistent} property.
*/
public final class ConsistentSettingsService {
private static final Logger logger = LogManager.getLogger(ConsistentSettingsService.class);
private final Settings settings;
private final ClusterService clusterService;
private final Collection<Setting<?>> secureSettingsCollection;
private final SecretKeyFactory pbkdf2KeyFactory;
public ConsistentSettingsService(Settings settings, ClusterService clusterService, Collection<Setting<?>> secureSettingsCollection) {
this.settings = settings;
this.clusterService = clusterService;
this.secureSettingsCollection = secureSettingsCollection;
// this is used to compute the PBKDF2 hash (the published one)
try {
this.pbkdf2KeyFactory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA512");
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException("The \"PBKDF2WithHmacSHA512\" algorithm is required for consistent secure settings' hashes", e);
}
}
/**
* Returns a {@link LocalNodeMasterListener} that will publish hashes of all the settings passed in the constructor. These hashes are
* published by the master node only. Note that this is not designed for {@link SecureSettings} implementations that are mutable.
*/
public LocalNodeMasterListener newHashPublisher() {
// eagerly compute hashes to be published
final Map<String, String> computedHashesOfConsistentSettings = computeHashesOfConsistentSecureSettings();
return new HashesPublisher(computedHashesOfConsistentSettings, clusterService);
}
/**
* Verifies that the hashes of consistent secure settings in the latest {@code ClusterState} verify for the values of those same
* settings on the local node. The settings to be checked are passed in the constructor. Also, validates that a missing local
* value is also missing in the published set, and vice-versa.
*/
public boolean areAllConsistent() {
final ClusterState state = clusterService.state();
final Map<String, String> publishedHashesOfConsistentSettings = state.metadata().hashesOfConsistentSettings();
final Set<String> publishedSettingKeysToVerify = new HashSet<>();
publishedSettingKeysToVerify.addAll(publishedHashesOfConsistentSettings.keySet());
final AtomicBoolean allConsistent = new AtomicBoolean(true);
forEachConcreteSecureSettingDo(concreteSecureSetting -> {
final String publishedSaltAndHash = publishedHashesOfConsistentSettings.get(concreteSecureSetting.getKey());
final byte[] localHash = concreteSecureSetting.getSecretDigest(settings);
if (publishedSaltAndHash == null && localHash == null) {
// consistency of missing
logger.debug(
"no published hash for the consistent secure setting [{}] but it also does NOT exist on the local node",
concreteSecureSetting.getKey()
);
} else if (publishedSaltAndHash == null && localHash != null) {
// setting missing on master but present locally
logger.warn(
"no published hash for the consistent secure setting [{}] but it exists on the local node",
concreteSecureSetting.getKey()
);
if (state.nodes().isLocalNodeElectedMaster()) {
throw new IllegalStateException(
"Master node cannot validate consistent setting. No published hash for ["
+ concreteSecureSetting.getKey()
+ "] but setting exists."
);
}
allConsistent.set(false);
} else if (publishedSaltAndHash != null && localHash == null) {
// setting missing locally but present on master
logger.warn(
"the consistent secure setting [{}] does not exist on the local node but there is a published hash for it",
concreteSecureSetting.getKey()
);
allConsistent.set(false);
} else {
assert publishedSaltAndHash != null;
assert localHash != null;
final String[] parts = publishedSaltAndHash.split(":");
if (parts == null || parts.length != 2) {
throw new IllegalArgumentException(
"published hash ["
+ publishedSaltAndHash
+ " ] for secure setting ["
+ concreteSecureSetting.getKey()
+ "] is invalid"
);
}
final String publishedSalt = parts[0];
final String publishedHash = parts[1];
final byte[] computedSaltedHashBytes = computeSaltedPBKDF2Hash(localHash, publishedSalt.getBytes(StandardCharsets.UTF_8));
final String computedSaltedHash = new String(Base64.getEncoder().encode(computedSaltedHashBytes), StandardCharsets.UTF_8);
if (false == publishedHash.equals(computedSaltedHash)) {
logger.warn(
"the published hash [{}] of the consistent secure setting [{}] differs from the locally computed one [{}]",
publishedHash,
concreteSecureSetting.getKey(),
computedSaltedHash
);
if (state.nodes().isLocalNodeElectedMaster()) {
throw new IllegalStateException(
"Master node cannot validate consistent setting. The published hash ["
+ publishedHash
+ "] of the consistent secure setting ["
+ concreteSecureSetting.getKey()
+ "] differs from the locally computed one ["
+ computedSaltedHash
+ "]."
);
}
allConsistent.set(false);
}
}
publishedSettingKeysToVerify.remove(concreteSecureSetting.getKey());
});
// another case of settings missing locally, when group settings have not expanded to all the keys published
for (String publishedSettingKey : publishedSettingKeysToVerify) {
for (Setting<?> setting : secureSettingsCollection) {
if (setting.match(publishedSettingKey)) {
// setting missing locally but present on master
logger.warn(
"the consistent secure setting [{}] does not exist on the local node but there is a published hash for it",
publishedSettingKey
);
allConsistent.set(false);
}
}
}
return allConsistent.get();
}
/**
* Iterate over the passed in secure settings, expanding {@link Setting.AffixSetting} to concrete settings, in the scope of the local
* settings.
*/
private void forEachConcreteSecureSettingDo(Consumer<SecureSetting<?>> secureSettingConsumer) {
for (Setting<?> setting : secureSettingsCollection) {
assert setting.isConsistent() : "[" + setting.getKey() + "] is not a consistent setting";
if (setting instanceof Setting.AffixSetting<?>) {
((Setting.AffixSetting<?>) setting).getAllConcreteSettings(settings).forEach(concreteSetting -> {
assert concreteSetting instanceof SecureSetting<?> : "[" + concreteSetting.getKey() + "] is not a secure setting";
secureSettingConsumer.accept((SecureSetting<?>) concreteSetting);
});
} else if (setting instanceof SecureSetting<?>) {
secureSettingConsumer.accept((SecureSetting<?>) setting);
} else {
assert false : "Unrecognized consistent secure setting [" + setting.getKey() + "]";
}
}
}
private Map<String, String> computeHashesOfConsistentSecureSettings() {
final Map<String, String> hashesBySettingKey = new HashMap<>();
forEachConcreteSecureSettingDo(concreteSecureSetting -> {
final byte[] localHash = concreteSecureSetting.getSecretDigest(settings);
if (localHash != null) {
final String salt = UUIDs.randomBase64UUID();
final byte[] publicHash = computeSaltedPBKDF2Hash(localHash, salt.getBytes(StandardCharsets.UTF_8));
final String encodedPublicHash = new String(Base64.getEncoder().encode(publicHash), StandardCharsets.UTF_8);
hashesBySettingKey.put(concreteSecureSetting.getKey(), salt + ":" + encodedPublicHash);
}
});
return hashesBySettingKey;
}
private byte[] computeSaltedPBKDF2Hash(byte[] bytes, byte[] salt) {
final int iterations = 5000;
final int keyLength = 512;
char[] value = null;
try {
value = MessageDigests.toHexCharArray(bytes);
final PBEKeySpec spec = new PBEKeySpec(value, salt, iterations, keyLength);
final SecretKey key = pbkdf2KeyFactory.generateSecret(spec);
return key.getEncoded();
} catch (InvalidKeySpecException e) {
throw new RuntimeException("Unexpected exception when computing PBKDF2 hash", e);
} finally {
if (value != null) {
Arrays.fill(value, '0');
}
}
}
static final class HashesPublisher implements LocalNodeMasterListener {
// eagerly compute hashes to be published
final Map<String, String> computedHashesOfConsistentSettings;
final ClusterService clusterService;
HashesPublisher(Map<String, String> computedHashesOfConsistentSettings, ClusterService clusterService) {
this.computedHashesOfConsistentSettings = Map.copyOf(computedHashesOfConsistentSettings);
this.clusterService = clusterService;
}
@Override
public void onMaster() {
clusterService.submitStateUpdateTask("publish-secure-settings-hashes", new ClusterStateUpdateTask(Priority.URGENT) {
@Override
public ClusterState execute(ClusterState currentState) {
final Map<String, String> publishedHashesOfConsistentSettings = currentState.metadata().hashesOfConsistentSettings();
if (computedHashesOfConsistentSettings.equals(publishedHashesOfConsistentSettings)) {
logger.debug("Nothing to publish. What is already published matches this node's view.");
return currentState;
} else {
return ClusterState.builder(currentState)
.metadata(
Metadata.builder(currentState.metadata()).hashesOfConsistentSettings(computedHashesOfConsistentSettings)
)
.build();
}
}
@Override
public void onFailure(String source, Exception e) {
logger.error("unable to publish secure settings hashes", e);
}
});
}
@Override
public void offMaster() {
logger.trace("I am no longer master, nothing to do");
}
}
}
| GlenRSmith/elasticsearch | server/src/main/java/org/elasticsearch/common/settings/ConsistentSettingsService.java | Java | apache-2.0 | 13,526 |
// Copyright (c) Microsoft Open Technologies, Inc. All rights reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using DocumentFormat.OpenXml.Validation;
using System.Diagnostics;
using System.Xml;
namespace DocumentFormat.OpenXml.Internal.SemanticValidation
{
internal class RelationshipExistConstraint : SemanticConstraint
{
private byte _rIdAttribute;
public RelationshipExistConstraint(byte rIdAttribute)
: base(SemanticValidationLevel.Part)
{
_rIdAttribute = rIdAttribute;
}
public override ValidationErrorInfo Validate(ValidationContext context)
{
OpenXmlSimpleType attributeValue = context.Element.Attributes[_rIdAttribute];
//if the attribute is omited, semantic validation will do nothing
if (attributeValue == null || string.IsNullOrEmpty(attributeValue.InnerText))
{
return null;
}
if (context.Part.PackagePart.RelationshipExists(attributeValue.InnerText))
{
return null;
}
else
{
string errorDescription = string.Format(System.Globalization.CultureInfo.CurrentUICulture, ValidationResources.Sem_InvalidRelationshipId,
attributeValue, GetAttributeQualifiedName(context.Element, _rIdAttribute));
return new ValidationErrorInfo()
{
Id = "Sem_InvalidRelationshipId",
ErrorType = ValidationErrorType.Semantic,
Node = context.Element,
Description = errorDescription
};
}
}
}
} | JesseQin/Open-XML-SDK | src/ofapi/Validation/SemanticValidation/SemanticConstraint/RelationshipExistConstraint.cs | C# | apache-2.0 | 1,905 |
// RUN: %clang_cc1 -fsyntax-only -verify %s
// Errors
export class foo { }; // expected-error {{expected template}}
template x; // expected-error {{C++ requires a type specifier for all declarations}} \
// expected-error {{does not refer}}
export template x; // expected-error {{expected '<' after 'template'}}
export template<class T> class x0; // expected-warning {{exported templates are unsupported}}
template < ; // expected-error {{expected template parameter}} \
// expected-error{{expected ',' or '>' in template-parameter-list}} \
// expected-warning {{declaration does not declare anything}}
template <int +> struct x1; // expected-error {{expected ',' or '>' in template-parameter-list}}
// verifies that we only walk to the ',' & still produce errors on the rest of the template parameters
template <int +, T> struct x2; // expected-error {{expected ',' or '>' in template-parameter-list}} \
expected-error {{expected unqualified-id}}
template<template<int+>> struct x3; // expected-error {{expected ',' or '>' in template-parameter-list}} \
expected-error {{template template parameter requires 'class' after the parameter list}}
template <template X> struct Err1; // expected-error {{expected '<' after 'template'}} \
// expected-error{{extraneous}}
template <template <typename> > struct Err2; // expected-error {{template template parameter requires 'class' after the parameter list}}
template <template <typename> Foo> struct Err3; // expected-error {{template template parameter requires 'class' after the parameter list}}
// Template function declarations
template <typename T> void foo();
template <typename T, typename U> void foo();
// Template function definitions.
template <typename T> void foo() { }
// Template class (forward) declarations
template <typename T> struct A;
template <typename T, typename U> struct b;
template <typename> struct C;
template <typename, typename> struct D;
// Forward declarations with default parameters?
template <typename T = int> class X1;
template <typename = int> class X2;
// Forward declarations w/template template parameters
template <template <typename> class T> class TTP1;
template <template <typename> class> class TTP2;
template <template <typename> class T = foo> class TTP3; // expected-error{{must be a class template}}
template <template <typename> class = foo> class TTP3; // expected-error{{must be a class template}}
template <template <typename X, typename Y> class T> class TTP5;
// Forward declarations with non-type params
template <int> class NTP0;
template <int N> class NTP1;
template <int N = 5> class NTP2;
template <int = 10> class NTP3;
template <unsigned int N = 12u> class NTP4;
template <unsigned int = 12u> class NTP5;
template <unsigned = 15u> class NTP6;
template <typename T, T Obj> class NTP7;
// Template class declarations
template <typename T> struct A { };
template <typename T, typename U> struct B { };
// Template parameter shadowing
template<typename T, // expected-note{{template parameter is declared here}}
typename T> // expected-error{{declaration of 'T' shadows template parameter}}
void shadow1();
template<typename T> // expected-note{{template parameter is declared here}}
void shadow2(int T); // expected-error{{declaration of 'T' shadows template parameter}}
template<typename T> // expected-note{{template parameter is declared here}}
class T { // expected-error{{declaration of 'T' shadows template parameter}}
};
template<int Size> // expected-note{{template parameter is declared here}}
void shadow3(int Size); // expected-error{{declaration of 'Size' shadows template parameter}}
// <rdar://problem/6952203>
template<typename T> // expected-note{{here}}
struct shadow4 {
int T; // expected-error{{shadows}}
};
template<typename T> // expected-note{{here}}
struct shadow5 {
int T(int, float); // expected-error{{shadows}}
};
// Non-type template parameters in scope
template<int Size>
void f(int& i) {
i = Size;
Size = i; // expected-error{{expression is not assignable}}
}
template<typename T>
const T& min(const T&, const T&);
void f2() {
int x;
A< typeof(x>1) > a;
}
// PR3844
template <> struct S<int> { }; // expected-error{{explicit specialization of non-template struct 'S'}}
namespace PR6184 {
namespace N {
template <typename T>
void bar(typename T::x);
}
template <typename T>
void N::bar(typename T::x) { }
}
| jeltz/rust-debian-package | src/llvm/tools/clang/test/Parser/cxx-template-decl.cpp | C++ | apache-2.0 | 4,541 |
# Copyright (c) 2014 VMware, Inc.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import mock
from oslo_vmware import vim_util
from nova import exception
from nova import test
from nova.tests.unit.virt.vmwareapi import fake
from nova.tests.unit.virt.vmwareapi import stubs
from nova.virt.vmwareapi import driver
from nova.virt.vmwareapi import network_util
from nova.virt.vmwareapi import vm_util
ResultSet = collections.namedtuple('ResultSet', ['objects'])
ObjectContent = collections.namedtuple('ObjectContent', ['obj', 'propSet'])
DynamicProperty = collections.namedtuple('DynamicProperty', ['name', 'val'])
class GetNetworkWithTheNameTestCase(test.NoDBTestCase):
def setUp(self):
super(GetNetworkWithTheNameTestCase, self).setUp()
fake.reset()
self.stub_out('nova.virt.vmwareapi.driver.VMwareAPISession.vim',
stubs.fake_vim_prop)
self.stub_out('nova.virt.vmwareapi.driver.'
'VMwareAPISession.is_vim_object',
stubs.fake_is_vim_object)
self._session = driver.VMwareAPISession()
def _build_cluster_networks(self, networks):
"""Returns a set of results for a cluster network lookup.
This is an example:
(ObjectContent){
obj =
(obj){
value = "domain-c7"
_type = "ClusterComputeResource"
}
propSet[] =
(DynamicProperty){
name = "network"
val =
(ArrayOfManagedObjectReference){
ManagedObjectReference[] =
(ManagedObjectReference){
value = "network-54"
_type = "Network"
},
(ManagedObjectReference){
value = "dvportgroup-14"
_type = "DistributedVirtualPortgroup"
},
}
},
}]
"""
objects = []
obj = ObjectContent(obj=vim_util.get_moref("domain-c7",
"ClusterComputeResource"),
propSet=[])
value = fake.DataObject()
value.ManagedObjectReference = []
for network in networks:
value.ManagedObjectReference.append(network)
obj.propSet.append(
DynamicProperty(name='network',
val=value))
objects.append(obj)
return ResultSet(objects=objects)
def test_get_network_no_match(self):
net_morefs = [vim_util.get_moref("dvportgroup-135",
"DistributedVirtualPortgroup"),
vim_util.get_moref("dvportgroup-136",
"DistributedVirtualPortgroup")]
networks = self._build_cluster_networks(net_morefs)
self._continue_retrieval_called = False
def mock_call_method(module, method, *args, **kwargs):
if method == 'get_object_properties':
return networks
if method == 'get_object_property':
result = fake.DataObject()
result.name = 'no-match'
return result
if method == 'continue_retrieval':
self._continue_retrieval_called = True
with mock.patch.object(self._session, '_call_method',
mock_call_method):
res = network_util.get_network_with_the_name(self._session,
'fake_net',
'fake_cluster')
self.assertTrue(self._continue_retrieval_called)
self.assertIsNone(res)
def _get_network_dvs_match(self, name, token=False):
net_morefs = [vim_util.get_moref("dvportgroup-135",
"DistributedVirtualPortgroup")]
networks = self._build_cluster_networks(net_morefs)
def mock_call_method(module, method, *args, **kwargs):
if method == 'get_object_properties':
return networks
if method == 'get_object_property':
result = fake.DataObject()
if not token or self._continue_retrieval_called:
result.name = name
else:
result.name = 'fake_name'
result.key = 'fake_key'
result.distributedVirtualSwitch = 'fake_dvs'
return result
if method == 'continue_retrieval':
if token:
self._continue_retrieval_called = True
return networks
if method == 'cancel_retrieval':
self._cancel_retrieval_called = True
with mock.patch.object(self._session, '_call_method',
mock_call_method):
res = network_util.get_network_with_the_name(self._session,
'fake_net',
'fake_cluster')
self.assertIsNotNone(res)
def test_get_network_dvs_exact_match(self):
self._cancel_retrieval_called = False
self._get_network_dvs_match('fake_net')
self.assertTrue(self._cancel_retrieval_called)
def test_get_network_dvs_match(self):
self._cancel_retrieval_called = False
self._get_network_dvs_match('dvs_7-virtualwire-7-fake_net')
self.assertTrue(self._cancel_retrieval_called)
def test_get_network_dvs_match_with_token(self):
self._continue_retrieval_called = False
self._cancel_retrieval_called = False
self._get_network_dvs_match('dvs_7-virtualwire-7-fake_net',
token=True)
self.assertTrue(self._continue_retrieval_called)
self.assertTrue(self._cancel_retrieval_called)
def test_get_network_network_match(self):
net_morefs = [vim_util.get_moref("network-54", "Network")]
networks = self._build_cluster_networks(net_morefs)
def mock_call_method(module, method, *args, **kwargs):
if method == 'get_object_properties':
return networks
if method == 'get_object_property':
return 'fake_net'
with mock.patch.object(self._session, '_call_method',
mock_call_method):
res = network_util.get_network_with_the_name(self._session,
'fake_net',
'fake_cluster')
self.assertIsNotNone(res)
class GetVlanIdAndVswitchForPortgroupTestCase(test.NoDBTestCase):
@mock.patch.object(vm_util, 'get_host_ref')
def test_no_port_groups(self, mock_get_host_ref):
session = mock.Mock()
session._call_method.return_value = None
self.assertRaises(
exception.NovaException,
network_util.get_vlanid_and_vswitch_for_portgroup,
session,
'port_group_name',
'fake_cluster'
)
@mock.patch.object(vm_util, 'get_host_ref')
def test_valid_port_group(self, mock_get_host_ref):
session = mock.Mock()
session._call_method.return_value = self._fake_port_groups()
vlanid, vswitch = network_util.get_vlanid_and_vswitch_for_portgroup(
session,
'port_group_name',
'fake_cluster'
)
self.assertEqual(vlanid, 100)
self.assertEqual(vswitch, 'vswitch_name')
@mock.patch.object(vm_util, 'get_host_ref')
def test_unknown_port_group(self, mock_get_host_ref):
session = mock.Mock()
session._call_method.return_value = self._fake_port_groups()
vlanid, vswitch = network_util.get_vlanid_and_vswitch_for_portgroup(
session,
'unknown_port_group',
'fake_cluster'
)
self.assertIsNone(vlanid)
self.assertIsNone(vswitch)
def _fake_port_groups(self):
port_group_spec = fake.DataObject()
port_group_spec.name = 'port_group_name'
port_group_spec.vlanId = 100
port_group = fake.DataObject()
port_group.vswitch = 'vswitch_name'
port_group.spec = port_group_spec
response = fake.DataObject()
response.HostPortGroup = [port_group]
return response
| zhimin711/nova | nova/tests/unit/virt/vmwareapi/test_network_util.py | Python | apache-2.0 | 9,191 |
require 'fog/libvirt'
require 'fog/compute'
require 'fog/libvirt/models/compute/util/util'
require 'fog/libvirt/models/compute/util/uri'
module Fog
module Compute
class Libvirt < Fog::Service
requires :libvirt_uri
recognizes :libvirt_username, :libvirt_password
recognizes :libvirt_ip_command
model_path 'fog/libvirt/models/compute'
model :server
collection :servers
model :network
collection :networks
model :interface
collection :interfaces
model :volume
collection :volumes
model :pool
collection :pools
model :node
collection :nodes
model :nic
collection :nics
request_path 'fog/libvirt/requests/compute'
request :list_domains
request :create_domain
request :define_domain
request :vm_action
request :list_pools
request :list_pool_volumes
request :define_pool
request :pool_action
request :list_volumes
request :volume_action
request :create_volume
request :list_networks
request :destroy_network
request :list_interfaces
request :destroy_interface
request :get_node_info
request :update_display
module Shared
include Fog::Compute::LibvirtUtil
end
class Mock
include Shared
def initialize(options={})
# libvirt is part of the gem => ruby-libvirt
require 'libvirt'
end
private
def client
return @client if defined?(@client)
end
#read mocks xml
def read_xml(file_name)
file_path = File.join(File.dirname(__FILE__),"requests","compute","mock_files",file_name)
File.read(file_path)
end
end
class Real
include Shared
attr_reader :client
attr_reader :uri
attr_reader :ip_command
def initialize(options={})
@uri = ::Fog::Compute::LibvirtUtil::URI.new(enhance_uri(options[:libvirt_uri]))
@ip_command = options[:libvirt_ip_command]
# libvirt is part of the gem => ruby-libvirt
begin
require 'libvirt'
rescue LoadError => e
retry if require('rubygems')
raise e.message
end
begin
if options[:libvirt_username] and options[:libvirt_password]
@client = ::Libvirt::open_auth(uri.uri, [::Libvirt::CRED_AUTHNAME, ::Libvirt::CRED_PASSPHRASE]) do |cred|
case cred['type']
when ::Libvirt::CRED_AUTHNAME
options[:libvirt_username]
when ::Libvirt::CRED_PASSPHRASE
options[:libvirt_password]
end
end
else
@client = ::Libvirt::open(uri.uri)
end
rescue ::Libvirt::ConnectionError
raise Fog::Errors::Error.new("Error making a connection to libvirt URI #{uri.uri}:\n#{$!}")
end
end
def terminate
@client.close if @client and !@client.closed?
end
def enhance_uri(uri)
require 'cgi'
append=""
# on macosx, chances are we are using libvirt through homebrew
# the client will default to a socket location based on it's own location (/opt)
# we conveniently point it to /var/run/libvirt/libvirt-sock
# if no socket option has been specified explicitly
if RUBY_PLATFORM =~ /darwin/
querystring=::URI.parse(uri).query
if querystring.nil?
append="?socket=/var/run/libvirt/libvirt-sock"
else
if !::CGI.parse(querystring).has_key?("socket")
append="&socket=/var/run/libvirt/libvirt-sock"
end
end
end
uri+append
end
end
end
end
end
| luna1x/chef-server | vendor/ruby/1.9.1/gems/fog-1.15.0/lib/fog/libvirt/compute.rb | Ruby | apache-2.0 | 3,975 |
#
# Author:: AJ Christensen (<aj@opscode.com>)
# Cookbook Name:: database
# Recipe:: snapshot
#
# Copyright 2009-2010, Opscode, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe "aws"
include_recipe "xfs"
%w{ebs_vol_dev db_role app_environment username password aws_access_key_id aws_secret_access_key snapshots_to_keep volume_id}.collect do |key|
Chef::Application.fatal!("Required db_snapshot configuration #{key} not found.", -47) unless node.db_snapshot.has_key? key
end
connection_info = {:host => localhost, :username => node.db_snapshot.username, :password => node.db_snapshot.password}
mysql_database "locking tables for #{node.db_snapshot.app_environment}" do
connection connection_info
sql "flush tables with read lock"
action :query
end
execute "xfs freeze" do
command "xfs_freeze -f #{node.db_snapshot.ebs_vol_dev}"
end
aws_ebs_volume "#{node.db_snapshot.db_role.first}_#{node.db_snapshot.app_environment}" do
aws_access_key node.db_snapshot.aws_access_key_id
aws_secret_access_key node.db_snapshot.aws_secret_access_key
size 50
device node.db_snapshot.ebs_vol_dev
snapshots_to_keep node.db_snapshot.snapshots_to_keep
action :snapshot
volume_id node.db_snapshot.volume_id
ignore_failure true # if this fails, continue to unfreeze and unlock
end
execute "xfs unfreeze" do
command "xfs_freeze -u #{node.db_snapshot.ebs_vol_dev}"
end
mysql_database "unflushing tables for #{node.db_snapshot.app_environment}" do
connection connection_info
sql "unlock tables"
action :query
end
aws_ebs_volume "#{node.db_snapshot.db_role.first}_#{node.db_snapshot.app_environment}" do
action :prune
end
| dagolden/opscode-cookbooks | database/recipes/snapshot.rb | Ruby | apache-2.0 | 2,156 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.flink.translation.wrappers.streaming.stableinput;
import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkNotNull;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import org.apache.flink.api.common.state.ListState;
/** A non-keyed implementation of a {@link BufferingElementsHandler}. */
public class NonKeyedBufferingElementsHandler<T> implements BufferingElementsHandler {
static <T> NonKeyedBufferingElementsHandler<T> create(ListState<BufferedElement> elementState) {
return new NonKeyedBufferingElementsHandler<>(elementState);
}
private final ListState<BufferedElement> elementState;
private NonKeyedBufferingElementsHandler(ListState<BufferedElement> elementState) {
this.elementState = checkNotNull(elementState);
}
@Override
public Stream<BufferedElement> getElements() {
try {
return StreamSupport.stream(elementState.get().spliterator(), false);
} catch (Exception e) {
throw new RuntimeException("Failed to retrieve buffered element from state backend.", e);
}
}
@Override
public void buffer(BufferedElement element) {
try {
elementState.add(element);
} catch (Exception e) {
throw new RuntimeException("Failed to buffer element in state backend.", e);
}
}
@Override
public void clear() {
elementState.clear();
}
}
| lukecwik/incubator-beam | runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/stableinput/NonKeyedBufferingElementsHandler.java | Java | apache-2.0 | 2,224 |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.job.entries.simpleeval;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.regex.Pattern;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.row.value.ValueMetaString;
import org.pentaho.di.core.util.StringUtil;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.entry.JobEntryBase;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
/**
* This defines a 'simple evaluation' job entry.
*
* @author Samatar Hassan
* @since 01-01-2009
*/
public class JobEntrySimpleEval extends JobEntryBase implements Cloneable, JobEntryInterface {
private static Class<?> PKG = JobEntrySimpleEval.class; // for i18n purposes, needed by Translator2!!
public static final String[] valueTypeDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.EvalPreviousField.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.EvalVariable.Label" ),
};
public static final String[] valueTypeCode = new String[] { "field", "variable" };
public static final int VALUE_TYPE_FIELD = 0;
public static final int VALUE_TYPE_VARIABLE = 1;
public int valuetype;
public static final String[] successConditionDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenEqual.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenDifferent.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenContains.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotContains.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenStartWith.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotStartWith.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenEndWith.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotEndWith.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenRegExp.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenInList.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotInList.Label" ) };
public static final String[] successConditionCode = new String[] {
"equal", "different", "contains", "notcontains", "startswith", "notstatwith", "endswith", "notendwith",
"regexp", "inlist", "notinlist" };
public static final int SUCCESS_CONDITION_EQUAL = 0;
public static final int SUCCESS_CONDITION_DIFFERENT = 1;
public static final int SUCCESS_CONDITION_CONTAINS = 2;
public static final int SUCCESS_CONDITION_NOT_CONTAINS = 3;
public static final int SUCCESS_CONDITION_START_WITH = 4;
public static final int SUCCESS_CONDITION_NOT_START_WITH = 5;
public static final int SUCCESS_CONDITION_END_WITH = 6;
public static final int SUCCESS_CONDITION_NOT_END_WITH = 7;
public static final int SUCCESS_CONDITION_REGEX = 8;
public static final int SUCCESS_CONDITION_IN_LIST = 9;
public static final int SUCCESS_CONDITION_NOT_IN_LIST = 10;
public int successcondition;
public static final String[] fieldTypeDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.FieldTypeString.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.FieldTypeNumber.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.FieldTypeDateTime.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.FieldTypeBoolean.Label" ),
};
public static final String[] fieldTypeCode = new String[] { "string", "number", "datetime", "boolean" };
public static final int FIELD_TYPE_STRING = 0;
public static final int FIELD_TYPE_NUMBER = 1;
public static final int FIELD_TYPE_DATE_TIME = 2;
public static final int FIELD_TYPE_BOOLEAN = 3;
public int fieldtype;
public static final String[] successNumberConditionDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenEqual.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenDifferent.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenSmallThan.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenSmallOrEqualThan.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenGreaterThan.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenGreaterOrEqualThan.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessBetween.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenInList.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotInList.Label" ), };
public static final String[] successNumberConditionCode = new String[] {
"equal", "different", "smaller", "smallequal", "greater", "greaterequal", "between", "inlist", "notinlist" };
public static final int SUCCESS_NUMBER_CONDITION_EQUAL = 0;
public static final int SUCCESS_NUMBER_CONDITION_DIFFERENT = 1;
public static final int SUCCESS_NUMBER_CONDITION_SMALLER = 2;
public static final int SUCCESS_NUMBER_CONDITION_SMALLER_EQUAL = 3;
public static final int SUCCESS_NUMBER_CONDITION_GREATER = 4;
public static final int SUCCESS_NUMBER_CONDITION_GREATER_EQUAL = 5;
public static final int SUCCESS_NUMBER_CONDITION_BETWEEN = 6;
public static final int SUCCESS_NUMBER_CONDITION_IN_LIST = 7;
public static final int SUCCESS_NUMBER_CONDITION_NOT_IN_LIST = 8;
public int successnumbercondition;
public static final String[] successBooleanConditionDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenTrue.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenFalse.Label" )
};
public static final String[] successBooleanConditionCode = new String[] { "true", "false" };
public static final int SUCCESS_BOOLEAN_CONDITION_TRUE = 0;
public static final int SUCCESS_BOOLEAN_CONDITION_FALSE = 1;
public int successbooleancondition;
private String fieldname;
private String variablename;
private String mask;
private String comparevalue;
private String minvalue;
private String maxvalue;
private boolean successwhenvarset;
public JobEntrySimpleEval( String n ) {
super( n, "" );
valuetype = VALUE_TYPE_FIELD;
successcondition = SUCCESS_CONDITION_EQUAL;
successnumbercondition = SUCCESS_NUMBER_CONDITION_EQUAL;
successbooleancondition = SUCCESS_BOOLEAN_CONDITION_FALSE;
minvalue = null;
maxvalue = null;
comparevalue = null;
fieldname = null;
variablename = null;
fieldtype = FIELD_TYPE_STRING;
mask = null;
successwhenvarset = false;
}
public JobEntrySimpleEval() {
this( "" );
}
@Override
public Object clone() {
JobEntrySimpleEval je = (JobEntrySimpleEval) super.clone();
return je;
}
private static String getValueTypeCode( int i ) {
if ( i < 0 || i >= valueTypeCode.length ) {
return valueTypeCode[0];
}
return valueTypeCode[i];
}
private static String getFieldTypeCode( int i ) {
if ( i < 0 || i >= fieldTypeCode.length ) {
return fieldTypeCode[0];
}
return fieldTypeCode[i];
}
private static String getSuccessConditionCode( int i ) {
if ( i < 0 || i >= successConditionCode.length ) {
return successConditionCode[0];
}
return successConditionCode[i];
}
public static String getSuccessNumberConditionCode( int i ) {
if ( i < 0 || i >= successNumberConditionCode.length ) {
return successNumberConditionCode[0];
}
return successNumberConditionCode[i];
}
private static String getSuccessBooleanConditionCode( int i ) {
if ( i < 0 || i >= successBooleanConditionCode.length ) {
return successBooleanConditionCode[0];
}
return successBooleanConditionCode[i];
}
@Override
public String getXML() {
StringBuilder retval = new StringBuilder( 300 );
retval.append( super.getXML() );
retval.append( " " ).append( XMLHandler.addTagValue( "valuetype", getValueTypeCode( valuetype ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "fieldname", fieldname ) );
retval.append( " " ).append( XMLHandler.addTagValue( "variablename", variablename ) );
retval.append( " " ).append( XMLHandler.addTagValue( "fieldtype", getFieldTypeCode( fieldtype ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "mask", mask ) );
retval.append( " " ).append( XMLHandler.addTagValue( "comparevalue", comparevalue ) );
retval.append( " " ).append( XMLHandler.addTagValue( "minvalue", minvalue ) );
retval.append( " " ).append( XMLHandler.addTagValue( "maxvalue", maxvalue ) );
retval.append( " " ).append(
XMLHandler.addTagValue( "successcondition", getSuccessConditionCode( successcondition ) ) );
retval
.append( " " ).append(
XMLHandler.addTagValue(
"successnumbercondition", getSuccessNumberConditionCode( successnumbercondition ) ) );
retval.append( " " ).append(
XMLHandler.addTagValue(
"successbooleancondition", getSuccessBooleanConditionCode( successbooleancondition ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "successwhenvarset", successwhenvarset ) );
return retval.toString();
}
private static int getValueTypeByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < valueTypeCode.length; i++ ) {
if ( valueTypeCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getSuccessNumberByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successNumberConditionCode.length; i++ ) {
if ( successNumberConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getSuccessBooleanByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successBooleanConditionCode.length; i++ ) {
if ( successBooleanConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getFieldTypeByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < fieldTypeCode.length; i++ ) {
if ( fieldTypeCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getSuccessConditionByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successConditionCode.length; i++ ) {
if ( successConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
public void setSuccessWhenVarSet( boolean successwhenvarset ) {
this.successwhenvarset = successwhenvarset;
}
public boolean isSuccessWhenVarSet() {
return this.successwhenvarset;
}
public static int getSuccessNumberConditionByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successNumberConditionCode.length; i++ ) {
if ( successNumberConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getSuccessBooleanConditionByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successBooleanConditionCode.length; i++ ) {
if ( successBooleanConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
@Override
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
Repository rep, IMetaStore metaStore ) throws KettleXMLException {
try {
super.loadXML( entrynode, databases, slaveServers );
valuetype = getValueTypeByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "valuetype" ), "" ) );
fieldname = XMLHandler.getTagValue( entrynode, "fieldname" );
fieldtype = getFieldTypeByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "fieldtype" ), "" ) );
variablename = XMLHandler.getTagValue( entrynode, "variablename" );
mask = XMLHandler.getTagValue( entrynode, "mask" );
comparevalue = XMLHandler.getTagValue( entrynode, "comparevalue" );
minvalue = XMLHandler.getTagValue( entrynode, "minvalue" );
maxvalue = XMLHandler.getTagValue( entrynode, "maxvalue" );
successcondition =
getSuccessConditionByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "successcondition" ), "" ) );
successnumbercondition =
getSuccessNumberConditionByCode( Const.NVL(
XMLHandler.getTagValue( entrynode, "successnumbercondition" ), "" ) );
successbooleancondition =
getSuccessBooleanConditionByCode( Const.NVL( XMLHandler.getTagValue(
entrynode, "successbooleancondition" ), "" ) );
successwhenvarset = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "successwhenvarset" ) );
} catch ( KettleXMLException xe ) {
throw new KettleXMLException(
BaseMessages.getString( PKG, "JobEntrySimple.Error.Exception.UnableLoadXML" ), xe );
}
}
@Override
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ) throws KettleException {
try {
valuetype = getValueTypeByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, "valuetype" ), "" ) );
fieldname = rep.getJobEntryAttributeString( id_jobentry, "fieldname" );
variablename = rep.getJobEntryAttributeString( id_jobentry, "variablename" );
fieldtype = getFieldTypeByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, "fieldtype" ), "" ) );
mask = rep.getJobEntryAttributeString( id_jobentry, "mask" );
comparevalue = rep.getJobEntryAttributeString( id_jobentry, "comparevalue" );
minvalue = rep.getJobEntryAttributeString( id_jobentry, "minvalue" );
maxvalue = rep.getJobEntryAttributeString( id_jobentry, "maxvalue" );
successcondition =
getSuccessConditionByCode( Const.NVL(
rep.getJobEntryAttributeString( id_jobentry, "successcondition" ), "" ) );
successnumbercondition =
getSuccessNumberConditionByCode( Const.NVL( rep.getJobEntryAttributeString(
id_jobentry, "successnumbercondition" ), "" ) );
successbooleancondition =
getSuccessBooleanConditionByCode( Const.NVL( rep.getJobEntryAttributeString(
id_jobentry, "successbooleancondition" ), "" ) );
successwhenvarset = rep.getJobEntryAttributeBoolean( id_jobentry, "successwhenvarset" );
} catch ( KettleException dbe ) {
throw new KettleException( BaseMessages.getString( PKG, "JobEntrySimple.Error.Exception.UnableLoadRep" )
+ id_jobentry, dbe );
}
}
@Override
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
try {
rep.saveJobEntryAttribute( id_job, getObjectId(), "valuetype", getValueTypeCode( valuetype ) );
rep.saveJobEntryAttribute( id_job, getObjectId(), "fieldname", fieldname );
rep.saveJobEntryAttribute( id_job, getObjectId(), "variablename", variablename );
rep.saveJobEntryAttribute( id_job, getObjectId(), "fieldtype", getFieldTypeCode( fieldtype ) );
rep.saveJobEntryAttribute( id_job, getObjectId(), "mask", mask );
rep.saveJobEntryAttribute( id_job, getObjectId(), "comparevalue", comparevalue );
rep.saveJobEntryAttribute( id_job, getObjectId(), "minvalue", minvalue );
rep.saveJobEntryAttribute( id_job, getObjectId(), "maxvalue", maxvalue );
rep.saveJobEntryAttribute(
id_job, getObjectId(), "successcondition", getSuccessConditionCode( successcondition ) );
rep
.saveJobEntryAttribute(
id_job, getObjectId(), "successnumbercondition",
getSuccessNumberConditionCode( successnumbercondition ) );
rep.saveJobEntryAttribute(
id_job, getObjectId(), "successbooleancondition",
getSuccessBooleanConditionCode( successbooleancondition ) );
rep.saveJobEntryAttribute( id_job, getObjectId(), "successwhenvarset", successwhenvarset );
} catch ( KettleDatabaseException dbe ) {
throw new KettleException( BaseMessages.getString( PKG, "JobEntrySimple.Error.Exception.UnableSaveRep" )
+ id_job, dbe );
}
}
@Override
public Result execute( Result previousResult, int nr ) throws KettleException {
Result result = previousResult;
result.setNrErrors( 1 );
result.setResult( false );
String sourcevalue = null;
switch ( valuetype ) {
case VALUE_TYPE_FIELD:
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntrySimpleEval.Log.ArgFromPrevious.Found", ( rows != null
? rows.size() : 0 )
+ "" ) );
}
if ( rows.size() == 0 ) {
rows = null;
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.NoRows" ) );
return result;
}
// get first row
resultRow = rows.get( 0 );
String realfieldname = environmentSubstitute( fieldname );
int indexOfField = -1;
indexOfField = resultRow.getRowMeta().indexOfValue( realfieldname );
if ( indexOfField == -1 ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.FieldNotExist", realfieldname ) );
resultRow = null;
rows = null;
return result;
}
sourcevalue = resultRow.getString( indexOfField, null );
if ( sourcevalue == null ) {
sourcevalue = "";
}
resultRow = null;
rows = null;
break;
case VALUE_TYPE_VARIABLE:
if ( Utils.isEmpty( variablename ) ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.VariableMissing" ) );
return result;
}
if ( isSuccessWhenVarSet() ) {
// return variable name
// remove specifications if needed
String variableName = StringUtil.getVariableName( Const.NVL( getVariableName(), "" ) );
// Get value, if the variable is not set, Null will be returned
String value = getVariable( variableName );
if ( value != null ) {
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntrySimpleEval.VariableSet", variableName ) );
}
result.setResult( true );
result.setNrErrors( 0 );
return result;
} else {
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntrySimpleEval.VariableNotSet", variableName ) );
}
// PDI-6943: this job entry does not set errors upon evaluation, independently of the outcome of the check
result.setNrErrors( 0 );
return result;
}
}
sourcevalue = environmentSubstitute( getVariableWithSpec() );
break;
default:
break;
}
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSimpleEval.Log.ValueToevaluate", sourcevalue ) );
}
boolean success = false;
String realCompareValue = environmentSubstitute( comparevalue );
if ( realCompareValue == null ) {
realCompareValue = "";
}
String realMinValue = environmentSubstitute( minvalue );
String realMaxValue = environmentSubstitute( maxvalue );
switch ( fieldtype ) {
case FIELD_TYPE_STRING:
switch ( successcondition ) {
case SUCCESS_CONDITION_EQUAL: // equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( sourcevalue.equals( realCompareValue ) );
if ( valuetype == VALUE_TYPE_VARIABLE && !success ) {
// make the empty value evaluate to true when compared to a not set variable
if ( Utils.isEmpty( realCompareValue ) ) {
String variableName = StringUtil.getVariableName( variablename );
if ( System.getProperty( variableName ) == null ) {
success = true;
}
}
}
break;
case SUCCESS_CONDITION_DIFFERENT: // different
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( !sourcevalue.equals( realCompareValue ) );
break;
case SUCCESS_CONDITION_CONTAINS: // contains
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( sourcevalue.contains( realCompareValue ) );
break;
case SUCCESS_CONDITION_NOT_CONTAINS: // not contains
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( !sourcevalue.contains( realCompareValue ) );
break;
case SUCCESS_CONDITION_START_WITH: // starts with
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( sourcevalue.startsWith( realCompareValue ) );
break;
case SUCCESS_CONDITION_NOT_START_WITH: // not start with
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( !sourcevalue.startsWith( realCompareValue ) );
break;
case SUCCESS_CONDITION_END_WITH: // ends with
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( sourcevalue.endsWith( realCompareValue ) );
break;
case SUCCESS_CONDITION_NOT_END_WITH: // not ends with
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( !sourcevalue.endsWith( realCompareValue ) );
break;
case SUCCESS_CONDITION_REGEX: // regexp
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( Pattern.compile( realCompareValue ).matcher( sourcevalue ).matches() );
break;
case SUCCESS_CONDITION_IN_LIST: // in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
realCompareValue = Const.NVL( realCompareValue, "" );
String[] parts = realCompareValue.split( "," );
for ( int i = 0; i < parts.length && !success; i++ ) {
success = ( sourcevalue.equals( parts[i].trim() ) );
}
break;
case SUCCESS_CONDITION_NOT_IN_LIST: // not in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
realCompareValue = Const.NVL( realCompareValue, "" );
parts = realCompareValue.split( "," );
success = true;
for ( int i = 0; i < parts.length && success; i++ ) {
success = !( sourcevalue.equals( parts[i].trim() ) );
}
break;
default:
break;
}
break;
case FIELD_TYPE_NUMBER:
double valuenumber;
try {
valuenumber = Double.parseDouble( sourcevalue );
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.UnparsableNumber", sourcevalue, e
.getMessage() ) );
return result;
}
double valuecompare;
switch ( successnumbercondition ) {
case SUCCESS_NUMBER_CONDITION_EQUAL: // equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber == valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_DIFFERENT: // different
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber != valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_SMALLER: // smaller
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber < valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_SMALLER_EQUAL: // smaller or equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber <= valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_GREATER: // greater
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber > valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_GREATER_EQUAL: // greater or equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber >= valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_BETWEEN: // between min and max
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValues", realMinValue, realMaxValue ) );
}
double valuemin;
try {
valuemin = Double.parseDouble( realMinValue );
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realMinValue, e
.getMessage() ) );
return result;
}
double valuemax;
try {
valuemax = Double.parseDouble( realMaxValue );
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realMaxValue, e
.getMessage() ) );
return result;
}
if ( valuemin >= valuemax ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.IncorrectNumbers", realMinValue, realMaxValue ) );
return result;
}
success = ( valuenumber >= valuemin && valuenumber <= valuemax );
break;
case SUCCESS_NUMBER_CONDITION_IN_LIST: // in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
String[] parts = realCompareValue.split( "," );
for ( int i = 0; i < parts.length && !success; i++ ) {
try {
valuecompare = Double.parseDouble( parts[i] );
} catch ( Exception e ) {
logError( toString(), BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", parts[i], e.getMessage() ) );
return result;
}
success = ( valuenumber == valuecompare );
}
break;
case SUCCESS_NUMBER_CONDITION_NOT_IN_LIST: // not in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
realCompareValue = Const.NVL( realCompareValue, "" );
parts = realCompareValue.split( "," );
success = true;
for ( int i = 0; i < parts.length && success; i++ ) {
try {
valuecompare = Double.parseDouble( parts[i] );
} catch ( Exception e ) {
logError( toString(), BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", parts[i], e.getMessage() ) );
return result;
}
success = ( valuenumber != valuecompare );
}
break;
default:
break;
}
break;
case FIELD_TYPE_DATE_TIME:
String realMask = environmentSubstitute( mask );
SimpleDateFormat df = new SimpleDateFormat();
if ( !Utils.isEmpty( realMask ) ) {
df.applyPattern( realMask );
}
Date datevalue = null;
try {
datevalue = convertToDate( sourcevalue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
Date datecompare;
switch ( successnumbercondition ) {
case SUCCESS_NUMBER_CONDITION_EQUAL: // equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.equals( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_DIFFERENT: // different
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( !datevalue.equals( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_SMALLER: // smaller
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.before( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_SMALLER_EQUAL: // smaller or equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.before( datecompare ) || datevalue.equals( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_GREATER: // greater
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.after( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_GREATER_EQUAL: // greater or equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.after( datecompare ) || datevalue.equals( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_BETWEEN: // between min and max
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValues", realMinValue, realMaxValue ) );
}
Date datemin;
try {
datemin = convertToDate( realMinValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
Date datemax;
try {
datemax = convertToDate( realMaxValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
if ( datemin.after( datemax ) || datemin.equals( datemax ) ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.IncorrectDates", realMinValue, realMaxValue ) );
return result;
}
success =
( ( datevalue.after( datemin )
|| datevalue.equals( datemin ) ) && ( datevalue.before( datemax )
|| datevalue.equals( datemax ) ) );
break;
case SUCCESS_NUMBER_CONDITION_IN_LIST: // in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
String[] parts = realCompareValue.split( "," );
for ( int i = 0; i < parts.length && !success; i++ ) {
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( toString(), e.getMessage() );
return result;
}
success = ( datevalue.equals( datecompare ) );
}
break;
case SUCCESS_NUMBER_CONDITION_NOT_IN_LIST: // not in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
realCompareValue = Const.NVL( realCompareValue, "" );
parts = realCompareValue.split( "," );
success = true;
for ( int i = 0; i < parts.length && success; i++ ) {
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( toString(), e.getMessage() );
return result;
}
success = ( !datevalue.equals( datecompare ) );
}
break;
default:
break;
}
df = null;
break;
case FIELD_TYPE_BOOLEAN:
boolean valuebool;
try {
valuebool = ValueMetaString.convertStringToBoolean( sourcevalue );
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.UnparsableBoolean", sourcevalue, e
.getMessage() ) );
return result;
}
switch ( successbooleancondition ) {
case SUCCESS_BOOLEAN_CONDITION_FALSE: // false
success = ( !valuebool );
break;
case SUCCESS_BOOLEAN_CONDITION_TRUE: // true
success = ( valuebool );
break;
default:
break;
}
break;
default:
break;
}
result.setResult( success );
// PDI-6943: this job entry does not set errors upon evaluation, independently of the outcome of the check
result.setNrErrors( 0 );
return result;
}
/*
* Returns variable with specifications
*/
private String getVariableWithSpec() {
String variable = getVariableName();
if ( ( !variable.contains( StringUtil.UNIX_OPEN ) && !variable.contains( StringUtil.WINDOWS_OPEN ) && !variable
.contains( StringUtil.HEX_OPEN ) )
&& ( ( !variable.contains( StringUtil.UNIX_CLOSE ) && !variable.contains( StringUtil.WINDOWS_CLOSE ) && !variable
.contains( StringUtil.HEX_CLOSE ) ) ) ) {
// Add specifications to variable
variable = StringUtil.UNIX_OPEN + variable + StringUtil.UNIX_CLOSE;
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntrySimpleEval.CheckingVariable", variable ) );
}
}
return variable;
}
private Date convertToDate( String valueString, String mask, SimpleDateFormat df ) throws KettleException {
Date datevalue = null;
try {
datevalue = df.parse( valueString );
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableDate", valueString ) );
}
return datevalue;
}
public static String getValueTypeDesc( int i ) {
if ( i < 0 || i >= valueTypeDesc.length ) {
return valueTypeDesc[0];
}
return valueTypeDesc[i];
}
public static String getFieldTypeDesc( int i ) {
if ( i < 0 || i >= fieldTypeDesc.length ) {
return fieldTypeDesc[0];
}
return fieldTypeDesc[i];
}
public static String getSuccessConditionDesc( int i ) {
if ( i < 0 || i >= successConditionDesc.length ) {
return successConditionDesc[0];
}
return successConditionDesc[i];
}
public static String getSuccessNumberConditionDesc( int i ) {
if ( i < 0 || i >= successNumberConditionDesc.length ) {
return successNumberConditionDesc[0];
}
return successNumberConditionDesc[i];
}
public static String getSuccessBooleanConditionDesc( int i ) {
if ( i < 0 || i >= successBooleanConditionDesc.length ) {
return successBooleanConditionDesc[0];
}
return successBooleanConditionDesc[i];
}
public static int getValueTypeByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < valueTypeDesc.length; i++ ) {
if ( valueTypeDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getValueTypeByCode( tt );
}
public static int getFieldTypeByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < fieldTypeDesc.length; i++ ) {
if ( fieldTypeDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getFieldTypeByCode( tt );
}
public static int getSuccessConditionByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successConditionDesc.length; i++ ) {
if ( successConditionDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getSuccessConditionByCode( tt );
}
public static int getSuccessNumberConditionByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successNumberConditionDesc.length; i++ ) {
if ( successNumberConditionDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getSuccessNumberByCode( tt );
}
public static int getSuccessBooleanConditionByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successBooleanConditionDesc.length; i++ ) {
if ( successBooleanConditionDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getSuccessBooleanByCode( tt );
}
public void setMinValue( String minvalue ) {
this.minvalue = minvalue;
}
public String getMinValue() {
return minvalue;
}
public void setCompareValue( String comparevalue ) {
this.comparevalue = comparevalue;
}
public String getMask() {
return mask;
}
public void setMask( String mask ) {
this.mask = mask;
}
public String getFieldName() {
return fieldname;
}
public void setFieldName( String fieldname ) {
this.fieldname = fieldname;
}
public String getVariableName() {
return variablename;
}
public void setVariableName( String variablename ) {
this.variablename = variablename;
}
public String getCompareValue() {
return comparevalue;
}
public void setMaxValue( String maxvalue ) {
this.maxvalue = maxvalue;
}
public String getMaxValue() {
return maxvalue;
}
@Override
public boolean evaluates() {
return true;
}
}
| nicoben/pentaho-kettle | engine/src/org/pentaho/di/job/entries/simpleeval/JobEntrySimpleEval.java | Java | apache-2.0 | 46,419 |
#!/usr/bin/env python
"""
@package ion.agents.platform.rsn.simulator.oms_values
@file ion/agents/platform/rsn/simulator/oms_values.py
@author Carlos Rueda
@brief Platform attribute value generators for the RSN OMS simulator.
"""
__author__ = 'Carlos Rueda'
__license__ = 'Apache 2.0'
import time
import ntplib
import math
# time begins a few secs ago from now for purposes of reporting
_START_TIME = ntplib.system_to_ntp_time(time.time() - 30)
# maximum value array size for a single generation call
_MAX_RESULT_SIZE = 1000
# next value for generators created by _create_simple_generator
_next_value = 990000
def _create_simple_generator(gen_period):
"""
Returns a simple generator that reports incremental values every given
time period.
@param gen_period discretize the time axis by this period in secs
@retval A function to be called with parameters (from_time, to_time) where
from_time and to_time are the lower and upper limits (both
inclusive) of desired time window (NTP).
"""
def _gen(from_time, to_time):
global _next_value
if from_time < _START_TIME:
from_time = _START_TIME
# t: initial abscissa coordinate within the time window
l_from_time = long(from_time - 2*gen_period)
t = float((l_from_time / gen_period) * gen_period)
while t < from_time:
t += gen_period
values = []
while t <= to_time:
val = _next_value
_next_value += 1
timestamp = t
values.append((val, timestamp))
t += gen_period
if len(values) == _MAX_RESULT_SIZE:
break
return values
return _gen
def _create_sine_generator(sine_period, gen_period, min_val, max_val):
"""
Returns a sine stream fluctuating between min_val and max_val.
@param sine_period Sine period in secs
@param gen_period discretize the time axis by this period in secs
@param min_val min value
@param max_val max value
@retval A function to be called with parameters (from_time, to_time) where
from_time and to_time are the lower and upper limits (both
inclusive) of desired time window (NTP).
"""
twopi = 2 * math.pi
def _gen(from_time, to_time):
if from_time < _START_TIME:
from_time = _START_TIME
# t: initial abscissa coordinate within the time window
l_from_time = long(from_time - 2*gen_period)
t = float((l_from_time / gen_period) * gen_period)
while t < from_time:
t += gen_period
range2 = (max_val - min_val) / 2
values = []
while t <= to_time:
s = math.sin(t / sine_period * twopi)
val = s * range2 + (max_val + min_val) / 2
timestamp = t
values.append((val, timestamp))
t += gen_period
if len(values) == _MAX_RESULT_SIZE:
break
return values
return _gen
# generators per platform-ID/attribute-name:
_plat_attr_generators = {
# we used to have a couple here, but now none for the moment.
# An example would be:
# ('LJ01D', 'input_voltage'): _create_sine_generator(sine_period=30,
# gen_period=2.5,
# min_val=-500,
# max_val=+500),
}
# generators per attribute name:
_attribute_generators = {
'input_voltage':
_create_sine_generator(sine_period=30,
gen_period=2.5,
min_val=-500,
max_val=+500),
'input_bus_current':
_create_sine_generator(sine_period=50,
gen_period=5,
min_val=-300,
max_val=+300),
'MVPC_temperature':
_create_sine_generator(sine_period=20,
gen_period=4,
min_val=-200,
max_val=+200),
'MVPC_pressure_1':
_create_sine_generator(sine_period=20,
gen_period=4,
min_val=-100,
max_val=+100),
}
_default_generator = _create_simple_generator(gen_period=5)
def generate_values(platform_id, attr_id, from_time, to_time):
"""
Generates synthetic values within a given time window (both ends are
inclusive). Times are NTP.
@param platform_id Platform ID
@param attr_id Attribute ID. Only the name part is considered. See OOIION-1551.
@param from_time lower limit of desired time window
@param to_time upper limit of desired time window
"""
# get the attribute name from the given ID:
separator = attr_id.rfind('|')
attr_name = attr_id[:separator] if separator >= 0 else attr_id
# try by platform/attribute:
if (platform_id, attr_name) in _plat_attr_generators:
gen = _plat_attr_generators[(platform_id, attr_name)]
# else: try by the attribute only:
elif attr_name in _attribute_generators:
gen = _attribute_generators[attr_name]
else:
gen = _default_generator
return gen(from_time, to_time)
if __name__ == "__main__": # pragma: no cover
# do not restrict the absolute from_time for this demo program:
_START_TIME = 0
import sys
if len(sys.argv) < 5:
print("""
USAGE:
oms_values.py platform_id attr_id delta_from delta_to
Generates values in window [curr_time + delta_from, curr_time + delta_to]
Example:
oms_values.py Node1A input_voltage -35 0
""")
exit()
cur_time = ntplib.system_to_ntp_time(time.time())
platform_id = sys.argv[1]
attr_id = sys.argv[2]
delta_from = float(sys.argv[3])
delta_to = float(sys.argv[4])
from_time = cur_time + delta_from
to_time = cur_time + delta_to
values = generate_values(platform_id, attr_id, from_time, to_time)
print("Generated %d values in time window [%s, %s]:" % (
len(values), from_time, to_time))
for n, (val, t) in enumerate(values):
print("\t%2d: %5.2f -> %+4.3f" % (n, t, val))
"""
$ bin/python ion/agents/platform/rsn/simulator/oms_values.py Node1A other_attr -35 0
Generated 7 values in time window [3561992754.4, 3561992789.4]:
0: 3561992755.00 -> +990000.000
1: 3561992760.00 -> +990001.000
2: 3561992765.00 -> +990002.000
3: 3561992770.00 -> +990003.000
4: 3561992775.00 -> +990004.000
5: 3561992780.00 -> +990005.000
6: 3561992785.00 -> +990006.000
$ bin/python ion/agents/platform/rsn/simulator/oms_values.py Node1A input_voltage -35 0
Generated 7 values in time window [3561992757.86, 3561992792.86]:
0: 3561992760.00 -> -0.000
1: 3561992765.00 -> +433.013
2: 3561992770.00 -> +433.013
3: 3561992775.00 -> +0.000
4: 3561992780.00 -> -433.013
5: 3561992785.00 -> -433.013
6: 3561992790.00 -> -0.000
"""
| janeen666/mi-instrument | mi/platform/rsn/simulator/oms_values.py | Python | bsd-2-clause | 7,128 |
cask "operadriver" do
version "96.0.4664.45"
sha256 "fe712310d8577056442bf7146cde2b1db69181873ff3cb2311335b784829cac6"
url "https://github.com/operasoftware/operachromiumdriver/releases/download/v.#{version}/operadriver_mac64.zip"
name "OperaChromiumDriver"
desc "Driver for Chromium-based Opera releases"
homepage "https://github.com/operasoftware/operachromiumdriver"
livecheck do
url :url
regex(/^v?\.?(\d+(?:\.\d+)+)$/i)
end
binary "operadriver_mac64/operadriver"
end
| malob/homebrew-cask | Casks/operadriver.rb | Ruby | bsd-2-clause | 501 |
cask 'wifispoof' do
version '3.0.2'
sha256 'ee0b4e0941f20f4cd71b7f6fa4f56da695cd1d6e1c4e49daec3a460463bd9946'
# sweetpproductions.com/products was verified as official when first introduced to the cask
url "https://sweetpproductions.com/products/wifispoof#{version.major}/WiFiSpoof#{version.major}.dmg"
appcast 'https://sweetpproductions.com/products/wifispoof3/appcast.xml',
checkpoint: 'e4a7cf391172f201bbd706624b22df970cb05e7b095b05a45713744c66e3b58a'
name 'WiFiSpoof'
homepage 'https://wifispoof.com/'
auto_updates true
app 'WiFiSpoof.app'
end
| jiashuw/homebrew-cask | Casks/wifispoof.rb | Ruby | bsd-2-clause | 579 |
cask 'arduino' do
version '1.8.7'
sha256 'bc5fae3e0b54f000d335d93f2e6da66fc8549def015e3b136d34a10e171c1501'
url "https://downloads.arduino.cc/arduino-#{version}-macosx.zip"
appcast 'https://www.arduino.cc/en/Main/ReleaseNotes'
name 'Arduino'
homepage 'https://www.arduino.cc/'
app 'Arduino.app'
binary "#{appdir}/Arduino.app/Contents/Java/arduino-builder"
caveats do
depends_on_java
end
end
| bosr/homebrew-cask | Casks/arduino.rb | Ruby | bsd-2-clause | 418 |
// RUN: %clang_cc1 -fsyntax-only -verify %s
// expected-no-diagnostics
namespace DeduceVsMember {
template<typename T>
struct X {
template<typename U>
int &operator==(const U& other) const;
};
template<typename T, typename U>
float &operator==(const T&, const X<U>&);
void test(X<int> xi, X<float> xf) {
float& ir = (xi == xf);
}
}
namespace OrderWithStaticMember {
struct A {
template<class T> int g(T**, int=0) { return 0; }
template<class T> static int g(T*) { return 1; }
};
void f() {
A a;
int **p;
a.g(p);
}
}
| santoshn/softboundcets-34 | softboundcets-llvm-clang34/tools/clang/test/CXX/temp/temp.decls/temp.fct/temp.func.order/p3.cpp | C++ | bsd-3-clause | 574 |
//--------------------------------------------------------------------------
//
// Environment:
// This software is part of the EvtGen package developed jointly
// for the BaBar and CLEO collaborations. If you use all or part
// of it, please give an appropriate acknowledgement.
//
// Copyright Information: See EvtGen/COPYRIGHT
// Copyright (C) 1998 Caltech, UCSB
//
// Module: EvtGen/EvtVector3R.hh
//
// Description: Class to describe real 3 vectors
//
// Modification history:
//
// RYD Sept. 5, 1997 Module created
//
//------------------------------------------------------------------------
#ifndef EVTVECTOR3R_HH
#define EVTVECTOR3R_HH
#include <iosfwd>
class EvtVector3R {
friend EvtVector3R rotateEuler(const EvtVector3R& v,
double phi,double theta,double ksi);
inline friend EvtVector3R operator*(double c,const EvtVector3R& v2);
inline friend double operator*(const EvtVector3R& v1,const EvtVector3R& v2);
inline friend EvtVector3R operator+(const EvtVector3R& v1,const EvtVector3R& v2);
inline friend EvtVector3R operator-(const EvtVector3R& v1,const EvtVector3R& v2);
inline friend EvtVector3R operator*(const EvtVector3R& v1,double c);
inline friend EvtVector3R operator/(const EvtVector3R& v1,double c);
friend EvtVector3R cross(const EvtVector3R& v1,const EvtVector3R& v2);
public:
EvtVector3R();
EvtVector3R(double x,double y ,double z);
virtual ~EvtVector3R();
inline EvtVector3R& operator*=(const double c);
inline EvtVector3R& operator/=(const double c);
inline EvtVector3R& operator+=(const EvtVector3R& v2);
inline EvtVector3R& operator-=(const EvtVector3R& v2);
inline void set(int i,double d);
inline void set(double x,double y ,double z);
void applyRotateEuler(double phi,double theta,double ksi);
inline double get(int i) const;
friend std::ostream& operator<<(std::ostream& s,const EvtVector3R& v);
double dot(const EvtVector3R& v2);
double d3mag() const;
private:
double v[3];
};
inline EvtVector3R& EvtVector3R::operator*=(const double c){
v[0]*=c;
v[1]*=c;
v[2]*=c;
return *this;
}
inline EvtVector3R& EvtVector3R::operator/=(const double c){
v[0]/=c;
v[1]/=c;
v[2]/=c;
return *this;
}
inline EvtVector3R& EvtVector3R::operator+=(const EvtVector3R& v2){
v[0]+=v2.v[0];
v[1]+=v2.v[1];
v[2]+=v2.v[2];
return *this;
}
inline EvtVector3R& EvtVector3R::operator-=(const EvtVector3R& v2){
v[0]-=v2.v[0];
v[1]-=v2.v[1];
v[2]-=v2.v[2];
return *this;
}
inline EvtVector3R operator*(double c,const EvtVector3R& v2){
return EvtVector3R(v2)*=c;
}
inline EvtVector3R operator*(const EvtVector3R& v1,double c){
return EvtVector3R(v1)*=c;
}
inline EvtVector3R operator/(const EvtVector3R& v1,double c){
return EvtVector3R(v1)/=c;
}
inline double operator*(const EvtVector3R& v1,const EvtVector3R& v2){
return v1.v[0]*v2.v[0]+v1.v[1]*v2.v[1]+v1.v[2]*v2.v[2];
}
inline EvtVector3R operator+(const EvtVector3R& v1,const EvtVector3R& v2) {
return EvtVector3R(v1)+=v2;
}
inline EvtVector3R operator-(const EvtVector3R& v1,const EvtVector3R& v2) {
return EvtVector3R(v1)-=v2;
}
inline double EvtVector3R::get(int i) const {
return v[i];
}
inline void EvtVector3R::set(int i,double d){
v[i]=d;
}
inline void EvtVector3R::set(double x,double y, double z){
v[0]=x;
v[1]=y;
v[2]=z;
}
#endif
| miranov25/AliRoot | TEvtGen/EvtGen/EvtGenBase/EvtVector3R.hh | C++ | bsd-3-clause | 3,395 |
from django.db.backends.ddl_references import Statement, Table
from django.db.models import F, Q
from django.db.models.constraints import BaseConstraint
from django.db.models.sql import Query
__all__ = ['ExclusionConstraint']
class ExclusionConstraint(BaseConstraint):
template = 'CONSTRAINT %(name)s EXCLUDE USING %(index_type)s (%(expressions)s)%(where)s'
def __init__(self, *, name, expressions, index_type=None, condition=None):
if index_type and index_type.lower() not in {'gist', 'spgist'}:
raise ValueError(
'Exclusion constraints only support GiST or SP-GiST indexes.'
)
if not expressions:
raise ValueError(
'At least one expression is required to define an exclusion '
'constraint.'
)
if not all(
isinstance(expr, (list, tuple)) and len(expr) == 2
for expr in expressions
):
raise ValueError('The expressions must be a list of 2-tuples.')
if not isinstance(condition, (type(None), Q)):
raise ValueError(
'ExclusionConstraint.condition must be a Q instance.'
)
self.expressions = expressions
self.index_type = index_type or 'GIST'
self.condition = condition
super().__init__(name=name)
def _get_expression_sql(self, compiler, connection, query):
expressions = []
for expression, operator in self.expressions:
if isinstance(expression, str):
expression = F(expression)
expression = expression.resolve_expression(query=query)
sql, params = expression.as_sql(compiler, connection)
expressions.append('%s WITH %s' % (sql % params, operator))
return expressions
def _get_condition_sql(self, compiler, schema_editor, query):
if self.condition is None:
return None
where = query.build_where(self.condition)
sql, params = where.as_sql(compiler, schema_editor.connection)
return sql % tuple(schema_editor.quote_value(p) for p in params)
def constraint_sql(self, model, schema_editor):
query = Query(model, alias_cols=False)
compiler = query.get_compiler(connection=schema_editor.connection)
expressions = self._get_expression_sql(compiler, schema_editor.connection, query)
condition = self._get_condition_sql(compiler, schema_editor, query)
return self.template % {
'name': schema_editor.quote_name(self.name),
'index_type': self.index_type,
'expressions': ', '.join(expressions),
'where': ' WHERE (%s)' % condition if condition else '',
}
def create_sql(self, model, schema_editor):
return Statement(
'ALTER TABLE %(table)s ADD %(constraint)s',
table=Table(model._meta.db_table, schema_editor.quote_name),
constraint=self.constraint_sql(model, schema_editor),
)
def remove_sql(self, model, schema_editor):
return schema_editor._delete_constraint_sql(
schema_editor.sql_delete_check,
model,
schema_editor.quote_name(self.name),
)
def deconstruct(self):
path, args, kwargs = super().deconstruct()
kwargs['expressions'] = self.expressions
if self.condition is not None:
kwargs['condition'] = self.condition
if self.index_type.lower() != 'gist':
kwargs['index_type'] = self.index_type
return path, args, kwargs
def __eq__(self, other):
if isinstance(other, self.__class__):
return (
self.name == other.name and
self.index_type == other.index_type and
self.expressions == other.expressions and
self.condition == other.condition
)
return super().__eq__(other)
def __repr__(self):
return '<%s: index_type=%s, expressions=%s%s>' % (
self.__class__.__qualname__,
self.index_type,
self.expressions,
'' if self.condition is None else ', condition=%s' % self.condition,
)
| kaedroho/django | django/contrib/postgres/constraints.py | Python | bsd-3-clause | 4,221 |
<?php
// Copyright 2004-present Facebook. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* An abstraction allowing the driver to manipulate the browser's window
*/
class WebDriverWindow {
protected $executor;
public function __construct($executor) {
$this->executor = $executor;
}
/**
* Get the position of the current window, relative to the upper left corner
* of the screen.
*
* @return array The current window position.
*/
public function getPosition() {
$position = $this->executor->execute(
DriverCommand::GET_WINDOW_POSITION,
array(':windowHandle' => 'current')
);
return new WebDriverPoint(
$position['x'],
$position['y']
);
}
/**
* Get the size of the current window. This will return the outer window
* dimension, not just the view port.
*
* @return array The current window size.
*/
public function getSize() {
$size = $this->executor->execute(
DriverCommand::GET_WINDOW_SIZE,
array(':windowHandle' => 'current')
);
return new WebDriverDimension(
$size['width'],
$size['height']
);
}
/**
* Maximizes the current window if it is not already maximized
*
* @return WebDriverWindow The instance.
*/
public function maximize() {
$this->executor->execute(
DriverCommand::MAXIMIZE_WINDOW,
array(':windowHandle' => 'current')
);
return $this;
}
/**
* Set the size of the current window. This will change the outer window
* dimension, not just the view port.
*
* @param WebDriverDimension $size
* @return WebDriverWindow The instance.
*/
public function setSize(WebDriverDimension $size) {
$params = array(
'width' => $size->getWidth(),
'height' => $size->getHeight(),
':windowHandle' => 'current',
);
$this->executor->execute(DriverCommand::SET_WINDOW_SIZE, $params);
return $this;
}
/**
* Set the position of the current window. This is relative to the upper left
* corner of the screen.
*
* @param WebDriverPoint $position
* @return WebDriverWindow The instance.
*/
public function setPosition(WebDriverPoint $position) {
$params = array(
'x' => $position->getX(),
'y' => $position->getY(),
':windowHandle' => 'current',
);
$this->executor->execute(DriverCommand::SET_WINDOW_POSITION, $params);
return $this;
}
/**
* Get the current browser orientation.
*
* @return string Either LANDSCAPE|PORTRAIT
*/
public function getScreenOrientation() {
return $this->executor->execute(DriverCommand::GET_SCREEN_ORIENTATION);
}
/**
* Set the browser orientation. The orientation should either
* LANDSCAPE|PORTRAIT
*
* @param string $orientation
* @return WebDriverWindow The instance.
* @throws IndexOutOfBoundsException
*/
public function setScreenOrientation($orientation) {
$orientation = strtoupper($orientation);
if (!in_array($orientation, array('PORTRAIT', 'LANDSCAPE'))) {
throw new IndexOutOfBoundsException(
"Orientation must be either PORTRAIT, or LANDSCAPE"
);
}
$this->executor->execute(
DriverCommand::SET_SCREEN_ORIENTATION,
array('orientation' => $orientation)
);
return $this;
}
}
| hamidgoharjoo/test | vendor/facebook/webdriver/lib/WebDriverWindow.php | PHP | bsd-3-clause | 3,841 |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/web_modal/modal_dialog_host.h"
namespace web_modal {
ModalDialogHostObserver::~ModalDialogHostObserver() {
}
ModalDialogHost::~ModalDialogHost() {
}
bool ModalDialogHost::ShouldActivateDialog() const {
return true;
}
} // namespace web_modal
| scheib/chromium | components/web_modal/modal_dialog_host.cc | C++ | bsd-3-clause | 437 |
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Copyright 2007 Google Inc. All Rights Reserved.
/**
* @fileoverview A color palette with a button for adding additional colors
* manually.
*
*/
goog.provide('goog.ui.CustomColorPalette');
goog.require('goog.color');
goog.require('goog.dom');
goog.require('goog.ui.ColorPalette');
/**
* A custom color palette is a grid of color swatches and a button that allows
* the user to add additional colors to the palette
*
* @param {Array.<string>} initColors Array of initial colors to populate the
* palette with.
* @param {goog.ui.PaletteRenderer} opt_renderer Renderer used to render or
* decorate the palette; defaults to {@link goog.ui.PaletteRenderer}.
* @param {goog.dom.DomHelper} opt_domHelper Optional DOM helper, used for
* document interaction.
* @constructor
* @extends {goog.ui.ColorPalette}
*/
goog.ui.CustomColorPalette = function(initColors, opt_renderer, opt_domHelper) {
goog.ui.ColorPalette.call(this, initColors, opt_renderer, opt_domHelper);
this.setSupportedState(goog.ui.Component.State.OPENED, true);
};
goog.inherits(goog.ui.CustomColorPalette, goog.ui.ColorPalette);
/**
* Returns an array of DOM nodes for each color, and an additional cell with a
* '+'.
* @return {Array.<Node>} Array of div elements.
* @private
*/
goog.ui.CustomColorPalette.prototype.createColorNodes_ = function() {
/** @desc Hover caption for the button that allows the user to add a color. */
var MSG_CLOSURE_CUSTOM_COLOR_BUTTON = goog.getMsg('Add a color');
var nl = goog.ui.CustomColorPalette.superClass_.createColorNodes_.call(this);
nl.push(goog.dom.createDom('div', {
'class': goog.getCssName('goog-palette-customcolor'),
'title': MSG_CLOSURE_CUSTOM_COLOR_BUTTON
}, '+'));
return nl;
};
/**
* @inheritDoc
* @param {goog.events.Event} e Mouse or key event that triggered the action.
* @return {boolean} True if the action was allowed to proceed, false otherwise.
*/
goog.ui.CustomColorPalette.prototype.performActionInternal = function(e) {
var item = /** @type {Element} */ (this.getHighlightedItem());
if (item) {
if (goog.dom.classes.has(
item, goog.getCssName('goog-palette-customcolor'))) {
// User activated the special "add custom color" swatch.
this.promptForCustomColor();
} else {
// User activated a normal color swatch.
this.setSelectedItem(item);
return this.dispatchEvent(goog.ui.Component.EventType.ACTION);
}
}
return false;
};
/**
* Prompts the user to enter a custom color. Currently uses a window.prompt
* but could be updated to use a dialog box with a WheelColorPalette.
*/
goog.ui.CustomColorPalette.prototype.promptForCustomColor = function() {
/** @desc Default custom color dialog. */
var MSG_CLOSURE_CUSTOM_COLOR_PROMPT = goog.getMsg(
'Input custom color, i.e. pink, #F00, #D015FF or rgb(100, 50, 25)');
// A CustomColorPalette is considered "open" while the color selection prompt
// is open. Enabling state transition events for the OPENED state and
// listening for OPEN events allows clients to save the selection before
// it is destroyed (see e.g. bug 1064701).
var response = null;
this.setOpen(true);
if (this.isOpen()) {
// The OPEN event wasn't canceled; prompt for custom color.
response = window.prompt(MSG_CLOSURE_CUSTOM_COLOR_PROMPT, '#FFFFFF');
this.setOpen(false);
}
if (!response) {
// The user hit cancel
return;
}
var color;
/** @preserveTry */
try {
color = goog.color.parse(response).hex;
} catch (er) {
/** @desc Alert message sent when the input string is not a valid color. */
var MSG_CLOSURE_CUSTOM_COLOR_INVALID_INPUT = goog.getMsg(
'ERROR: "{$color}" is not a valid color.', {'color': response});
alert(MSG_CLOSURE_CUSTOM_COLOR_INVALID_INPUT);
return;
}
// TODO: This is relatively inefficient. Consider adding
// functionality to palette to add individual items after render time.
var colors = this.getColors();
colors.push(color)
this.setColors(colors);
// Set the selected color to the new color and notify listeners of the action.
this.setSelectedColor(color);
this.dispatchEvent(goog.ui.Component.EventType.ACTION);
};
| yesudeep/puppy | tools/google-closure-library/closure/goog/ui/customcolorpalette.js | JavaScript | mit | 4,792 |
/**
* Utility to register editors and common namespace for keeping reference to all editor classes
*/
import Handsontable from './browser';
import {toUpperCaseFirst} from './helpers/string';
export {registerEditor, getEditor, hasEditor, getEditorConstructor};
var
registeredEditorNames = {},
registeredEditorClasses = new WeakMap();
// support for older versions of Handsontable
Handsontable.editors = Handsontable.editors || {};
Handsontable.editors.registerEditor = registerEditor;
Handsontable.editors.getEditor = getEditor;
function RegisteredEditor(editorClass) {
var Clazz, instances;
instances = {};
Clazz = editorClass;
this.getConstructor = function() {
return editorClass;
};
this.getInstance = function(hotInstance) {
if (!(hotInstance.guid in instances)) {
instances[hotInstance.guid] = new Clazz(hotInstance);
}
return instances[hotInstance.guid];
};
}
/**
* Registers editor under given name
* @param {String} editorName
* @param {Function} editorClass
*/
function registerEditor(editorName, editorClass) {
var editor = new RegisteredEditor(editorClass);
if (typeof editorName === 'string') {
registeredEditorNames[editorName] = editor;
Handsontable.editors[toUpperCaseFirst(editorName) + 'Editor'] = editorClass;
}
registeredEditorClasses.set(editorClass, editor);
}
/**
* Returns instance (singleton) of editor class
*
* @param {String} editorName
* @param {Object} hotInstance
* @returns {Function} editorClass
*/
function getEditor(editorName, hotInstance) {
var editor;
if (typeof editorName == 'function') {
if (!(registeredEditorClasses.get(editorName))) {
registerEditor(null, editorName);
}
editor = registeredEditorClasses.get(editorName);
} else if (typeof editorName == 'string') {
editor = registeredEditorNames[editorName];
} else {
throw Error('Only strings and functions can be passed as "editor" parameter ');
}
if (!editor) {
throw Error('No editor registered under name "' + editorName + '"');
}
return editor.getInstance(hotInstance);
}
/**
* Get editor constructor class
*
* @param {String} editorName
* @returns {Function}
*/
function getEditorConstructor(editorName) {
var editor;
if (typeof editorName == 'string') {
editor = registeredEditorNames[editorName];
} else {
throw Error('Only strings and functions can be passed as "editor" parameter ');
}
if (!editor) {
throw Error('No editor registered under name "' + editorName + '"');
}
return editor.getConstructor();
}
/**
* @param editorName
* @returns {Boolean}
*/
function hasEditor(editorName) {
return registeredEditorNames[editorName] ? true : false;
}
| Growmies/handsontable | src/editors.js | JavaScript | mit | 2,723 |
"use strict";
var index_1 = require("../../models/types/index");
function createReferenceType(context, symbol, includeParent) {
var checker = context.checker;
var id = context.getSymbolID(symbol);
var name = checker.symbolToString(symbol);
if (includeParent && symbol.parent) {
name = checker.symbolToString(symbol.parent) + '.' + name;
}
return new index_1.ReferenceType(name, id);
}
exports.createReferenceType = createReferenceType;
//# sourceMappingURL=reference.js.map | glamb/TCMS-Frontend | node_modules/typedoc/lib/converter/factories/reference.js | JavaScript | mit | 505 |
<?php
/*
* This file is part of the Sylius package.
*
* (c) Paweł Jędrzejewski
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
declare(strict_types=1);
namespace Sylius\Behat\Service;
use Sylius\Behat\Service\Setter\CookieSetterInterface;
use Sylius\Component\User\Model\UserInterface;
use Symfony\Component\HttpFoundation\Session\SessionInterface;
use Symfony\Component\Security\Core\Authentication\Token\TokenInterface;
use Symfony\Component\Security\Core\Authentication\Token\UsernamePasswordToken;
use Symfony\Component\Security\Core\Exception\TokenNotFoundException;
/**
* @author Arkadiusz Krakowiak <arkadiusz.krakowiak@lakion.com>
* @author Kamil Kokot <kamil@kokot.me>
*/
final class SecurityService implements SecurityServiceInterface
{
/**
* @var SessionInterface
*/
private $session;
/**
* @var CookieSetterInterface
*/
private $cookieSetter;
/**
* @var string
*/
private $sessionTokenVariable;
/**
* @param SessionInterface $session
* @param CookieSetterInterface $cookieSetter
* @param string $firewallContextName
*/
public function __construct(SessionInterface $session, CookieSetterInterface $cookieSetter, $firewallContextName)
{
$this->session = $session;
$this->cookieSetter = $cookieSetter;
$this->sessionTokenVariable = sprintf('_security_%s', $firewallContextName);
}
/**
* {@inheritdoc}
*/
public function logIn(UserInterface $user)
{
$token = new UsernamePasswordToken($user, $user->getPassword(), 'randomstringbutnotnull', $user->getRoles());
$this->setToken($token);
}
public function logOut()
{
$this->session->set($this->sessionTokenVariable, null);
$this->session->save();
$this->cookieSetter->setCookie($this->session->getName(), $this->session->getId());
}
/**
* {@inheritdoc}
*/
public function getCurrentToken()
{
$serializedToken = $this->session->get($this->sessionTokenVariable);
if (null === $serializedToken) {
throw new TokenNotFoundException();
}
return unserialize($serializedToken);
}
/**
* {@inheritdoc}
*/
public function restoreToken(TokenInterface $token)
{
$this->setToken($token);
}
/**
* @param TokenInterface $token
*/
private function setToken(TokenInterface $token)
{
$serializedToken = serialize($token);
$this->session->set($this->sessionTokenVariable, $serializedToken);
$this->session->save();
$this->cookieSetter->setCookie($this->session->getName(), $this->session->getId());
}
}
| rainlike/justshop | vendor/sylius/sylius/src/Sylius/Behat/Service/SecurityService.php | PHP | mit | 2,802 |