gt
stringclasses 1
value | context
stringlengths 2.05k
161k
|
---|---|
package org.openas2.app;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.filefilter.PrefixFileFilter;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.openas2.ComponentNotFoundException;
import org.openas2.TestPartner;
import org.openas2.TestResource;
import org.openas2.partner.Partnership;
import org.openas2.partner.PartnershipFactory;
import org.openas2.util.DateUtil;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.openas2.TestUtils.waitForFile;
public class OpenAS2ServerTest {
private static final TestResource RESOURCE = TestResource.forClass(OpenAS2ServerTest.class);
private static TestPartner partnerA;
private static TestPartner partnerB;
private static OpenAS2Server serverA;
private static OpenAS2Server serverB;
private final int msgCnt = 2;
private static ExecutorService executorService;
@Rule
public TemporaryFolder tmp = new TemporaryFolder();
@BeforeClass
public static void startServers() throws Exception {
//System.setProperty("org.openas2.logging.defaultlog", "TRACE");
System.setProperty("org.apache.commons.logging.Log", "org.openas2.logging.Log");
try {
partnerA = new TestPartner("OpenAS2A");
partnerB = new TestPartner("OpenAS2B");
serverA = new OpenAS2Server.Builder().run(RESOURCE.get(partnerA.getName(), "config", "config.xml").getAbsolutePath());
partnerA.setServer(serverA);
serverB = new OpenAS2Server.Builder().run(RESOURCE.get(partnerB.getName(), "config", "config.xml").getAbsolutePath());
partnerB.setServer(serverB);
enhancePartners();
executorService = Executors.newFixedThreadPool(20);
} catch (FileNotFoundException e) {
System.err.println("Failed to retrieve resource for test: " + e.getMessage());
e.printStackTrace();
throw new Exception(e);
} catch (Throwable e) {
// TODO Auto-generated catch block
e.printStackTrace();
throw new Exception(e);
}
}
@Test
public void shouldSendMessages() throws Exception {
try {
int amountOfMessages = msgCnt;
List<Callable<TestMessage>> callers = new ArrayList<Callable<TestMessage>>(amountOfMessages);
// prepare messages
for (int i = 0; i < amountOfMessages; i++) {
callers.add(new Callable<TestMessage>() {
@Override
public TestMessage call() throws Exception {
return sendMessage(partnerA, partnerB);
}
});
}
// send and verify all messages in parallel
for (Future<TestMessage> result : executorService.invokeAll(callers)) {
verifyMessageDelivery(result.get());
}
} catch (Throwable e) {
// Aid debugging JUnit test failures
System.out.println("ERROR OCCURRED: " + ExceptionUtils.getStackTrace(e));
throw new Exception(e);
}
}
@Test
public void shouldSendMessagesAsync() throws Exception {
int amountOfMessages = msgCnt;
List<Callable<TestMessage>> callers = new ArrayList<Callable<TestMessage>>(amountOfMessages);
// prepare messages
for (int i = 0; i < amountOfMessages; i++) {
callers.add(new Callable<TestMessage>() {
@Override
public TestMessage call() throws Exception {
return sendMessage(partnerB, partnerA);
}
});
}
// send and verify all messages in parallel
for (Future<TestMessage> result : executorService.invokeAll(callers)) {
verifyMessageDelivery(result.get());
}
}
@AfterClass
public static void tearDown() throws Exception {
//executorService.awaitTermination(100, TimeUnit.SECONDS);
executorService.shutdown();
partnerA.getServer().shutdown();
partnerB.getServer().shutdown();
}
private TestMessage sendMessage(TestPartner fromPartner, TestPartner toPartner) throws IOException {
String outgoingMsgFileName = RandomStringUtils.randomAlphanumeric(10) + ".txt";
String outgoingMsgBody = RandomStringUtils.randomAlphanumeric(1024);
File outgoingMsg = tmp.newFile(outgoingMsgFileName);
FileUtils.write(outgoingMsg, outgoingMsgBody, "UTF-8");
FileUtils.copyFileToDirectory(outgoingMsg, fromPartner.getOutbox());
return new TestMessage(outgoingMsgFileName, outgoingMsgBody, fromPartner, toPartner);
}
private void verifyMessageDelivery(TestMessage testMessage) throws IOException {
// wait till delivery occurs
File deliveredMsg = waitForFile(testMessage.toPartner.getInbox(), new PrefixFileFilter(testMessage.fileName), 20, TimeUnit.SECONDS);
{
String deliveredMsgBody = FileUtils.readFileToString(deliveredMsg, "UTF-8");
assertThat("Verify content of delivered message", deliveredMsgBody, is(testMessage.body));
}
{
File deliveryConfirmationMDN = waitForFile(testMessage.fromPartner.getRxdMDNs(), new PrefixFileFilter(testMessage.fileName), 10, TimeUnit.SECONDS);
assertThat("Verify MDN was received by " + testMessage.fromPartner.getName(), deliveryConfirmationMDN.exists(), is(true));
}
{
File deliveryConfirmationMDN = waitForFile(testMessage.toPartner.getSentMDNs(), new PrefixFileFilter(testMessage.fileName), 10, TimeUnit.SECONDS);
assertThat("Verify MDN was stored by " + testMessage.toPartner.getName(), deliveryConfirmationMDN.exists(), is(true));
}
}
/**
* Add additional attributes to partner objects.
*
* @throws ComponentNotFoundException
* @throws FileNotFoundException
*/
// TODO: Should try to extract more of them from config to help make tests less brittle
private static void enhancePartners() throws ComponentNotFoundException, FileNotFoundException {
PartnershipFactory pf = serverA.getSession().getPartnershipFactory();
Map<String, Object> partners = pf.getPartners();
for (Map.Entry<String, Object> pair : partners.entrySet()) {
if (pair.getKey().equals(partnerB.getName())) {
Map<String, String> partner = (Map<String, String>) pair.getValue();
partnerB.setAs2Id(partner.get(Partnership.PID_AS2));
} else if (pair.getKey().equals(partnerA.getName())) {
Map<String, String> partner = (Map<String, String>) pair.getValue();
partnerA.setAs2Id(partner.get(Partnership.PID_AS2));
}
}
String partnershipFolderAtoB = partnerA.getAs2Id() + "-" + partnerB.getAs2Id();
String partnershipFolderBtoA = partnerB.getAs2Id() + "-" + partnerA.getAs2Id();
partnerA.setHome(RESOURCE.get(partnerA.getName()));
partnerA.setOutbox(FileUtils.getFile(partnerA.getHome(), "data", "to" + partnerB.getName()));
partnerA.setInbox(FileUtils.getFile(partnerA.getHome(), "data", partnershipFolderBtoA, "inbox"));
partnerA.setSentMDNs(FileUtils.getFile(partnerA.getHome(), "data", partnershipFolderBtoA, "mdn", DateUtil.formatDate("yyyy-MM-dd")));
partnerA.setRxdMDNs(FileUtils.getFile(partnerA.getHome(), "data", partnershipFolderAtoB, "mdn", DateUtil.formatDate("yyyy-MM-dd")));
partnerB.setHome(RESOURCE.get(partnerB.getName()));
partnerB.setOutbox(FileUtils.getFile(partnerB.getHome(), "data", "to" + partnerA.getName()));
partnerB.setInbox(FileUtils.getFile(partnerB.getHome(), "data", partnershipFolderAtoB, "inbox"));
partnerB.setSentMDNs(FileUtils.getFile(partnerB.getHome(), "data", partnershipFolderAtoB, "mdn", DateUtil.formatDate("yyyy-MM-dd")));
partnerB.setRxdMDNs(FileUtils.getFile(partnerB.getHome(), "data", partnershipFolderBtoA, "mdn", DateUtil.formatDate("yyyy-MM-dd")));
}
@SuppressWarnings("unused")
private static void getPartnership() throws Exception {
// Set Partner B to request ASYNC MDN
PartnershipFactory pf = serverA.getSession().getPartnershipFactory();
Partnership p = new Partnership();
Partnership asyncPartnership = pf.getPartnership(p, false);
if (asyncPartnership != null) {
asyncPartnership.setAttribute(Partnership.PA_AS2_RECEIPT_OPTION, "http://localhost:20081");
} else {
throw new Exception("Could not set partnership to ~ASYNC mode");
}
}
private static class TestMessage {
private final String fileName;
private final String body;
private final TestPartner fromPartner, toPartner;
private TestMessage(String fileName, String body, TestPartner fromPartner, TestPartner toPartner) {
this.fileName = fileName;
this.body = body;
this.fromPartner = fromPartner;
this.toPartner = toPartner;
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.usecases;
import javax.jms.Connection;
import javax.jms.Destination;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageListener;
import javax.jms.MessageProducer;
import javax.jms.Session;
import javax.jms.TextMessage;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.concurrent.TimeUnit;
import org.apache.activemq.ActiveMQConnection;
import org.apache.activemq.ActiveMQConnectionFactory;
import org.apache.activemq.RedeliveryPolicy;
import org.apache.activemq.broker.BrokerService;
import org.apache.activemq.util.Wait;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class NonBlockingConsumerRedeliveryTest {
private static final Logger LOG = LoggerFactory.getLogger(NonBlockingConsumerRedeliveryTest.class);
private final String destinationName = "Destination";
private final int MSG_COUNT = 100;
private BrokerService broker;
private String connectionUri;
private ActiveMQConnectionFactory connectionFactory;
@Test
public void testMessageDeleiveredWhenNonBlockingEnabled() throws Exception {
final LinkedHashSet<Message> received = new LinkedHashSet<>();
final LinkedHashSet<Message> beforeRollback = new LinkedHashSet<>();
final LinkedHashSet<Message> afterRollback = new LinkedHashSet<>();
Connection connection = connectionFactory.createConnection();
Session session = connection.createSession(true, Session.AUTO_ACKNOWLEDGE);
Destination destination = session.createQueue(destinationName);
MessageConsumer consumer = session.createConsumer(destination);
consumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message message) {
received.add(message);
}
});
sendMessages();
session.commit();
connection.start();
assertTrue("Pre-Rollback expects to receive: " + MSG_COUNT + " messages.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
LOG.info("Consumer has received " + received.size() + " messages.");
return received.size() == MSG_COUNT;
}
}));
beforeRollback.addAll(received);
received.clear();
session.rollback();
assertTrue("Post-Rollback expects to receive: " + MSG_COUNT + " messages.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
LOG.info("Consumer has received " + received.size() + " messages since rollback.");
return received.size() == MSG_COUNT;
}
}));
afterRollback.addAll(received);
received.clear();
assertEquals(beforeRollback.size(), afterRollback.size());
assertEquals(beforeRollback, afterRollback);
session.commit();
}
@Test
public void testMessageDeleiveredInCorrectOrder() throws Exception {
final LinkedHashSet<Message> received = new LinkedHashSet<>();
final LinkedHashSet<Message> beforeRollback = new LinkedHashSet<>();
final LinkedHashSet<Message> afterRollback = new LinkedHashSet<>();
Connection connection = connectionFactory.createConnection();
Session session = connection.createSession(true, Session.AUTO_ACKNOWLEDGE);
Destination destination = session.createQueue(destinationName);
MessageConsumer consumer = session.createConsumer(destination);
consumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message message) {
received.add(message);
}
});
sendMessages();
session.commit();
connection.start();
assertTrue("Pre-Rollback expects to receive: " + MSG_COUNT + " messages.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
LOG.info("Consumer has received " + received.size() + " messages.");
return received.size() == MSG_COUNT;
}
}));
beforeRollback.addAll(received);
received.clear();
session.rollback();
assertTrue("Post-Rollback expects to receive: " + MSG_COUNT + " messages.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
LOG.info("Consumer has received " + received.size() + " messages since rollback.");
return received.size() == MSG_COUNT;
}
}));
afterRollback.addAll(received);
received.clear();
assertEquals(beforeRollback.size(), afterRollback.size());
assertEquals(beforeRollback, afterRollback);
Iterator<Message> after = afterRollback.iterator();
Iterator<Message> before = beforeRollback.iterator();
while (before.hasNext() && after.hasNext()) {
TextMessage original = (TextMessage) before.next();
TextMessage rolledBack = (TextMessage) after.next();
int originalInt = Integer.parseInt(original.getText());
int rolledbackInt = Integer.parseInt(rolledBack.getText());
assertEquals(originalInt, rolledbackInt);
}
session.commit();
}
@Test
public void testMessageDeleiveryDoesntStop() throws Exception {
final LinkedHashSet<Message> received = new LinkedHashSet<>();
final LinkedHashSet<Message> beforeRollback = new LinkedHashSet<>();
final LinkedHashSet<Message> afterRollback = new LinkedHashSet<>();
Connection connection = connectionFactory.createConnection();
Session session = connection.createSession(true, Session.AUTO_ACKNOWLEDGE);
Destination destination = session.createQueue(destinationName);
MessageConsumer consumer = session.createConsumer(destination);
consumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message message) {
received.add(message);
}
});
sendMessages();
connection.start();
assertTrue("Pre-Rollback expects to receive: " + MSG_COUNT + " messages.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
LOG.info("Consumer has received " + received.size() + " messages.");
return received.size() == MSG_COUNT;
}
}));
beforeRollback.addAll(received);
received.clear();
session.rollback();
sendMessages();
assertTrue("Post-Rollback expects to receive: " + MSG_COUNT + " messages.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
LOG.info("Consumer has received " + received.size() + " messages since rollback.");
return received.size() == MSG_COUNT * 2;
}
}));
afterRollback.addAll(received);
received.clear();
assertEquals(beforeRollback.size() * 2, afterRollback.size());
session.commit();
}
@Test
public void testNonBlockingMessageDeleiveryIsDelayed() throws Exception {
final LinkedHashSet<Message> received = new LinkedHashSet<>();
ActiveMQConnection connection = (ActiveMQConnection) connectionFactory.createConnection();
connection.getRedeliveryPolicy().setInitialRedeliveryDelay(TimeUnit.SECONDS.toMillis(6));
Session session = connection.createSession(true, Session.AUTO_ACKNOWLEDGE);
Destination destination = session.createQueue(destinationName);
MessageConsumer consumer = session.createConsumer(destination);
consumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message message) {
received.add(message);
}
});
sendMessages();
connection.start();
assertTrue("Pre-Rollback expects to receive: " + MSG_COUNT + " messages.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
LOG.info("Consumer has received " + received.size() + " messages.");
return received.size() == MSG_COUNT;
}
}));
received.clear();
session.rollback();
assertFalse("Delayed redelivery test not expecting any messages yet.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
return received.size() > 0;
}
}, TimeUnit.SECONDS.toMillis(4)));
session.commit();
session.close();
}
@Test
public void testNonBlockingMessageDeleiveryWithRollbacks() throws Exception {
final LinkedHashSet<Message> received = new LinkedHashSet<>();
ActiveMQConnection connection = (ActiveMQConnection) connectionFactory.createConnection();
final Session session = connection.createSession(true, Session.AUTO_ACKNOWLEDGE);
final Destination destination = session.createQueue(destinationName);
final MessageConsumer consumer = session.createConsumer(destination);
consumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message message) {
received.add(message);
}
});
sendMessages();
connection.start();
assertTrue("Pre-Rollback expects to receive: " + MSG_COUNT + " messages.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
LOG.info("Consumer has received " + received.size() + " messages.");
return received.size() == MSG_COUNT;
}
}));
received.clear();
consumer.setMessageListener(new MessageListener() {
int count = 0;
@Override
public void onMessage(Message message) {
if (++count > 10) {
try {
session.rollback();
LOG.info("Rolling back session.");
count = 0;
} catch (JMSException e) {
LOG.warn("Caught an unexpected exception: " + e.getMessage());
}
} else {
received.add(message);
try {
session.commit();
} catch (JMSException e) {
LOG.warn("Caught an unexpected exception: " + e.getMessage());
}
}
}
});
session.rollback();
assertTrue("Post-Rollback expects to receive: " + MSG_COUNT + " messages.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
LOG.info("Consumer has received " + received.size() + " messages since rollback.");
return received.size() == MSG_COUNT;
}
}));
assertEquals(MSG_COUNT, received.size());
session.commit();
}
@Test
public void testNonBlockingMessageDeleiveryWithAllRolledBack() throws Exception {
final LinkedHashSet<Message> received = new LinkedHashSet<>();
final LinkedHashSet<Message> dlqed = new LinkedHashSet<>();
ActiveMQConnection connection = (ActiveMQConnection) connectionFactory.createConnection();
connection.getRedeliveryPolicy().setMaximumRedeliveries(5);
final Session session = connection.createSession(true, Session.AUTO_ACKNOWLEDGE);
final Destination destination = session.createQueue(destinationName);
final Destination dlq = session.createQueue("ActiveMQ.DLQ");
final MessageConsumer consumer = session.createConsumer(destination);
final MessageConsumer dlqConsumer = session.createConsumer(dlq);
dlqConsumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message message) {
dlqed.add(message);
}
});
consumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message message) {
received.add(message);
}
});
sendMessages();
connection.start();
assertTrue("Pre-Rollback expects to receive: " + MSG_COUNT + " messages.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
LOG.info("Consumer has received " + received.size() + " messages.");
return received.size() == MSG_COUNT;
}
}));
session.rollback();
consumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message message) {
try {
session.rollback();
} catch (JMSException e) {
LOG.warn("Caught an unexpected exception: " + e.getMessage());
}
}
});
assertTrue("Post-Rollback expects to DLQ: " + MSG_COUNT + " messages.", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
LOG.info("Consumer has received " + dlqed.size() + " messages in DLQ.");
return dlqed.size() == MSG_COUNT;
}
}));
session.commit();
}
private void sendMessages() throws Exception {
Connection connection = connectionFactory.createConnection();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
Destination destination = session.createQueue(destinationName);
MessageProducer producer = session.createProducer(destination);
for (int i = 0; i < MSG_COUNT; ++i) {
producer.send(session.createTextMessage("" + i));
}
}
@Before
public void startBroker() throws Exception {
broker = new BrokerService();
broker.setDeleteAllMessagesOnStartup(true);
broker.setPersistent(false);
broker.setUseJmx(false);
broker.addConnector("tcp://0.0.0.0:0");
broker.start();
broker.waitUntilStarted();
connectionUri = broker.getTransportConnectors().get(0).getPublishableConnectString();
connectionFactory = new ActiveMQConnectionFactory(connectionUri);
connectionFactory.setNonBlockingRedelivery(true);
RedeliveryPolicy policy = connectionFactory.getRedeliveryPolicy();
policy.setInitialRedeliveryDelay(TimeUnit.SECONDS.toMillis(2));
policy.setBackOffMultiplier(-1);
policy.setRedeliveryDelay(TimeUnit.SECONDS.toMillis(2));
policy.setMaximumRedeliveryDelay(-1);
policy.setUseExponentialBackOff(false);
policy.setMaximumRedeliveries(-1);
}
@After
public void stopBroker() throws Exception {
broker.stop();
broker.waitUntilStopped();
}
}
|
|
// -*- mode: java; c-basic-offset: 2; -*-
// Copyright 2009-2011 Google, All Rights reserved
// Copyright 2011-2012 MIT, All rights reserved
// Released under the MIT License https://raw.github.com/mit-cml/app-inventor/master/mitlicense.txt
package com.google.appinventor.components.runtime;
import com.google.appinventor.components.annotations.DesignerComponent;
import com.google.appinventor.components.annotations.DesignerProperty;
import com.google.appinventor.components.annotations.PropertyCategory;
import com.google.appinventor.components.annotations.SimpleEvent;
import com.google.appinventor.components.annotations.SimpleFunction;
import com.google.appinventor.components.annotations.SimpleObject;
import com.google.appinventor.components.annotations.SimpleProperty;
import com.google.appinventor.components.annotations.UsesPermissions;
import com.google.appinventor.components.common.ComponentCategory;
import com.google.appinventor.components.common.PropertyTypeConstants;
import com.google.appinventor.components.common.YaVersion;
import com.google.appinventor.components.runtime.util.ErrorMessages;
import android.content.Context;
import android.location.Address;
import android.location.Criteria;
import android.location.Geocoder;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.location.LocationProvider;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Sensor that can provide information on longitude, latitude, and altitude.
*
*/
@DesignerComponent(version = YaVersion.LOCATIONSENSOR_COMPONENT_VERSION,
description = "Non-visible component providing location information, " +
"including longitude, latitude, altitude (if supported by the device), " +
"and address. This can also perform \"geocoding\", converting a given " +
"address (not necessarily the current one) to a latitude (with the " +
"<code>LatitudeFromAddress</code> method) and a longitude (with the " +
"<code>LongitudeFromAddress</code> method).</p>\n" +
"<p>In order to function, the component must have its " +
"<code>Enabled</code> property set to True, and the device must have " +
"location sensing enabled through either wireless networks or GPS " +
"satellites (if outside).",
category = ComponentCategory.SENSORS,
nonVisible = true,
iconName = "images/locationSensor.png")
@SimpleObject
@UsesPermissions(permissionNames =
"android.permission.ACCESS_FINE_LOCATION," +
"android.permission.ACCESS_COARSE_LOCATION," +
"android.permission.ACCESS_MOCK_LOCATION," +
"android.permission.ACCESS_LOCATION_EXTRA_COMMANDS")
public class LocationSensor extends AndroidNonvisibleComponent
implements Component, OnStopListener, OnResumeListener, Deleteable {
/**
* Class that listens for changes in location, raises appropriate events,
* and provides properties.
*
*/
private class MyLocationListener implements LocationListener {
@Override
// This sets fields longitude, latitude, altitude, hasLocationData, and
// hasAltitude, then calls LocationSensor.LocationChanged(), alll in the
// enclosing class LocationSensor.
public void onLocationChanged(Location location) {
lastLocation = location;
longitude = location.getLongitude();
latitude = location.getLatitude();
// If the current location doesn't have altitude information, the prior
// altitude reading is retained.
if (location.hasAltitude()) {
hasAltitude = true;
altitude = location.getAltitude();
}
hasLocationData = true;
LocationChanged(latitude, longitude, altitude);
}
@Override
public void onProviderDisabled(String provider) {
StatusChanged(provider, "Disabled");
stopListening();
if (enabled) {
RefreshProvider();
}
}
@Override
public void onProviderEnabled(String provider) {
StatusChanged(provider, "Enabled");
RefreshProvider();
}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
switch (status) {
// Ignore TEMPORARILY_UNAVAILABLE, because service usually returns quickly.
case LocationProvider.TEMPORARILY_UNAVAILABLE:
StatusChanged(provider, "TEMPORARILY_UNAVAILABLE");
break;
case LocationProvider.OUT_OF_SERVICE:
// If the provider we were listening to is no longer available,
// find another.
StatusChanged(provider, "OUT_OF_SERVICE");
if (provider.equals(providerName)) {
stopListening();
RefreshProvider();
}
break;
case LocationProvider.AVAILABLE:
// If another provider becomes available and is one we hadn't known
// about see if it is better than the one we're currently using.
StatusChanged(provider, "AVAILABLE");
if (!provider.equals(providerName) &&
!allProviders.contains(provider)) {
RefreshProvider();
}
break;
}
}
}
/**
* Constant returned by {@link #Longitude()}, {@link #Latitude()}, and
* {@link #Altitude()} if no value could be obtained for them. The client
* can find this out directly by calling {@link #HasLongitudeLatitude()} or
* {@link #HasAltitude()}.
*/
public static final int UNKNOWN_VALUE = 0;
// These variables contain information related to the LocationProvider.
private final Criteria locationCriteria;
private final Handler handler;
private final LocationManager locationManager;
private boolean providerLocked = false; // if true we can't change providerName
private String providerName;
// Invariant: providerLocked => providerName is non-empty
private int timeInterval;
private int distanceInterval;
private MyLocationListener myLocationListener;
private LocationProvider locationProvider;
private boolean listening = false;
// Invariant: listening <=> a myLocationListener is registered with locationManager
// Invariant: !listening <=> locationProvider == null
//This holds all the providers available when we last chose providerName.
//The reported best provider is first, possibly duplicated.
private List<String> allProviders;
// These location-related values are set in MyLocationListener.onLocationChanged().
private Location lastLocation;
private double longitude = UNKNOWN_VALUE;
private double latitude = UNKNOWN_VALUE;
private double altitude = UNKNOWN_VALUE;
private boolean hasLocationData = false;
private boolean hasAltitude = false;
// This is used in reverse geocoding.
private Geocoder geocoder;
// User-settable properties
private boolean enabled = true; // the default value is true
/**
* Creates a new LocationSensor component.
*
* @param container ignored (because this is a non-visible component)
*/
public LocationSensor(ComponentContainer container) {
super(container.$form());
handler = new Handler();
// Set up listener
form.registerForOnResume(this);
form.registerForOnStop(this);
// Initialize sensor properties (60 seconds; 5 meters)
timeInterval = 60000;
distanceInterval = 5;
// Initialize location-related fields
Context context = container.$context();
geocoder = new Geocoder(context);
locationManager = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE);
locationCriteria = new Criteria();
myLocationListener = new MyLocationListener();
allProviders = new ArrayList<String>();
}
// Events
/**
* Indicates that a new location has been detected.
*/
@SimpleEvent
public void LocationChanged(double latitude, double longitude, double altitude) {
if (enabled) {
EventDispatcher.dispatchEvent(this, "LocationChanged", latitude, longitude, altitude);
}
}
/**
* Indicates that the status of the provider has changed.
*/
@SimpleEvent
public void StatusChanged(String provider, String status) {
if (enabled) {
EventDispatcher.dispatchEvent(this, "StatusChanged", provider, status);
}
}
// Properties
/**
* Indicates the source of the location information. If there is no provider, the
* string "NO PROVIDER" is returned. This is useful primarily for debugging.
*/
@SimpleProperty(category = PropertyCategory.BEHAVIOR)
public String ProviderName() {
if (providerName == null) {
return "NO PROVIDER";
} else {
return providerName;
}
}
/**
* Change the location provider.
* If the blocks program changes the name, try to change the provider.
* Whatever happens now, the provider and the reported name may be switched to
* Android's preferred provider later. This is primarily for debugging.
*/
@SimpleProperty
public void ProviderName(String providerName) {
this.providerName = providerName;
if (!empty(providerName) && startProvider(providerName)) {
return;
} else {
RefreshProvider();
}
}
@SimpleProperty(category = PropertyCategory.BEHAVIOR)
public boolean ProviderLocked() {
return providerLocked;
}
/**
* Indicates whether the sensor should allow the developer to
* manually change the provider (GPS, GSM, Wifi, etc.)
* from which location updates are received.
*/
@SimpleProperty
public void ProviderLocked(boolean lock) {
providerLocked = lock;
}
@DesignerProperty(editorType = PropertyTypeConstants.PROPERTY_TYPE_SENSOR_TIME_INTERVAL,
defaultValue = "60000")
@SimpleProperty
public void TimeInterval(int interval) {
// make sure that the provided value is a valid one.
// choose 1000000 miliseconds to be the upper limit
if (interval < 0 || interval > 1000000)
return;
timeInterval = interval;
// restart listening for location updates, using the new time interval
if (enabled) {
RefreshProvider();
}
}
@SimpleProperty(
description = "Determines the minimum time interval, in milliseconds, that the sensor will try " +
"to use for sending out location updates. However, location updates will only be received " +
"when the location of the phone actually changes, and use of the specified time interval " +
"is not guaranteed. For example, if 1000 is used as the time interval, location updates will " +
"never be fired sooner than 1000ms, but they may be fired anytime after.",
category = PropertyCategory.BEHAVIOR)
public int TimeInterval() {
return timeInterval;
}
@DesignerProperty(editorType = PropertyTypeConstants.PROPERTY_TYPE_SENSOR_DIST_INTERVAL,
defaultValue = "5")
@SimpleProperty
public void DistanceInterval(int interval) {
// make sure that the provided value is a valid one.
// choose 1000 meters to be the upper limit
if (interval < 0 || interval > 1000)
return;
distanceInterval = interval;
// restart listening for location updates, using the new distance interval
if (enabled) {
RefreshProvider();
}
}
@SimpleProperty(
description = "Determines the minimum distance interval, in meters, that the sensor will try " +
"to use for sending out location updates. For example, if this is set to 5, then the sensor will " +
"fire a LocationChanged event only after 5 meters have been traversed. However, the sensor does " +
"not guarantee that an update will be received at exactly the distance interval. It may take more " +
"than 5 meters to fire an event, for instance.",
category = PropertyCategory.BEHAVIOR)
public int DistanceInterval() {
return distanceInterval;
}
/**
* Indicates whether longitude and latitude information is available. (It is
* always the case that either both or neither are.)
*/
@SimpleProperty(category = PropertyCategory.BEHAVIOR)
public boolean HasLongitudeLatitude() {
return hasLocationData && enabled;
}
/**
* Indicates whether altitude information is available.
*/
@SimpleProperty(category = PropertyCategory.BEHAVIOR)
public boolean HasAltitude() {
return hasAltitude && enabled;
}
/**
* Indicates whether information about location accuracy is available.
*/
@SimpleProperty(category = PropertyCategory.BEHAVIOR)
public boolean HasAccuracy() {
return Accuracy() != UNKNOWN_VALUE && enabled;
}
/**
* The most recent available longitude value. If no value is available,
* 0 will be returned.
*/
@SimpleProperty(category = PropertyCategory.BEHAVIOR)
public double Longitude() {
return longitude;
}
/**
* The most recently available latitude value. If no value is available,
* 0 will be returned.
*/
@SimpleProperty(category = PropertyCategory.BEHAVIOR)
public double Latitude() {
return latitude;
}
/**
* The most recently available altitude value, in meters. If no value is
* available, 0 will be returned.
*/
@SimpleProperty(category = PropertyCategory.BEHAVIOR)
public double Altitude() {
return altitude;
}
/**
* The most recent measure of accuracy, in meters. If no value is available,
* 0 will be returned.
*/
@SimpleProperty(category = PropertyCategory.BEHAVIOR)
public double Accuracy() {
if (lastLocation != null && lastLocation.hasAccuracy()) {
return lastLocation.getAccuracy();
} else if (locationProvider != null) {
return locationProvider.getAccuracy();
} else {
return UNKNOWN_VALUE;
}
}
/**
* Indicates whether the user has specified that the sensor should
* listen for location changes and raise the corresponding events.
*/
@SimpleProperty(category = PropertyCategory.BEHAVIOR)
public boolean Enabled() {
return enabled;
}
/**
* Indicates whether the sensor should listen for location chagnes
* and raise the corresponding events.
*/
@DesignerProperty(editorType = PropertyTypeConstants.PROPERTY_TYPE_BOOLEAN,
defaultValue = "True")
@SimpleProperty
public void Enabled(boolean enabled) {
this.enabled = enabled;
if (!enabled) {
stopListening();
} else {
RefreshProvider();
}
}
/**
* Provides a textual representation of the current address or
* "No address available".
*/
@SimpleProperty(category = PropertyCategory.BEHAVIOR)
public String CurrentAddress() {
if (hasLocationData &&
latitude <= 90 && latitude >= -90 &&
longitude <= 180 || longitude >= -180) {
try {
List<Address> addresses = geocoder.getFromLocation(latitude, longitude, 1);
if (addresses != null && addresses.size() == 1) {
Address address = addresses.get(0);
if (address != null) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i <= address.getMaxAddressLineIndex(); i++) {
sb.append(address.getAddressLine(i));
sb.append("\n");
}
return sb.toString();
}
}
} catch (Exception e) {
// getFromLocation can throw an IOException or an IllegalArgumentException
// a bad result can give an indexOutOfBoundsException
// are there others?
if (e instanceof IllegalArgumentException
|| e instanceof IOException
|| e instanceof IndexOutOfBoundsException ) {
Log.e("LocationSensor", "Exception thrown by getting current address " + e.getMessage());
} else {
// what other exceptions can happen here?
Log.e("LocationSensor",
"Unexpected exception thrown by getting current address " + e.getMessage());
}
}
}
return "No address available";
}
/**
* Derives Latitude from Address
*
* @param locationName human-readable address
*
* @return latitude in degrees, 0 if not found.
*/
@SimpleFunction(description = "Derives latitude of given address")
public double LatitudeFromAddress(String locationName) {
try {
List<Address> addressObjs = geocoder.getFromLocationName(locationName, 1);
Log.i("LocationSensor", "latitude addressObjs size is " + addressObjs.size() + " for " + locationName);
if ( (addressObjs == null) || (addressObjs.size() == 0) ){
throw new IOException("");
}
return addressObjs.get(0).getLatitude();
} catch (IOException e) {
form.dispatchErrorOccurredEvent(this, "LatitudeFromAddress",
ErrorMessages.ERROR_LOCATION_SENSOR_LATITUDE_NOT_FOUND, locationName);
return 0;
}
}
/**
* Derives Longitude from Address
* @param locationName human-readable address
*
* @return longitude in degrees, 0 if not found.
*/
@SimpleFunction(description = "Derives longitude of given address")
public double LongitudeFromAddress(String locationName) {
try {
List<Address> addressObjs = geocoder.getFromLocationName(locationName, 1);
Log.i("LocationSensor", "longitude addressObjs size is " + addressObjs.size() + " for " + locationName);
if ( (addressObjs == null) || (addressObjs.size() == 0) ){
throw new IOException("");
}
return addressObjs.get(0).getLongitude();
} catch (IOException e) {
form.dispatchErrorOccurredEvent(this, "LongitudeFromAddress",
ErrorMessages.ERROR_LOCATION_SENSOR_LONGITUDE_NOT_FOUND, locationName);
return 0;
}
}
@SimpleProperty(category = PropertyCategory.BEHAVIOR)
public List<String> AvailableProviders () {
return allProviders;
}
// Methods to stop and start listening to LocationProviders
/**
* Refresh provider attempts to choose and start the best provider unless
* someone has set and locked the provider. Currently, blocks programmers
* cannot do that because the relevant methods are not declared as properties.
*
*/
// @SimpleFunction(description = "Find and start listening to a location provider.")
public void RefreshProvider() {
stopListening(); // In case another provider is active.
if (providerLocked && !empty(providerName)) {
listening = startProvider(providerName);
return;
}
allProviders = locationManager.getProviders(true); // Typically it's ("network" "gps")
String bProviderName = locationManager.getBestProvider(locationCriteria, true);
if (bProviderName != null && !bProviderName.equals(allProviders.get(0))) {
allProviders.add(0, bProviderName);
}
// We'll now try the best first and stop as soon as one successfully starts.
for (String providerN : allProviders) {
listening = startProvider(providerN);
if (listening) {
if (!providerLocked) {
providerName = providerN;
}
return;
}
}
}
/* Start listening to ProviderName.
* Return true iff successful.
*/
private boolean startProvider(String providerName) {
this.providerName = providerName;
LocationProvider tLocationProvider = locationManager.getProvider(providerName);
if (tLocationProvider == null) {
Log.d("LocationSensor", "getProvider(" + providerName + ") returned null");
return false;
}
stopListening();
locationProvider = tLocationProvider;
locationManager.requestLocationUpdates(providerName, timeInterval,
distanceInterval, myLocationListener);
listening = true;
return true;
}
/**
* This unregisters {@link #myLocationListener} as a listener to location
* updates. It is safe to call this even if no listener had been registered,
* in which case it has no effect. This also sets the value of
* {@link #locationProvider} to {@code null} and sets {@link #listening}
* to {@code false}.
*/
private void stopListening() {
if (listening) {
locationManager.removeUpdates(myLocationListener);
locationProvider = null;
listening = false;
}
}
// OnResumeListener implementation
@Override
public void onResume() {
if (enabled) {
RefreshProvider();
}
}
// OnStopListener implementation
@Override
public void onStop() {
stopListening();
}
// Deleteable implementation
@Override
public void onDelete() {
stopListening();
}
private boolean empty(String s) {
return s == null || s.length() == 0;
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.uam;
import java.io.IOException;
import java.util.EnumSet;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.api.ApplicationMasterProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest;
import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest;
import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse;
import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.NMToken;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.AMRMClientUtils;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.security.AMRMTokenIdentifier;
import org.apache.hadoop.yarn.server.AMHeartbeatRequestHandler;
import org.apache.hadoop.yarn.server.AMRMClientRelayer;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.AsyncCallback;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
/**
* UnmanagedApplicationManager is used to register unmanaged application and
* negotiate for resources from resource managers. An unmanagedAM is an AM that
* is not launched and managed by the RM. Allocate calls are handled
* asynchronously using {@link AsyncCallback}.
*/
@Public
@Unstable
public class UnmanagedApplicationManager {
private static final Logger LOG =
LoggerFactory.getLogger(UnmanagedApplicationManager.class);
private static final long AM_STATE_WAIT_TIMEOUT_MS = 10000;
public static final String APP_NAME = "UnmanagedAM";
private static final String DEFAULT_QUEUE_CONFIG = "uam.default.queue.name";
private AMHeartbeatRequestHandler heartbeatHandler;
private AMRMClientRelayer rmProxyRelayer;
private ApplicationId applicationId;
private String submitter;
private String appNameSuffix;
private Configuration conf;
private String queueName;
private UserGroupInformation userUgi;
private RegisterApplicationMasterRequest registerRequest;
private ApplicationClientProtocol rmClient;
private long asyncApiPollIntervalMillis;
private RecordFactory recordFactory;
private boolean keepContainersAcrossApplicationAttempts;
/*
* This flag is used as an indication that this method launchUAM/reAttachUAM
* is called (and perhaps blocked in initializeUnmanagedAM below due to RM
* connection/failover issue and not finished yet). Set the flag before
* calling the blocking call to RM.
*/
private boolean connectionInitiated;
/**
* Constructor.
*
* @param conf configuration
* @param appId application Id to use for this UAM
* @param queueName the queue of the UAM
* @param submitter user name of the app
* @param appNameSuffix the app name suffix to use
* @param rmName name of the YarnRM
* @param keepContainersAcrossApplicationAttempts keep container flag for UAM
* recovery. See {@link ApplicationSubmissionContext
* #setKeepContainersAcrossApplicationAttempts(boolean)}
*/
public UnmanagedApplicationManager(Configuration conf, ApplicationId appId,
String queueName, String submitter, String appNameSuffix,
boolean keepContainersAcrossApplicationAttempts, String rmName) {
Preconditions.checkNotNull(conf, "Configuration cannot be null");
Preconditions.checkNotNull(appId, "ApplicationId cannot be null");
Preconditions.checkNotNull(submitter, "App submitter cannot be null");
this.conf = conf;
this.applicationId = appId;
this.queueName = queueName;
this.submitter = submitter;
this.appNameSuffix = appNameSuffix;
this.userUgi = null;
// Relayer's rmClient will be set after the RM connection is created
this.rmProxyRelayer =
new AMRMClientRelayer(null, this.applicationId, rmName);
this.heartbeatHandler = createAMHeartbeatRequestHandler(this.conf,
this.applicationId, this.rmProxyRelayer);
this.connectionInitiated = false;
this.registerRequest = null;
this.recordFactory = RecordFactoryProvider.getRecordFactory(conf);
this.asyncApiPollIntervalMillis = conf.getLong(
YarnConfiguration.
YARN_CLIENT_APPLICATION_CLIENT_PROTOCOL_POLL_INTERVAL_MS,
YarnConfiguration.
DEFAULT_YARN_CLIENT_APPLICATION_CLIENT_PROTOCOL_POLL_INTERVAL_MS);
this.keepContainersAcrossApplicationAttempts =
keepContainersAcrossApplicationAttempts;
}
@VisibleForTesting
protected AMHeartbeatRequestHandler createAMHeartbeatRequestHandler(
Configuration config, ApplicationId appId,
AMRMClientRelayer relayer) {
return new AMHeartbeatRequestHandler(config, appId, relayer);
}
/**
* Launch a new UAM in the resource manager.
*
* @return identifier uam identifier
* @throws YarnException if fails
* @throws IOException if fails
*/
public Token<AMRMTokenIdentifier> launchUAM()
throws YarnException, IOException {
this.connectionInitiated = true;
// Blocking call to RM
Token<AMRMTokenIdentifier> amrmToken =
initializeUnmanagedAM(this.applicationId);
// Creates the UAM connection
createUAMProxy(amrmToken);
return amrmToken;
}
/**
* Re-attach to an existing UAM in the resource manager.
*
* @param amrmToken the UAM token
* @throws IOException if re-attach fails
* @throws YarnException if re-attach fails
*/
public void reAttachUAM(Token<AMRMTokenIdentifier> amrmToken)
throws IOException, YarnException {
this.connectionInitiated = true;
// Creates the UAM connection
createUAMProxy(amrmToken);
}
protected void createUAMProxy(Token<AMRMTokenIdentifier> amrmToken)
throws IOException {
this.userUgi = UserGroupInformation.createProxyUser(
this.applicationId.toString(), UserGroupInformation.getCurrentUser());
this.rmProxyRelayer.setRMClient(createRMProxy(
ApplicationMasterProtocol.class, this.conf, this.userUgi, amrmToken));
this.heartbeatHandler.setUGI(this.userUgi);
}
/**
* Registers this {@link UnmanagedApplicationManager} with the resource
* manager.
*
* @param request RegisterApplicationMasterRequest
* @return register response
* @throws YarnException if register fails
* @throws IOException if register fails
*/
public RegisterApplicationMasterResponse registerApplicationMaster(
RegisterApplicationMasterRequest request)
throws YarnException, IOException {
// Save the register request for re-register later
this.registerRequest = request;
LOG.info("Registering the Unmanaged application master {}",
this.applicationId);
RegisterApplicationMasterResponse response =
this.rmProxyRelayer.registerApplicationMaster(this.registerRequest);
this.heartbeatHandler.resetLastResponseId();
for (Container container : response.getContainersFromPreviousAttempts()) {
LOG.debug("RegisterUAM returned existing running container {}",
container.getId());
}
for (NMToken nmToken : response.getNMTokensFromPreviousAttempts()) {
LOG.debug("RegisterUAM returned existing NM token for node {}",
nmToken.getNodeId());
}
LOG.info(
"RegisterUAM returned {} existing running container and {} NM tokens",
response.getContainersFromPreviousAttempts().size(),
response.getNMTokensFromPreviousAttempts().size());
// Only when register succeed that we start the heartbeat thread
this.heartbeatHandler.setDaemon(true);
this.heartbeatHandler.start();
return response;
}
/**
* Unregisters from the resource manager and stops the request handler thread.
*
* @param request the finishApplicationMaster request
* @return the response
* @throws YarnException if finishAM call fails
* @throws IOException if finishAM call fails
*/
public FinishApplicationMasterResponse finishApplicationMaster(
FinishApplicationMasterRequest request)
throws YarnException, IOException {
if (this.userUgi == null) {
if (this.connectionInitiated) {
// This is possible if the async launchUAM is still
// blocked and retrying. Return a dummy response in this case.
LOG.warn("Unmanaged AM still not successfully launched/registered yet."
+ " Stopping the UAM heartbeat thread anyways.");
return FinishApplicationMasterResponse.newInstance(false);
} else {
throw new YarnException("finishApplicationMaster should not "
+ "be called before createAndRegister");
}
}
FinishApplicationMasterResponse response =
this.rmProxyRelayer.finishApplicationMaster(request);
if (response.getIsUnregistered()) {
shutDownConnections();
}
return response;
}
/**
* Force kill the UAM.
*
* @return kill response
* @throws IOException if fails to create rmProxy
* @throws YarnException if force kill fails
*/
public KillApplicationResponse forceKillApplication()
throws IOException, YarnException {
shutDownConnections();
KillApplicationRequest request =
KillApplicationRequest.newInstance(this.applicationId);
if (this.rmClient == null) {
this.rmClient = createRMProxy(ApplicationClientProtocol.class, this.conf,
UserGroupInformation.createRemoteUser(this.submitter), null);
}
return this.rmClient.forceKillApplication(request);
}
/**
* Sends the specified heart beat request to the resource manager and invokes
* the callback asynchronously with the response.
*
* @param request the allocate request
* @param callback the callback method for the request
* @throws YarnException if registerAM is not called yet
*/
public void allocateAsync(AllocateRequest request,
AsyncCallback<AllocateResponse> callback) throws YarnException {
this.heartbeatHandler.allocateAsync(request, callback);
// Two possible cases why the UAM is not successfully registered yet:
// 1. launchUAM is not called at all. Should throw here.
// 2. launchUAM is called but hasn't successfully returned.
//
// In case 2, we have already save the allocate request above, so if the
// registration succeed later, no request is lost.
if (this.userUgi == null) {
if (this.connectionInitiated) {
LOG.info("Unmanaged AM still not successfully launched/registered yet."
+ " Saving the allocate request and send later.");
} else {
throw new YarnException(
"AllocateAsync should not be called before launchUAM");
}
}
}
/**
* Shutdown this UAM client, without killing the UAM in the YarnRM side.
*/
public void shutDownConnections() {
this.heartbeatHandler.shutdown();
this.rmProxyRelayer.shutdown();
}
/**
* Returns the application id of the UAM.
*
* @return application id of the UAM
*/
public ApplicationId getAppId() {
return this.applicationId;
}
/**
* Returns the rmProxy relayer of this UAM.
*
* @return rmProxy relayer of the UAM
*/
public AMRMClientRelayer getAMRMClientRelayer() {
return this.rmProxyRelayer;
}
/**
* Returns RM proxy for the specified protocol type. Unit test cases can
* override this method and return mock proxy instances.
*
* @param protocol protocal of the proxy
* @param config configuration
* @param user ugi for the proxy connection
* @param token token for the connection
* @param <T> type of the proxy
* @return the proxy instance
* @throws IOException if fails to create the proxy
*/
protected <T> T createRMProxy(Class<T> protocol, Configuration config,
UserGroupInformation user, Token<AMRMTokenIdentifier> token)
throws IOException {
return AMRMClientUtils.createRMProxy(config, protocol, user, token);
}
/**
* Launch and initialize an unmanaged AM. First, it creates a new application
* on the RM and negotiates a new attempt id. Then it waits for the RM
* application attempt state to reach YarnApplicationAttemptState.LAUNCHED
* after which it returns the AM-RM token.
*
* @param appId application id
* @return the UAM token
* @throws IOException if initialize fails
* @throws YarnException if initialize fails
*/
protected Token<AMRMTokenIdentifier> initializeUnmanagedAM(
ApplicationId appId) throws IOException, YarnException {
try {
UserGroupInformation appSubmitter =
UserGroupInformation.createRemoteUser(this.submitter);
this.rmClient = createRMProxy(ApplicationClientProtocol.class, this.conf,
appSubmitter, null);
// Submit the application
submitUnmanagedApp(appId);
// Monitor the application attempt to wait for launch state
monitorCurrentAppAttempt(appId,
EnumSet.of(YarnApplicationState.ACCEPTED,
YarnApplicationState.RUNNING, YarnApplicationState.KILLED,
YarnApplicationState.FAILED, YarnApplicationState.FINISHED),
YarnApplicationAttemptState.LAUNCHED);
return getUAMToken();
} finally {
this.rmClient = null;
}
}
private void submitUnmanagedApp(ApplicationId appId)
throws YarnException, IOException {
SubmitApplicationRequest submitRequest =
this.recordFactory.newRecordInstance(SubmitApplicationRequest.class);
ApplicationSubmissionContext context = this.recordFactory
.newRecordInstance(ApplicationSubmissionContext.class);
context.setApplicationId(appId);
context.setApplicationName(APP_NAME + "-" + appNameSuffix);
if (StringUtils.isBlank(this.queueName)) {
context.setQueue(this.conf.get(DEFAULT_QUEUE_CONFIG,
YarnConfiguration.DEFAULT_QUEUE_NAME));
} else {
context.setQueue(this.queueName);
}
ContainerLaunchContext amContainer =
this.recordFactory.newRecordInstance(ContainerLaunchContext.class);
Resource resource = BuilderUtils.newResource(1024, 1);
context.setResource(resource);
context.setAMContainerSpec(amContainer);
submitRequest.setApplicationSubmissionContext(context);
context.setUnmanagedAM(true);
context.setKeepContainersAcrossApplicationAttempts(
this.keepContainersAcrossApplicationAttempts);
LOG.info("Submitting unmanaged application {}", appId);
this.rmClient.submitApplication(submitRequest);
}
/**
* Monitor the submitted application and attempt until it reaches certain
* states.
*
* @param appId Application Id of application to be monitored
* @param appStates acceptable application state
* @param attemptState acceptable application attempt state
* @return the application report
* @throws YarnException if getApplicationReport fails
* @throws IOException if getApplicationReport fails
*/
private ApplicationAttemptReport monitorCurrentAppAttempt(ApplicationId appId,
Set<YarnApplicationState> appStates,
YarnApplicationAttemptState attemptState)
throws YarnException, IOException {
long startTime = System.currentTimeMillis();
ApplicationAttemptId appAttemptId = null;
while (true) {
if (appAttemptId == null) {
// Get application report for the appId we are interested in
ApplicationReport report = getApplicationReport(appId);
YarnApplicationState state = report.getYarnApplicationState();
if (appStates.contains(state)) {
if (state != YarnApplicationState.ACCEPTED) {
throw new YarnRuntimeException(
"Received non-accepted application state: " + state + " for "
+ appId + ". This is likely because this is not the first "
+ "app attempt in home sub-cluster, and AMRMProxy HA "
+ "(yarn.nodemanager.amrmproxy.ha.enable) is not enabled.");
}
appAttemptId =
getApplicationReport(appId).getCurrentApplicationAttemptId();
} else {
LOG.info("Current application state of {} is {}, will retry later.",
appId, state);
}
}
if (appAttemptId != null) {
GetApplicationAttemptReportRequest req = this.recordFactory
.newRecordInstance(GetApplicationAttemptReportRequest.class);
req.setApplicationAttemptId(appAttemptId);
ApplicationAttemptReport attemptReport = this.rmClient
.getApplicationAttemptReport(req).getApplicationAttemptReport();
if (attemptState
.equals(attemptReport.getYarnApplicationAttemptState())) {
return attemptReport;
}
LOG.info("Current attempt state of " + appAttemptId + " is "
+ attemptReport.getYarnApplicationAttemptState()
+ ", waiting for current attempt to reach " + attemptState);
}
try {
Thread.sleep(this.asyncApiPollIntervalMillis);
} catch (InterruptedException e) {
LOG.warn("Interrupted while waiting for current attempt of " + appId
+ " to reach " + attemptState);
}
if (System.currentTimeMillis() - startTime > AM_STATE_WAIT_TIMEOUT_MS) {
throw new RuntimeException("Timeout for waiting current attempt of "
+ appId + " to reach " + attemptState);
}
}
}
/**
* Gets the amrmToken of the unmanaged AM.
*
* @return the amrmToken of the unmanaged AM.
* @throws IOException if getApplicationReport fails
* @throws YarnException if getApplicationReport fails
*/
protected Token<AMRMTokenIdentifier> getUAMToken()
throws IOException, YarnException {
Token<AMRMTokenIdentifier> token = null;
org.apache.hadoop.yarn.api.records.Token amrmToken =
getApplicationReport(this.applicationId).getAMRMToken();
if (amrmToken != null) {
token = ConverterUtils.convertFromYarn(amrmToken, (Text) null);
} else {
LOG.warn(
"AMRMToken not found in the application report for application: {}",
this.applicationId);
}
return token;
}
private ApplicationReport getApplicationReport(ApplicationId appId)
throws YarnException, IOException {
GetApplicationReportRequest request =
this.recordFactory.newRecordInstance(GetApplicationReportRequest.class);
request.setApplicationId(appId);
return this.rmClient.getApplicationReport(request).getApplicationReport();
}
@VisibleForTesting
public int getRequestQueueSize() {
return this.heartbeatHandler.getRequestQueueSize();
}
@VisibleForTesting
protected void drainHeartbeatThread() {
this.heartbeatHandler.drainHeartbeatThread();
}
@VisibleForTesting
protected boolean isHeartbeatThreadAlive() {
return this.heartbeatHandler.isAlive();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.resource;
import java.util.concurrent.Callable;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteSpring;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.util.typedef.G;
import org.apache.ignite.lang.IgniteCallable;
import org.apache.ignite.resources.SpringResource;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.junit.Test;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.beans.factory.NoUniqueBeanDefinitionException;
import org.springframework.context.support.ClassPathXmlApplicationContext;
/**
* Tests for injected resource.
*/
@SuppressWarnings("unused")
public class GridSpringResourceInjectionSelfTest extends GridCommonAbstractTest {
/** Bean name. */
private static final String DUMMY_BEAN = "dummyResourceBean";
/** Test grid with Spring context. */
private static Ignite grid;
/** {@inheritDoc} */
@Override public void beforeTestsStarted() throws Exception {
grid = IgniteSpring.start(new ClassPathXmlApplicationContext(
"/org/apache/ignite/internal/processors/resource/spring-resource.xml"));
}
/**
* @throws Exception If failed.
*/
@Test
public void testClosureFieldByResourceName() throws Exception {
grid.compute().call(new IgniteCallable<Object>() {
@SpringResource(resourceName = DUMMY_BEAN)
private transient DummyResourceBean dummyRsrcBean;
@Override public Object call() throws Exception {
assertNotNull(dummyRsrcBean);
return null;
}
});
}
/**
* @throws Exception If failed.
*/
@Test
public void testClosureFieldByResourceClass() throws Exception {
grid.compute().call(new IgniteCallable<Object>() {
@SpringResource(resourceClass = DummyResourceBean.class)
private transient DummyResourceBean dummyRsrcBean;
@Override public Object call() throws Exception {
assertNotNull(dummyRsrcBean);
return null;
}
});
}
/**
* @throws Exception If failed.
*/
@Test
public void testClosureFieldByResourceClassWithMultipleBeans() throws Exception {
IgniteConfiguration anotherCfg = new IgniteConfiguration();
anotherCfg.setIgniteInstanceName("anotherGrid");
Ignite anotherGrid = IgniteSpring.start(anotherCfg, new ClassPathXmlApplicationContext(
"/org/apache/ignite/internal/processors/resource/spring-resource-with-duplicate-beans.xml"));
assertError(new IgniteCallable<Object>() {
@SpringResource(resourceClass = DummyResourceBean.class)
private transient DummyResourceBean dummyRsrcBean;
@Override public Object call() throws Exception {
assertNotNull(dummyRsrcBean);
return null;
}
}, anotherGrid, NoUniqueBeanDefinitionException.class, "No qualifying bean of type " +
"'org.apache.ignite.internal.processors.resource.GridSpringResourceInjectionSelfTest$DummyResourceBean'" +
" available: expected single matching bean but found 2:");
G.stop("anotherGrid", false);
}
/**
* Resource injection with non-existing resource name.
*/
@Test
public void testClosureFieldWithWrongResourceName() {
assertError(new IgniteCallable<Object>() {
@SpringResource(resourceName = "nonExistentResource")
private transient DummyResourceBean dummyRsrcBean;
@Override public Object call() throws Exception {
assertNull(dummyRsrcBean);
return null;
}
}, grid, NoSuchBeanDefinitionException.class, "No bean named 'nonExistentResource' available");
}
/**
* Resource injection with non-existing resource class.
*/
@Test
public void testClosureFieldWithWrongResourceClass() {
assertError(new IgniteCallable<Object>() {
@SpringResource(resourceClass = AnotherDummyResourceBean.class)
private transient AnotherDummyResourceBean dummyRsrcBean;
@Override public Object call() throws Exception {
assertNull(dummyRsrcBean);
return null;
}
}, grid, NoSuchBeanDefinitionException.class, "No qualifying bean of type 'org.apache.ignite.internal.processors.resource." +
"GridSpringResourceInjectionSelfTest$AnotherDummyResourceBean' available");
}
/**
* Resource injection with both resource and class set (ambiguity).
*/
@Test
public void testClosureFieldByResourceClassAndName() {
assertError(new IgniteCallable<Object>() {
@SpringResource(resourceClass = DummyResourceBean.class, resourceName = DUMMY_BEAN)
private transient DummyResourceBean dummyRsrcBean;
@Override public Object call() throws Exception {
assertNull(dummyRsrcBean);
return null;
}
}, grid, IgniteException.class, "Either bean name or its class must be specified in @SpringResource, but not both");
}
/**
* Resource injection with no name and class set.
*/
@Test
public void testClosureFieldWithNoParams() {
assertError(new IgniteCallable<Object>() {
@SpringResource
private transient DummyResourceBean dummyRsrcBean;
@Override public Object call() throws Exception {
assertNull(dummyRsrcBean);
return null;
}
}, grid, IgniteException.class, "Either bean name or its class must be specified in @SpringResource, but not both");
}
/**
* @throws Exception If failed.
*/
@Test
public void testClosureMethodWithResourceName() throws Exception {
grid.compute().call(new IgniteCallable<Object>() {
private DummyResourceBean dummyRsrcBean;
@SpringResource(resourceName = DUMMY_BEAN)
private void setDummyResourceBean(DummyResourceBean dummyRsrcBean) {
assertNotNull(dummyRsrcBean);
this.dummyRsrcBean = dummyRsrcBean;
}
@Override public Object call() throws Exception {
assertNotNull(dummyRsrcBean);
return null;
}
});
}
/**
* @throws Exception If failed.
*/
@Test
public void testClosureMethodWithResourceClass() throws Exception {
grid.compute().call(new IgniteCallable<Object>() {
private DummyResourceBean dummyRsrcBean;
@SpringResource(resourceClass = DummyResourceBean.class)
private void setDummyResourceBean(DummyResourceBean dummyRsrcBean) {
assertNotNull(dummyRsrcBean);
this.dummyRsrcBean = dummyRsrcBean;
}
@Override public Object call() throws Exception {
assertNotNull(dummyRsrcBean);
return null;
}
});
}
/**
* @throws Exception If failed.
*/
@Test
public void testClosureMethodWithResourceClassWithMultipleBeans() throws Exception {
IgniteConfiguration anotherCfg = new IgniteConfiguration();
anotherCfg.setIgniteInstanceName("anotherGrid");
Ignite anotherGrid = IgniteSpring.start(anotherCfg, new ClassPathXmlApplicationContext(
"/org/apache/ignite/internal/processors/resource/spring-resource-with-duplicate-beans.xml"));
try {
assertError(new IgniteCallable<Object>() {
private DummyResourceBean dummyRsrcBean;
@SpringResource(resourceClass = DummyResourceBean.class)
private void setDummyResourceBean(DummyResourceBean dummyRsrcBean) {
assertNotNull(dummyRsrcBean);
this.dummyRsrcBean = dummyRsrcBean;
}
@Override public Object call() throws Exception {
assertNotNull(dummyRsrcBean);
return null;
}
}, anotherGrid, NoUniqueBeanDefinitionException.class, "No qualifying bean of type " +
"'org.apache.ignite.internal.processors.resource.GridSpringResourceInjectionSelfTest$DummyResourceBean'" +
" available: expected single matching bean but found 2:");
}
finally {
G.stop("anotherGrid", false);
}
}
/**
* Resource injection with non-existing resource name.
*/
@Test
public void testClosureMethodWithWrongResourceName() {
assertError(new IgniteCallable<Object>() {
private DummyResourceBean dummyRsrcBean;
@SpringResource(resourceName = "nonExistentResource")
private void setDummyResourceBean(DummyResourceBean dummyRsrcBean) {
// No-op.
}
@Override public Object call() throws Exception {
assertNull(dummyRsrcBean);
return null;
}
}, grid, NoSuchBeanDefinitionException.class, "No bean named 'nonExistentResource' available");
}
/**
* Resource injection with non-existing resource class.
*/
@Test
public void testClosureMethodWithWrongResourceClass() {
assertError(new IgniteCallable<Object>() {
private AnotherDummyResourceBean dummyRsrcBean;
@SpringResource(resourceClass = AnotherDummyResourceBean.class)
private void setDummyResourceBean(AnotherDummyResourceBean dummyRsrcBean) {
// No-op.
}
@Override public Object call() throws Exception {
assertNull(dummyRsrcBean);
return null;
}
}, grid, NoSuchBeanDefinitionException.class,"No qualifying bean of type 'org.apache.ignite.internal.processors.resource" +
".GridSpringResourceInjectionSelfTest$AnotherDummyResourceBean' available");
}
/**
* Resource injection with both resource and class set (ambiguity).
*/
@Test
public void testClosureMethodByResourceClassAndName() {
assertError(new IgniteCallable<Object>() {
@SpringResource(resourceClass = DummyResourceBean.class, resourceName = DUMMY_BEAN)
private transient DummyResourceBean dummyRsrcBean;
@Override public Object call() throws Exception {
assertNull(dummyRsrcBean);
return null;
}
}, grid, IgniteException.class, "Either bean name or its class must be specified in @SpringResource, but not both");
}
/**
* Resource injection with no params.
*/
@Test
public void testClosureMethodWithNoParams() {
assertError(new IgniteCallable<Object>() {
@SpringResource
private transient DummyResourceBean dummyRsrcBean;
@Override public Object call() throws Exception {
assertNull(dummyRsrcBean);
return null;
}
}, grid, IgniteException.class, "Either bean name or its class must be specified in @SpringResource, but not both");
}
/**
* @param job {@link IgniteCallable} to be run.
* @param grid Node to run the job on.
* @param expE Expected exception type.
* @param expEMsg Expected exception message.
*/
private void assertError(final IgniteCallable<?> job, final Ignite grid, Class<? extends Throwable> expE,
String expEMsg) {
GridTestUtils.assertThrowsAnyCause(log, new Callable<Object>() {
@Override public Object call() throws Exception {
grid.compute(grid.cluster().forLocal()).call(job);
return null;
}
}, expE, expEMsg);
}
/**
* Dummy resource bean.
*/
public static class DummyResourceBean {
/**
*
*/
public DummyResourceBean() {
// No-op.
}
}
/**
* Another dummy resource bean.
*/
private static class AnotherDummyResourceBean {
/**
*
*/
public AnotherDummyResourceBean() {
// No-op.
}
}
}
|
|
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2016 The ZAP Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.extension.api;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.emptyString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.jupiter.api.Assertions.assertThrows;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.FileConfiguration;
import org.junit.jupiter.api.Test;
import org.zaproxy.zap.utils.ZapXmlConfiguration;
/** Unit test for {@link OptionsParamApi}. */
class OptionsParamApiUnitTest {
private static final String API_ENABLED_KEY = "api.enabled";
private static final String API_SECURE_KEY = "api.secure";
private static final String API_KEY_KEY = "api.key";
private static final String API_DISABLEKEY_KEY = "api.disablekey";
private static final String API_INCERRORDETAILS_KEY = "api.incerrordetails";
private static final String API_AUTOFILLKEY_KEY = "api.autofillkey";
private static final String API_ENABLEJSONP_KEY = "api.enablejsonp";
private static final String API_NO_KEY_FOR_SAFE_OPS = "api.nokeyforsafeops";
private static final String API_REPORT_PERM_ERRORS = "api.reportpermerrors";
@Test
void shouldNotHaveConfigByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.getConfig(), is(equalTo(null)));
}
@Test
void shouldHaveEnabledStateByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.isEnabled(), is(equalTo(true)));
}
@Test
void shouldFailToSetEnabledStateWithoutConfig() {
// Given
OptionsParamApi param = new OptionsParamApi();
// When / Then
assertThrows(NullPointerException.class, () -> param.setEnabled(true));
}
@Test
void shouldSetEnabledStateWithConfig() {
// Given
OptionsParamApi param = createOptionsParamApiWithConfig();
// When
param.setEnabled(false);
// Then
assertThat(param.isEnabled(), is(equalTo(false)));
assertThat(param.getConfig().getBoolean(API_ENABLED_KEY), is(equalTo(false)));
}
@Test
void shouldHaveSecureOnlyDisabledByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.isSecureOnly(), is(equalTo(false)));
}
@Test
void shouldFailToSetSecureOnlyWithoutConfig() {
// Given
OptionsParamApi param = new OptionsParamApi();
// When / Then
assertThrows(NullPointerException.class, () -> param.setSecureOnly(true));
}
@Test
void shouldSetSecureOnlyWithConfig() {
// Given
OptionsParamApi param = createOptionsParamApiWithConfig();
// When
param.setSecureOnly(true);
// Then
assertThat(param.isSecureOnly(), is(equalTo(true)));
assertThat(param.getConfig().getBoolean(API_SECURE_KEY), is(equalTo(true)));
}
@Test
void shouldHaveKeyEnabledByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.isDisableKey(), is(equalTo(false)));
}
@Test
void shouldFailToSetDisableKeyWithoutConfig() {
// Given
OptionsParamApi param = new OptionsParamApi();
// When / Then
assertThrows(NullPointerException.class, () -> param.setDisableKey(true));
}
@Test
void shouldSetDisableKeyWithConfig() {
// Given
OptionsParamApi param = createOptionsParamApiWithConfig();
// When
param.setDisableKey(true);
// Then
assertThat(param.isEnabled(), is(equalTo(true)));
assertThat(param.getConfig().getBoolean(API_DISABLEKEY_KEY), is(equalTo(true)));
}
@Test
void shouldHaveIncErrorDetailsDisabledByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.isIncErrorDetails(), is(equalTo(false)));
}
@Test
void shouldFailToSetIncErrorDetailsWithoutConfig() {
// Given
OptionsParamApi param = new OptionsParamApi();
// When / Then
assertThrows(NullPointerException.class, () -> param.setIncErrorDetails(true));
}
@Test
void shouldSetIncErrorDetailsWithConfig() {
// Given
OptionsParamApi param = createOptionsParamApiWithConfig();
// When
param.setIncErrorDetails(true);
// Then
assertThat(param.isIncErrorDetails(), is(equalTo(true)));
assertThat(param.getConfig().getBoolean(API_INCERRORDETAILS_KEY), is(equalTo(true)));
}
@Test
void shouldHaveAutofillKeyDisabledByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.isAutofillKey(), is(equalTo(false)));
}
@Test
void shouldFailToSetAutofillKeyWithoutConfig() {
// Given
OptionsParamApi param = new OptionsParamApi();
// When / Then
assertThrows(NullPointerException.class, () -> param.setAutofillKey(true));
}
@Test
void shouldSetAutofillKeyWithConfig() {
// Given
OptionsParamApi param = createOptionsParamApiWithConfig();
// When
param.setAutofillKey(true);
// Then
assertThat(param.isAutofillKey(), is(equalTo(true)));
assertThat(param.getConfig().getBoolean(API_AUTOFILLKEY_KEY), is(equalTo(true)));
}
@Test
void shouldHaveEnableJSONPDisabledByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.isEnableJSONP(), is(equalTo(false)));
}
@Test
void shouldFailToSetEnableJSONPWithoutConfig() {
// Given
OptionsParamApi param = new OptionsParamApi();
// When / Then
assertThrows(NullPointerException.class, () -> param.setEnableJSONP(true));
}
@Test
void shouldSetEnableJSONPWithConfig() {
// Given
OptionsParamApi param = createOptionsParamApiWithConfig();
// When
param.setEnableJSONP(true);
// Then
assertThat(param.isEnableJSONP(), is(equalTo(true)));
assertThat(param.getConfig().getBoolean(API_ENABLEJSONP_KEY), is(equalTo(true)));
}
@Test
void shouldHaveReportPermErrorsDisabledByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.isReportPermErrors(), is(equalTo(false)));
}
@Test
void shouldFailToSetReportPermErrorsWithoutConfig() {
// Given
OptionsParamApi param = new OptionsParamApi();
// When / Then
assertThrows(NullPointerException.class, () -> param.setReportPermErrors(true));
}
@Test
void shouldSetReportPermErrorsWithConfig() {
// Given
OptionsParamApi param = createOptionsParamApiWithConfig();
// When
param.setReportPermErrors(true);
// Then
assertThat(param.isReportPermErrors(), is(equalTo(true)));
assertThat(param.getConfig().getBoolean(API_REPORT_PERM_ERRORS), is(equalTo(true)));
}
@Test
void shouldHaveNonceTimeToLiveInSecsSetTo5MinsByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.getNonceTimeToLiveInSecs(), is(equalTo(5 * 60)));
}
@Test
void shouldHaveNoKeyForViewsOrSafeOthersDisabledByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.isNoKeyForSafeOps(), is(equalTo(false)));
}
@Test
void shouldFailToSetNoKeyForViewsOrSafeOthersWithoutConfig() {
// Given
OptionsParamApi param = new OptionsParamApi();
// When / Then
assertThrows(NullPointerException.class, () -> param.setNoKeyForSafeOps(true));
}
@Test
void shouldSetNoKeyForViewsOrSafeOthersWithConfig() {
// Given
OptionsParamApi param = createOptionsParamApiWithConfig();
// When
param.setNoKeyForSafeOps(true);
// Then
assertThat(param.isNoKeyForSafeOps(), is(equalTo(true)));
assertThat(param.getConfig().getBoolean(API_NO_KEY_FOR_SAFE_OPS), is(equalTo(true)));
}
@Test
void shouldHaveEmptyRealKeyByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.getRealKey(), is(emptyString()));
}
@Test
void shouldHaveGeneratedKeyByDefault() {
// Given / When
OptionsParamApi param = new OptionsParamApi();
// Then
assertThat(param.getKey(), is(not(equalTo(""))));
}
@Test
void shouldFailToSetKeyWithoutConfig() {
// Given
OptionsParamApi param = new OptionsParamApi();
// When / Then
assertThrows(NullPointerException.class, () -> param.setKey(""));
}
@Test
void shouldSetKeyWithConfig() {
// Given
OptionsParamApi param = createOptionsParamApiWithConfig();
String apiKey = "Key";
// When
param.setKey(apiKey);
// Then
assertThat(param.getKey(), is(equalTo(apiKey)));
assertThat(param.getConfig().getString(API_KEY_KEY), is(equalTo(apiKey)));
}
@Test
void shouldSaveGeneratedKeyWithConfig() {
// Given
OptionsParamApi param = new OptionsParamApi();
Configuration conf = new Configuration();
param.load(conf);
param.setKey(null);
// When
String key = param.getKey();
// Then
assertThat(key, is(not(equalTo(""))));
assertThat(conf.getString(API_KEY_KEY), is(equalTo(key)));
assertThat(conf.isSaved(), is(equalTo(true)));
}
@Test
void shouldReturnEmptyKeyIfKeyDisabled() {
// Given
OptionsParamApi param = createOptionsParamApiWithConfig();
param.setDisableKey(true);
param.setKey("Key");
// When
String key = param.getKey();
// Then
assertThat(key, is(equalTo("")));
assertThat(param.getRealKey(), is(equalTo("Key")));
}
@Test
void shouldParseLoadedFileConfiguration() {
// Given
OptionsParamApi param = new OptionsParamApi();
FileConfiguration config = createTestConfig();
// When
param.load(config);
// Then
assertThat(param.isEnabled(), is(equalTo(false)));
assertThat(param.isSecureOnly(), is(equalTo(true)));
assertThat(param.isDisableKey(), is(equalTo(true)));
assertThat(param.isIncErrorDetails(), is(equalTo(true)));
assertThat(param.isAutofillKey(), is(equalTo(true)));
assertThat(param.isEnableJSONP(), is(equalTo(true)));
assertThat(param.getRealKey(), is(equalTo("ApiKey")));
}
@Test
void shouldBeCloneableByDefault() {
// Given
OptionsParamApi param = new OptionsParamApi();
// When
OptionsParamApi clone = param.clone();
// Then
assertThat(clone, is(notNullValue()));
assertThat(param.isEnabled(), is(equalTo(true)));
assertThat(param.isSecureOnly(), is(equalTo(false)));
assertThat(param.isDisableKey(), is(equalTo(false)));
assertThat(param.isIncErrorDetails(), is(equalTo(false)));
assertThat(param.isAutofillKey(), is(equalTo(false)));
assertThat(param.isEnableJSONP(), is(equalTo(false)));
assertThat(param.getRealKey(), is(equalTo("")));
}
@Test
void shouldHaveLoadedConfigsAfterCloning() {
// Given
OptionsParamApi param = new OptionsParamApi();
FileConfiguration config = createTestConfig();
param.load(config);
// When
OptionsParamApi clone = param.clone();
// Then
assertThat(clone, is(notNullValue()));
assertThat(param.isEnabled(), is(equalTo(false)));
assertThat(param.isSecureOnly(), is(equalTo(true)));
assertThat(param.isDisableKey(), is(equalTo(true)));
assertThat(param.isIncErrorDetails(), is(equalTo(true)));
assertThat(param.isAutofillKey(), is(equalTo(true)));
assertThat(param.isEnableJSONP(), is(equalTo(true)));
assertThat(param.getRealKey(), is(equalTo("ApiKey")));
}
@Test
void shouldUseDefaultValuesWhenLoadingFileConfigurationWithInvalidValues() {
// Given
OptionsParamApi param = new OptionsParamApi();
FileConfiguration config = createTestConfigWithInvalidValues();
// When
param.load(config);
// Then
assertThat(param.isEnabled(), is(equalTo(true)));
assertThat(param.isSecureOnly(), is(equalTo(false)));
assertThat(param.isDisableKey(), is(equalTo(false)));
assertThat(param.isIncErrorDetails(), is(equalTo(false)));
assertThat(param.isAutofillKey(), is(equalTo(false)));
assertThat(param.isEnableJSONP(), is(equalTo(false)));
assertThat(param.getRealKey(), is(equalTo("")));
}
private static OptionsParamApi createOptionsParamApiWithConfig() {
OptionsParamApi param = new OptionsParamApi();
param.load(new ZapXmlConfiguration());
return param;
}
private static FileConfiguration createTestConfig() {
ZapXmlConfiguration config = new ZapXmlConfiguration();
config.setProperty(API_ENABLED_KEY, "false");
config.setProperty(API_SECURE_KEY, "true");
config.setProperty(API_KEY_KEY, "ApiKey");
config.setProperty(API_DISABLEKEY_KEY, "true");
config.setProperty(API_INCERRORDETAILS_KEY, "true");
config.setProperty(API_AUTOFILLKEY_KEY, "true");
config.setProperty(API_ENABLEJSONP_KEY, "true");
config.setProperty(API_NO_KEY_FOR_SAFE_OPS, "true");
config.setProperty(API_REPORT_PERM_ERRORS, "true");
return config;
}
private static FileConfiguration createTestConfigWithInvalidValues() {
ZapXmlConfiguration config = new ZapXmlConfiguration();
config.setProperty(API_ENABLED_KEY, "Not Boolean");
config.setProperty(API_SECURE_KEY, "Not Boolean");
config.setProperty(API_DISABLEKEY_KEY, "Not Boolean");
config.setProperty(API_INCERRORDETAILS_KEY, "Not Boolean");
config.setProperty(API_AUTOFILLKEY_KEY, "Not Boolean");
config.setProperty(API_ENABLEJSONP_KEY, "Not Boolean");
config.setProperty(API_NO_KEY_FOR_SAFE_OPS, "Not Boolean");
config.setProperty(API_REPORT_PERM_ERRORS, "Not Boolean");
return config;
}
private static class Configuration extends ZapXmlConfiguration {
private static final long serialVersionUID = 3822957830178644758L;
private boolean saved;
@Override
public void save() throws ConfigurationException {
saved = true;
}
boolean isSaved() {
return saved;
}
}
}
|
|
package com.antsoft.ant.codecontext;
import java.util.*;
import java.io.*;
import javax.swing.text.Document;
import com.antsoft.ant.compiler.DepsData;
/*
@author Kim, SungHoon.
*/
public class Parser {
private static Hashtable table_symbol;
private static Hashtable table_import;
private static int token;
private static int dummyCount = 0;
private static int staticCount = 0;
private static int depth = 0;
private static String currentClassName = null;
private static String packageName = null;
public static boolean hasMain = false;
private static boolean isConstructor = false;
private static String mainClass = null;
private static boolean depsFlag = false;
private static String fname = null;
private static Vector classes = null;
public static void setDepsFlag(String f) {
depsFlag = true;
fname = f;
//System.out.println(" filename => "+f);
}
private static Lexer lexer = null;
public Parser(File file) {
//Lexer.setData(file);
lexer = new Lexer(file);
init();
}
private static void init() {
table_symbol = new Hashtable(); // for symbol table.
table_import = new Hashtable(); // for import list table.
dummyCount = 0;
depth = 0;
staticCount = 0;
packageName = "dummypack";
hasMain = false;
depsFlag = false;
fname = null;
classes = new Vector(1,1);
}
public Parser(InputStream in) {
//Lexer.setData(in);
lexer = new Lexer(in);
init();
}
public Parser(Document doc) {
//Lexer.setData(doc);
lexer = new Lexer(doc);
init();
}
private static final String name() {
StringBuffer buf = new StringBuffer("");
boolean loop = false;
do {
if (token == Sym.ID) {
buf.append(Lexer.getValue());
token = Lexer.nextToken();
if (token == Sym.DOT) {
buf.append(".");
token = Lexer.nextToken();
loop = true;
}
else loop = false;
}
else loop = false;
} while (loop);
// System.out.println("name =---> "+buf.toString());
String result = buf.toString();
if (depsFlag) DepsData.deps.addTypeReference(result);
buf = null;
return result;
}
private static final String type() {
StringBuffer buf = new StringBuffer("");
switch (token) {
case Sym.BOOLEAN :
case Sym.INT :
case Sym.CHAR :
case Sym.BYTE :
case Sym.SHORT :
case Sym.LONG :
case Sym.FLOAT :
case Sym.DOUBLE :
case Sym.VOID :
buf.append(Lexer.getValue());
token = Lexer.nextToken();
break;
case Sym.ID :
buf.append(name());
break;
}
while (token == Sym.LS) {
buf.append("[");
if ( (token = Lexer.nextToken()) == Sym.RS) {
buf.append("]");
token = Lexer.nextToken();
}
else break;
}
String result = buf.toString();
buf = null;
return result;
}
private static final String formalParameter(Hashtable table,StringBuffer params) {
// System.out.println(" formal parameter");
boolean perfect = true;
int count=0;
StringBuffer buf = new StringBuffer();
while (token != Sym.RP) {
SymbolTableEntry entry = new SymbolTableEntry();
entry.setStartLine(Lexer.getLineCount());
entry.setMemberSort(SymbolTableEntry.FIELD);
entry.setDepth(depth + 1);
if (token == Sym.MODIFIER2) token = Lexer.nextToken();
String temp = type();
if (temp != null) {
buf.append(temp);
params.append(temp);
entry.setType(temp);
}
else {
perfect = false;
break;
}
String key = null;
if (token == Sym.ID) {
key = Lexer.getValue();
table.put(key,entry);
}
else {
perfect = false;
break;
}
token = Lexer.nextToken();
while (token == Sym.LS) {
token = Lexer.nextToken();
buf.append("[]");
params.append("[]");
token = Lexer.nextToken();
}
params.append(" "+key);
if (token == Sym.COMMA) {
buf.append(",");
params.append(",");
token = Lexer.nextToken();
}
}
if (perfect) {
token = Lexer.nextToken();
return "("+buf.toString()+")";
}
else {
while (token != Sym.RS && token != Sym.EOF) token = Lexer.nextToken();
params = null;
return "()";
}
}
private static final String innerDeclareStatement(SymbolTableEntry entry) {
String key = "";
if (token == Sym.ID) {
key = Lexer.getValue();
//System.out.println(" inner class is "+key);
entry.setType(key);
token = Lexer.nextToken();
entry.setDepth(depth);
}
if (token == Sym.EXTENDS) {
token = Lexer.nextToken();
entry.setSuperClass(name());
}
else entry.setSuperClass("java.lang.Object");
if (token == Sym.IMPLEMENTS) {
token = Lexer.nextToken();
while (true) {
entry.addImplementsInterface(name());
if (token == Sym.COMMA) token = Lexer.nextToken();
else break;
}
}
while (token != Sym.LB && token != Sym.EOF) token = Lexer.nextToken();
if (token == Sym.LB) {
depth++;
token = Lexer.nextToken();
entry.table = new Hashtable();
blockStatement(entry,true);
}
//token = Lexer.nextToken();
return key;
}
private static final String constructorStatement(SymbolTableEntry entry) {
// System.out.println(" constructor declare statement");
entry.table = new Hashtable();
StringBuffer buf = new StringBuffer("");
String param = formalParameter(entry.table,buf);
if (!buf.toString().equals("")) {
entry.addParameter(buf.toString());
}
if (token == Sym.THROWS) {
token = Lexer.nextToken();
boolean loop = false;
do {
name();
if (token == Sym.COMMA) {
loop = true;
token = Lexer.nextToken();
}
else loop = false;
} while (loop);
}
if (token == Sym.LB) {
depth++;
token = Lexer.nextToken();
blockStatement(entry,false);
}
return param;
}
private static final String nonConstructorStatement(SymbolTableEntry entry) {
String param = "";
if (token == Sym.LP) {
entry.table = new Hashtable();
entry.setMemberSort(SymbolTableEntry.METHOD);
token = Lexer.nextToken();
StringBuffer buf = new StringBuffer("");
param = formalParameter(entry.table,buf);
if (!buf.toString().equals("")) {
entry.addParameter(buf.toString());
}
if (token == Sym.THROWS) {
token = Lexer.nextToken();
boolean loop = false;
do {
name();
if (token == Sym.COMMA) {
loop = true;
token = Lexer.nextToken();
}
else loop = false;
} while (loop);
}
if (token == Sym.LB) {
depth++;
token = Lexer.nextToken();
blockStatement(entry,false);
}
else if (token == Sym.SEMIC) token = Lexer.nextToken();
}
else {
entry.setMemberSort(SymbolTableEntry.FIELD);
entry.setEndLine(Lexer.getLineCount());
StringBuffer buf = new StringBuffer("");
while (token != Sym.SEMIC && token != Sym.EOF) {
while (token == Sym.LS) {
token = Lexer.nextToken();
if (token == Sym.RS) {
token = Lexer.nextToken();
buf.append("[]");
}
}
if (token == Sym.COMMA) {
token = Lexer.nextToken();
if (token == Sym.ID) {
String id = Lexer.getValue();
if (depsFlag) DepsData.deps.addTypeReference(id);
buf.append(","+id);
token = Lexer.nextToken();
}
}
else if (token == Sym.LB) {
entry.table = new Hashtable();
depth++;
token = Lexer.nextToken();
blockStatement(entry,false);
}
else token = Lexer.nextToken();
}
token = Lexer.nextToken();
param = buf.toString();
}
return param;
}
private static final String memberDeclareStatement(SymbolTableEntry entry) {
String key = "";
entry.setType(type());
entry.setDepth(depth);
if (token == Sym.LP) {
key = entry.getType();
entry.setType(null);
entry.setMemberSort(SymbolTableEntry.CONSTRUCTOR);
token = Lexer.nextToken();
key += constructorStatement(entry);
//entry.setEndLine(Lexer.getLineCount());
}
else if (token == Sym.ID) {
key = Lexer.getValue();
token = Lexer.nextToken();
key += nonConstructorStatement(entry);
//entry.setEndLine(Lexer.getLineCount());
if (entry.getMemberSort() == SymbolTableEntry.METHOD
&& key.equals("main(String[])") && entry.getAccessType().equals("public")
&& entry.getType().equals("void")) {
hasMain = true;
mainClass = currentClassName;
}
}
else token = Lexer.nextToken();
return key;
}
private static final void declareStatement(SymbolTableEntry origin) {
Hashtable table = origin.table;
while (token != Sym.RB && token != Sym.EOF) {
SymbolTableEntry entry = new SymbolTableEntry();
entry.setStartLine(Lexer.getLineCount());
String key = null;
while (token == Sym.MODIFIER || token == Sym.MODIFIER2) {
if (token == Sym.MODIFIER) entry.setAccessType(Lexer.getValue());
else if (Lexer.getValue().equals("static")) entry.setStatic(true);
token = Lexer.nextToken();
}
if (token == Sym.INTERFACE || token == Sym.CLASS) {
if (token == Sym.CLASS) entry.setMemberSort(SymbolTableEntry.CLASS);
else entry.setMemberSort(SymbolTableEntry.INTERFACE);
token = Lexer.nextToken();
key = innerDeclareStatement(entry);
//if (entry.table == null) System.out.println(" oh shit....." );
if (token == Sym.RB) entry.setReallyEnd(true);
entry.setEndLine(Lexer.getLineCount());
//System.out.println(" end lien is "+entry.getType());
token = Lexer.nextToken();
}
else if (token == Sym.LB) {
entry.setDepth(depth);
depth++;
key = "static"+staticCount++;
entry.table = new Hashtable();
token = Lexer.nextToken();
blockStatement(entry,false);
//entry.setEndLine(Lexer.getLineCount());
entry.setMemberSort(SymbolTableEntry.OTHERS);
}
else {
key = memberDeclareStatement(entry);
if (entry.getMemberSort() == SymbolTableEntry.FIELD) {
StringTokenizer tokenizer = new StringTokenizer(key,",");
while (tokenizer!=null&&tokenizer.hasMoreElements()) {
String one = tokenizer.nextToken();
entry.table = null;
SymbolTableEntry e1 = new SymbolTableEntry(entry);
int p = one.indexOf("[]");
if (p == -1) table.put(one,e1);
else {
e1.setType(e1.getType()+one.substring(p,one.length()));
table.put(one.substring(0,p),e1);
}
}
continue;
}
}
if (key !=null) {
if (entry.getMemberSort() != SymbolTableEntry.CONSTRUCTOR) {
//if (entry.getMemberSort() == SymbolTableEntry.CLASS)
//System.out.println(" testing => "+entry);
table.put(key,entry);
}
else {
if (key.indexOf(currentClassName) != -1) {
table.put(key,entry);
}
}
}
}
depth--;
}
private static final void normalStatement(SymbolTableEntry origin) {
//System.out.println(" block start ");
Hashtable table = origin.table;
while (token != Sym.RB && token != Sym.EOF) {
SymbolTableEntry entry;
String type = type();
if (type !=null && token == Sym.ID) {
entry = new SymbolTableEntry();
entry.setStartLine(Lexer.getLineCount());
entry.setEndLine(Lexer.getLineCount());
entry.setDepth(depth);
entry.setType(type);
entry.setMemberSort(SymbolTableEntry.FIELD);
String key = Lexer.getValue();
table.put(key,entry);
}
//else if (type != null && token != Sym.ID) {
//if (depsFlag) DepsData.deps.addMemberReference(type);
//token = Lexer.nextToken();
//}
else if (token == Sym.ID) {
if (depsFlag) DepsData.deps.addTypeReference(Lexer.getValue());
token = Lexer.nextToken();
}
else if (token == Sym.LB) {
if (depsFlag && type != null) {
if (type.indexOf('.') != -1) DepsData.deps.addTypeReference(type);
}
depth++;
String key = "dummy" + dummyCount++;
entry = new SymbolTableEntry();
entry.setStartLine(Lexer.getLineCount());
entry.setDepth(depth);
entry.table = new Hashtable();
token = Lexer.nextToken();
//System.out.println(" value => "+Lexer.getValue());
normalStatement(entry);
entry.setMemberSort(SymbolTableEntry.OTHERS);
//entry.setEndLine(Lexer.getLineCount());
table.put(key,entry);
}
else if (token != Sym.RB) {
if (depsFlag && type != null) {
if (type.indexOf('.') != -1) DepsData.deps.addTypeReference(type);
}
token = Lexer.nextToken();
}
}
origin.setEndLine(Lexer.getLineCount());
//System.out.println(" origin type ==> "+origin.getType());
depth--;
token = Lexer.nextToken();
//System.out.println(" block end ");
}
private static final void blockStatement(SymbolTableEntry entry,boolean isInClass) {
if (isInClass) declareStatement(entry);
else normalStatement(entry);
}
private static final void classBlock() {
SymbolTableEntry entry = new SymbolTableEntry();
String key = null;
entry.setStartLine(Lexer.getLineCount());
entry.setDepth(depth);
while (token == Sym.MODIFIER || token == Sym.MODIFIER2) {
if (token == Sym.MODIFIER) entry.setAccessType(Lexer.getValue());
token = Lexer.nextToken();
}
while (token != Sym.CLASS && token != Sym.INTERFACE && token != Sym.EOF) token = Lexer.nextToken();
if (token == Sym.EOF) return;
if (token == Sym.CLASS) entry.setMemberSort(SymbolTableEntry.CLASS);
else entry.setMemberSort(SymbolTableEntry.INTERFACE);
token = Lexer.nextToken();
while (token != Sym.ID && token != Sym.EOF) token = Lexer.nextToken();
if (token == Sym.EOF) return;
key = Lexer.getValue();
currentClassName = key;
entry.setType(key);
classes.addElement(key);
if (depsFlag) DepsData.deps.startDefinition(key);
token = Lexer.nextToken();
if (token == Sym.EXTENDS) {
token = Lexer.nextToken();
entry.setSuperClass(name());
}
else {
if (packageName.equals("java.lang") && key.equals("Object"))
entry.setSuperClass(null);
else entry.setSuperClass("java.lang.Object");
}
if (token == Sym.IMPLEMENTS) {
token = Lexer.nextToken();
while (true) {
entry.addImplementsInterface(name());
if (token == Sym.COMMA) token = Lexer.nextToken();
else break;
}
}
while (token != Sym.LB && token != Sym.EOF) token = Lexer.nextToken();
token = Lexer.nextToken();
entry.table = new Hashtable();
depth++;
blockStatement(entry,true);
entry.setEndLine(Lexer.getLineCount());
if (token == Sym.RB) entry.setReallyEnd(true);
table_symbol.put(key,entry);
token = Lexer.nextToken();
}
private static final void importStatement() {
if (token == Sym.IMPORT) {
Lexer.newLine = true;
SymbolTableEntry entry = new SymbolTableEntry();
entry.setMemberSort(SymbolTableEntry.IMPORT);
entry.setStartLine(Lexer.getLineCount());
StringBuffer buf = new StringBuffer();
token = Lexer.nextToken();
while ( (token != Sym.EOL) && (token != Sym.EOF) ) {
if (token == Sym.ID) buf.append(Lexer.getValue());
else if (token == Sym.DOT) buf.append(".");
else if (token == Sym.MUL) buf.append("*");
token = Lexer.nextToken();
}
String iname = buf.toString();
if (depsFlag) DepsData.deps.addImport(iname);
table_import.put(iname,entry);
entry = null;
}
Lexer.newLine = false;
token = Lexer.nextToken();
}
private static final void packageStatement() {
if (token == Sym.PACKAGE) {
Lexer.newLine = true;
SymbolTableEntry entry = new SymbolTableEntry();
entry.setMemberSort(SymbolTableEntry.PACKAGE);
entry.setStartLine(Lexer.getLineCount());
StringBuffer buf = new StringBuffer();
token = Lexer.nextToken();
while ( (token != Sym.EOL) && (token != Sym.EOF) ) {
if (token == Sym.ID) buf.append(Lexer.getValue());
else if (token == Sym.DOT) buf.append(".");
token = Lexer.nextToken();
}
packageName = buf.toString();
if (depsFlag) DepsData.deps.startPackage(packageName);
table_import.put(packageName,entry);
entry = null;
}
Lexer.newLine = false;
token = Lexer.nextToken();
}
public static final void doParse() {
if (depsFlag) {
//System.out.println(" f name => "+fname);
DepsData.deps.startFile(fname);
}
token = Lexer.nextToken();
if (token == Sym.EOF) return;
if (depsFlag) DepsData.deps.startPackage("dummypack");
if (token == Sym.PACKAGE) packageStatement();
while (token == Sym.IMPORT) importStatement();
while (token != Sym.EOF) classBlock();
//show(table_symbol);
}
////////////////////////////////////////////////////////////////////////////////////////
// result of parsing is retrieved by other object.
public static final Hashtable getTableImport() {
return table_import;
}
public static final Hashtable getTableSymbol() {
return table_symbol;
}
public static final String getPackageName() {
return packageName;
}
public static final String getMainClass() {
return mainClass;
}
public static final Vector getClasses() {
return classes;
}
/*
public static void main(String[] argv) {
File f = new File("Lexer.java");
Parser.setData(f);
Parser.doParse();
System.out.println("result ");
Hashtable table = Parser.getTableSymbol();
//Show show = new Show();
//show.show(table);
show(table);
}
static int count = 0;
public static void show(Hashtable t) {
//System.out.println("count is "+count++);
Enumeration key = t.keys();
while (key != null && key.hasMoreElements()) {
String ttt = (String)key.nextElement();
SymbolTableEntry e = (SymbolTableEntry)t.get(ttt);
System.out.println("key => "+ttt);
if (e!=null) {
System.out.println("entry => "+e.toString());
if (e.table != null) show(e.table);
}
}
}
*/
}
|
|
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package net.opengis.gml.impl;
import net.opengis.gml.AbstractTopoPrimitiveType;
import net.opengis.gml.GmlPackage;
import net.opengis.gml.TopoPrimitiveMemberType;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.EObjectImpl;
import org.eclipse.emf.ecore.util.BasicFeatureMap;
import org.eclipse.emf.ecore.util.FeatureMap;
import org.eclipse.emf.ecore.util.InternalEList;
import org.w3._1999.xlink.ActuateType;
import org.w3._1999.xlink.ShowType;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Topo Primitive Member Type</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link net.opengis.gml.impl.TopoPrimitiveMemberTypeImpl#getTopoPrimitiveGroup <em>Topo Primitive Group</em>}</li>
* <li>{@link net.opengis.gml.impl.TopoPrimitiveMemberTypeImpl#getTopoPrimitive <em>Topo Primitive</em>}</li>
* <li>{@link net.opengis.gml.impl.TopoPrimitiveMemberTypeImpl#getActuate <em>Actuate</em>}</li>
* <li>{@link net.opengis.gml.impl.TopoPrimitiveMemberTypeImpl#getArcrole <em>Arcrole</em>}</li>
* <li>{@link net.opengis.gml.impl.TopoPrimitiveMemberTypeImpl#getHref <em>Href</em>}</li>
* <li>{@link net.opengis.gml.impl.TopoPrimitiveMemberTypeImpl#getRemoteSchema <em>Remote Schema</em>}</li>
* <li>{@link net.opengis.gml.impl.TopoPrimitiveMemberTypeImpl#getRole <em>Role</em>}</li>
* <li>{@link net.opengis.gml.impl.TopoPrimitiveMemberTypeImpl#getShow <em>Show</em>}</li>
* <li>{@link net.opengis.gml.impl.TopoPrimitiveMemberTypeImpl#getTitle <em>Title</em>}</li>
* <li>{@link net.opengis.gml.impl.TopoPrimitiveMemberTypeImpl#getType <em>Type</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class TopoPrimitiveMemberTypeImpl extends EObjectImpl implements TopoPrimitiveMemberType {
/**
* The cached value of the '{@link #getTopoPrimitiveGroup() <em>Topo Primitive Group</em>}' attribute list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getTopoPrimitiveGroup()
* @generated
* @ordered
*/
protected FeatureMap topoPrimitiveGroup;
/**
* The default value of the '{@link #getActuate() <em>Actuate</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getActuate()
* @generated
* @ordered
*/
protected static final ActuateType ACTUATE_EDEFAULT = ActuateType.ON_LOAD;
/**
* The cached value of the '{@link #getActuate() <em>Actuate</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getActuate()
* @generated
* @ordered
*/
protected ActuateType actuate = ACTUATE_EDEFAULT;
/**
* This is true if the Actuate attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean actuateESet;
/**
* The default value of the '{@link #getArcrole() <em>Arcrole</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getArcrole()
* @generated
* @ordered
*/
protected static final String ARCROLE_EDEFAULT = null;
/**
* The cached value of the '{@link #getArcrole() <em>Arcrole</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getArcrole()
* @generated
* @ordered
*/
protected String arcrole = ARCROLE_EDEFAULT;
/**
* The default value of the '{@link #getHref() <em>Href</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getHref()
* @generated
* @ordered
*/
protected static final String HREF_EDEFAULT = null;
/**
* The cached value of the '{@link #getHref() <em>Href</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getHref()
* @generated
* @ordered
*/
protected String href = HREF_EDEFAULT;
/**
* The default value of the '{@link #getRemoteSchema() <em>Remote Schema</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRemoteSchema()
* @generated
* @ordered
*/
protected static final String REMOTE_SCHEMA_EDEFAULT = null;
/**
* The cached value of the '{@link #getRemoteSchema() <em>Remote Schema</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRemoteSchema()
* @generated
* @ordered
*/
protected String remoteSchema = REMOTE_SCHEMA_EDEFAULT;
/**
* The default value of the '{@link #getRole() <em>Role</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRole()
* @generated
* @ordered
*/
protected static final String ROLE_EDEFAULT = null;
/**
* The cached value of the '{@link #getRole() <em>Role</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRole()
* @generated
* @ordered
*/
protected String role = ROLE_EDEFAULT;
/**
* The default value of the '{@link #getShow() <em>Show</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getShow()
* @generated
* @ordered
*/
protected static final ShowType SHOW_EDEFAULT = ShowType.NEW;
/**
* The cached value of the '{@link #getShow() <em>Show</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getShow()
* @generated
* @ordered
*/
protected ShowType show = SHOW_EDEFAULT;
/**
* This is true if the Show attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean showESet;
/**
* The default value of the '{@link #getTitle() <em>Title</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getTitle()
* @generated
* @ordered
*/
protected static final String TITLE_EDEFAULT = null;
/**
* The cached value of the '{@link #getTitle() <em>Title</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getTitle()
* @generated
* @ordered
*/
protected String title = TITLE_EDEFAULT;
/**
* The default value of the '{@link #getType() <em>Type</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getType()
* @generated
* @ordered
*/
protected static final String TYPE_EDEFAULT = "simple";
/**
* The cached value of the '{@link #getType() <em>Type</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getType()
* @generated
* @ordered
*/
protected String type = TYPE_EDEFAULT;
/**
* This is true if the Type attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean typeESet;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected TopoPrimitiveMemberTypeImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return GmlPackage.eINSTANCE.getTopoPrimitiveMemberType();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public FeatureMap getTopoPrimitiveGroup() {
if (topoPrimitiveGroup == null) {
topoPrimitiveGroup = new BasicFeatureMap(this, GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TOPO_PRIMITIVE_GROUP);
}
return topoPrimitiveGroup;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public AbstractTopoPrimitiveType getTopoPrimitive() {
return (AbstractTopoPrimitiveType)getTopoPrimitiveGroup().get(GmlPackage.eINSTANCE.getTopoPrimitiveMemberType_TopoPrimitive(), true);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetTopoPrimitive(AbstractTopoPrimitiveType newTopoPrimitive, NotificationChain msgs) {
return ((FeatureMap.Internal)getTopoPrimitiveGroup()).basicAdd(GmlPackage.eINSTANCE.getTopoPrimitiveMemberType_TopoPrimitive(), newTopoPrimitive, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ActuateType getActuate() {
return actuate;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setActuate(ActuateType newActuate) {
ActuateType oldActuate = actuate;
actuate = newActuate == null ? ACTUATE_EDEFAULT : newActuate;
boolean oldActuateESet = actuateESet;
actuateESet = true;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__ACTUATE, oldActuate, actuate, !oldActuateESet));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void unsetActuate() {
ActuateType oldActuate = actuate;
boolean oldActuateESet = actuateESet;
actuate = ACTUATE_EDEFAULT;
actuateESet = false;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.UNSET, GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__ACTUATE, oldActuate, ACTUATE_EDEFAULT, oldActuateESet));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isSetActuate() {
return actuateESet;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getArcrole() {
return arcrole;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setArcrole(String newArcrole) {
String oldArcrole = arcrole;
arcrole = newArcrole;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__ARCROLE, oldArcrole, arcrole));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getHref() {
return href;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setHref(String newHref) {
String oldHref = href;
href = newHref;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__HREF, oldHref, href));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getRemoteSchema() {
return remoteSchema;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setRemoteSchema(String newRemoteSchema) {
String oldRemoteSchema = remoteSchema;
remoteSchema = newRemoteSchema;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__REMOTE_SCHEMA, oldRemoteSchema, remoteSchema));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getRole() {
return role;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setRole(String newRole) {
String oldRole = role;
role = newRole;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__ROLE, oldRole, role));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ShowType getShow() {
return show;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setShow(ShowType newShow) {
ShowType oldShow = show;
show = newShow == null ? SHOW_EDEFAULT : newShow;
boolean oldShowESet = showESet;
showESet = true;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__SHOW, oldShow, show, !oldShowESet));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void unsetShow() {
ShowType oldShow = show;
boolean oldShowESet = showESet;
show = SHOW_EDEFAULT;
showESet = false;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.UNSET, GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__SHOW, oldShow, SHOW_EDEFAULT, oldShowESet));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isSetShow() {
return showESet;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getTitle() {
return title;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setTitle(String newTitle) {
String oldTitle = title;
title = newTitle;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TITLE, oldTitle, title));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getType() {
return type;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setType(String newType) {
String oldType = type;
type = newType;
boolean oldTypeESet = typeESet;
typeESet = true;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TYPE, oldType, type, !oldTypeESet));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void unsetType() {
String oldType = type;
boolean oldTypeESet = typeESet;
type = TYPE_EDEFAULT;
typeESet = false;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.UNSET, GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TYPE, oldType, TYPE_EDEFAULT, oldTypeESet));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isSetType() {
return typeESet;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TOPO_PRIMITIVE_GROUP:
return ((InternalEList<?>)getTopoPrimitiveGroup()).basicRemove(otherEnd, msgs);
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TOPO_PRIMITIVE:
return basicSetTopoPrimitive(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TOPO_PRIMITIVE_GROUP:
if (coreType) return getTopoPrimitiveGroup();
return ((FeatureMap.Internal)getTopoPrimitiveGroup()).getWrapper();
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TOPO_PRIMITIVE:
return getTopoPrimitive();
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__ACTUATE:
return getActuate();
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__ARCROLE:
return getArcrole();
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__HREF:
return getHref();
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__REMOTE_SCHEMA:
return getRemoteSchema();
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__ROLE:
return getRole();
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__SHOW:
return getShow();
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TITLE:
return getTitle();
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TYPE:
return getType();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TOPO_PRIMITIVE_GROUP:
((FeatureMap.Internal)getTopoPrimitiveGroup()).set(newValue);
return;
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__ACTUATE:
setActuate((ActuateType)newValue);
return;
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__ARCROLE:
setArcrole((String)newValue);
return;
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__HREF:
setHref((String)newValue);
return;
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__REMOTE_SCHEMA:
setRemoteSchema((String)newValue);
return;
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__ROLE:
setRole((String)newValue);
return;
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__SHOW:
setShow((ShowType)newValue);
return;
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TITLE:
setTitle((String)newValue);
return;
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TYPE:
setType((String)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TOPO_PRIMITIVE_GROUP:
getTopoPrimitiveGroup().clear();
return;
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__ACTUATE:
unsetActuate();
return;
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__ARCROLE:
setArcrole(ARCROLE_EDEFAULT);
return;
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__HREF:
setHref(HREF_EDEFAULT);
return;
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__REMOTE_SCHEMA:
setRemoteSchema(REMOTE_SCHEMA_EDEFAULT);
return;
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__ROLE:
setRole(ROLE_EDEFAULT);
return;
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__SHOW:
unsetShow();
return;
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TITLE:
setTitle(TITLE_EDEFAULT);
return;
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TYPE:
unsetType();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TOPO_PRIMITIVE_GROUP:
return topoPrimitiveGroup != null && !topoPrimitiveGroup.isEmpty();
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TOPO_PRIMITIVE:
return getTopoPrimitive() != null;
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__ACTUATE:
return isSetActuate();
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__ARCROLE:
return ARCROLE_EDEFAULT == null ? arcrole != null : !ARCROLE_EDEFAULT.equals(arcrole);
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__HREF:
return HREF_EDEFAULT == null ? href != null : !HREF_EDEFAULT.equals(href);
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__REMOTE_SCHEMA:
return REMOTE_SCHEMA_EDEFAULT == null ? remoteSchema != null : !REMOTE_SCHEMA_EDEFAULT.equals(remoteSchema);
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__ROLE:
return ROLE_EDEFAULT == null ? role != null : !ROLE_EDEFAULT.equals(role);
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__SHOW:
return isSetShow();
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TITLE:
return TITLE_EDEFAULT == null ? title != null : !TITLE_EDEFAULT.equals(title);
case GmlPackage.TOPO_PRIMITIVE_MEMBER_TYPE__TYPE:
return isSetType();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (topoPrimitiveGroup: ");
result.append(topoPrimitiveGroup);
result.append(", actuate: ");
if (actuateESet) result.append(actuate); else result.append("<unset>");
result.append(", arcrole: ");
result.append(arcrole);
result.append(", href: ");
result.append(href);
result.append(", remoteSchema: ");
result.append(remoteSchema);
result.append(", role: ");
result.append(role);
result.append(", show: ");
if (showESet) result.append(show); else result.append("<unset>");
result.append(", title: ");
result.append(title);
result.append(", type: ");
if (typeESet) result.append(type); else result.append("<unset>");
result.append(')');
return result.toString();
}
} //TopoPrimitiveMemberTypeImpl
|
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.transform.persistence;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.AliasMetadata;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexTemplateMetadata;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.xpack.core.common.notifications.AbstractAuditMessage;
import org.elasticsearch.xpack.core.transform.TransformField;
import org.elasticsearch.xpack.core.transform.transforms.DestConfig;
import org.elasticsearch.xpack.core.transform.transforms.SourceConfig;
import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint;
import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats;
import org.elasticsearch.xpack.core.transform.transforms.TransformProgress;
import org.elasticsearch.xpack.core.transform.transforms.TransformState;
import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDoc;
import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants;
import java.io.IOException;
import java.util.Collections;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME;
import static org.elasticsearch.xpack.core.ClientHelper.TRANSFORM_ORIGIN;
import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin;
import static org.elasticsearch.xpack.core.transform.TransformField.TRANSFORM_ID;
public final class TransformInternalIndex {
/* Changelog of internal index versions
*
* Please list changes, increase the version in @link{TransformInternalIndexConstants} if you are 1st in this release cycle
*
* version 1 (7.2): initial
* version 2 (7.4): cleanup, add config::version, config::create_time, checkpoint::timestamp, checkpoint::time_upper_bound,
* progress::docs_processed, progress::docs_indexed,
* stats::exponential_avg_checkpoint_duration_ms, stats::exponential_avg_documents_indexed,
* stats::exponential_avg_documents_processed
* version 3 (7.5): rename to .transform-internal-xxx
* version 4 (7.6): state::should_stop_at_checkpoint
* checkpoint::checkpoint
* version 5 (7.7): stats::processing_time_in_ms, stats::processing_total
*/
// constants for mappings
public static final String DYNAMIC = "dynamic";
public static final String PROPERTIES = "properties";
public static final String TYPE = "type";
public static final String ENABLED = "enabled";
public static final String DATE = "date";
public static final String TEXT = "text";
public static final String FIELDS = "fields";
public static final String RAW = "raw";
public static final String IGNORE_ABOVE = "ignore_above";
// data types
public static final String FLOAT = "float";
public static final String DOUBLE = "double";
public static final String LONG = "long";
public static final String KEYWORD = "keyword";
public static final String BOOLEAN = "boolean";
public static IndexTemplateMetadata getIndexTemplateMetadata() throws IOException {
IndexTemplateMetadata transformTemplate = IndexTemplateMetadata.builder(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME)
.patterns(Collections.singletonList(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME))
.version(Version.CURRENT.id)
.settings(
Settings.builder()
// the configurations are expected to be small
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1")
)
.putMapping(MapperService.SINGLE_MAPPING_NAME, Strings.toString(mappings()))
.build();
return transformTemplate;
}
public static IndexTemplateMetadata getAuditIndexTemplateMetadata() throws IOException {
IndexTemplateMetadata transformTemplate = IndexTemplateMetadata.builder(TransformInternalIndexConstants.AUDIT_INDEX)
.patterns(Collections.singletonList(TransformInternalIndexConstants.AUDIT_INDEX_PREFIX + "*"))
.version(Version.CURRENT.id)
.settings(
Settings.builder()
// the audits are expected to be small
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1")
.put(IndexMetadata.SETTING_INDEX_HIDDEN, true)
)
.putMapping(MapperService.SINGLE_MAPPING_NAME, Strings.toString(auditMappings()))
.putAlias(AliasMetadata.builder(TransformInternalIndexConstants.AUDIT_INDEX_READ_ALIAS).isHidden(true))
.build();
return transformTemplate;
}
private static XContentBuilder auditMappings() throws IOException {
XContentBuilder builder = jsonBuilder().startObject();
builder.startObject(SINGLE_MAPPING_NAME);
addMetaInformation(builder);
builder.field(DYNAMIC, "false");
builder
.startObject(PROPERTIES)
.startObject(TRANSFORM_ID)
.field(TYPE, KEYWORD)
.endObject()
.startObject(AbstractAuditMessage.LEVEL.getPreferredName())
.field(TYPE, KEYWORD)
.endObject()
.startObject(AbstractAuditMessage.MESSAGE.getPreferredName())
.field(TYPE, TEXT)
.startObject(FIELDS)
.startObject(RAW)
.field(TYPE, KEYWORD)
.field(IGNORE_ABOVE, 1024)
.endObject()
.endObject()
.endObject()
.startObject(AbstractAuditMessage.TIMESTAMP.getPreferredName())
.field(TYPE, DATE)
.endObject()
.startObject(AbstractAuditMessage.NODE_NAME.getPreferredName())
.field(TYPE, KEYWORD)
.endObject()
.endObject()
.endObject()
.endObject();
return builder;
}
public static XContentBuilder mappings() throws IOException {
XContentBuilder builder = jsonBuilder();
return mappings(builder);
}
public static XContentBuilder mappings(XContentBuilder builder) throws IOException {
builder.startObject();
builder.startObject(MapperService.SINGLE_MAPPING_NAME);
addMetaInformation(builder);
// do not allow anything outside of the defined schema
builder.field(DYNAMIC, "false");
// the schema definitions
builder.startObject(PROPERTIES);
// overall doc type
builder.startObject(TransformField.INDEX_DOC_TYPE.getPreferredName()).field(TYPE, KEYWORD).endObject();
// add the schema for transform configurations
addTransformsConfigMappings(builder);
// add the schema for transform stats
addTransformStoredDocMappings(builder);
// add the schema for checkpoints
addTransformCheckpointMappings(builder);
// end type
builder.endObject();
// end properties
builder.endObject();
// end mapping
builder.endObject();
return builder;
}
private static XContentBuilder addTransformStoredDocMappings(XContentBuilder builder) throws IOException {
return builder
.startObject(TransformStoredDoc.STATE_FIELD.getPreferredName())
.startObject(PROPERTIES)
.startObject(TransformState.TASK_STATE.getPreferredName())
.field(TYPE, KEYWORD)
.endObject()
.startObject(TransformState.INDEXER_STATE.getPreferredName())
.field(TYPE, KEYWORD)
.endObject()
.startObject(TransformState.SHOULD_STOP_AT_NEXT_CHECKPOINT.getPreferredName())
.field(TYPE, BOOLEAN)
.endObject()
.startObject(TransformState.CURRENT_POSITION.getPreferredName())
.field(ENABLED, false)
.endObject()
.startObject(TransformState.CHECKPOINT.getPreferredName())
.field(TYPE, LONG)
.endObject()
.startObject(TransformState.REASON.getPreferredName())
.field(TYPE, KEYWORD)
.endObject()
.startObject(TransformState.PROGRESS.getPreferredName())
.startObject(PROPERTIES)
.startObject(TransformProgress.TOTAL_DOCS.getPreferredName())
.field(TYPE, LONG)
.endObject()
.startObject(TransformProgress.DOCS_REMAINING.getPreferredName())
.field(TYPE, LONG)
.endObject()
.startObject(TransformProgress.PERCENT_COMPLETE)
.field(TYPE, FLOAT)
.endObject()
.startObject(TransformProgress.DOCS_INDEXED.getPreferredName())
.field(TYPE, LONG)
.endObject()
.startObject(TransformProgress.DOCS_PROCESSED.getPreferredName())
.field(TYPE, LONG)
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.startObject(TransformField.STATS_FIELD.getPreferredName())
.startObject(PROPERTIES)
.startObject(TransformIndexerStats.NUM_PAGES.getPreferredName())
.field(TYPE, LONG)
.endObject()
.startObject(TransformIndexerStats.NUM_INPUT_DOCUMENTS.getPreferredName())
.field(TYPE, LONG)
.endObject()
.startObject(TransformIndexerStats.NUM_OUTPUT_DOCUMENTS.getPreferredName())
.field(TYPE, LONG)
.endObject()
.startObject(TransformIndexerStats.NUM_INVOCATIONS.getPreferredName())
.field(TYPE, LONG)
.endObject()
.startObject(TransformIndexerStats.INDEX_TIME_IN_MS.getPreferredName())
.field(TYPE, LONG)
.endObject()
.startObject(TransformIndexerStats.SEARCH_TIME_IN_MS.getPreferredName())
.field(TYPE, LONG)
.endObject()
.startObject(TransformIndexerStats.PROCESSING_TIME_IN_MS.getPreferredName())
.field(TYPE, LONG)
.endObject()
.startObject(TransformIndexerStats.INDEX_TOTAL.getPreferredName())
.field(TYPE, LONG)
.endObject()
.startObject(TransformIndexerStats.SEARCH_TOTAL.getPreferredName())
.field(TYPE, LONG)
.endObject()
.startObject(TransformIndexerStats.PROCESSING_TOTAL.getPreferredName())
.field(TYPE, LONG)
.endObject()
.startObject(TransformIndexerStats.SEARCH_FAILURES.getPreferredName())
.field(TYPE, LONG)
.endObject()
.startObject(TransformIndexerStats.INDEX_FAILURES.getPreferredName())
.field(TYPE, LONG)
.endObject()
.startObject(TransformIndexerStats.EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS.getPreferredName())
.field(TYPE, DOUBLE)
.endObject()
.startObject(TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_INDEXED.getPreferredName())
.field(TYPE, DOUBLE)
.endObject()
.startObject(TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_PROCESSED.getPreferredName())
.field(TYPE, DOUBLE)
.endObject()
.endObject()
.endObject();
}
public static XContentBuilder addTransformsConfigMappings(XContentBuilder builder) throws IOException {
return builder
.startObject(TransformField.ID.getPreferredName())
.field(TYPE, KEYWORD)
.endObject()
.startObject(TransformField.SOURCE.getPreferredName())
.startObject(PROPERTIES)
.startObject(SourceConfig.INDEX.getPreferredName())
.field(TYPE, KEYWORD)
.endObject()
.startObject(SourceConfig.QUERY.getPreferredName())
.field(ENABLED, false)
.endObject()
.endObject()
.endObject()
.startObject(TransformField.DESTINATION.getPreferredName())
.startObject(PROPERTIES)
.startObject(DestConfig.INDEX.getPreferredName())
.field(TYPE, KEYWORD)
.endObject()
.endObject()
.endObject()
.startObject(TransformField.DESCRIPTION.getPreferredName())
.field(TYPE, TEXT)
.endObject()
.startObject(TransformField.VERSION.getPreferredName())
.field(TYPE, KEYWORD)
.endObject()
.startObject(TransformField.CREATE_TIME.getPreferredName())
.field(TYPE, DATE)
.endObject();
}
private static XContentBuilder addTransformCheckpointMappings(XContentBuilder builder) throws IOException {
return builder
.startObject(TransformField.TIMESTAMP_MILLIS.getPreferredName())
.field(TYPE, DATE)
.endObject()
.startObject(TransformField.TIME_UPPER_BOUND_MILLIS.getPreferredName())
.field(TYPE, DATE)
.endObject()
.startObject(TransformCheckpoint.CHECKPOINT.getPreferredName())
.field(TYPE, LONG)
.endObject();
}
/**
* Inserts "_meta" containing useful information like the version into the mapping
* template.
*
* @param builder The builder for the mappings
* @throws IOException On write error
*/
private static XContentBuilder addMetaInformation(XContentBuilder builder) throws IOException {
return builder.startObject("_meta").field("version", Version.CURRENT).endObject();
}
/**
* This method should be called before any document is indexed that relies on the
* existence of the latest index templates to create the internal and audit index.
* The reason is that the standard template upgrader only runs when the master node
* is upgraded to the newer version. If data nodes are upgraded before master
* nodes and transforms get assigned to those data nodes then without this check
* the data nodes will index documents into the internal index before the necessary
* index template is present and this will result in an index with completely
* dynamic mappings being created (which is very bad).
*/
public static void installLatestIndexTemplatesIfRequired(ClusterService clusterService, Client client, ActionListener<Void> listener) {
installLatestVersionedIndexTemplateIfRequired(
clusterService,
client,
ActionListener.wrap(r -> { installLatestAuditIndexTemplateIfRequired(clusterService, client, listener); }, listener::onFailure)
);
}
protected static boolean haveLatestVersionedIndexTemplate(ClusterState state) {
return state.getMetadata().getTemplates().containsKey(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME);
}
protected static boolean haveLatestAuditIndexTemplate(ClusterState state) {
return state.getMetadata().getTemplates().containsKey(TransformInternalIndexConstants.AUDIT_INDEX);
}
protected static void installLatestVersionedIndexTemplateIfRequired(
ClusterService clusterService,
Client client,
ActionListener<Void> listener
) {
// The check for existence of the template is against local cluster state, so very cheap
if (haveLatestVersionedIndexTemplate(clusterService.state())) {
listener.onResponse(null);
return;
}
// Installing the template involves communication with the master node, so it's more expensive but much rarer
try {
IndexTemplateMetadata indexTemplateMetadata = getIndexTemplateMetadata();
BytesReference jsonMappings = indexTemplateMetadata.mappings().uncompressed();
PutIndexTemplateRequest request = new PutIndexTemplateRequest(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME)
.patterns(indexTemplateMetadata.patterns())
.version(indexTemplateMetadata.version())
.settings(indexTemplateMetadata.settings())
.mapping(XContentHelper.convertToMap(jsonMappings, true, XContentType.JSON).v2());
ActionListener<AcknowledgedResponse> innerListener = ActionListener.wrap(r -> listener.onResponse(null), listener::onFailure);
executeAsyncWithOrigin(
client.threadPool().getThreadContext(),
TRANSFORM_ORIGIN,
request,
innerListener,
client.admin().indices()::putTemplate
);
} catch (IOException e) {
listener.onFailure(e);
}
}
protected static void installLatestAuditIndexTemplateIfRequired(
ClusterService clusterService,
Client client,
ActionListener<Void> listener
) {
// The check for existence of the template is against local cluster state, so very cheap
if (haveLatestAuditIndexTemplate(clusterService.state())) {
listener.onResponse(null);
return;
}
// Installing the template involves communication with the master node, so it's more expensive but much rarer
try {
IndexTemplateMetadata indexTemplateMetadata = getAuditIndexTemplateMetadata();
BytesReference jsonMappings = indexTemplateMetadata.mappings().uncompressed();
PutIndexTemplateRequest request = new PutIndexTemplateRequest(TransformInternalIndexConstants.AUDIT_INDEX).patterns(
indexTemplateMetadata.patterns()
)
.version(indexTemplateMetadata.version())
.settings(indexTemplateMetadata.settings())
.mapping(XContentHelper.convertToMap(jsonMappings, true, XContentType.JSON).v2());
ActionListener<AcknowledgedResponse> innerListener = ActionListener.wrap(r -> listener.onResponse(null), listener::onFailure);
executeAsyncWithOrigin(
client.threadPool().getThreadContext(),
TRANSFORM_ORIGIN,
request,
innerListener,
client.admin().indices()::putTemplate
);
} catch (IOException e) {
listener.onFailure(e);
}
}
private TransformInternalIndex() {}
}
|
|
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// THIS IS MODIFIED COPY OF THE "L" PLATFORM CLASS. BE CAREFUL ABOUT EDITS.
// THIS CODE SHOULD FOLLOW ANDROID STYLE.
package org.uribeacon.advertise.compat;
import android.os.Parcel;
import android.os.Parcelable;
/**
* The {@link AdvertiseSettings} provide a way to adjust advertising preferences for each
* Bluetooth LE advertisement instance. Use {@link AdvertiseSettings.Builder} to create an
* instance of this class.
*/
public final class AdvertiseSettings implements Parcelable {
/**
* Perform Bluetooth LE advertising in low power mode. This is the default and preferred
* advertising mode as it consumes the least power.
*/
public static final int ADVERTISE_MODE_LOW_POWER = 0;
/**
* Perform Bluetooth LE advertising in balanced power mode. This is balanced between advertising
* frequency and power consumption.
*/
public static final int ADVERTISE_MODE_BALANCED = 1;
/**
* Perform Bluetooth LE advertising in low latency, high power mode. This has the highest power
* consumption and should not be used for continuous background advertising.
*/
public static final int ADVERTISE_MODE_LOW_LATENCY = 2;
/**
* Advertise using the lowest transmission (TX) power level. Low transmission power can be used
* to restrict the visibility range of advertising packets.
*/
public static final int ADVERTISE_TX_POWER_ULTRA_LOW = 0;
/**
* Advertise using low TX power level.
*/
public static final int ADVERTISE_TX_POWER_LOW = 1;
/**
* Advertise using medium TX power level.
*/
public static final int ADVERTISE_TX_POWER_MEDIUM = 2;
/**
* Advertise using high TX power level. This corresponds to largest visibility range of the
* advertising packet.
*/
public static final int ADVERTISE_TX_POWER_HIGH = 3;
/**
* The maximum limited advertisement duration as specified by the Bluetooth SIG
*/
private static final int LIMITED_ADVERTISING_MAX_MILLIS = 180 * 1000;
private final int mAdvertiseMode;
private final int mAdvertiseTxPowerLevel;
private final int mAdvertiseTimeoutMillis;
private final boolean mAdvertiseConnectable;
private AdvertiseSettings(int advertiseMode, int advertiseTxPowerLevel,
boolean advertiseConnectable, int advertiseTimeout) {
mAdvertiseMode = advertiseMode;
mAdvertiseTxPowerLevel = advertiseTxPowerLevel;
mAdvertiseConnectable = advertiseConnectable;
mAdvertiseTimeoutMillis = advertiseTimeout;
}
private AdvertiseSettings(Parcel in) {
mAdvertiseMode = in.readInt();
mAdvertiseTxPowerLevel = in.readInt();
mAdvertiseConnectable = in.readInt() != 0 ? true : false;
mAdvertiseTimeoutMillis = in.readInt();
}
/**
* Returns the advertise mode.
*/
public int getMode() {
return mAdvertiseMode;
}
/**
* Returns the TX power level for advertising.
*/
public int getTxPowerLevel() {
return mAdvertiseTxPowerLevel;
}
/**
* Returns whether the advertisement will indicate connectable.
*/
public boolean isConnectable() {
return mAdvertiseConnectable;
}
/**
* Returns the advertising time limit in milliseconds.
*/
public int getTimeout() {
return mAdvertiseTimeoutMillis;
}
@Override
public String toString() {
return "Settings [mAdvertiseMode=" + mAdvertiseMode
+ ", mAdvertiseTxPowerLevel=" + mAdvertiseTxPowerLevel
+ ", mAdvertiseConnectable=" + mAdvertiseConnectable
+ ", mAdvertiseTimeoutMillis=" + mAdvertiseTimeoutMillis + "]";
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeInt(mAdvertiseMode);
dest.writeInt(mAdvertiseTxPowerLevel);
dest.writeInt(mAdvertiseConnectable ? 1 : 0);
dest.writeInt(mAdvertiseTimeoutMillis);
}
public static final Parcelable.Creator<AdvertiseSettings> CREATOR =
new Creator<AdvertiseSettings>() {
@Override
public AdvertiseSettings[] newArray(int size) {
return new AdvertiseSettings[size];
}
@Override
public AdvertiseSettings createFromParcel(Parcel in) {
return new AdvertiseSettings(in);
}
};
/**
* Builder class for {@link AdvertiseSettings}.
*/
public static final class Builder {
private int mMode = ADVERTISE_MODE_LOW_POWER;
private int mTxPowerLevel = ADVERTISE_TX_POWER_MEDIUM;
private int mTimeoutMillis = 0;
private boolean mConnectable = true;
/**
* Set advertise mode to control the advertising power and latency.
*
* @param advertiseMode Bluetooth LE Advertising mode, can only be one of
* {@link AdvertiseSettings#ADVERTISE_MODE_LOW_POWER},
* {@link AdvertiseSettings#ADVERTISE_MODE_BALANCED}, or
* {@link AdvertiseSettings#ADVERTISE_MODE_LOW_LATENCY}.
* @throws IllegalArgumentException If the advertiseMode is invalid.
*/
public Builder setAdvertiseMode(int advertiseMode) {
if (advertiseMode < ADVERTISE_MODE_LOW_POWER
|| advertiseMode > ADVERTISE_MODE_LOW_LATENCY) {
throw new IllegalArgumentException("unknown mode " + advertiseMode);
}
mMode = advertiseMode;
return this;
}
/**
* Set advertise TX power level to control the transmission power level for the advertising.
*
* @param txPowerLevel Transmission power of Bluetooth LE Advertising, can only be one of
* {@link AdvertiseSettings#ADVERTISE_TX_POWER_ULTRA_LOW},
* {@link AdvertiseSettings#ADVERTISE_TX_POWER_LOW},
* {@link AdvertiseSettings#ADVERTISE_TX_POWER_MEDIUM} or
* {@link AdvertiseSettings#ADVERTISE_TX_POWER_HIGH}.
* @throws IllegalArgumentException If the {@code txPowerLevel} is invalid.
*/
public Builder setTxPowerLevel(int txPowerLevel) {
if (txPowerLevel < ADVERTISE_TX_POWER_ULTRA_LOW
|| txPowerLevel > ADVERTISE_TX_POWER_HIGH) {
throw new IllegalArgumentException("unknown tx power level " + txPowerLevel);
}
mTxPowerLevel = txPowerLevel;
return this;
}
/**
* Set whether the advertisement type should be connectable or non-connectable.
*
* @param connectable Controls whether the advertisment type will be connectable (true)
* or non-connectable (false).
*/
public Builder setConnectable(boolean connectable) {
mConnectable = connectable;
return this;
}
/**
* Limit advertising to a given amount of time.
* @param timeoutMillis Advertising time limit. May not exceed 180000 milliseconds.
* A value of 0 will disable the time limit.
* @throws IllegalArgumentException If the provided timeout is over 180000 ms.
*/
public Builder setTimeout(int timeoutMillis) {
if (timeoutMillis < 0 || timeoutMillis > LIMITED_ADVERTISING_MAX_MILLIS) {
throw new IllegalArgumentException("timeoutMillis invalid (must be 0-"
+ LIMITED_ADVERTISING_MAX_MILLIS + " milliseconds)");
}
mTimeoutMillis = timeoutMillis;
return this;
}
/**
* Build the {@link AdvertiseSettings} object.
*/
public AdvertiseSettings build() {
return new AdvertiseSettings(mMode, mTxPowerLevel, mConnectable, mTimeoutMillis);
}
}
}
|
|
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.http.impl.client.cache;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import org.apache.http.Header;
import org.apache.http.HeaderElement;
import org.apache.http.HttpRequest;
import org.apache.http.HttpVersion;
import org.apache.http.ProtocolException;
import org.apache.http.client.cache.HeaderConstants;
import org.apache.http.client.cache.HttpCacheEntry;
import org.apache.http.impl.cookie.DateUtils;
import org.apache.http.message.BasicHeader;
import org.apache.http.message.BasicHttpRequest;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TestConditionalRequestBuilder {
private ConditionalRequestBuilder impl;
private HttpRequest request;
private HttpCacheEntry entry;
@Before
public void setUp() throws Exception {
impl = new ConditionalRequestBuilder();
request = new BasicHttpRequest("GET", "/", HttpVersion.HTTP_1_1);
entry = HttpTestUtils.makeCacheEntry();
}
@Test
public void testBuildConditionalRequestWithLastModified() throws ProtocolException {
String theMethod = "GET";
String theUri = "/theuri";
String lastModified = "this is my last modified date";
HttpRequest request = new BasicHttpRequest(theMethod, theUri);
request.addHeader("Accept-Encoding", "gzip");
Header[] headers = new Header[] {
new BasicHeader("Date", DateUtils.formatDate(new Date())),
new BasicHeader("Last-Modified", lastModified) };
HttpCacheEntry cacheEntry = HttpTestUtils.makeCacheEntry(headers);
HttpRequest newRequest = impl.buildConditionalRequest(request, cacheEntry);
Assert.assertNotSame(request, newRequest);
Assert.assertEquals(theMethod, newRequest.getRequestLine().getMethod());
Assert.assertEquals(theUri, newRequest.getRequestLine().getUri());
Assert.assertEquals(request.getRequestLine().getProtocolVersion(), newRequest
.getRequestLine().getProtocolVersion());
Assert.assertEquals(2, newRequest.getAllHeaders().length);
Assert.assertEquals("Accept-Encoding", newRequest.getAllHeaders()[0].getName());
Assert.assertEquals("gzip", newRequest.getAllHeaders()[0].getValue());
Assert.assertEquals("If-Modified-Since", newRequest.getAllHeaders()[1].getName());
Assert.assertEquals(lastModified, newRequest.getAllHeaders()[1].getValue());
}
@Test
public void testConditionalRequestForEntryWithLastModifiedAndEtagIncludesBothAsValidators()
throws Exception {
Date now = new Date();
Date tenSecondsAgo = new Date(now.getTime() - 10 * 1000L);
Date twentySecondsAgo = new Date(now.getTime() - 20 * 1000L);
final String lmDate = DateUtils.formatDate(twentySecondsAgo);
final String etag = "\"etag\"";
Header[] headers = {
new BasicHeader("Date", DateUtils.formatDate(tenSecondsAgo)),
new BasicHeader("Last-Modified", lmDate),
new BasicHeader("ETag", etag)
};
HttpRequest request = new BasicHttpRequest("GET", "/", HttpVersion.HTTP_1_1);
HttpCacheEntry entry = HttpTestUtils.makeCacheEntry(headers);
HttpRequest result = impl.buildConditionalRequest(request, entry);
Assert.assertEquals(lmDate,
result.getFirstHeader("If-Modified-Since").getValue());
Assert.assertEquals(etag,
result.getFirstHeader("If-None-Match").getValue());
}
@Test
public void testBuildConditionalRequestWithETag() throws ProtocolException {
String theMethod = "GET";
String theUri = "/theuri";
String theETag = "this is my eTag";
HttpRequest request = new BasicHttpRequest(theMethod, theUri);
request.addHeader("Accept-Encoding", "gzip");
Header[] headers = new Header[] {
new BasicHeader("Date", DateUtils.formatDate(new Date())),
new BasicHeader("Last-Modified", DateUtils.formatDate(new Date())),
new BasicHeader("ETag", theETag) };
HttpCacheEntry cacheEntry = HttpTestUtils.makeCacheEntry(headers);
HttpRequest newRequest = impl.buildConditionalRequest(request, cacheEntry);
Assert.assertNotSame(request, newRequest);
Assert.assertEquals(theMethod, newRequest.getRequestLine().getMethod());
Assert.assertEquals(theUri, newRequest.getRequestLine().getUri());
Assert.assertEquals(request.getRequestLine().getProtocolVersion(), newRequest
.getRequestLine().getProtocolVersion());
Assert.assertEquals(3, newRequest.getAllHeaders().length);
Assert.assertEquals("Accept-Encoding", newRequest.getAllHeaders()[0].getName());
Assert.assertEquals("gzip", newRequest.getAllHeaders()[0].getValue());
Assert.assertEquals("If-None-Match", newRequest.getAllHeaders()[1].getName());
Assert.assertEquals(theETag, newRequest.getAllHeaders()[1].getValue());
}
@Test
public void testCacheEntryWithMustRevalidateDoesEndToEndRevalidation() throws Exception {
HttpRequest request = new BasicHttpRequest("GET","/",HttpVersion.HTTP_1_1);
Date now = new Date();
Date elevenSecondsAgo = new Date(now.getTime() - 11 * 1000L);
Date tenSecondsAgo = new Date(now.getTime() - 10 * 1000L);
Date nineSecondsAgo = new Date(now.getTime() - 9 * 1000L);
Header[] cacheEntryHeaders = new Header[] {
new BasicHeader("Date", DateUtils.formatDate(tenSecondsAgo)),
new BasicHeader("ETag", "\"etag\""),
new BasicHeader("Cache-Control","max-age=5, must-revalidate") };
HttpCacheEntry cacheEntry = HttpTestUtils.makeCacheEntry(elevenSecondsAgo, nineSecondsAgo, cacheEntryHeaders);
HttpRequest result = impl.buildConditionalRequest(request, cacheEntry);
boolean foundMaxAge0 = false;
for(Header h : result.getHeaders("Cache-Control")) {
for(HeaderElement elt : h.getElements()) {
if ("max-age".equalsIgnoreCase(elt.getName())
&& "0".equals(elt.getValue())) {
foundMaxAge0 = true;
}
}
}
Assert.assertTrue(foundMaxAge0);
}
@Test
public void testCacheEntryWithProxyRevalidateDoesEndToEndRevalidation() throws Exception {
HttpRequest request = new BasicHttpRequest("GET","/",HttpVersion.HTTP_1_1);
Date now = new Date();
Date elevenSecondsAgo = new Date(now.getTime() - 11 * 1000L);
Date tenSecondsAgo = new Date(now.getTime() - 10 * 1000L);
Date nineSecondsAgo = new Date(now.getTime() - 9 * 1000L);
Header[] cacheEntryHeaders = new Header[] {
new BasicHeader("Date", DateUtils.formatDate(tenSecondsAgo)),
new BasicHeader("ETag", "\"etag\""),
new BasicHeader("Cache-Control","max-age=5, proxy-revalidate") };
HttpCacheEntry cacheEntry = HttpTestUtils.makeCacheEntry(elevenSecondsAgo, nineSecondsAgo, cacheEntryHeaders);
HttpRequest result = impl.buildConditionalRequest(request, cacheEntry);
boolean foundMaxAge0 = false;
for(Header h : result.getHeaders("Cache-Control")) {
for(HeaderElement elt : h.getElements()) {
if ("max-age".equalsIgnoreCase(elt.getName())
&& "0".equals(elt.getValue())) {
foundMaxAge0 = true;
}
}
}
Assert.assertTrue(foundMaxAge0);
}
@Test
public void testBuildUnconditionalRequestUsesGETMethod()
throws Exception {
HttpRequest result = impl.buildUnconditionalRequest(request, entry);
Assert.assertEquals("GET", result.getRequestLine().getMethod());
}
@Test
public void testBuildUnconditionalRequestUsesRequestUri()
throws Exception {
final String uri = "/theURI";
request = new BasicHttpRequest("GET", uri, HttpVersion.HTTP_1_1);
HttpRequest result = impl.buildUnconditionalRequest(request, entry);
Assert.assertEquals(uri, result.getRequestLine().getUri());
}
@Test
public void testBuildUnconditionalRequestUsesHTTP_1_1()
throws Exception {
HttpRequest result = impl.buildUnconditionalRequest(request, entry);
Assert.assertEquals(HttpVersion.HTTP_1_1, result.getProtocolVersion());
}
@Test
public void testBuildUnconditionalRequestAddsCacheControlNoCache()
throws Exception {
HttpRequest result = impl.buildUnconditionalRequest(request, entry);
boolean ccNoCacheFound = false;
for(Header h : result.getHeaders("Cache-Control")) {
for(HeaderElement elt : h.getElements()) {
if ("no-cache".equals(elt.getName())) {
ccNoCacheFound = true;
}
}
}
Assert.assertTrue(ccNoCacheFound);
}
@Test
public void testBuildUnconditionalRequestAddsPragmaNoCache()
throws Exception {
HttpRequest result = impl.buildUnconditionalRequest(request, entry);
boolean ccNoCacheFound = false;
for(Header h : result.getHeaders("Pragma")) {
for(HeaderElement elt : h.getElements()) {
if ("no-cache".equals(elt.getName())) {
ccNoCacheFound = true;
}
}
}
Assert.assertTrue(ccNoCacheFound);
}
@Test
public void testBuildUnconditionalRequestDoesNotUseIfRange()
throws Exception {
request.addHeader("If-Range","\"etag\"");
HttpRequest result = impl.buildUnconditionalRequest(request, entry);
Assert.assertNull(result.getFirstHeader("If-Range"));
}
@Test
public void testBuildUnconditionalRequestDoesNotUseIfMatch()
throws Exception {
request.addHeader("If-Match","\"etag\"");
HttpRequest result = impl.buildUnconditionalRequest(request, entry);
Assert.assertNull(result.getFirstHeader("If-Match"));
}
@Test
public void testBuildUnconditionalRequestDoesNotUseIfNoneMatch()
throws Exception {
request.addHeader("If-None-Match","\"etag\"");
HttpRequest result = impl.buildUnconditionalRequest(request, entry);
Assert.assertNull(result.getFirstHeader("If-None-Match"));
}
@Test
public void testBuildUnconditionalRequestDoesNotUseIfUnmodifiedSince()
throws Exception {
request.addHeader("If-Unmodified-Since", DateUtils.formatDate(new Date()));
HttpRequest result = impl.buildUnconditionalRequest(request, entry);
Assert.assertNull(result.getFirstHeader("If-Unmodified-Since"));
}
@Test
public void testBuildUnconditionalRequestDoesNotUseIfModifiedSince()
throws Exception {
request.addHeader("If-Modified-Since", DateUtils.formatDate(new Date()));
HttpRequest result = impl.buildUnconditionalRequest(request, entry);
Assert.assertNull(result.getFirstHeader("If-Modified-Since"));
}
@Test
public void testBuildUnconditionalRequestCarriesOtherRequestHeaders()
throws Exception {
request.addHeader("User-Agent","MyBrowser/1.0");
HttpRequest result = impl.buildUnconditionalRequest(request, entry);
Assert.assertEquals("MyBrowser/1.0",
result.getFirstHeader("User-Agent").getValue());
}
@Test
public void testBuildConditionalRequestFromVariants() throws Exception {
String etag1 = "\"123\"";
String etag2 = "\"456\"";
String etag3 = "\"789\"";
Map<String,Variant> variantEntries = new HashMap<String,Variant>();
variantEntries.put(etag1, new Variant("A","B",HttpTestUtils.makeCacheEntry(new Header[] { new BasicHeader("ETag", etag1) })));
variantEntries.put(etag2, new Variant("C","D",HttpTestUtils.makeCacheEntry(new Header[] { new BasicHeader("ETag", etag2) })));
variantEntries.put(etag3, new Variant("E","F",HttpTestUtils.makeCacheEntry(new Header[] { new BasicHeader("ETag", etag3) })));
HttpRequest conditional = impl.buildConditionalRequestFromVariants(request, variantEntries);
// seems like a lot of work, but necessary, check for existence and exclusiveness
String ifNoneMatch = conditional.getFirstHeader(HeaderConstants.IF_NONE_MATCH).getValue();
Assert.assertTrue(ifNoneMatch.contains(etag1));
Assert.assertTrue(ifNoneMatch.contains(etag2));
Assert.assertTrue(ifNoneMatch.contains(etag3));
ifNoneMatch = ifNoneMatch.replace(etag1, "");
ifNoneMatch = ifNoneMatch.replace(etag2, "");
ifNoneMatch = ifNoneMatch.replace(etag3, "");
ifNoneMatch = ifNoneMatch.replace(",","");
ifNoneMatch = ifNoneMatch.replace(" ", "");
Assert.assertEquals(ifNoneMatch, "");
}
}
|
|
package com.cluster.employeeproject.employee.managedbean;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.RequestScoped;
import javax.faces.context.FacesContext;
import javax.faces.event.ValueChangeEvent;
import org.richfaces.event.FileUploadEvent;
import org.richfaces.model.UploadedFile;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import com.cluster.employeeproject.common.exception.CommonUtil;
import com.cluster.employeeproject.common.exception.EmployeeManagementApplicationException;
import com.cluster.employeeproject.employee.businessservice.EmployeeBS;
import com.cluster.employeeproject.entity.Employee;
import com.cluster.employeeproject.web.employee.mapper.EmployeeMapper;
import com.cluster.employeeproject.web.employee.to.EmployeeTO;
/*@ManagedBean(name="employeeBean")
@RequestScoped*/
@Component
@Scope("request")
public class EmployeeBean
{
@Autowired
EmployeeBS employeeBS;
EmployeeTO employeeTO;
private UploadedFile files ;
private byte[] image;
private boolean updating;
@PostConstruct
public void init()
{
employeeTO=(EmployeeTO) FacesContext.getCurrentInstance().getExternalContext().getSessionMap().get("employeeTO");
// FacesContext.getCurrentInstance().getExternalContext().getSessionMap().remove("employeeTO");
System.out.println("employee in inti() "+employeeTO);
if(employeeTO!=null)
{
System.out.println("employeebean update");
updating=true;
}
if(employeeTO==null)
{
System.out.println("---------");
employeeTO=new EmployeeTO();
}
}
private static Map<String,String> countries;
private static Map<String,String> states;
private static Map<String,String> cities;
static
{
countries=new HashMap<String, String>();
countries.put("India","10");
countries.put("America","20");
countries.put("SriLanka","30");
countries.put("Pakistan","40");
ApplicationMap.init();
}
public Map<String, String> getCountries() {
return countries;
}
public void setCountries(Map<String, String> countries) {
EmployeeBean.countries = countries;
}
public Map<String, String> getStates() {
return states;
}
public void setStates(Map<String, String> states) {
EmployeeBean.states = states;
}
public EmployeeTO getEmployeeTO() {
return employeeTO;
}
public void setEmployeeTO(EmployeeTO employeeTO) {
this.employeeTO = employeeTO;
}
public Map<String, String> getCities() {
return cities;
}
public void setCities(Map<String, String> cities) {
EmployeeBean.cities = cities;
}
public UploadedFile getFiles() {
return files;
}
public void setFiles(UploadedFile files) {
this.files = files;
}
public void valueChangedCountry(ValueChangeEvent event)
{
System.out.println("inside valueChangedCountry ");
String newCountry=event.getNewValue().toString();
System.out.println("Country "+newCountry);
if(newCountry.equalsIgnoreCase("10"))
{
states=new HashMap<String, String>();
states.put("Assam","10");
states.put("Gujarat","20");
states.put("Delhi","30");
states.put("Kerala","40");
states.put("Karnataka","50");
states.put("Maharashtra","60");
}
if(newCountry.equalsIgnoreCase("20"))
{
states=new HashMap<String, String>();
states.put("NewYork","NewYork");
states.put("Califonia","Califonia");
states.put("Washington","Washinaton");
}
if(newCountry.equalsIgnoreCase("40"))
{
states=new HashMap<String, String>();
states.put("Karachi","Karachi");
states.put("Lahore","Lahore");
states.put("Multan","Multan");
}
if(newCountry.equalsIgnoreCase("30"))
{
states=new HashMap<String, String>();
states.put("Colombo","Colombo");
states.put("Jaffan","Jaffan");
}
}
public byte[] getImage() {
return image;
}
public void setImage(byte[] image) {
this.image = image;
}
public boolean isUpdating() {
return updating;
}
public void setUpdating(boolean updating) {
this.updating = updating;
}
public void valueChangedState(ValueChangeEvent event)
{
System.out.println("inside state valuechage event");
String newState=event.getNewValue().toString();
String value= getkey(getStates(), newState);
System.out.println("selected state is " +value);
/*if(newState.equalsIgnoreCase("60"))
{
Map<String, Object> appMap=FacesContext.getCurrentInstance().getExternalContext().getApplicationMap();
setCities((Map<String, String>) appMap.get("maharashtra"));
}
if(newState.equalsIgnoreCase("20"))
{
Map<String, Object> appMap=FacesContext.getCurrentInstance().getExternalContext().getApplicationMap();
setCities((Map<String, String>) appMap.get("gujrat"));
}
if(newState.equalsIgnoreCase("50"))
{
Map<String, Object> appMap=FacesContext.getCurrentInstance().getExternalContext().getApplicationMap();
setCities((Map<String, String>) appMap.get("karanataka"));
}*/
Map<String, Object> appMap=FacesContext.getCurrentInstance().getExternalContext().getApplicationMap();
setCities((Map<String, String>) appMap.get(value));
}
private String getkey(Map<String, String> states2, String newState)
{
Iterator map=states2.keySet().iterator();
while (map.hasNext())
{
String key=(String) map.next();
if(states2.get(key).equals(newState))
{
return key;
}
}
return null;
}
public void paint(OutputStream stream, Object object) throws IOException
{
// stream.write(getFiles().getData());
stream.write(image);
stream.close();
}
public void listener(FileUploadEvent event) throws Exception
{
System.out.println("Inside file upload event");
files= event.getUploadedFile();
System.out.println("image file"+files);
employeeTO.setImage(files.getData());
System.out.println("file.getData() "+files.getData());
image=files.getData();
/*FileOutputStream fileOutputStream=new FileOutputStream("/image/imagesTO.jpg");
fileOutputStream.write(files.getData());*/
System.out.println("success");
}
public Date getTimeStamp(){
return new Date();
}
public String save()
{
String strReturn=null;
System.out.println("save method of EmployeeBean");
Employee employee= EmployeeMapper.getEntityFromTO(employeeTO);
System.out.println("after employee mapper");
// EmployeeDelegate employeeDelegate=new EmployeeDelegate();
try {
employeeBS.save(employee, 10);
System.out.println("after employee delegate.save");
strReturn="success";
} catch (EmployeeManagementApplicationException e)
{
CommonUtil.errorSet(e);
e.printStackTrace();
}
catch (ClassNotFoundException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalArgumentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
System.out.println("employeeId"+employee.getEmployeeId());
return "loginsuccess";
}
public String update()
{
String strReturn=null;
Employee employee= EmployeeMapper.getEntityFromTO(employeeTO);
// EmployeeDelegate employeeDelegate=new EmployeeDelegate();
try
{
employeeBS.update(employee);
} catch (IllegalArgumentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
System.out.println("employeeId"+employee.getEmployeeId());
FacesContext.getCurrentInstance().getExternalContext().getSessionMap().remove("employeeTO");
System.out.println("end of update() employeeTO "+employeeTO);
if(updating)
{
strReturn="loginsuccess";
}
return strReturn;
}
@PreDestroy
public void destroy()
{
employeeTO=null;
}
}
|
|
/*
* Copyright 2000-2005 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.javascript;
import java.util.List;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import com.intellij.lang.javascript.psi.JSElementFactory;
import com.intellij.lang.javascript.psi.JSFile;
import com.intellij.lang.javascript.psi.JSNamedElement;
import com.intellij.lang.xml.XMLLanguage;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiComment;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiLanguageInjectionHost;
import com.intellij.psi.PsiManager;
import com.intellij.psi.PsiReference;
import com.intellij.psi.PsiWhiteSpace;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import com.intellij.psi.util.PsiTreeUtil;
import consulo.javascript.lang.JavaScriptLanguage;
/**
* Created by IntelliJ IDEA.
* User: Maxim.Mossienko
* Date: Feb 27, 2008
* Time: 7:45:08 PM
* To change this template use File | Settings | File Templates.
*/
public class JSDebuggerSupportUtils
{
@Nullable
public static TextRange getExpressionAtOffset(@Nonnull Project project, @Nonnull Document document, final int offset)
{
PsiFile file = PsiDocumentManager.getInstance(project).getPsiFile(document);
if(file == null)
{
return null;
}
int injectionOffsetCorrection = 0;
PsiReference ref = file.findReferenceAt(offset);
if(ref == null)
{
final PsiElement at = file.findElementAt(offset);
TextRange rangeForNamedElement = getRangeForNamedElement(at, 0);
if(rangeForNamedElement != null)
{
return rangeForNamedElement;
}
final PsiLanguageInjectionHost psiLanguageInjectionHost = PsiTreeUtil.getParentOfType(at, PsiLanguageInjectionHost.class);
if(psiLanguageInjectionHost != null)
{
final Ref<PsiReference> result = new Ref<PsiReference>();
final Ref<PsiElement> eltInInjected = new Ref<PsiElement>();
final int[] injectedOffset = new int[1];
InjectedLanguageUtil.enumerate(psiLanguageInjectionHost, new PsiLanguageInjectionHost.InjectedPsiVisitor()
{
@Override
public void visit(@Nonnull final PsiFile injectedPsi, @Nonnull final List<PsiLanguageInjectionHost.Shred> places)
{
final PsiLanguageInjectionHost.Shred shred = places.get(0);
final int injectedStart = shred.getRangeInsideHost().getStartOffset() + shred.getHost().getTextOffset();
final int offsetInInjected = offset - injectedStart;
result.set(injectedPsi.findReferenceAt(offsetInInjected));
eltInInjected.set(injectedPsi.findElementAt(offsetInInjected));
injectedOffset[0] = injectedStart;
}
});
ref = result.get();
if(ref == null)
{
rangeForNamedElement = getRangeForNamedElement(eltInInjected.get(), injectedOffset[0]);
if(rangeForNamedElement != null)
{
return rangeForNamedElement;
}
}
else
{
injectionOffsetCorrection = injectedOffset[0];
}
}
if(ref == null)
{
return null;
}
}
final PsiElement element = ref.getElement();
if(!element.getLanguage().isKindOf(JavaScriptLanguage.INSTANCE))
{
return null;
}
return element.getTextRange().shiftRight(injectionOffsetCorrection);
}
private static TextRange getRangeForNamedElement(final PsiElement at, int offset)
{
final PsiElement parent = at != null ? at.getParent() : null;
if(parent instanceof JSNamedElement)
{
final PsiElement node = ((JSNamedElement) parent).getNameIdentifier();
if(node != null)
{
return node.getTextRange().shiftRight(offset);
}
}
return null;
}
public static Document createDocument(final String text, final Project project, @Nullable VirtualFile contextVirtualFile, int contextOffset)
{
PsiElement context = null;
if(contextVirtualFile != null)
{
context = getContextElement(contextVirtualFile, contextOffset, project);
}
JSFile file = JSElementFactory.createExpressionCodeFragment(project, text, context, true);
return PsiDocumentManager.getInstance(project).getDocument(file);
}
@Nullable
public static PsiElement getContextElement(VirtualFile virtualFile, int offset, final @Nonnull Project project)
{
Document document = FileDocumentManager.getInstance().getDocument(virtualFile);
PsiFile file = PsiManager.getInstance(project).findFile(virtualFile);
if(file == null || document == null)
{
return null;
}
if(offset < 0)
{
offset = 0;
}
if(offset > document.getTextLength())
{
offset = document.getTextLength();
}
int startOffset = offset;
int lineEndOffset = document.getLineEndOffset(document.getLineNumber(offset));
PsiElement result = null;
do
{
PsiElement element = file.findElementAt(offset);
if(!(element instanceof PsiWhiteSpace) && !(element instanceof PsiComment))
{
result = element;
break;
}
offset = element.getTextRange().getEndOffset() + 1;
}
while(offset < lineEndOffset);
if(result == null)
{
result = file.findElementAt(startOffset);
}
if(result != null && result.getLanguage() == XMLLanguage.INSTANCE)
{
PsiLanguageInjectionHost parent = PsiTreeUtil.getParentOfType(result, PsiLanguageInjectionHost.class);
if(parent != null)
{
final int finalOffset = offset;
final Ref<PsiElement> resultInInjected = new Ref<PsiElement>();
InjectedLanguageUtil.enumerate(parent, new PsiLanguageInjectionHost.InjectedPsiVisitor()
{
@Override
public void visit(@Nonnull final PsiFile injectedPsi, @Nonnull final List<PsiLanguageInjectionHost.Shred> places)
{
final PsiLanguageInjectionHost.Shred shred = places.get(0);
final int injectedStart = shred.getRangeInsideHost().getStartOffset() + shred.getHost().getTextOffset();
final int offsetInInjected = finalOffset - injectedStart;
resultInInjected.set(injectedPsi.findElementAt(offsetInInjected));
}
});
result = resultInInjected.get();
}
}
return result;
}
}
|
|
package com.octopod.util.minecraft.chat;
import org.json.simple.JSONValue;
import java.util.*;
/**
* Last Updated: 2.15.2014
* Chat that provide tools relating to MC's chat and Chat Libraries.
* @author Octopod
*/
public class Chat
{
/**
* Map of all specific character widths other than 6, which most of the characters have.
*/
final private static Map<Character, Integer> widths = new HashMap<>();
static
{
widths.put('*', 5);
widths.put('>', 5);
widths.put('<', 5);
widths.put(',', 2);
widths.put('!', 2);
widths.put('{', 5);
widths.put('}', 5);
widths.put(')', 5);
widths.put('(', 5);
widths.put('\u00a7', 0); //section sign; Minecraft's color code symbol.
widths.put('[', 4);
widths.put(']', 4);
widths.put(':', 2);
widths.put('\'', 3);
widths.put('|', 2);
widths.put('.', 2);
widths.put('\u2019', 2); //filler character; Reverse quotation mark.
widths.put('`', 3); //old filler character; Width change since 1.7
widths.put(' ', 4);
widths.put('f', 5);
widths.put('k', 5);
widths.put('I', 4);
widths.put('t', 4);
widths.put('l', 3);
widths.put('i', 2);
}
/**
* Returns the width of the inserted character, according to Minecraft's default chat font (in pixels)
* Most characters are 6 pixels wide.
*
* @param character The setText to use for calculation.
* @return The width of the setText inserted. (in pixels)
*/
static public int width(char character)
{
if(widths.containsKey(character))
return widths.get(character);
return 6;
}
/**
* Returns the width of the setText inserted into the function.
* Note that bolded characters are 1 pixel wider than normal.
* @param text The setText to use for calculation.
* @return The width of the setText inserted. (in pixels)
*/
static public int width(String text)
{
int width = 0;
boolean isCode = false;
boolean bolded = false;
for(char character:text.toCharArray())
{
if(character == '\u00a7')
{
isCode = true;
}
else
{
if(isCode)
{
if(bolded && ChatColor.fromChar(character) != null)
{
bolded = false;
}
else if(!bolded)
{
bolded = (character == 'l' || character == 'L');
}
isCode = false;
}
else
{
width += width(character);
if(bolded) width += 1;
}
}
}
return width;
}
public static void send(ChatReciever target, String json)
{
target.sendJSONMessage(json);
}
public static String colorize(String message)
{
return colorize(message, '&');
}
public static String colorize(String message, char replace)
{
return message.replace(replace, '\u00A7');
}
/**
* Converts a ChatBuilder object to Minecraft legacy chat string.
* Obviously, hover and click events won't carry over.
* @param builder The ChatBuilder object to convert
* @return The legacy chat string.
*/
public static String toLegacyString(ChatElement... elements)
{
StringBuilder sb = new StringBuilder();
for(ChatElement e: elements)
{
sb.append(toLegacyString(e));
}
return sb.toString();
}
public static String toLegacyString(ChatElement element)
{
StringBuilder sb = new StringBuilder();
if(element.getColor() != null)
{
sb.append(element.getColor());
}
for(ChatFormat format: element.getFormats())
{
sb.append(format);
}
if(!element.getText().equals(""))
{
sb.append(element.getText());
}
for(ChatElement extra: element.getExtraElements())
{
if(extra.getColor() != null)
{
sb.append(extra.getColor());
}
for(ChatFormat format: extra.getFormats())
{
sb.append(format);
}
if(!extra.getText().equals(""))
{
sb.append(extra.getText());
}
}
return sb.toString();
}
public static ChatElement fromLegacy(String message) {return fromLegacy(message, '\u00A7');}
/**
* Converts Minecraft legacy chat to a ChatBuilder object.
* @param message The legacy chat string to convert
* @return A new ChatBuilder object.
*/
public static ChatElement fromLegacy(String message, char colorCode) {
ChatElement cb = new ChatElement();
StringBuilder text = new StringBuilder();
boolean nextIsColorCode = false;
ChatColor lastColor = ChatColor.WHITE;
List<ChatFormat> formats = new ArrayList<>();
for(char c: message.toCharArray()) {
if(c == colorCode) {
nextIsColorCode = true;
continue;
}
if(nextIsColorCode) {
nextIsColorCode = false;
ChatColor color = ChatColor.fromChar(c);
ChatFormat format = ChatFormat.fromChar(c);
if(color != null && format == null) { //This is a color
//Push new element
if(!text.toString().equals("")) {
cb.append(text.toString()).color(lastColor).format(formats.toArray(new ChatFormat[formats.size()]));
}
//Reset variables
text = new StringBuilder();
lastColor = color;
formats = new ArrayList<>();
} else if (color == null && format != null) { //This is a format
formats.add(format);
}
continue;
}
text.append(c);
}
cb.append(text.toString()).color(lastColor).format(formats.toArray(new ChatFormat[formats.size()]));
return cb;
}
public static ChatElement join(ChatElement builder, ChatElement glue) {return join(builder, glue, glue);}
public static ChatElement join(ChatElement builder, ChatElement glue, ChatElement lastGlue)
{
ChatElement newBuilder = new ChatElement();
List<ChatElement> elements = builder.getExtraElements();
if(elements.size() > 0) {
newBuilder.append(elements.get(0));
for(int i = 1; i < elements.size(); i++) {
if(i == (elements.size() - 1)) {
newBuilder.append(lastGlue);
} else {
newBuilder.append(glue);
}
newBuilder.append(elements.get(i));
}
}
return newBuilder;
}
/*
@SuppressWarnings("deprecation")
public static String itemtoJSON(ItemStack item) {
Map<String, Object> toJSONString = new HashMap<String, Object>();
Map<String, Object> meta = new HashMap<String, Object>();
Map<String, Object> display = new HashMap<String, Object>();
toJSONString.put("id", item.getTypeId());
toJSONString.put("Damage", (int)item.getData().getData());
toJSONString.put("Count", item.getAmount());
try{
display.put("Name", item.getItemMeta().getDisplayName());
meta.put("display", display);
} catch (NullPointerException e) {}
toJSONString.put("tag", meta);
return JSONValue.toJSONString(toJSONString);
}
*/
private static interface BlockRenderer <T>
{
public T render(String left, String text, String right);
}
private static BlockRenderer<String> BLOCK_RENDERER_STRING = new BlockRenderer<String>()
{
@Override
public String render(String left, String text, String right)
{
return left + text + right;
}
};
private static BlockRenderer<ChatElement> BLOCK_RENDERER_CHAT = new BlockRenderer<ChatElement>()
{
@Override
public ChatElement render(String left, String text, String right)
{
return new ChatElement().
appendif(!left.equals(""), left).
append(text).
appendif(!right.equals(""), right);
}
};
private static interface FillerRenderer <T>
{
public T render(String filler);
}
private static FillerRenderer<String> FILLER_RENDERER_STRING = new FillerRenderer<String>()
{
@Override
public String render(String filler)
{
return filler;
}
};
private static FillerRenderer<ChatElement> FILLER_RENDERER_CHAT = new FillerRenderer<ChatElement>()
{
@Override
public ChatElement render(String filler)
{
return new ChatElement(filler);
}
};
/**
* Creates a block of setText with a variable width. Useful for aligning setText into columns on multiple lines.
* @param text The string to insert
* @param toWidth The width to fit the setText to in pixels. (Will cut the setText if toWidth is shorter than it)
* @param alignment Which way to align the setText. (0: left, 1: right, 2: center)
* @param fillerChar The primary character to use for filling. Usually a space.
* @param precise Whether or not to use filler characters to perfectly match the width (this will create artifacts in the filler)
* @param renderer The interface that this method will use to build the return object.
* @return The setText fitted to toWidth.
*/
static private <T> T block(String text, int toWidth, ChatAlignment alignment, char fillerChar, boolean precise, BlockRenderer<T> renderer)
{
String cutText = cut(text, toWidth)[0] + ChatFormat.RESET;
//The total width (in pixels) needed to fill
final int totalFillerWidth = toWidth - width(cutText);
int lFillerWidth, rFillerWidth;
switch(alignment) {
case LEFT:
default:
lFillerWidth = 0;
rFillerWidth = totalFillerWidth;
break;
case RIGHT:
lFillerWidth = totalFillerWidth;
rFillerWidth = 0;
break;
case CENTER: //Cuts the total width to fill in half
lFillerWidth = (int)Math.floor(totalFillerWidth / 2.0);
rFillerWidth = (int)Math.ceil(totalFillerWidth / 2.0);
break;
case CENTER_CEILING:
lFillerWidth = (int)Math.ceil(totalFillerWidth / 2.0);
rFillerWidth = (int)Math.floor(totalFillerWidth / 2.0);
break;
}
return renderer.render(filler(lFillerWidth, precise, fillerChar, FILLER_RENDERER_STRING), cutText, filler(rFillerWidth, precise, fillerChar, FILLER_RENDERER_STRING));
}
static public String blockString(String text, int toWidth, ChatAlignment alignment)
{
return blockString(text, toWidth, alignment, ' ', true);
}
static public String blockString(String text, int toWidth, ChatAlignment alignment, char fillerChar, boolean precise)
{
return block(text, toWidth, alignment, fillerChar, precise, BLOCK_RENDERER_STRING);
}
static public ChatElement block(String text, int toWidth, ChatAlignment alignment)
{
return block(text, toWidth, alignment, ' ', true);
}
static public ChatElement block(String text, int toWidth, ChatAlignment alignment, char fillerChar, boolean precise)
{
return block(text, toWidth, alignment, fillerChar, precise, BLOCK_RENDERER_CHAT);
}
static public ChatElement block(ChatElement element, int toWidth, ChatAlignment alignment)
{
return block(element, toWidth, alignment, ' ', true);
}
static public ChatElement block(ChatElement element, int toWidth, ChatAlignment alignment, char fillerChar, boolean precise)
{
return block(toLegacyString(element), toWidth, alignment, fillerChar, precise, BLOCK_RENDERER_CHAT);
}
final static ChatColor FILLER_COLOR = ChatColor.DARK_GRAY;
public final static String FILLER_2PX_RAW = FILLER_COLOR + "\u2019";
public final static ChatElement FILLER_2PX = new ChatElement(FILLER_2PX_RAW);
/**
* Creates a filler for use in Minecraft's chat. It's a more raw function used to align setText.
* @param width The width of the filler (in pixels)
* @param precise Whether or not to use filler characters to perfectly match the width (this will create artifacts in the filler)
* @param customFiller The character to use primarily during the filler (should be a space most of the time)
* @return The filler as a string.
*/
static public <T> T filler(int width, boolean precise, char customFiller, FillerRenderer<T> renderer)
{
if(width < 0) throw new IllegalArgumentException("Filler width cannot be less than 0!");
if(width == 0) return renderer.render("");
if(width == 1) throw new IllegalArgumentException("A filler cannot be a pixel wide");
if(width == 2) return renderer.render(FILLER_2PX_RAW);
final int customFillerWidth = width(customFiller);
StringBuilder filler = new StringBuilder();
while(width > customFillerWidth + 1){
filler.append(customFiller);
width -= customFillerWidth;
}
switch(width){
case 6:
if(customFillerWidth == 6) {filler.append(customFiller); break;}
case 5:
if(customFillerWidth == 5) {filler.append(customFiller); break;}
// Use a bolded space (4px + 1px)
filler.append(ChatFormat.BOLD).append(' ').append(ChatFormat.RESET);
break;
case 4:
if(customFillerWidth == 4) {filler.append(customFiller); break;}
// Use a space (4px)
filler.append(" ");
break;
case 3:
if(customFillerWidth == 3) {filler.append(customFiller); break;}
if(!precise) break;
// Use the bolded 2px filler (2px + 1px)
filler.append(FILLER_COLOR).append(ChatFormat.BOLD).append(FILLER_2PX_RAW).append(ChatFormat.RESET);
break;
case 2:
if(customFillerWidth == 2) {filler.append(customFiller); break;}
if(!precise) break;
// Use the 2px filler
filler.append(FILLER_COLOR).append(FILLER_2PX_RAW).append(ChatFormat.RESET);
break;
}
return renderer.render(filler.toString());
}
static public ChatElement filler(int width)
{
return filler(width, true, ' ');
}
static public ChatElement filler(int width, boolean precise, char emptyFiller)
{
return filler(width, precise, emptyFiller, FILLER_RENDERER_CHAT);
}
static public String[] cut(String text, int width)
{
return cut(text, width, 0);
}
/**
* Returns the truncated version of setText to be of toWidth or less.
* The setText will be returned unmodified if toWidth is wider than the width of the setText.
* TODO: Make this function return a list of strings instead of just the first one
* @param text The setText to use for calculation.
* @return The width of the setText inserted. (in pixels)
*/
static public String[] cut(String text, int width, int wrap)
{
int start = 0;
int end = text.length();
while(width(text.substring(start, end)) > width)
{
end--;
if(wrap > 0 && width(text.substring(start, end)) <= width)
{
int lookbehind = 0; //Amount of characters looked at behind the end index
int temp_end = end; //Temporary end marker
while(lookbehind < wrap && text.charAt(temp_end - 1) != ' ')
{
temp_end--;
if(temp_end <= 0) break;
lookbehind++;
if(text.charAt(temp_end - 1) == ' ')
{
end = temp_end;
break;
}
}
}
}
return new String[]{text.substring(start, end), text.substring(end)};
}
public static String toJSONString(ChatElement... elements)
{
Map<Object, Object> json = new HashMap<>();
json.put("text", "");
json.put("extra", Arrays.asList(elements));
return JSONValue.toJSONString(json);
}
public static String toJSONString(List<ChatElement> elements)
{
Map<Object, Object> json = new HashMap<>();
json.put("text", "");
json.put("extra", elements);
return JSONValue.toJSONString(json);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public static String toJSONString(ChatElement element)
{
String text = element.getText();
boolean translate = element.getTranslate();
List<String> with = element.getTranslateWith();
Map<String, Object> json = new HashMap();
if(translate) {
json.put("translate", text);
if(with.size() > 0)
json.put("with", with);
} else {
json.put("text", text);
}
if(element.isPlain())
{
return JSONValue.toJSONString(json);
}
if(element.getClick() != null)
{
Map click = new HashMap();
click.put("action", element.getClick().name().toLowerCase());
click.put("value", element.getClickValue());
json.put("clickEvent", click);
}
if(element.getHover() != null)
{
Map hover = new HashMap();
hover.put("action", element.getHover().name().toLowerCase());
hover.put("value", element.getHoverValue());
json.put("hoverEvent", hover);
}
for(ChatFormat format: element.getFormats())
{
json.put(format.name().toLowerCase(), true);
}
if(element.getColor() != null)
{
json.put("color", element.getColor().name().toLowerCase());
}
if(element.getExtraElements().size() > 0)
{
json.put("extra", element.getSimpleExtraElements());
}
return JSONValue.toJSONString(json);
}
}
|
|
/*
* Copyright 2007 Sun Microsystems, Inc.
*
* This file is part of jVoiceBridge.
*
* jVoiceBridge is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation and distributed hereunder
* to you.
*
* jVoiceBridge is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Sun designates this particular file as subject to the "Classpath"
* exception as provided by Sun in the License file that accompanied this
* code.
*/
package com.sun.voip;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Vector;
public class SdpInfo {
private String sdp;
private String remoteHost;
private int remotePort;
private byte telephoneEventPayload;
private boolean preferredMediaSpecified;
private String userName;
private String callId;
private String conferenceId;
private boolean isDistributedBridge;
private int synchronizationSource;
private InetSocketAddress rtcpAddress;
private Vector supportedMedia;
private MediaInfo mediaInfo;
private MediaInfo transmitMediaInfo;
private boolean transmitMediaInfoOk;
public SdpInfo(String remoteHost, int remotePort,
byte telephoneEventPayload, Vector supportedMedia,
MediaInfo mediaInfo, boolean preferredMediaSpecified) {
this.remoteHost = remoteHost;
this.remotePort = remotePort;
this.telephoneEventPayload = telephoneEventPayload;
this.supportedMedia = supportedMedia;
this.mediaInfo = mediaInfo;
this.preferredMediaSpecified = preferredMediaSpecified;
}
public void setSdp(String sdp) {
this.sdp = sdp;
}
public String getSdp() {
return sdp;
}
public void setRemoteHost(String remoteHost) {
this.remoteHost = remoteHost;
}
public String getRemoteHost() {
return remoteHost;
}
public void setRemotePort(int remotePort) {
this.remotePort = remotePort;
}
public int getRemotePort() {
return remotePort;
}
public byte getTelephoneEventPayload() {
return telephoneEventPayload;
}
public void setUserName(String userName) {
this.userName= userName;
}
public String getUserName() {
return userName;
}
public void setCallId(String callId) {
this.callId = callId;
}
public String getCallId() {
return callId;
}
public void setConferenceId(String conferenceId) {
this.conferenceId = conferenceId;
}
public String getConferenceId() {
return conferenceId;
}
public void setDistributedBridge() {
isDistributedBridge = true;
}
public boolean isDistributedBridge() {
return isDistributedBridge;
}
public void setRtcpAddress(InetSocketAddress rtcpAddress) {
this.rtcpAddress = rtcpAddress;
}
public InetSocketAddress getRtcpAddress() {
return rtcpAddress;
}
public void setMediaInfo(MediaInfo mediaInfo) {
this.mediaInfo = mediaInfo;
}
public MediaInfo getMediaInfo() {
return mediaInfo;
}
public void setTransmitMediaInfoOk(boolean transmitMediaInfoOk) {
this.transmitMediaInfoOk = transmitMediaInfoOk;
}
public boolean getTransmitMediaInfoOk() {
return transmitMediaInfoOk;
}
public void setTransmitMediaInfo(MediaInfo transmitMediaInfo) {
this.transmitMediaInfo = transmitMediaInfo;
}
public MediaInfo getTransmitMediaInfo() {
if (transmitMediaInfo == null || mediaInfo.getPayload() ==
RtpPacket.PCMU_PAYLOAD) {
return mediaInfo;
}
int transmitSampleRate = transmitMediaInfo.getSampleRate();
if (transmitSampleRate > mediaInfo.getSampleRate()) {
transmitSampleRate = mediaInfo.getSampleRate();
}
int transmitChannels = transmitMediaInfo.getChannels();
if (transmitChannels > mediaInfo.getChannels()) {
transmitChannels = mediaInfo.getChannels();
}
try {
transmitMediaInfo = MediaInfo.findMediaInfo(
transmitMediaInfo.getEncoding(),
transmitSampleRate, transmitChannels);
} catch (IOException e) {
Logger.println(e.getMessage());
Logger.println("Using transmit media info " + transmitMediaInfo);
}
return transmitMediaInfo;
}
public boolean preferredMediaSpecified() {
return preferredMediaSpecified;
}
public void setSynchronizationSource(int synchronizationSource) {
Logger.println("Setting sync to " + synchronizationSource);
this.synchronizationSource = synchronizationSource;
}
public int getSynchronizationSource() {
return synchronizationSource;
}
public boolean isSupported(MediaInfo mediaInfo) {
try {
getMediaInfo(mediaInfo.getSampleRate(), mediaInfo.getChannels(),
mediaInfo.getEncoding());
} catch (IOException e) {
return false;
}
return true;
}
public MediaInfo getMediaInfo(int sampleRate, int channels, int encoding)
throws IOException {
if (supportedMedia != null) {
for (int i = 0; i < supportedMedia.size(); i++) {
MediaInfo mediaInfo = (MediaInfo) supportedMedia.elementAt(i);
if (mediaInfo.getSampleRate() == sampleRate &&
mediaInfo.getChannels() == channels &&
mediaInfo.getEncoding() == encoding) {
return mediaInfo;
}
}
}
throw new IOException("No Suitable media for "
+ encoding + "/" + sampleRate + "/" + channels);
}
public MediaInfo findBestMediaInfo(Vector otherSupportedMedia, MediaInfo otherMediaPreference) throws IOException {
MediaInfo best = null;
for (int i = 0; i < otherSupportedMedia.size(); i++) {
MediaInfo m = (MediaInfo) otherSupportedMedia.elementAt(i);
if (!isSupported(m)) {
continue;
}
if (otherMediaPreference != null) {
if (m.getSampleRate() > otherMediaPreference.getSampleRate() ||
m.getChannels() > otherMediaPreference.getChannels()) {
continue;
}
}
if (best == null || isBetter(m, best)) {
best = m;
}
}
if (best == null) {
throw new IOException("No supported Media!");
}
return best;
}
private boolean isBetter(MediaInfo m1, MediaInfo m2) {
if (m1.getSampleRate() > m2.getSampleRate() &&
m1.getChannels() >= m2.getChannels()) {
return true;
}
if (m1.getSampleRate() == m2.getSampleRate() &&
m1.getChannels() > m2.getChannels()) {
return true;
}
return false;
}
}
|
|
package org.hive2hive.core.network.data;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import java.io.IOException;
import java.security.InvalidKeyException;
import java.security.KeyPair;
import java.security.SignatureException;
import java.util.List;
import java.util.Random;
import net.tomp2p.futures.FutureGet;
import net.tomp2p.futures.FuturePut;
import net.tomp2p.peers.Number160;
import net.tomp2p.storage.Data;
import org.hive2hive.core.H2HJUnitTest;
import org.hive2hive.core.H2HTestData;
import org.hive2hive.core.exceptions.NoPeerConnectionException;
import org.hive2hive.core.network.NetworkManager;
import org.hive2hive.core.network.NetworkTestUtil;
import org.hive2hive.core.network.data.parameters.Parameters;
import org.hive2hive.core.security.EncryptionUtil;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
/**
* @author Seppi
*/
public class DataManagerTest extends H2HJUnitTest {
private static List<NetworkManager> network;
private static final int networkSize = 3;
private static Random random = new Random();
@BeforeClass
public static void initTest() throws Exception {
testClass = DataManagerTest.class;
beforeClass();
network = NetworkTestUtil.createNetwork(networkSize);
}
@Test
public void testPutGet() throws Exception {
String data = NetworkTestUtil.randomString();
Parameters parameters = new Parameters().setLocationKey(NetworkTestUtil.randomString())
.setContentKey(NetworkTestUtil.randomString()).setData(new H2HTestData(data));
NetworkManager node = network.get(random.nextInt(networkSize));
FuturePut future = node.getDataManager().putUnblocked(parameters);
future.awaitUninterruptibly();
FutureGet futureGet = node.getDataManager().getUnblocked(parameters);
futureGet.awaitUninterruptibly();
String result = (String) ((H2HTestData) futureGet.getData().object()).getTestString();
assertEquals(data, result);
}
@Test
public void testPutGetFromOtherNode() throws Exception {
String data = NetworkTestUtil.randomString();
Parameters parameters = new Parameters().setLocationKey(NetworkTestUtil.randomString())
.setContentKey(NetworkTestUtil.randomString()).setData(new H2HTestData(data));
NetworkManager nodeA = network.get(random.nextInt(networkSize / 2));
NetworkManager nodeB = network.get(random.nextInt(networkSize / 2) + networkSize / 2);
FuturePut future = nodeA.getDataManager().putUnblocked(parameters);
future.awaitUninterruptibly();
FutureGet futureGet = nodeB.getDataManager().getUnblocked(parameters);
futureGet.awaitUninterruptibly();
String result = ((H2HTestData) futureGet.getData().object()).getTestString();
assertEquals(data, result);
}
@Test
public void testPutOneLocationKeyMultipleContentKeys() throws Exception {
String locationKey = NetworkTestUtil.randomString();
NetworkManager node = network.get(random.nextInt(networkSize));
String data1 = NetworkTestUtil.randomString();
Parameters parameters1 = new Parameters().setLocationKey(locationKey)
.setContentKey(NetworkTestUtil.randomString()).setData(new H2HTestData(data1));
FuturePut future1 = node.getDataManager().putUnblocked(parameters1);
future1.awaitUninterruptibly();
String data2 = NetworkTestUtil.randomString();
Parameters parameters2 = new Parameters().setLocationKey(locationKey)
.setContentKey(NetworkTestUtil.randomString()).setData(new H2HTestData(data2));
FuturePut future2 = node.getDataManager().putUnblocked(parameters2);
future2.awaitUninterruptibly();
String data3 = NetworkTestUtil.randomString();
Parameters parameters3 = new Parameters().setLocationKey(locationKey)
.setContentKey(NetworkTestUtil.randomString()).setData(new H2HTestData(data3));
FuturePut future3 = node.getDataManager().putUnblocked(parameters3);
future3.awaitUninterruptibly();
FutureGet get1 = node.getDataManager().getUnblocked(parameters1);
get1.awaitUninterruptibly();
String result1 = (String) ((H2HTestData) get1.getData().object()).getTestString();
assertEquals(data1, result1);
FutureGet get2 = node.getDataManager().getUnblocked(parameters2);
get2.awaitUninterruptibly();
String result2 = (String) ((H2HTestData) get2.getData().object()).getTestString();
assertEquals(data2, result2);
FutureGet get3 = node.getDataManager().getUnblocked(parameters3);
get3.awaitUninterruptibly();
String result3 = (String) ((H2HTestData) get3.getData().object()).getTestString();
assertEquals(data3, result3);
}
@Test
public void testPutOneLocationKeyMultipleContentKeysGlobalGetFromOtherNodes() throws Exception {
String locationKey = NetworkTestUtil.randomString();
String data1 = NetworkTestUtil.randomString();
Parameters parameters1 = new Parameters().setLocationKey(locationKey)
.setContentKey(NetworkTestUtil.randomString()).setData(new H2HTestData(data1));
FuturePut future1 = network.get(random.nextInt(networkSize)).getDataManager()
.putUnblocked(parameters1);
future1.awaitUninterruptibly();
String data2 = NetworkTestUtil.randomString();
Parameters parameters2 = new Parameters().setLocationKey(locationKey)
.setContentKey(NetworkTestUtil.randomString()).setData(new H2HTestData(data2));
FuturePut future2 = network.get(random.nextInt(networkSize)).getDataManager()
.putUnblocked(parameters2);
future2.awaitUninterruptibly();
String data3 = NetworkTestUtil.randomString();
Parameters parameters3 = new Parameters().setLocationKey(locationKey)
.setContentKey(NetworkTestUtil.randomString()).setData(new H2HTestData(data3));
FuturePut future3 = network.get(random.nextInt(networkSize)).getDataManager()
.putUnblocked(parameters3);
future3.awaitUninterruptibly();
FutureGet get1 = network.get(random.nextInt(networkSize)).getDataManager().getUnblocked(parameters1);
get1.awaitUninterruptibly();
String result1 = (String) ((H2HTestData) get1.getData().object()).getTestString();
assertEquals(data1, result1);
FutureGet get2 = network.get(random.nextInt(networkSize)).getDataManager().getUnblocked(parameters2);
get2.awaitUninterruptibly();
String result2 = (String) ((H2HTestData) get2.getData().object()).getTestString();
assertEquals(data2, result2);
FutureGet get3 = network.get(random.nextInt(networkSize)).getDataManager().getUnblocked(parameters3);
get3.awaitUninterruptibly();
String result3 = (String) ((H2HTestData) get3.getData().object()).getTestString();
assertEquals(data3, result3);
}
@Test
public void testRemovalOneContentKey() throws NoPeerConnectionException {
NetworkManager nodeA = network.get(random.nextInt(networkSize / 2));
NetworkManager nodeB = network.get(random.nextInt(networkSize / 2) + networkSize / 2);
String locationKey = nodeB.getNodeId();
H2HTestData data = new H2HTestData(NetworkTestUtil.randomString());
Parameters parameters = new Parameters().setLocationKey(locationKey).setDomainKey("domain key")
.setContentKey(NetworkTestUtil.randomString()).setData(data);
// put a content
nodeA.getDataManager().putUnblocked(parameters).awaitUninterruptibly();
// test that it is there
FutureGet futureGet = nodeB.getDataManager().getUnblocked(parameters);
futureGet.awaitUninterruptibly();
assertNotNull(futureGet.getData());
// delete it
nodeA.getDataManager().removeUnblocked(parameters).awaitUninterruptibly();
// check that it is gone
futureGet = nodeB.getDataManager().getUnblocked(parameters);
futureGet.awaitUninterruptibly();
assertNull(futureGet.getData());
}
@Test
public void testRemovalMultipleContentKey() throws ClassNotFoundException, IOException,
NoPeerConnectionException {
NetworkManager nodeA = network.get(random.nextInt(networkSize / 2));
NetworkManager nodeB = network.get(random.nextInt(networkSize / 2) + networkSize / 2);
String locationKey = nodeB.getNodeId();
String contentKey1 = NetworkTestUtil.randomString();
String testString1 = NetworkTestUtil.randomString();
Parameters parameters1 = new Parameters().setLocationKey(locationKey).setContentKey(contentKey1)
.setData(new H2HTestData(testString1));
String contentKey2 = NetworkTestUtil.randomString();
String testString2 = NetworkTestUtil.randomString();
Parameters parameters2 = new Parameters().setLocationKey(locationKey).setContentKey(contentKey2)
.setData(new H2HTestData(testString2));
String contentKey3 = NetworkTestUtil.randomString();
String testString3 = NetworkTestUtil.randomString();
Parameters parameters3 = new Parameters().setLocationKey(locationKey).setContentKey(contentKey3)
.setData(new H2HTestData(testString3));
// insert them
FuturePut put1 = nodeA.getDataManager().putUnblocked(parameters1);
put1.awaitUninterruptibly();
FuturePut put2 = nodeA.getDataManager().putUnblocked(parameters2);
put2.awaitUninterruptibly();
FuturePut put3 = nodeA.getDataManager().putUnblocked(parameters3);
put3.awaitUninterruptibly();
// check that they are all stored
FutureGet futureGet = nodeB.getDataManager().getUnblocked(parameters1);
futureGet.awaitUninterruptibly();
assertEquals(testString1, ((H2HTestData) futureGet.getData().object()).getTestString());
futureGet = nodeB.getDataManager().getUnblocked(parameters2);
futureGet.awaitUninterruptibly();
assertEquals(testString2, ((H2HTestData) futureGet.getData().object()).getTestString());
futureGet = nodeB.getDataManager().getUnblocked(parameters3);
futureGet.awaitUninterruptibly();
assertEquals(testString3, ((H2HTestData) futureGet.getData().object()).getTestString());
// remove 2nd one and check that 1st and 3rd are still there
nodeA.getDataManager().removeUnblocked(parameters2).awaitUninterruptibly();
futureGet = nodeB.getDataManager().getUnblocked(parameters1);
futureGet.awaitUninterruptibly();
assertEquals(testString1, ((H2HTestData) futureGet.getData().object()).getTestString());
futureGet = nodeB.getDataManager().getUnblocked(parameters2);
futureGet.awaitUninterruptibly();
assertNull(futureGet.getData());
futureGet = nodeB.getDataManager().getUnblocked(parameters3);
futureGet.awaitUninterruptibly();
assertEquals(testString3, ((H2HTestData) futureGet.getData().object()).getTestString());
// remove 3rd one as well and check that they are gone as well
nodeA.getDataManager().removeUnblocked(parameters1).awaitUninterruptibly();
nodeA.getDataManager().removeUnblocked(parameters3).awaitUninterruptibly();
futureGet = nodeB.getDataManager().getUnblocked(parameters1);
futureGet.awaitUninterruptibly();
assertNull(futureGet.getData());
futureGet = nodeB.getDataManager().getUnblocked(parameters2);
futureGet.awaitUninterruptibly();
assertNull(futureGet.getData());
futureGet = nodeB.getDataManager().getUnblocked(parameters3);
futureGet.awaitUninterruptibly();
assertNull(futureGet.getData());
}
@Test
public void testChangeProtectionKeySingleVersionKey() throws NoPeerConnectionException, IOException,
InvalidKeyException, SignatureException {
KeyPair keypairOld = EncryptionUtil.generateRSAKeyPair();
KeyPair keypairNew = EncryptionUtil.generateRSAKeyPair();
H2HTestData data = new H2HTestData(NetworkTestUtil.randomString());
data.generateVersionKey();
data.setBasedOnKey(Number160.ZERO);
Parameters parameters = new Parameters().setLocationKey(NetworkTestUtil.randomString())
.setContentKey(NetworkTestUtil.randomString()).setVersionKey(data.getVersionKey())
.setData(data).setProtectionKeys(keypairOld).setNewProtectionKeys(keypairNew)
.setTTL(data.getTimeToLive()).setHashFlag(true);
NetworkManager node = network.get(random.nextInt(networkSize));
// put some initial data
FuturePut putFuture1 = node.getDataManager().putUnblocked(parameters);
putFuture1.awaitUninterruptibly();
Assert.assertTrue(putFuture1.isSuccess());
// parameters without the data object itself
parameters = new Parameters().setLocationKey(parameters.getLocationKey())
.setContentKey(parameters.getContentKey()).setVersionKey(data.getVersionKey())
.setProtectionKeys(keypairOld).setNewProtectionKeys(keypairNew)
.setTTL(data.getTimeToLive());
// change content protection key
FuturePut changeFuture = node.getDataManager().changeProtectionKeyUnblocked(parameters);
changeFuture.awaitUninterruptibly();
Assert.assertTrue(changeFuture.isSuccess());
// verify if content protection key has been changed
Data resData = node.getDataManager().getUnblocked(parameters).awaitUninterruptibly().getData();
Assert.assertEquals(keypairNew.getPublic(), resData.publicKey());
}
@Test
@Ignore
public void testChangeProtectionKeyMultipleVersionKeys() throws NoPeerConnectionException, IOException,
InvalidKeyException, SignatureException {
// TODO test case for changing entries wit same location, domain and content key, but different
// version keys
}
@AfterClass
public static void cleanAfterClass() {
NetworkTestUtil.shutdownNetwork(network);
afterClass();
}
}
|
|
/**
* Copyright (c) 2013, Sana
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the Sana nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL Sana BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.sana.android.content;
import android.content.UriMatcher;
import android.net.Uri;
import android.text.TextUtils;
import org.sana.android.provider.AmbulanceDrivers;
import org.sana.android.provider.Concepts;
import org.sana.android.provider.Counties;
import org.sana.android.provider.Districts;
import org.sana.android.provider.EncounterTasks;
import org.sana.android.provider.Encounters;
import org.sana.android.provider.Events;
import org.sana.android.provider.Instructions;
import org.sana.android.provider.Locations;
import org.sana.android.provider.Models;
import org.sana.android.provider.Notifications;
import org.sana.android.provider.ObservationTasks;
import org.sana.android.provider.Observations;
import org.sana.android.provider.Observers;
import org.sana.android.provider.Parishes;
import org.sana.android.provider.Procedures;
import org.sana.android.provider.Subcounties;
import org.sana.android.provider.Subjects;
import org.sana.android.provider.VHTs;
import org.sana.util.UUIDUtil;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
/**
* a container of Uri descriptor values and related constants. The static
* methods of this class are primarily intended for consistent, application-wide
* Uri matching.
*
* @author Sana Development
*
*/
public final class Uris {
private static final String SCHEME_CONTENT = "content";
public static final int NO_MATCH = -1;
public static final int MATCH_ALL = 0;
public static final int MATCH_TYPE = 1;
public static final int MATCH_CONTENT = 2;
public static final int MATCH_PACKAGE = 4;
public static final int ITEMS = 1;
public static final int ITEM_ID = 2;
public static final int ITEM_UUID = 4;
public static final int ITEM_RELATED = 8;
public static final int ITEM_FILE = 16;
public static final int TYPE_WIDTH = 8;
public static final int TYPE_SHIFT = 0;
private static final int TYPE_MASK = (1 << TYPE_WIDTH) -1;
public static final int CONTENT_WIDTH = 16;
public static final int CONTENT_SHIFT = TYPE_WIDTH;
private static final int CONTENT_MASK = ((1 << CONTENT_WIDTH) - 1) << CONTENT_SHIFT;
public static final int PACKAGE_WIDTH = 4;
public static final int PACKAGE_SHIFT = CONTENT_WIDTH + CONTENT_SHIFT;
private static final int PACKAGE_MASK = ((1 << PACKAGE_WIDTH) - 1) << PACKAGE_SHIFT;
public static final int DESCRIPTOR_WIDTH = PACKAGE_WIDTH + CONTENT_WIDTH + TYPE_WIDTH;
public static final int NULL = UriMatcher.NO_MATCH;
//--------------------------------------------------------------------------
// Application, i.e. Package codes
//--------------------------------------------------------------------------
public static final String PACKAGE_AUTHORITY = "org.sana";
public static final int PACKAGE_DIR = 0x000000001;
/*
public static final Uri INTENT_URI = buildContentUri(PACKAGE_AUTHORITY, "intent");
/** Uri which identifies the application settings *
public static final Uri SETTINGS_URI = buildContentUri(PACKAGE_AUTHORITY, "settings");
/** Uri which identifies session activity *
public static final Uri SESSION_URI =buildContentUri(PACKAGE_AUTHORITY, "session");
public static final int PACKAGE = 1;
public static final int SESSION = 2 << (DESCRIPTOR_WIDTH - PACKAGE_SHIFT);
public static final int SETTINGS = 4 << (DESCRIPTOR_WIDTH - PACKAGE_SHIFT);
public static final int PACKAGE_DIR = PACKAGE | ITEMS;
public static final int SESSION_DIR = SESSION | ITEMS;
public static final int SETTINGS_DIR = SETTINGS | ITEMS;
public static final int SESSION_ITEM = SESSION | ITEM_ID;
public static final int SETTINGS_ITEM = SETTINGS | ITEM_ID;
public static final int SESSION_UUID = SESSION | ITEM_UUID;
public static final int SETTINGS_UUID = SETTINGS | ITEM_UUID;
static interface Sessions{
static final String CONTENT_TYPE = "vnd.android.cursor.dir/org.sana.session";
static final String CONTENT_ITEM_TYPE = "vnd.android.cursor.item/org.sana.session";
}
static interface Settings{
static final String CONTENT_TYPE = "vnd.android.cursor.dir/org.sana.setting";
static final String CONTENT_ITEM_TYPE = "vnd.android.cursor.item/org.sana.setting";
}
*/
//--------------------------------------------------------------------------
// Model codes
//
// Note Max int value is 2^32 or roughly < 32 data types in the system
//--------------------------------------------------------------------------
public static final int CONCEPT = 1 << CONTENT_SHIFT;
public static final int ENCOUNTER = 2 << CONTENT_SHIFT;
public static final int EVENT = 4 << CONTENT_SHIFT;
public static final int INSTRUCTION = 8 << CONTENT_SHIFT;
public static final int NOTIFICATION = 6 << CONTENT_SHIFT;
public static final int OBSERVATION = 32 << CONTENT_SHIFT;
public static final int OBSERVER = 64 << CONTENT_SHIFT;
public static final int PROCEDURE = 128 << CONTENT_SHIFT;
public static final int RELATIONSHIP = 256 << CONTENT_SHIFT;
public static final int SUBJECT = 512 << CONTENT_SHIFT;
public static final int ENCOUNTER_TASK = 1024 << CONTENT_SHIFT;
public static final int OBSERVATION_TASK = 2048 << CONTENT_SHIFT;
public static final int AMBULANCE_DRIVER = 4096 << CONTENT_SHIFT;
public static final int VHT = 8192 << CONTENT_SHIFT;
public static final int LOCATION = 16384 << CONTENT_SHIFT;
public static final int PARISH = 16385 << CONTENT_SHIFT;
public static final int SUBCOUNTY = 16386 << CONTENT_SHIFT;
public static final int COUNTY = 16387 << CONTENT_SHIFT;
public static final int DISTRICT = 16388 << CONTENT_SHIFT;
// dir match codes OBJECT | ITEMS
public static final int CONCEPT_DIR = CONCEPT | ITEMS;
public static final int ENCOUNTER_DIR = ENCOUNTER | ITEMS;
public static final int EVENT_DIR = EVENT | ITEMS;
public static final int INSTRUCTION_DIR = INSTRUCTION | ITEMS;
public static final int NOTIFICATION_DIR = NOTIFICATION | ITEMS;
public static final int OBSERVATION_DIR = OBSERVATION | ITEMS;
public static final int OBSERVER_DIR = OBSERVER | ITEMS;
public static final int PROCEDURE_DIR = PROCEDURE | ITEMS;
public static final int RELATIONSHIP_DIR = RELATIONSHIP | ITEMS;
public static final int SUBJECT_DIR = SUBJECT | ITEMS;
public static final int ENCOUNTER_TASK_DIR = ENCOUNTER_TASK | ITEMS;
public static final int OBSERVATION_TASK_DIR = OBSERVATION_TASK | ITEMS;
public static final int AMBULANCE_DRIVER_DIR = AMBULANCE_DRIVER | ITEMS;
public static final int VHT_DIR = VHT | ITEMS;
public static final int LOCATION_DIR = LOCATION | ITEMS;
public static final int PARISH_DIR = PARISH | ITEMS;
public static final int SUBCOUNTY_DIR = SUBCOUNTY | ITEMS;
public static final int COUNTY_DIR = COUNTY | ITEMS;
public static final int DISTRICT_DIR = DISTRICT | ITEMS;
// item match codes OBJECT | ITEM_ID
public static final int CONCEPT_ITEM = CONCEPT | ITEM_ID;
public static final int ENCOUNTER_ITEM = ENCOUNTER | ITEM_ID;
public static final int EVENT_ITEM = EVENT | ITEM_ID;
public static final int INSTRUCTION_ITEM = INSTRUCTION | ITEM_ID;
public static final int NOTIFICATION_ITEM = NOTIFICATION | ITEM_ID;
public static final int OBSERVATION_ITEM = OBSERVATION | ITEM_ID;
public static final int OBSERVER_ITEM = OBSERVER | ITEM_ID;
public static final int PROCEDURE_ITEM = PROCEDURE | ITEM_ID;
public static final int RELATIONSHIP_ITEM = RELATIONSHIP | ITEM_ID;
public static final int SUBJECT_ITEM = SUBJECT | ITEM_ID;
public static final int ENCOUNTER_TASK_ITEM = ENCOUNTER_TASK | ITEM_ID;
public static final int OBSERVATION_TASK_ITEM = OBSERVATION_TASK | ITEM_ID;
public static final int AMBULANCE_DRIVER_ITEM= AMBULANCE_DRIVER | ITEM_ID;
public static final int VHT_ITEM = VHT | ITEM_ID;
public static final int LOCATION_ITEM = LOCATION | ITEM_ID;
public static final int PARISH_ITEM = PARISH | ITEM_ID;
public static final int SUBCOUNTY_ITEM = SUBCOUNTY | ITEM_ID;
public static final int COUNTY_ITEM = COUNTY | ITEM_ID;
public static final int DISTRICT_ITEM = DISTRICT | ITEM_ID;
// item match codes OBJECT | ITEM_UUID
public static final int CONCEPT_UUID = CONCEPT | ITEM_UUID;
public static final int ENCOUNTER_UUID = ENCOUNTER | ITEM_UUID;
public static final int EVENT_UUID = EVENT | ITEM_UUID;
public static final int INSTRUCTION_UUID = INSTRUCTION | ITEM_UUID;
public static final int NOTIFICATION_UUID = NOTIFICATION | ITEM_UUID;
public static final int OBSERVATION_UUID = OBSERVATION | ITEM_UUID;
public static final int OBSERVER_UUID = OBSERVER | ITEM_UUID;
public static final int PROCEDURE_UUID = PROCEDURE | ITEM_UUID;
public static final int RELATIONSHIP_UUID = RELATIONSHIP | ITEM_UUID;
public static final int SUBJECT_UUID = SUBJECT | ITEM_UUID;
public static final int ENCOUNTER_TASK_UUID = ENCOUNTER_TASK | ITEM_UUID;
public static final int OBSERVATION_TASK_UUID = OBSERVATION_TASK | ITEM_UUID;
public static final int AMBULANCE_DRIVER_UUID= AMBULANCE_DRIVER | ITEM_UUID;
public static final int VHT_UUID = VHT | ITEM_UUID;
public static final int LOCATION_UUID = LOCATION | ITEM_UUID;
public static final int PARISH_UUID = PARISH | ITEM_UUID;
public static final int SUBCOUNTY_UUID = SUBCOUNTY | ITEM_UUID;
public static final int COUNTY_UUID = COUNTY | ITEM_UUID;
public static final int DISTRICT_UUID = DISTRICT | ITEM_UUID;
// Matcher for mapping the Uri to code mappings
private static final UriMatcher mMatcher = new UriMatcher(UriMatcher.NO_MATCH);
static{
/*
// admin mappings
mMatcher.addURI(PACKAGE_AUTHORITY, "/", PACKAGE_DIR);
mMatcher.addURI(PACKAGE_AUTHORITY, "session/", SESSION_DIR);
mMatcher.addURI(PACKAGE_AUTHORITY, "session/#", SESSION_ITEM);
mMatcher.addURI(PACKAGE_AUTHORITY, "session/*", SESSION_UUID);
mMatcher.addURI(PACKAGE_AUTHORITY, "settings/", SETTINGS_DIR);
mMatcher.addURI(PACKAGE_AUTHORITY, "settings/#", SETTINGS_ITEM);
mMatcher.addURI(PACKAGE_AUTHORITY, "settings/*", SETTINGS_UUID);
*/
mMatcher.addURI(Models.AUTHORITY, "/", PACKAGE_DIR);
mMatcher.addURI(Models.AUTHORITY, "core/concept/", CONCEPT_DIR);
mMatcher.addURI(Models.AUTHORITY, "core/concept/#", CONCEPT_ITEM);
mMatcher.addURI(Models.AUTHORITY, "core/concept/*", CONCEPT_UUID);
mMatcher.addURI(Models.AUTHORITY, "core/encounter/", ENCOUNTER_DIR);
mMatcher.addURI(Models.AUTHORITY, "core/encounter/#", ENCOUNTER_ITEM);
mMatcher.addURI(Models.AUTHORITY, "core/encounter/*", ENCOUNTER_UUID);
mMatcher.addURI(Models.AUTHORITY, "core/event/", EVENT_DIR);
mMatcher.addURI(Models.AUTHORITY, "core/event/#", EVENT_ITEM);
mMatcher.addURI(Models.AUTHORITY, "core/event/*", EVENT_UUID);
mMatcher.addURI(Models.AUTHORITY, "core/instruction/", INSTRUCTION_DIR);
mMatcher.addURI(Models.AUTHORITY, "core/instruction/#", INSTRUCTION_ITEM);
mMatcher.addURI(Models.AUTHORITY, "core/instruction/*", INSTRUCTION_UUID);
mMatcher.addURI(Models.AUTHORITY, "core/notification/", NOTIFICATION_DIR);
mMatcher.addURI(Models.AUTHORITY, "core/notification/#", NOTIFICATION_ITEM);
mMatcher.addURI(Models.AUTHORITY, "core/notification/*", NOTIFICATION_UUID);
mMatcher.addURI(Models.AUTHORITY, "core/observation/", OBSERVATION_DIR);
mMatcher.addURI(Models.AUTHORITY, "core/observation/#", OBSERVATION_ITEM);
mMatcher.addURI(Models.AUTHORITY, "core/observation/*", OBSERVATION_UUID);
mMatcher.addURI(Models.AUTHORITY, "core/observer/", OBSERVER_DIR);
mMatcher.addURI(Models.AUTHORITY, "core/observer/#", OBSERVER_ITEM);
mMatcher.addURI(Models.AUTHORITY, "core/observer/*", OBSERVER_UUID);
mMatcher.addURI(Models.AUTHORITY, "core/procedure/", PROCEDURE_DIR);
mMatcher.addURI(Models.AUTHORITY, "core/procedure/#", PROCEDURE_ITEM);
mMatcher.addURI(Models.AUTHORITY, "core/procedure/*", PROCEDURE_UUID);
mMatcher.addURI(Models.AUTHORITY, "core/subject/", SUBJECT_DIR);
mMatcher.addURI(Models.AUTHORITY, "core/subject/#", SUBJECT_ITEM);
mMatcher.addURI(Models.AUTHORITY, "core/subject/*", SUBJECT_UUID);
mMatcher.addURI(Models.AUTHORITY, "core/patient/", SUBJECT_DIR);
mMatcher.addURI(Models.AUTHORITY, "core/patient/#", SUBJECT_ITEM);
mMatcher.addURI(Models.AUTHORITY, "core/patient/*", SUBJECT_UUID);
mMatcher.addURI(Models.AUTHORITY, "tasks/encounter/", ENCOUNTER_TASK_DIR);
mMatcher.addURI(Models.AUTHORITY, "tasks/encounter/#", ENCOUNTER_TASK_ITEM);
mMatcher.addURI(Models.AUTHORITY, "tasks/encounter/*", ENCOUNTER_TASK_UUID);
mMatcher.addURI(Models.AUTHORITY, "tasks/observation/", OBSERVATION_TASK_DIR);
mMatcher.addURI(Models.AUTHORITY, "tasks/observation/#", OBSERVATION_TASK_ITEM);
mMatcher.addURI(Models.AUTHORITY, "tasks/observation/*", OBSERVATION_TASK_UUID);
// Add ambulance driver
mMatcher.addURI(Models.AUTHORITY, "core/ambulancedriver/", AMBULANCE_DRIVER_DIR);
mMatcher.addURI(Models.AUTHORITY, "core/ambulancedriver/#", AMBULANCE_DRIVER_ITEM);
mMatcher.addURI(Models.AUTHORITY, "core/ambulancedriver/*", AMBULANCE_DRIVER_UUID);
//Add VHT
mMatcher.addURI(Models.AUTHORITY, "core/vht/", VHT_DIR);
mMatcher.addURI(Models.AUTHORITY, "core/vht/#", VHT_ITEM);
mMatcher.addURI(Models.AUTHORITY, "core/vht/*", VHT_UUID);
// Add Location
mMatcher.addURI(Models.AUTHORITY, "core/location/", LOCATION_DIR);
mMatcher.addURI(Models.AUTHORITY, "core/location/#", LOCATION_ITEM);
mMatcher.addURI(Models.AUTHORITY, "core/location/*", LOCATION_UUID);
// Add Locality identifiers
mMatcher.addURI(Models.AUTHORITY, "core/parish/", PARISH_DIR);
mMatcher.addURI(Models.AUTHORITY, "core/parish/#", PARISH_ITEM);
mMatcher.addURI(Models.AUTHORITY, "core/parish/*", PARISH_UUID);
mMatcher.addURI(Models.AUTHORITY, "core/subcounty/", SUBCOUNTY_DIR);
mMatcher.addURI(Models.AUTHORITY, "core/subcounty/#", SUBCOUNTY_ITEM);
mMatcher.addURI(Models.AUTHORITY, "core/subcounty/*", SUBCOUNTY_UUID);
mMatcher.addURI(Models.AUTHORITY, "core/county/", COUNTY_DIR);
mMatcher.addURI(Models.AUTHORITY, "core/county/#", COUNTY_ITEM);
mMatcher.addURI(Models.AUTHORITY, "core/county/*", COUNTY_UUID);
mMatcher.addURI(Models.AUTHORITY, "core/district/", DISTRICT_DIR);
mMatcher.addURI(Models.AUTHORITY, "core/district/#", DISTRICT_ITEM);
mMatcher.addURI(Models.AUTHORITY, "core/district/*", DISTRICT_UUID);
}
/**
* Returns an int value describing the content and type represented by the
* Uri
*
* @param uri The Uri to check.
* @return a value greater than 0 if the Uri was recognized or the value of
* {@link android.content.UriMatcher#NO_MATCH UriMatcher.NO_MATCH}.
* @throws IllegalArgumentException if the descriptor can not be determined
* or the UUID provided as a path segment is invalid.
*/
public static int getDescriptor(Uri uri){
int result = mMatcher.match((uri !=null)? uri: Uri.EMPTY);
if(result > -1){
if(((result & TYPE_MASK) >> TYPE_SHIFT) == Uris.ITEM_UUID){
String uuid = uri.getLastPathSegment();
if(!UUIDUtil.isValid(uuid))
throw new IllegalArgumentException("Invalid uuid format");
}
}
return result;
}
/**
* Returns the content object class descriptor for the Uri.
*
* @param uri
* @return
* @throws IllegalArgumentException if the descriptor can not be determined.
*/
public static int getContentDescriptor(Uri uri){
int d = getDescriptor(uri) & CONTENT_MASK;
return (d > -1)? d: UriMatcher.NO_MATCH;
}
/**
* The content type descriptor. If matched, it will return one of
* ITEMS, ITEMS_ID, or ITEM_UUID
*
* @param uri
* @return
* @throws IllegalArgumentException if the descriptor can not be determined.
*/
public static int getTypeDescriptor(Uri uri){
int d = getDescriptor(uri) & TYPE_MASK;
return (d > -1)? d: UriMatcher.NO_MATCH;
}
/**
* Builds a hierarchical Uri.
*
* @param scheme
* @param authority
* @param path
* @return
*/
public static Uri buildUri(String scheme, String authority, String path){
Uri uri = Uri.parse(scheme + "://");
Uri.Builder builder = uri.buildUpon();
builder.scheme(scheme).authority(authority);
for(String s:path.split("/")){
if(!TextUtils.isEmpty(s)){
builder.appendPath(path);
}
}
return builder.build();
}
/**
* Builds a hierarchical content style Uri.
*
* @param authority The authority String
* @param path
* @return
*/
public static Uri buildContentUri(String authority, String path){
return buildUri(SCHEME_CONTENT,authority, path );
}
/**
* Parses the last path segment. This method performs no validation on the
* format.
*
* @param uri
* @return
*/
public static String parseUUID(Uri uri){
return uri.getLastPathSegment();
}
/**
* Appends a uuid String as the last path segment. This method performs no
* validation on the format.
*
* @param uri
* @param uuid
* @return
*/
public static Uri withAppendedUuid(Uri uri, String uuid){
// verify that the Uri is valid
if(Uris.isEmpty(uri))
throw new NullPointerException("Empty uri. Can not append UUID");
Uri result = Uri.parse(uri.toString() + "/" + uuid);
return result;
}
/**
* Will return the mime type represented by the Uri. For item or uuid
* matches-i.e. path matching {@literal value/#} and {@literal value/*}
* respectively, it will return a type beginning with
* "vnd.android.cursor.item", otherwise, a type beginning with
* "vnd.android.cursor.dir" will be returned.
*
* @param uri
* @return The mime type for the Uri
* @throws IllegalArgumentException if the mime type can not be determined.
*/
public static String getType(Uri uri) {
switch (getDescriptor(uri)) {
case CONCEPT_DIR:
return Concepts.CONTENT_TYPE;
case CONCEPT_UUID:
case CONCEPT_ITEM:
return Concepts.CONTENT_ITEM_TYPE;
case ENCOUNTER_DIR:
return Encounters.CONTENT_TYPE;
case ENCOUNTER_UUID:
case ENCOUNTER_ITEM:
return Encounters.CONTENT_ITEM_TYPE;
case EVENT_DIR:
return Events.CONTENT_TYPE;
case EVENT_UUID:
case EVENT_ITEM:
return Events.CONTENT_ITEM_TYPE;
case INSTRUCTION_DIR:
return Instructions.CONTENT_TYPE;
case INSTRUCTION_UUID:
case INSTRUCTION_ITEM:
return Instructions.CONTENT_ITEM_TYPE;
case NOTIFICATION_DIR:
return Notifications.CONTENT_TYPE;
case NOTIFICATION_UUID:
case NOTIFICATION_ITEM:
return Notifications.CONTENT_ITEM_TYPE;
case OBSERVATION_DIR:
return Observations.CONTENT_TYPE;
case OBSERVATION_UUID:
case OBSERVATION_ITEM:
return Observations.CONTENT_ITEM_TYPE;
case OBSERVER_DIR:
return Observers.CONTENT_TYPE;
case OBSERVER_UUID:
case OBSERVER_ITEM:
return Observers.CONTENT_ITEM_TYPE;
case PROCEDURE_DIR:
return Procedures.CONTENT_TYPE;
case PROCEDURE_UUID:
case PROCEDURE_ITEM:
return Procedures.CONTENT_ITEM_TYPE;
case SUBJECT_DIR:
return Subjects.CONTENT_TYPE;
case SUBJECT_UUID:
case SUBJECT_ITEM:
return Subjects.CONTENT_ITEM_TYPE;
case ENCOUNTER_TASK_DIR:
return EncounterTasks.CONTENT_TYPE;
case ENCOUNTER_TASK_UUID:
case ENCOUNTER_TASK_ITEM:
return EncounterTasks.CONTENT_ITEM_TYPE;
case OBSERVATION_TASK_DIR:
return ObservationTasks.CONTENT_TYPE;
case OBSERVATION_TASK_UUID:
case OBSERVATION_TASK_ITEM:
return ObservationTasks.CONTENT_ITEM_TYPE;
case AMBULANCE_DRIVER_DIR:
return AmbulanceDrivers.CONTENT_TYPE;
case AMBULANCE_DRIVER_ITEM:
case AMBULANCE_DRIVER_UUID:
return AmbulanceDrivers.CONTENT_ITEM_TYPE;
case VHT_DIR:
return VHTs.CONTENT_TYPE;
case VHT_ITEM:
case VHT_UUID:
return VHTs.CONTENT_ITEM_TYPE;
// Locality sub-items
case LOCATION_DIR:
return Locations.CONTENT_TYPE;
case LOCATION_ITEM:
case LOCATION_UUID:
return Locations.CONTENT_ITEM_TYPE;
case PARISH_DIR:
return Parishes.CONTENT_TYPE;
case PARISH_ITEM:
case PARISH_UUID:
return Parishes.CONTENT_ITEM_TYPE;
case SUBCOUNTY_DIR:
return Subcounties.CONTENT_TYPE;
case SUBCOUNTY_ITEM:
case SUBCOUNTY_UUID:
return Subcounties.CONTENT_ITEM_TYPE;
case COUNTY_DIR:
return Counties.CONTENT_TYPE;
case COUNTY_ITEM:
case COUNTY_UUID:
return Counties.CONTENT_ITEM_TYPE;
case DISTRICT_DIR:
return Districts.CONTENT_TYPE;
case DISTRICT_ITEM:
case DISTRICT_UUID:
return Districts.CONTENT_ITEM_TYPE;
case PACKAGE_DIR:
return "application/vnd.android.package-archive";
default:
throw new IllegalArgumentException("Invalid uri. No match::"+uri);
}
}
/**
* Returns whether a Uri is a content directory type. The mime types for
* directory uris should typically start with ""vnd.android.cursor.dir".
*
* @param uri the Uri to check
* @return true if the Uri may refer to multiple objects
* @throws IllegalArgumentException if the type can not be determined.
*/
public static boolean isDirType(Uri uri){
return (getTypeDescriptor(uri) & TYPE_MASK) == ITEMS;
}
/**
* Returns true if the Uri refers to a single item type. The mime types for
* single item uris should should start with "vnd.android.cursor.item".
*
* @param uri the Uri to check
* @return true if the Uri may refer to multiple objects
* @throws IllegalArgumentException if the type can not be determined.
*/
public static boolean isItemType(Uri uri){
int val = getTypeDescriptor(uri) & TYPE_MASK;
return (val == ITEM_ID) || (val == ITEM_UUID);
}
public static boolean isPackage(Uri uri){
return (uri.getScheme().equals("package"));
}
/**
* Returns true if the Uri is null or equal to Uri.EMPTY
* @param uri
* @return
*/
public static boolean isEmpty(Uri uri){
return (uri != null && !uri.equals(Uri.EMPTY))? false: true;
}
public static boolean isTask(Uri uri){
int d = getTypeDescriptor(uri);
if(d == UriMatcher.NO_MATCH) return false;
return ((d == ENCOUNTER_TASK) ||
(d == OBSERVATION_TASK))? true: false;
}
public static final boolean filterEquals(Uri from, Uri to, int flags){
boolean result = false;
if(flags == MATCH_ALL){
result = (getDescriptor(from) == getDescriptor(to));
} else {
// TODO implement this
}
return result;
}
public static final boolean filterEquals(Uri from, Uri to){
return filterEquals(from,to, MATCH_ALL);
}
/**
* Copy constructor utility.
*
* @param uri The uri to copy.
* @return A copy of the input parameter or Uri.EMPTY;
*/
public static Uri copyInstance(Uri uri){
return (uri == null)? Uri.EMPTY: Uri.parse(uri.toString());
}
public static Uri iriToUri(Uri iri){
Uri result = Uri.EMPTY;
return result;
}
/**
* Utility function to return a Uri whose path can be used for web based
* services. POST and PUT methods will remove the trailing ID or UUID if
* present.
* @param iri
* @param method
* @return
*/
public static Uri iriToUri(Uri iri, String method){
Uri result = Uri.EMPTY;
return result;
}
/**
* Converts Android "content" style resource identifiers to URIs to use with the
* new MDS REST API. Only works for those objects whose path components in the new
* REST API are consistent with MDS.
*
* @param uri The internal resource identifier to convert.
* @param scheme The scheme to use for the conversion
* @param host The mds host
* @param port The mds port to talk to.
* @param rootPath Additional
* @return
* @throws MalformedURLException
* @throws URISyntaxException
*/
public static URI iriToURI(Uri uri, String scheme, String host, int port, String rootPath)
throws MalformedURLException, URISyntaxException
{
String path = String.format("%s%s", rootPath, uri.getPath());
if(!path.endsWith("/"))
path = path + "/";
path = path.replace("//", "/");
String query = uri.getEncodedQuery();
URL url = null;
if(!TextUtils.isEmpty(query)){
path = String.format("%s?%s", path,query);
}
url = new URL(scheme, host, port, path);
URI u = url.toURI();
return u;
}
/**
* Converts Android "content" style resource identifiers to URIs to use with the
* new MDS REST API. Only works for those objects whose path components in the new
* REST API are consistent with MDS.
*
* @param uri The internal resource identifier to convert.
* @param scheme The scheme to use for the conversion
* @param host The mds host
* @param port The mds port to talk to.
* @param rootPath Additional
* @return
* @throws MalformedURLException
* @throws URISyntaxException
*/
/**
* Converts Android "content" style resource identifiers to Uris which
* can be used with the MDS REST API
* @param uri The internal resource identifier to convert.
* @param scheme The scheme to use for the conversion
* @param authority The mds host and port
* @return
*/
public static Uri iriToUri(String scheme, String authority, String rootPath, Uri uri)
{
String query = uri.getEncodedQuery();
Uri.Builder builder = new Uri.Builder();
builder.encodedAuthority(authority)
.scheme(scheme);
if(!TextUtils.isEmpty(rootPath)){
if (rootPath.startsWith("/")) {
rootPath = rootPath.substring(1);
}
builder.encodedPath(rootPath);
}
builder.appendEncodedPath(normalizePath(uri).getEncodedPath());
if(!TextUtils.isEmpty(query)){
builder.encodedQuery(query);
}
return builder.build();
}
public static Uri iriToUri(String scheme, String authority, Uri uri){
return iriToUri(scheme,authority,null,uri);
}
public static Uri normalizePath(Uri uri){
if(!uri.getPath().endsWith("/")){
return uri.buildUpon().appendPath("").build();
} else {
return uri;
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.quotas;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest;
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class QuotaSettingsFactory {
static class QuotaGlobalsSettingsBypass extends QuotaSettings {
private final boolean bypassGlobals;
QuotaGlobalsSettingsBypass(final String userName, final TableName tableName,
final String namespace, final boolean bypassGlobals) {
super(userName, tableName, namespace);
this.bypassGlobals = bypassGlobals;
}
@Override
public QuotaType getQuotaType() {
return QuotaType.GLOBAL_BYPASS;
}
@Override
protected void setupSetQuotaRequest(SetQuotaRequest.Builder builder) {
builder.setBypassGlobals(bypassGlobals);
}
@Override
public String toString() {
return "GLOBAL_BYPASS => " + bypassGlobals;
}
}
/* ==========================================================================
* QuotaSettings from the Quotas object
*/
static List<QuotaSettings> fromUserQuotas(final String userName, final Quotas quotas) {
return fromQuotas(userName, null, null, quotas);
}
static List<QuotaSettings> fromUserQuotas(final String userName, final TableName tableName,
final Quotas quotas) {
return fromQuotas(userName, tableName, null, quotas);
}
static List<QuotaSettings> fromUserQuotas(final String userName, final String namespace,
final Quotas quotas) {
return fromQuotas(userName, null, namespace, quotas);
}
static List<QuotaSettings> fromTableQuotas(final TableName tableName, final Quotas quotas) {
return fromQuotas(null, tableName, null, quotas);
}
static List<QuotaSettings> fromNamespaceQuotas(final String namespace, final Quotas quotas) {
return fromQuotas(null, null, namespace, quotas);
}
private static List<QuotaSettings> fromQuotas(final String userName, final TableName tableName,
final String namespace, final Quotas quotas) {
List<QuotaSettings> settings = new ArrayList<QuotaSettings>();
if (quotas.hasThrottle()) {
settings.addAll(fromThrottle(userName, tableName, namespace, quotas.getThrottle()));
}
if (quotas.getBypassGlobals() == true) {
settings.add(new QuotaGlobalsSettingsBypass(userName, tableName, namespace, true));
}
return settings;
}
private static List<QuotaSettings> fromThrottle(final String userName, final TableName tableName,
final String namespace, final QuotaProtos.Throttle throttle) {
List<QuotaSettings> settings = new ArrayList<QuotaSettings>();
if (throttle.hasReqNum()) {
settings.add(ThrottleSettings.fromTimedQuota(userName, tableName, namespace,
ThrottleType.REQUEST_NUMBER, throttle.getReqNum()));
}
if (throttle.hasReqSize()) {
settings.add(ThrottleSettings.fromTimedQuota(userName, tableName, namespace,
ThrottleType.REQUEST_SIZE, throttle.getReqSize()));
}
return settings;
}
/* ==========================================================================
* RPC Throttle
*/
/**
* Throttle the specified user.
*
* @param userName the user to throttle
* @param type the type of throttling
* @param limit the allowed number of request/data per timeUnit
* @param timeUnit the limit time unit
* @return the quota settings
*/
public static QuotaSettings throttleUser(final String userName, final ThrottleType type,
final long limit, final TimeUnit timeUnit) {
return throttle(userName, null, null, type, limit, timeUnit);
}
/**
* Throttle the specified user on the specified table.
*
* @param userName the user to throttle
* @param tableName the table to throttle
* @param type the type of throttling
* @param limit the allowed number of request/data per timeUnit
* @param timeUnit the limit time unit
* @return the quota settings
*/
public static QuotaSettings throttleUser(final String userName, final TableName tableName,
final ThrottleType type, final long limit, final TimeUnit timeUnit) {
return throttle(userName, tableName, null, type, limit, timeUnit);
}
/**
* Throttle the specified user on the specified namespace.
*
* @param userName the user to throttle
* @param namespace the namespace to throttle
* @param type the type of throttling
* @param limit the allowed number of request/data per timeUnit
* @param timeUnit the limit time unit
* @return the quota settings
*/
public static QuotaSettings throttleUser(final String userName, final String namespace,
final ThrottleType type, final long limit, final TimeUnit timeUnit) {
return throttle(userName, null, namespace, type, limit, timeUnit);
}
/**
* Remove the throttling for the specified user.
*
* @param userName the user
* @return the quota settings
*/
public static QuotaSettings unthrottleUser(final String userName) {
return throttle(userName, null, null, null, 0, null);
}
/**
* Remove the throttling for the specified user on the specified table.
*
* @param userName the user
* @param tableName the table
* @return the quota settings
*/
public static QuotaSettings unthrottleUser(final String userName, final TableName tableName) {
return throttle(userName, tableName, null, null, 0, null);
}
/**
* Remove the throttling for the specified user on the specified namespace.
*
* @param userName the user
* @param namespace the namespace
* @return the quota settings
*/
public static QuotaSettings unthrottleUser(final String userName, final String namespace) {
return throttle(userName, null, namespace, null, 0, null);
}
/**
* Throttle the specified table.
*
* @param tableName the table to throttle
* @param type the type of throttling
* @param limit the allowed number of request/data per timeUnit
* @param timeUnit the limit time unit
* @return the quota settings
*/
public static QuotaSettings throttleTable(final TableName tableName, final ThrottleType type,
final long limit, final TimeUnit timeUnit) {
return throttle(null, tableName, null, type, limit, timeUnit);
}
/**
* Remove the throttling for the specified table.
*
* @param tableName the table
* @return the quota settings
*/
public static QuotaSettings unthrottleTable(final TableName tableName) {
return throttle(null, tableName, null, null, 0, null);
}
/**
* Throttle the specified namespace.
*
* @param namespace the namespace to throttle
* @param type the type of throttling
* @param limit the allowed number of request/data per timeUnit
* @param timeUnit the limit time unit
* @return the quota settings
*/
public static QuotaSettings throttleNamespace(final String namespace, final ThrottleType type,
final long limit, final TimeUnit timeUnit) {
return throttle(null, null, namespace, type, limit, timeUnit);
}
/**
* Remove the throttling for the specified namespace.
*
* @param namespace the namespace
* @return the quota settings
*/
public static QuotaSettings unthrottleNamespace(final String namespace) {
return throttle(null, null, namespace, null, 0, null);
}
/* Throttle helper */
private static QuotaSettings throttle(final String userName, final TableName tableName,
final String namespace, final ThrottleType type, final long limit,
final TimeUnit timeUnit) {
QuotaProtos.ThrottleRequest.Builder builder = QuotaProtos.ThrottleRequest.newBuilder();
if (type != null) {
builder.setType(ProtobufUtil.toProtoThrottleType(type));
}
if (timeUnit != null) {
builder.setTimedQuota(ProtobufUtil.toTimedQuota(limit, timeUnit, QuotaScope.MACHINE));
}
return new ThrottleSettings(userName, tableName, namespace, builder.build());
}
/* ==========================================================================
* Global Settings
*/
/**
* Set the "bypass global settings" for the specified user
*
* @param userName the user to throttle
* @param bypassGlobals true if the global settings should be bypassed
* @return the quota settings
*/
public static QuotaSettings bypassGlobals(final String userName, final boolean bypassGlobals) {
return new QuotaGlobalsSettingsBypass(userName, null, null, bypassGlobals);
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
import static org.elasticsearch.index.mapper.MapperBuilders.booleanField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
/**
* A field mapper for boolean fields.
*/
public class BooleanFieldMapper extends FieldMapper {
public static final String CONTENT_TYPE = "boolean";
public static class Defaults {
public static final MappedFieldType FIELD_TYPE = new BooleanFieldType();
static {
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.freeze();
}
}
public static class Values {
public final static BytesRef TRUE = new BytesRef("T");
public final static BytesRef FALSE = new BytesRef("F");
}
public static class Builder extends FieldMapper.Builder<Builder, BooleanFieldMapper> {
public Builder(String name) {
super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
this.builder = this;
}
@Override
public Builder tokenized(boolean tokenized) {
if (tokenized) {
throw new IllegalArgumentException("bool field can't be tokenized");
}
return super.tokenized(tokenized);
}
@Override
public BooleanFieldMapper build(BuilderContext context) {
setupFieldType(context);
return new BooleanFieldMapper(name, fieldType, defaultFieldType,
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
BooleanFieldMapper.Builder builder = booleanField(name);
parseField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
if (propNode == null) {
throw new MapperParsingException("Property [null_value] cannot be null.");
}
builder.nullValue(nodeBooleanValue(propNode));
iterator.remove();
}
}
return builder;
}
}
public static final class BooleanFieldType extends MappedFieldType {
public BooleanFieldType() {}
protected BooleanFieldType(BooleanFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new BooleanFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public Boolean nullValue() {
return (Boolean)super.nullValue();
}
@Override
public BytesRef indexedValueForSearch(Object value) {
if (value == null) {
return Values.FALSE;
}
if (value instanceof Boolean) {
return ((Boolean) value) ? Values.TRUE : Values.FALSE;
}
String sValue;
if (value instanceof BytesRef) {
sValue = ((BytesRef) value).utf8ToString();
} else {
sValue = value.toString();
}
if (sValue.length() == 0) {
return Values.FALSE;
}
if (sValue.length() == 1 && sValue.charAt(0) == 'F') {
return Values.FALSE;
}
if (Booleans.parseBoolean(sValue, false)) {
return Values.TRUE;
}
return Values.FALSE;
}
@Override
public Boolean value(Object value) {
if (value == null) {
return Boolean.FALSE;
}
String sValue = value.toString();
if (sValue.length() == 0) {
return Boolean.FALSE;
}
if (sValue.length() == 1 && sValue.charAt(0) == 'F') {
return Boolean.FALSE;
}
if (Booleans.parseBoolean(sValue, false)) {
return Boolean.TRUE;
}
return Boolean.FALSE;
}
@Override
public Object valueForSearch(Object value) {
return value(value);
}
@Override
public boolean useTermQueryWithQueryString() {
return true;
}
}
protected BooleanFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
}
@Override
public BooleanFieldType fieldType() {
return (BooleanFieldType) super.fieldType();
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored() && !fieldType().hasDocValues()) {
return;
}
Boolean value = context.parseExternalValue(Boolean.class);
if (value == null) {
XContentParser.Token token = context.parser().currentToken();
if (token == XContentParser.Token.VALUE_NULL) {
if (fieldType().nullValue() != null) {
value = fieldType().nullValue();
}
} else {
value = context.parser().booleanValue();
}
}
if (value == null) {
return;
}
fields.add(new Field(fieldType().names().indexName(), value ? "T" : "F", fieldType()));
if (fieldType().hasDocValues()) {
fields.add(new SortedNumericDocValuesField(fieldType().names().indexName(), value ? 1 : 0));
}
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || fieldType().nullValue() != null) {
builder.field("null_value", fieldType().nullValue());
}
}
}
|
|
/*
* The MIT License (MIT)
*
* Copyright (c) 2014 Alexander Polden
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package com.tehforce.sofa.parser;
import java.util.ArrayList;
import java.util.HashMap;
import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor;
import org.antlr.v4.runtime.tree.ParseTree;
import com.tehforce.sofa.logic.Character;
import com.tehforce.sofa.logic.CharacterClass;
import com.tehforce.sofa.logic.Team;
import com.tehforce.sofa.logic.Token;
import com.tehforce.sofa.parser.SofaLangParser.Alignment_enemyContext;
import com.tehforce.sofa.parser.SofaLangParser.Alignment_friendlyContext;
import com.tehforce.sofa.parser.SofaLangParser.BooleanBuiltin_aliveContext;
import com.tehforce.sofa.parser.SofaLangParser.BooleanBuiltin_inRangeContext;
import com.tehforce.sofa.parser.SofaLangParser.BooleanBuiltin_woundedContext;
import com.tehforce.sofa.parser.SofaLangParser.Bop_eqContext;
import com.tehforce.sofa.parser.SofaLangParser.Bop_geContext;
import com.tehforce.sofa.parser.SofaLangParser.Bop_gtContext;
import com.tehforce.sofa.parser.SofaLangParser.Bop_leContext;
import com.tehforce.sofa.parser.SofaLangParser.Bop_ltContext;
import com.tehforce.sofa.parser.SofaLangParser.Bop_neContext;
import com.tehforce.sofa.parser.SofaLangParser.ClassName_AnyContext;
import com.tehforce.sofa.parser.SofaLangParser.ClassName_HealerContext;
import com.tehforce.sofa.parser.SofaLangParser.ClassName_RangerContext;
import com.tehforce.sofa.parser.SofaLangParser.ClassName_WarriorContext;
import com.tehforce.sofa.parser.SofaLangParser.ClassentryContext;
import com.tehforce.sofa.parser.SofaLangParser.Comparison_numberBuiltinContext;
import com.tehforce.sofa.parser.SofaLangParser.Direction_downContext;
import com.tehforce.sofa.parser.SofaLangParser.Direction_leftContext;
import com.tehforce.sofa.parser.SofaLangParser.Direction_rightContext;
import com.tehforce.sofa.parser.SofaLangParser.Direction_upContext;
import com.tehforce.sofa.parser.SofaLangParser.DirectionalBultin_directionContext;
import com.tehforce.sofa.parser.SofaLangParser.DirectionalBultin_directionToContext;
import com.tehforce.sofa.parser.SofaLangParser.DirectionalBultin_oppositeDirectionToContext;
import com.tehforce.sofa.parser.SofaLangParser.Expr_ComparisonContext;
import com.tehforce.sofa.parser.SofaLangParser.Expr_OtherwiseContext;
import com.tehforce.sofa.parser.SofaLangParser.Expr_booleanBuiltinContext;
import com.tehforce.sofa.parser.SofaLangParser.LogicContext;
import com.tehforce.sofa.parser.SofaLangParser.NumberBuiltin_distanceToContext;
import com.tehforce.sofa.parser.SofaLangParser.NumberBuiltin_healthContext;
import com.tehforce.sofa.parser.SofaLangParser.NumberBuiltin_maxHealthContext;
import com.tehforce.sofa.parser.SofaLangParser.NumberBuiltin_numberContext;
import com.tehforce.sofa.parser.SofaLangParser.NumberBuiltin_rangeContext;
import com.tehforce.sofa.parser.SofaLangParser.ProgContext;
import com.tehforce.sofa.parser.SofaLangParser.RootContext;
import com.tehforce.sofa.parser.SofaLangParser.TargetBuiltin_closestContext;
import com.tehforce.sofa.parser.SofaLangParser.TargetBuiltin_farthestContext;
import com.tehforce.sofa.parser.SofaLangParser.TargetBuiltin_targetContext;
import com.tehforce.sofa.parser.SofaLangParser.TargetContext;
import com.tehforce.sofa.parser.SofaLangParser.TeamContext;
import com.tehforce.sofa.parser.SofaLangParser.Token_attackContext;
import com.tehforce.sofa.parser.SofaLangParser.Token_defendContext;
import com.tehforce.sofa.parser.SofaLangParser.Token_healContext;
import com.tehforce.sofa.parser.SofaLangParser.Token_moveContext;
import com.tehforce.sofa.parser.SofaLangParser.Token_roamContext;
/**
* Evaluator is the evaluator of the AST created by ANTLR4.
* It evaluates the entire tree on request, and takes the current status
* of the simulation into account every single time.
*
* Returns a set of tokens on request.
*
* @author mockillo
*
*/
public class Evaluator extends AbstractParseTreeVisitor<Value> implements
SofaLangVisitor<Value> {
private Team friendly, enemy;
private HashMap<Character, Token[]> tokens;
private Character current;
private ParseTree tree;
private ArrayList<Token[]> tokendump;
public Evaluator(Team friendly, Team enemy, ParseTree tree) {
this.friendly = friendly;
this.enemy = enemy;
this.tree = tree;
tokendump = new ArrayList<Token[]>();
}
public Token[] getToken(Character c) {
return tokens.get(c);
}
protected HashMap<Character, Token[]> getAllTokens() {
return tokens;
}
public void updateTokens() {
visit(tree);
}
@Override
public Value visitAlignment_enemy(Alignment_enemyContext ctx) {
return new Value(enemy);
}
@Override
public Value visitAlignment_friendly(Alignment_friendlyContext ctx) {
return new Value(friendly);
}
@Override
public Value visitBooleanBuiltin_alive(BooleanBuiltin_aliveContext ctx) {
if (ctx.getChildCount() > 3) {
Character c = visit(ctx.getChild(0)).asCharacter();
return new Value(c.alive(), true);
} else {
return new Value(current.alive(), true);
}
}
@Override
public Value visitBooleanBuiltin_inRange(BooleanBuiltin_inRangeContext ctx) {
if (ctx.getChildCount() > 4) {
Character c = visit(ctx.getChild(0)).asCharacter();
Character t = visit(ctx.getChild(4)).asCharacter();
return new Value(c.inRange(t), true);
} else {
Character t = visit(ctx.getChild(2)).asCharacter();
return new Value(current.inRange(t), true);
}
}
@Override
public Value visitBooleanBuiltin_wounded(BooleanBuiltin_woundedContext ctx) {
if (ctx.getChildCount() > 3) {
Character c = visit(ctx.getChild(0)).asCharacter();
return new Value(c.wounded(), true);
} else {
return new Value(current.wounded(), true);
}
}
/**
* eq = 0;
*/
@Override
public Value visitBop_eq(Bop_eqContext ctx) {
return new Value(0);
}
/**
* ge = 1;
*/
@Override
public Value visitBop_ge(Bop_geContext ctx) {
return new Value(1);
}
/**
* gt = 5;
*/
@Override
public Value visitBop_gt(Bop_gtContext ctx) {
return new Value(5);
}
/**
* le = 2;
*/
@Override
public Value visitBop_le(Bop_leContext ctx) {
return new Value(2);
}
/**
* lt = 4;
*/
@Override
public Value visitBop_lt(Bop_ltContext ctx) {
return new Value(4);
}
/**
* ne = 3;
*/
@Override
public Value visitBop_ne(Bop_neContext ctx) {
return new Value(3);
}
@Override
public Value visitClassentry(ClassentryContext ctx) {
switch (visit(ctx.getChild(0)).asCharacterClass()) {
case WARRIOR:
current = friendly.getWarrior();
break;
case HEALER:
current = friendly.getHealer();
break;
case RANGER:
current = friendly.getRanger();
break;
case ANY:
System.out.println("Invalid class entry in source file!");
break;
}
for (int i = 1; i < ctx.getChildCount(); i++) {
Value v = visit(ctx.getChild(i));
if (v == null)
continue;
if (v.isNull())
continue;
if (v.isBoolean() && v.asBoolean()) {
tokens.put(current, tokendump.get(tokendump.size() - 1));
return new Value(true, true);
}
}
return new Value(false, true);
}
@Override
public Value visitClassName_Any(ClassName_AnyContext ctx) {
return new Value(CharacterClass.ANY);
}
@Override
public Value visitClassName_Healer(ClassName_HealerContext ctx) {
return new Value(CharacterClass.HEALER);
}
@Override
public Value visitClassName_Ranger(ClassName_RangerContext ctx) {
return new Value(CharacterClass.RANGER);
}
@Override
public Value visitClassName_Warrior(ClassName_WarriorContext ctx) {
return new Value(CharacterClass.WARRIOR);
}
@Override
public Value visitComparison_numberBuiltin(
Comparison_numberBuiltinContext ctx) {
Value v = null;
float f = visit(ctx.getChild(0)).asFloat();
float s = visit(ctx.getChild(2)).asFloat();
int bop = visit(ctx.getChild(1)).asInteger();
switch (bop) {
case 0:
v = new Value((Math.abs(f - s) < .0000001), true); break;
case 1:
v = new Value((f >= s), true); break;
case 2:
v = new Value((f <= s), true); break;
case 3:
v = new Value((f != s), true); break;
case 4:
v = new Value((f < s), true); break;
default:
v = new Value((f > s), true); break;
}
return v;
}
@Override
public Value visitDirection_down(Direction_downContext ctx) {
return new Value("Down");
}
@Override
public Value visitDirection_left(Direction_leftContext ctx) {
return new Value("Left");
}
@Override
public Value visitDirection_right(Direction_rightContext ctx) {
return new Value("Right");
}
@Override
public Value visitDirection_up(Direction_upContext ctx) {
return new Value("Up");
}
@Override
public Value visitDirectionalBultin_direction(
DirectionalBultin_directionContext ctx) {
return visit(ctx.getChild(0));
}
@Override
public Value visitDirectionalBultin_directionTo(
DirectionalBultin_directionToContext ctx) {
if (ctx.getChildCount() > 4) {
Character c = visit(ctx.getChild(0)).asCharacter();
Character t = visit(ctx.getChild(4)).asCharacter();
return new Value(c.direction(t));
} else {
Character t = visit(ctx.getChild(2)).asCharacter();
return new Value(current.direction(t));
}
}
@Override
public Value visitDirectionalBultin_oppositeDirectionTo(
DirectionalBultin_oppositeDirectionToContext ctx) {
if (ctx.getChildCount() > 4) {
Character c = visit(ctx.getChild(0)).asCharacter();
Character t = visit(ctx.getChild(4)).asCharacter();
return new Value(c.oppositeDirection(t));
} else {
Character t = visit(ctx.getChild(2)).asCharacter();
return new Value(current.oppositeDirection(t));
}
}
@Override
public Value visitExpr_booleanBuiltin(Expr_booleanBuiltinContext ctx) {
return visit(ctx.getChild(0));
}
@Override
public Value visitExpr_Comparison(Expr_ComparisonContext ctx) {
return visit(ctx.getChild(0));
}
@Override
public Value visitExpr_Otherwise(Expr_OtherwiseContext ctx) {
return new Value(true, true);
}
@Override
public Value visitLogic(LogicContext ctx) {
ArrayList<Boolean> exprBooleans = new ArrayList<Boolean>();
ArrayList<Token> exprTokens = new ArrayList<Token>();
for (int i = 0; i < ctx.getChildCount(); i++) {
Value v = visit(ctx.getChild(i));
if (v == null)
continue;
if (v.isNull())
continue;
if (v.isBoolean())
exprBooleans.add(v.asBoolean());
else
exprTokens.add(v.asToken());
}
Token[] myTokens = new Token[exprTokens.size()];
exprTokens.toArray(myTokens);
tokendump.add(myTokens);
boolean passed = true;
for (Boolean b : exprBooleans) {
if (!b)
passed = false;
}
if (passed)
return new Value(true, true);
else
return new Value(false, true);
}
@Override
public Value visitNumberBuiltin_distanceTo(
NumberBuiltin_distanceToContext ctx) {
if (ctx.getChildCount() > 4) {
Character c = visit(ctx.getChild(0)).asCharacter();
Character t = visit(ctx.getChild(4)).asCharacter();
return new Value(c.distance(t));
} else {
Character t = visit(ctx.getChild(2)).asCharacter();
return new Value(current.distance(t));
}
}
@Override
public Value visitNumberBuiltin_health(NumberBuiltin_healthContext ctx) {
if (ctx.getChildCount() > 3) {
Character c = visit(ctx.getChild(0)).asCharacter();
return new Value(c.getCurrentHealth());
} else {
return new Value(current.getCurrentHealth());
}
}
@Override
public Value visitNumberBuiltin_maxHealth(NumberBuiltin_maxHealthContext ctx) {
if (ctx.getChildCount() > 3) {
Character c = visit(ctx.getChild(0)).asCharacter();
return new Value(c.getMaxHealth());
} else {
return new Value(current.getMaxHealth());
}
}
@Override
public Value visitNumberBuiltin_number(NumberBuiltin_numberContext ctx) {
return new Value(ctx.getChild(0).getText());
}
@Override
public Value visitNumberBuiltin_range(NumberBuiltin_rangeContext ctx) {
if (ctx.getChildCount() > 3) {
Character c = visit(ctx.getChild(0)).asCharacter();
return new Value(c.range());
} else {
return new Value(current.range());
}
}
@Override
public Value visitProg(ProgContext ctx) {
return visitChildren(ctx);
}
@Override
public Value visitRoot(RootContext ctx) {
return visitChildren(ctx);
}
@Override
public Value visitTarget(TargetContext ctx) {
Team team = visit(ctx.getChild(0)).asTeam();
Value c = null;
switch (visit(ctx.getChild(2)).asCharacterClass()) {
case WARRIOR:
c = new Value(team.getWarrior());
break;
case HEALER:
c = new Value(team.getHealer());
break;
case RANGER:
c = new Value(team.getRanger());
break;
case ANY:
c = new Value(team.getAny());
break;
}
return c;
}
@Override
public Value visitTargetBuiltin_closest(TargetBuiltin_closestContext ctx) {
if (ctx.getChildCount() > 4) {
Character c = visit(ctx.getChild(0)).asCharacter();
Team t = visit(ctx.getChild(4)).asTeam();
return new Value(c.closest(t));
} else {
Team t = visit(ctx.getChild(2)).asTeam();
return new Value(current.closest(t));
}
}
@Override
public Value visitTargetBuiltin_farthest(TargetBuiltin_farthestContext ctx) {
if (ctx.getChildCount() > 4) {
Character c = visit(ctx.getChild(0)).asCharacter();
Team t = visit(ctx.getChild(4)).asTeam();
return new Value(c.farthest(t));
} else {
Team t = visit(ctx.getChild(2)).asTeam();
return new Value(current.farthest(t));
}
}
@Override
public Value visitTargetBuiltin_target(TargetBuiltin_targetContext ctx) {
return visit(ctx.getChild(0));
}
@Override
public Value visitTeam(TeamContext ctx) {
tokens = new HashMap<Character, Token[]>();
return visitChildren(ctx);
}
@Override
public Value visitToken_attack(Token_attackContext ctx) {
Token t = new Token("Attack", visit(ctx.getChild(1)).asCharacter());
return new Value(t);
}
@Override
public Value visitToken_defend(Token_defendContext ctx) {
Token t = new Token("Defend");
return new Value(t);
}
@Override
public Value visitToken_heal(Token_healContext ctx) {
Token t = new Token("Heal", visit(ctx.getChild(1)).asCharacter());
return new Value(t);
}
@Override
public Value visitToken_move(Token_moveContext ctx) {
Token t = new Token("Move", visit(ctx.getChild(1)).asVector());
return new Value(t);
}
@Override
public Value visitToken_roam(Token_roamContext ctx) {
Token t = new Token("Roam");
return new Value(t);
}
}
|
|
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.night_mode;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.AdditionalAnswers.answerVoid;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.chromium.base.ApplicationState.HAS_RUNNING_ACTIVITIES;
import static org.chromium.base.ApplicationState.HAS_STOPPED_ACTIVITIES;
import static org.chromium.chrome.browser.preferences.ChromePreferenceKeys.UI_THEME_SETTING;
import android.os.Build;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.stubbing.VoidAnswer1;
import org.robolectric.annotation.Config;
import org.chromium.base.test.BaseRobolectricTestRunner;
import org.chromium.base.test.util.DisableIf;
import org.chromium.chrome.browser.preferences.SharedPreferencesManager;
/**
* Unit tests for {@link GlobalNightModeStateController}.
*/
@RunWith(BaseRobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class GlobalNightModeStateControllerTest {
@Mock
private NightModeStateProvider.Observer mObserver;
private GlobalNightModeStateController mGlobalNightModeStateController;
@Mock
private SystemNightModeMonitor mSystemNightModeMonitor;
private SystemNightModeMonitor.Observer mSystemNightModeObserver;
@Mock
private PowerSavingModeMonitor mPowerSavingMonitor;
private Runnable mPowerModeObserver;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
captureObservers();
mGlobalNightModeStateController =
new GlobalNightModeStateController(mSystemNightModeMonitor, mPowerSavingMonitor,
SharedPreferencesManager.getInstance());
mGlobalNightModeStateController.onApplicationStateChange(HAS_RUNNING_ACTIVITIES);
// Night mode is disabled by default.
assertFalse(GlobalNightModeStateProviderHolder.getInstance().isInNightMode());
}
private void captureObservers() {
// We need to mock removeObserver as well as addObserver, so can't use ArgumentCaptor.
doAnswer(answerVoid((VoidAnswer1<SystemNightModeMonitor.Observer>)
observer -> mSystemNightModeObserver = observer))
.when(mSystemNightModeMonitor).addObserver(any());
doAnswer(answerVoid((VoidAnswer1<SystemNightModeMonitor.Observer>)
observer -> mSystemNightModeObserver = null))
.when(mSystemNightModeMonitor).removeObserver(any());
doAnswer(answerVoid((VoidAnswer1<Runnable>)
observer -> mPowerModeObserver = observer))
.when(mPowerSavingMonitor).addObserver(any());
doAnswer(answerVoid((VoidAnswer1<Runnable>)
observer -> mPowerModeObserver = null))
.when(mPowerSavingMonitor).removeObserver(any());
}
@After
public void tearDown() {
SharedPreferencesManager.getInstance().removeKey(UI_THEME_SETTING);
}
@Test
public void testUpdateNightMode_PowerSaveMode_DefaultsToSystem() {
// Set preference to system default and verify that the night mode isn't enabled.
SharedPreferencesManager.getInstance().writeInt(UI_THEME_SETTING, ThemeType.SYSTEM_DEFAULT);
assertFalse(mGlobalNightModeStateController.isInNightMode());
// Enable power save mode and verify night mode is enabled.
setIsPowerSaveMode(true);
assertTrue(mGlobalNightModeStateController.isInNightMode());
// Disable power save mode and verify night mode is disabled.
setIsPowerSaveMode(false);
assertFalse(mGlobalNightModeStateController.isInNightMode());
}
@Test
@DisableIf.Build(sdk_is_greater_than = Build.VERSION_CODES.P)
public void testUpdateNightMode_PowerSaveMode_DefaultsToLight() {
// Enable power save mode and verify night mode is not enabled.
setIsPowerSaveMode(true);
assertFalse(mGlobalNightModeStateController.isInNightMode());
}
@Test
public void testUpdateNightMode_SystemNightMode_DefaultsToSystem() {
// Set preference to system default and verify that the night mode isn't enabled.
SharedPreferencesManager.getInstance().writeInt(UI_THEME_SETTING, ThemeType.SYSTEM_DEFAULT);
assertFalse(mGlobalNightModeStateController.isInNightMode());
// Enable system night mode and verify night mode is enabled.
setSystemNightMode(true);
assertTrue(mGlobalNightModeStateController.isInNightMode());
// Disable system night mode and verify night mode is disabled.
setSystemNightMode(false);
assertFalse(mGlobalNightModeStateController.isInNightMode());
}
@Test
@DisableIf.Build(sdk_is_greater_than = Build.VERSION_CODES.P)
public void testUpdateNightMode_SystemNightMode_DefaultsToLight() {
// Enable system night mode and verify night mode is not enabled.
setSystemNightMode(true);
assertFalse(mGlobalNightModeStateController.isInNightMode());
}
@Test
public void testUpdateNightMode_Preference() {
// Set preference to dark theme and verify night mode is enabled.
SharedPreferencesManager.getInstance().writeInt(UI_THEME_SETTING, ThemeType.DARK);
assertTrue(mGlobalNightModeStateController.isInNightMode());
// Set preference to light theme and verify night mode is disabled.
SharedPreferencesManager.getInstance().writeInt(UI_THEME_SETTING, ThemeType.LIGHT);
assertFalse(mGlobalNightModeStateController.isInNightMode());
// Regardless of power save mode and system night mode, night mode is disabled with light
// theme preference.
setIsPowerSaveMode(true);
assertFalse(mGlobalNightModeStateController.isInNightMode());
setSystemNightMode(true);
assertFalse(mGlobalNightModeStateController.isInNightMode());
}
@Test
public void testStopAndRestart() {
// Simulate to stop listening to night mode state changes. Verify that night mode state is
// not changed.
mGlobalNightModeStateController.onApplicationStateChange(HAS_STOPPED_ACTIVITIES);
setIsPowerSaveMode(true);
assertFalse(mGlobalNightModeStateController.isInNightMode());
setSystemNightMode(true);
assertFalse(mGlobalNightModeStateController.isInNightMode());
SharedPreferencesManager.getInstance().writeInt(UI_THEME_SETTING, ThemeType.DARK);
assertFalse(mGlobalNightModeStateController.isInNightMode());
// Simulate to start listening to night mode state changes. Verify that
// 1. Night mode state is updated after #start().
// 2. Night mode state is updated on power save mode, system night mode, or preference
// changes.
mGlobalNightModeStateController.onApplicationStateChange(HAS_RUNNING_ACTIVITIES);
assertTrue(mGlobalNightModeStateController.isInNightMode());
SharedPreferencesManager.getInstance().writeInt(UI_THEME_SETTING, ThemeType.SYSTEM_DEFAULT);
assertTrue(mGlobalNightModeStateController.isInNightMode());
setIsPowerSaveMode(false);
assertTrue(mGlobalNightModeStateController.isInNightMode());
setSystemNightMode(false);
assertFalse(mGlobalNightModeStateController.isInNightMode());
}
@Test
public void testObserver() {
mGlobalNightModeStateController.addObserver(mObserver);
// Verify that observer is called on night mode state changed from false to true.
SharedPreferencesManager.getInstance().writeInt(UI_THEME_SETTING, ThemeType.DARK);
assertTrue(mGlobalNightModeStateController.isInNightMode());
verify(mObserver, times(1)).onNightModeStateChanged();
// Verify that observer is not called when night mode state is not changed.
setIsPowerSaveMode(true);
assertTrue(mGlobalNightModeStateController.isInNightMode());
verify(mObserver, times(1)).onNightModeStateChanged();
// Verify that observer is not called when night mode state is not changed.
setIsPowerSaveMode(false);
assertTrue(mGlobalNightModeStateController.isInNightMode());
verify(mObserver, times(1)).onNightModeStateChanged();
// Verify that observer is called when set to light theme.
SharedPreferencesManager.getInstance().writeInt(UI_THEME_SETTING, ThemeType.LIGHT);
assertFalse(mGlobalNightModeStateController.isInNightMode());
verify(mObserver, times(2)).onNightModeStateChanged();
// Verify that observer is not called after it is removed.
mGlobalNightModeStateController.removeObserver(mObserver);
SharedPreferencesManager.getInstance().writeInt(UI_THEME_SETTING, ThemeType.DARK);
assertTrue(mGlobalNightModeStateController.isInNightMode());
verify(mObserver, times(2)).onNightModeStateChanged();
}
/**
* Simulates setting power save mode, and notifies the change.
* @param isPowerSaveMode Whether power save mode is enabled or not.
*/
private void setIsPowerSaveMode(boolean isPowerSaveMode) {
when(mPowerSavingMonitor.powerSavingIsOn()).thenReturn(isPowerSaveMode);
if (mPowerModeObserver != null) {
mPowerModeObserver.run();
}
}
/**
* Simulates setting system night mode, and notifies the change.
* @param isSystemNightModeOn Whether system night mode is enabled or not.
*/
private void setSystemNightMode(boolean isSystemNightModeOn) {
when(mSystemNightModeMonitor.isSystemNightModeOn()).thenReturn(isSystemNightModeOn);
if (mSystemNightModeObserver != null) {
mSystemNightModeObserver.onSystemNightModeChanged();
}
}
}
|
|
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.json;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.core.IsEqual.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.openqa.selenium.json.Json.MAP_TYPE;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.reflect.TypeToken;
import com.google.gson.JsonNull;
import com.google.gson.JsonObject;
import org.junit.Test;
import org.openqa.selenium.Capabilities;
import org.openqa.selenium.Cookie;
import org.openqa.selenium.ImmutableCapabilities;
import org.openqa.selenium.MutableCapabilities;
import org.openqa.selenium.Platform;
import org.openqa.selenium.Proxy;
import org.openqa.selenium.logging.LogType;
import org.openqa.selenium.logging.LoggingPreferences;
import org.openqa.selenium.remote.CapabilityType;
import org.openqa.selenium.remote.Command;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.remote.DriverCommand;
import org.openqa.selenium.remote.ErrorCodes;
import org.openqa.selenium.remote.Response;
import org.openqa.selenium.remote.SessionId;
import java.io.StringReader;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
public class JsonTest {
@Test
public void canReadBooleans() {
assertTrue(new Json().toType("true", Boolean.class));
assertFalse(new Json().toType("false", Boolean.class));
}
@Test
public void canReadANumber() {
assertEquals(Long.valueOf(42), new Json().toType("42", Number.class));
assertEquals(Integer.valueOf(42), new Json().toType("42", Integer.class));
assertEquals(Double.valueOf(42), new Json().toType("42", Double.class));
}
@Test
public void canRoundTripNumbers() {
Map<String, Object> original = ImmutableMap.of(
"options", ImmutableMap.of("args", ImmutableList.of(1L, "hello")));
Json json = new Json();
String converted = json.toJson(original);
Object remade = json.toType(converted, MAP_TYPE);
assertEquals(original, remade);
}
@Test
public void roundTripAFirefoxOptions() {
Map<String, Object> caps = ImmutableMap.of(
"moz:firefoxOptions", ImmutableMap.of(
"prefs", ImmutableMap.of("foo.bar", 1)));
String json = new Json().toJson(caps);
assertFalse(json, json.contains("1.0"));
try (JsonInput input = new Json().newInput(new StringReader(json))) {
json = new Json().toJson(input.read(Json.MAP_TYPE));
assertFalse(json, json.contains("1.0"));
}
}
@Test
public void shouldCoerceAListOfCapabilitiesIntoSomethingMutable() {
// This is needed since Grid expects each of the capabilities to be mutable
List<Capabilities> expected = ImmutableList.of(
new ImmutableCapabilities("cheese", "brie"),
new ImmutableCapabilities("peas", 42L));
Json json = new Json();
String raw = json.toJson(expected);
List<Capabilities> seen = json.toType(raw, new TypeToken<List<Capabilities>>(){}.getType());
assertEquals(expected, seen);
assertTrue(seen.get(0) instanceof MutableCapabilities);
}
@Test
public void shouldUseBeanSettersToPopulateFields() {
Map<String, String> map = ImmutableMap.of("name", "fishy");
Json json = new Json();
String raw = json.toJson(map);
BeanWithSetter seen = json.toType(raw, BeanWithSetter.class);
assertEquals("fishy", seen.theName);
}
public static class BeanWithSetter {
String theName;
public void setName(String name) {
theName = name;
}
}
@Test
public void shouldAllowUserToPopulateFieldsDirectly() {
Map<String, String> map = ImmutableMap.of("theName", "fishy");
Json json = new Json();
String raw = json.toJson(map);
BeanWithSetter seen = json.toType(raw, BeanWithSetter.class, PropertySetting.BY_FIELD);
assertEquals("fishy", seen.theName);
}
@Test
public void testCanConstructASimpleString() {
String text = new Json().toType("cheese", String.class);
assertThat(text, is("cheese"));
}
@SuppressWarnings("unchecked")
@Test
public void testCanPopulateAMap() {
JsonObject toConvert = new JsonObject();
toConvert.addProperty("cheese", "brie");
toConvert.addProperty("foodstuff", "cheese");
Map<String, String> map = new Json().toType(toConvert.toString(), Map.class);
assertThat(map.size(), is(2));
assertThat(map, hasEntry("cheese", "brie"));
assertThat(map, hasEntry("foodstuff", "cheese"));
}
@Test
public void testCanPopulateAMapThatContainsNull() {
JsonObject toConvert = new JsonObject();
toConvert.add("foo", JsonNull.INSTANCE);
Map<?,?> converted = new Json().toType(toConvert.toString(), Map.class);
assertEquals(1, converted.size());
assertTrue(converted.containsKey("foo"));
assertNull(converted.get("foo"));
}
@Test
public void testCanPopulateASimpleBean() {
JsonObject toConvert = new JsonObject();
toConvert.addProperty("value", "time");
SimpleBean bean = new Json().toType(toConvert.toString(), SimpleBean.class);
assertThat(bean.getValue(), is("time"));
}
@Test
public void testWillSilentlyDiscardUnusedFieldsWhenPopulatingABean() {
JsonObject toConvert = new JsonObject();
toConvert.addProperty("value", "time");
toConvert.addProperty("frob", "telephone");
SimpleBean bean = new Json().toType(toConvert.toString(), SimpleBean.class);
assertThat(bean.getValue(), is("time"));
}
@Test
public void testShouldSetPrimitiveValuesToo() {
JsonObject toConvert = new JsonObject();
toConvert.addProperty("magicNumber", 3);
Map<?,?> map = new Json().toType(toConvert.toString(), Map.class);
assertEquals(3L, map.get("magicNumber"));
}
@Test
public void testShouldPopulateFieldsOnNestedBeans() {
JsonObject toConvert = new JsonObject();
toConvert.addProperty("name", "frank");
JsonObject child = new JsonObject();
child.addProperty("value", "lots");
toConvert.add("bean", child);
ContainingBean bean = new Json().toType(toConvert.toString(), ContainingBean.class);
assertThat(bean.getName(), is("frank"));
assertThat(bean.getBean().getValue(), is("lots"));
}
@Test
public void testShouldProperlyFillInACapabilitiesObject() {
DesiredCapabilities capabilities =
new DesiredCapabilities("browser", CapabilityType.VERSION, Platform.ANY);
capabilities.setJavascriptEnabled(true);
String text = new BeanToJsonConverter().convert(capabilities);
Capabilities readCapabilities = new Json().toType(text, DesiredCapabilities.class);
assertEquals(capabilities, readCapabilities);
}
@Test
public void testShouldUseAMapToRepresentComplexObjects() {
JsonObject toModel = new JsonObject();
toModel.addProperty("thing", "hairy");
toModel.addProperty("hairy", "true");
Map<?,?> modelled = (Map<?,?>) new Json().toType(toModel.toString(), Object.class);
assertEquals(2, modelled.size());
}
@Test
public void testShouldConvertAResponseWithAnElementInIt() {
String json =
"{\"value\":{\"value\":\"\",\"text\":\"\",\"selected\":false,\"enabled\":true,\"id\":\"three\"},\"context\":\"con\",\"sessionId\":\"sess\"}";
Response converted = new Json().toType(json, Response.class);
Map<?,?> value = (Map<?,?>) converted.getValue();
assertEquals("three", value.get("id"));
}
@Test
public void testShouldBeAbleToCopeWithStringsThatLookLikeBooleans() {
String json =
"{\"value\":\"false\",\"context\":\"foo\",\"sessionId\":\"1210083863107\"}";
try {
new Json().toType(json, Response.class);
} catch (Exception e) {
e.printStackTrace();
fail("This should have worked");
}
}
@Test
public void testShouldBeAbleToSetAnObjectToABoolean() {
String json =
"{\"value\":true,\"context\":\"foo\",\"sessionId\":\"1210084658750\"}";
Response response = new Json().toType(json, Response.class);
assertThat(response.getValue(), is(true));
}
@Test
public void testCanHandleValueBeingAnArray() {
String[] value = {"Cheese", "Peas"};
Response response = new Response();
response.setSessionId("bar");
response.setValue(value);
response.setStatus(1512);
String json = new BeanToJsonConverter().convert(response);
Response converted = new Json().toType(json, Response.class);
assertEquals("bar", response.getSessionId());
assertEquals(2, ((List<?>) converted.getValue()).size());
assertEquals(1512, response.getStatus().intValue());
}
@Test
public void testShouldConvertObjectsInArraysToMaps() {
Date date = new Date();
Cookie cookie = new Cookie("foo", "bar", "localhost", "/rooted", date, true, true);
String rawJson = new BeanToJsonConverter().convert(Collections.singletonList(cookie));
List<?> list = new Json().toType(rawJson, List.class);
Object first = list.get(0);
assertTrue(first instanceof Map);
Map<?,?> map = (Map<?,?>) first;
assertMapEntry(map, "name", "foo");
assertMapEntry(map, "value", "bar");
assertMapEntry(map, "domain", "localhost");
assertMapEntry(map, "path", "/rooted");
assertMapEntry(map, "secure", true);
assertMapEntry(map, "httpOnly", true);
assertMapEntry(map, "expiry", TimeUnit.MILLISECONDS.toSeconds(date.getTime()));
}
private void assertMapEntry(Map<?,?> map, String key, Object expected) {
assertTrue("Missing key: " + key, map.containsKey(key));
assertEquals("Wrong value for key: " + key + ": " + map.get(key).getClass().getName(),
expected, map.get(key));
}
@Test
public void testShouldConvertAnArrayBackIntoAnArray() {
Exception e = new Exception();
String converted = new BeanToJsonConverter().convert(e);
Map<?,?> reconstructed = new Json().toType(converted, Map.class);
List<?> trace = (List<?>) reconstructed.get("stackTrace");
assertTrue(trace.get(0) instanceof Map);
}
@Test
public void testShouldBeAbleToReconsituteASessionId() {
String json = new BeanToJsonConverter().convert(new SessionId("id"));
SessionId sessionId = new Json().toType(json, SessionId.class);
assertEquals("id", sessionId.toString());
}
@Test
public void testShouldBeAbleToConvertACommand() {
SessionId sessionId = new SessionId("session id");
Command original = new Command(
sessionId,
DriverCommand.NEW_SESSION,
ImmutableMap.of("food", "cheese"));
String raw = new BeanToJsonConverter().convert(original);
Command converted = new Json().toType(raw, Command.class);
assertEquals(sessionId.toString(), converted.getSessionId().toString());
assertEquals(original.getName(), converted.getName());
assertEquals(1, converted.getParameters().keySet().size());
assertEquals("cheese", converted.getParameters().get("food"));
}
@Test
public void testShouldConvertCapabilitiesToAMapAndIncludeCustomValues() {
Capabilities caps = new ImmutableCapabilities("furrfu", "fishy");
String raw = new BeanToJsonConverter().convert(caps);
Capabilities converted = new Json().toType(raw, Capabilities.class);
assertEquals("fishy", converted.getCapability("furrfu"));
}
@Test
public void testShouldParseCapabilitiesWithLoggingPreferences() {
JsonObject prefs = new JsonObject();
prefs.addProperty("browser", "WARNING");
prefs.addProperty("client", "DEBUG");
prefs.addProperty("driver", "ALL");
prefs.addProperty("server", "OFF");
JsonObject caps = new JsonObject();
caps.add(CapabilityType.LOGGING_PREFS, prefs);
Capabilities converted = new Json().toType(caps.toString(), Capabilities.class);
LoggingPreferences lp =
(LoggingPreferences) converted.getCapability(CapabilityType.LOGGING_PREFS);
assertNotNull(lp);
assertEquals(Level.WARNING, lp.getLevel(LogType.BROWSER));
assertEquals(Level.FINE, lp.getLevel(LogType.CLIENT));
assertEquals(Level.ALL, lp.getLevel(LogType.DRIVER));
assertEquals(Level.OFF, lp.getLevel(LogType.SERVER));
}
@Test
public void testShouldNotParseQuotedJsonObjectsAsActualJsonObjects() {
JsonObject inner = new JsonObject();
inner.addProperty("color", "green");
inner.addProperty("number", 123);
JsonObject outer = new JsonObject();
outer.addProperty("inner", inner.toString());
String jsonStr = outer.toString();
Object convertedOuter = new Json().toType(jsonStr, Map.class);
assertThat(convertedOuter, instanceOf(Map.class));
Object convertedInner = ((Map<?,?>) convertedOuter).get("inner");
assertNotNull(convertedInner);
assertThat(convertedInner, instanceOf(String.class));
assertThat(convertedInner.toString(), equalTo(inner.toString()));
}
@Test
public void shouldBeAbleToConvertASelenium3CommandToASelenium2Command() {
SessionId expectedId = new SessionId("thisisakey");
JsonObject rawJson = new JsonObject();
// In selenium 2, the sessionId is an object. In selenium 3, it's a straight string.
rawJson.addProperty("sessionId", expectedId.toString());
rawJson.addProperty("name", "some command");
rawJson.add("parameters", new JsonObject());
String stringified = rawJson.toString();
Command converted = new Json().toType(stringified, Command.class);
assertEquals(expectedId, converted.getSessionId());
}
@Test
public void testShouldCallFromJsonMethodIfPresent() {
JsonAware res = new Json().toType("converted", JsonAware.class);
assertEquals("\"converted\"", res.convertedValue);
}
// Test for issue 8187
@Test
public void testDecodingResponseWithNumbersInValueObject() {
Response response = new Json().toType(
"{\"status\":0,\"value\":{\"width\":96,\"height\":46.19140625}}",
Response.class);
@SuppressWarnings("unchecked")
Map<String, Number> value = (Map<String, Number>) response.getValue();
assertEquals(96, value.get("width").intValue());
assertEquals(46, value.get("height").intValue());
assertEquals(46.19140625, value.get("height").doubleValue(), 0.00001);
}
@Test
public void testShouldRecognizeNumericStatus() {
Response response = new Json().toType(
"{\"status\":0,\"value\":\"cheese\"}",
Response.class);
assertEquals(0, response.getStatus().intValue());
assertEquals(new ErrorCodes().toState(0), response.getState());
String value = (String) response.getValue();
assertEquals("cheese", value);
}
@Test
public void testShouldRecognizeStringStatus() {
Response response = new Json().toType(
"{\"status\":\"success\",\"value\":\"cheese\"}",
Response.class);
assertEquals(0, response.getStatus().intValue());
assertEquals(new ErrorCodes().toState(0), response.getState());
String value = (String) response.getValue();
assertEquals("cheese", value);
}
@Test
public void testShouldConvertInvalidSelectorError() {
Response response = new Json().toType(
"{\"state\":\"invalid selector\",\"message\":\"invalid xpath selector\"}",
Response.class);
assertEquals(32, response.getStatus().intValue());
assertEquals(new ErrorCodes().toState(32), response.getState());
}
@Test
public void testShouldRecognizeStringState() {
Response response = new Json()
.toType(
"{\"state\":\"success\",\"value\":\"cheese\"}",
Response.class);
assertEquals("success", response.getState());
assertEquals(0, response.getStatus().intValue());
String value = (String) response.getValue();
assertEquals("cheese", value);
}
@Test
public void testNoStatusShouldBeNullInResponseObject() {
Response response = new Json().toType("{\"value\":\"cheese\"}", Response.class);
assertNull(response.getStatus());
}
@Test
public void canConvertAnEnumWithALowerCaseValue() {
Proxy.ProxyType type = new Json().toType("pac", Proxy.ProxyType.class);
assertEquals(Proxy.ProxyType.PAC, type);
}
public static class SimpleBean {
private String value;
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
public static class ContainingBean {
private String name;
private SimpleBean bean;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public SimpleBean getBean() {
return bean;
}
public void setBean(SimpleBean bean) {
this.bean = bean;
}
}
public static class JsonAware {
private String convertedValue;
public JsonAware(String convertedValue) {
this.convertedValue = convertedValue;
}
public static JsonAware fromJson(String json) {
return new JsonAware(json);
}
}
}
|
|
/*
* Copyright 2017 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.domain;
import com.thoughtworks.go.config.StageConfig;
import com.thoughtworks.go.config.elastic.ClusterProfile;
import com.thoughtworks.go.config.elastic.ElasticProfile;
import com.thoughtworks.go.util.command.EnvironmentVariableContext;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
public class DefaultJobPlan implements JobPlan {
private long jobId;
private long pipelineId;
private JobIdentifier identifier;
private Resources resources;
private List<ArtifactPlan> artifactPlans;
private List<ArtifactPropertiesGenerator> generators;
private String agentUuid;
private EnvironmentVariables variables;
private EnvironmentVariables triggerVariables;
private ElasticProfile elasticProfile;
private ClusterProfile clusterProfile;
private boolean fetchMaterials = StageConfig.DEFAULT_FETCH_MATERIALS;
private boolean cleanWorkingDir = StageConfig.DEFAULT_CLEAN_WORKING_DIR;
//FOR IBATIS
protected DefaultJobPlan() {
}
public DefaultJobPlan(Resources resources, List<ArtifactPlan> artifactPlans, List<ArtifactPropertiesGenerator> generators, long jobId,
JobIdentifier identifier, String agentUuid, EnvironmentVariables variables,
EnvironmentVariables triggerTimeVariables, ElasticProfile elasticProfile, ClusterProfile clusterProfile) {
this.jobId = jobId;
this.identifier = identifier;
this.resources = resources;
this.artifactPlans = artifactPlans;
this.generators = generators;
this.agentUuid = agentUuid;
this.variables = variables;
this.triggerVariables = triggerTimeVariables;
this.elasticProfile = elasticProfile;
this.clusterProfile = clusterProfile;
}
public String getPipelineName() {
return identifier.getPipelineName();
}
public String getStageName() {
return identifier.getStageName();
}
public String getName() {
return identifier.getBuildName();
}
public long getJobId() {
return jobId;
}
public JobIdentifier getIdentifier() {
return identifier;
}
public List<ArtifactPropertiesGenerator> getPropertyGenerators() {
return generators;
}
public List<ArtifactPlan> getArtifactPlans() {
return artifactPlans;
}
//USED BY IBatis - do NOT add to the interface
public List<ArtifactPropertiesGenerator> getGenerators() {
return generators;
}
public Resources getResources() {
return resources;
}
public void setGenerators(List<ArtifactPropertiesGenerator> generators) {
this.generators = new ArrayList<>(generators);
}
public void setJobId(long jobId) {
this.jobId = jobId;
}
public void setIdentifier(JobIdentifier identifier) {
this.identifier = identifier;
}
public void setArtifactPlans(List<ArtifactPlan> artifactPlans) {
this.artifactPlans = artifactPlans;
}
public void setResources(List<Resource> resources) {
this.resources = new Resources(resources);
}
public String toString() {
return "[JobPlan " + "identifier=" + identifier + "resources=" + resources + " artifactConfigs=" + artifactPlans +
" generators=" + generators + "]";
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DefaultJobPlan plan = (DefaultJobPlan) o;
if (jobId != plan.jobId) {
return false;
}
if (generators != null ? !generators.equals(plan.generators) : plan.generators != null) {
return false;
}
if (identifier != null ? !identifier.equals(plan.identifier) : plan.identifier != null) {
return false;
}
if (artifactPlans != null ? !artifactPlans.equals(plan.artifactPlans) : plan.artifactPlans != null) {
return false;
}
if (resources != null ? !resources.equals(plan.resources) : plan.resources != null) {
return false;
}
return true;
}
public int hashCode() {
int result;
result = (resources != null ? resources.hashCode() : 0);
result = 31 * result + (artifactPlans != null ? artifactPlans.hashCode() : 0);
result = 31 * result + (generators != null ? generators.hashCode() : 0);
result = 31 * result + (int) (jobId ^ (jobId >>> 32));
result = 31 * result + (identifier != null ? identifier.hashCode() : 0);
return result;
}
public String getAgentUuid() {
return agentUuid;
}
public EnvironmentVariables getVariables() {
return variables;
}
public void setVariables(EnvironmentVariables variables) {
this.variables = new EnvironmentVariables(variables);
}
public long getPipelineId() {
return pipelineId;
}
public void setTriggerVariables(EnvironmentVariables environmentVariables) {
triggerVariables = new EnvironmentVariables(environmentVariables);
}
public boolean shouldFetchMaterials() {
return fetchMaterials;
}
public void applyTo(EnvironmentVariableContext variableContext) {
variables.addTo(variableContext);
triggerVariables.addToIfExists(variableContext);
}
public void setAgentUuid(String agentUuid) {
this.agentUuid = agentUuid;
}
public void setFetchMaterials(boolean fetchMaterials) {
this.fetchMaterials = fetchMaterials;
}
public void setCleanWorkingDir(boolean cleanWorkingDir) {
this.cleanWorkingDir = cleanWorkingDir;
}
public boolean shouldCleanWorkingDir() {
return cleanWorkingDir;
}
public ElasticProfile getElasticProfile() {
return elasticProfile;
}
public ClusterProfile getClusterProfile() {
return clusterProfile;
}
@Override
public boolean requiresElasticAgent() {
return elasticProfile != null;
}
public void setElasticProfile(ElasticProfile elasticProfile) {
this.elasticProfile = new ElasticProfile(elasticProfile.getId(), elasticProfile.getClusterProfileId(), elasticProfile);
}
@Override
public List<ArtifactPlan> getArtifactPlansOfType(final ArtifactPlanType artifactPlanType) {
return getArtifactPlans().stream().filter(artifactPlan -> artifactPlan.getArtifactPlanType() == artifactPlanType).collect(Collectors.toList());
}
@Override
public void setClusterProfile(ClusterProfile clusterProfile) {
if (clusterProfile != null) {
this.clusterProfile = new ClusterProfile(clusterProfile.getId(), clusterProfile.getPluginId(), clusterProfile);
}
}
@Override
public boolean assignedToAgent() {
return agentUuid == null;
}
}
|
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.test.api.event;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.List;
import org.flowable.bpmn.model.BoundaryEvent;
import org.flowable.bpmn.model.MessageEventDefinition;
import org.flowable.bpmn.model.SignalEventDefinition;
import org.flowable.bpmn.model.TimerEventDefinition;
import org.flowable.common.engine.api.delegate.event.FlowableEngineEventType;
import org.flowable.common.engine.api.delegate.event.FlowableEvent;
import org.flowable.engine.delegate.event.FlowableActivityCancelledEvent;
import org.flowable.engine.delegate.event.FlowableActivityEvent;
import org.flowable.engine.delegate.event.FlowableCancelledEvent;
import org.flowable.engine.delegate.event.FlowableErrorEvent;
import org.flowable.engine.delegate.event.FlowableMessageEvent;
import org.flowable.engine.delegate.event.FlowableSignalEvent;
import org.flowable.engine.delegate.event.impl.FlowableActivityEventImpl;
import org.flowable.engine.delegate.event.impl.FlowableSignalEventImpl;
import org.flowable.engine.event.EventLogEntry;
import org.flowable.engine.impl.event.logger.EventLogger;
import org.flowable.engine.impl.test.PluggableFlowableTestCase;
import org.flowable.engine.runtime.Execution;
import org.flowable.engine.runtime.ProcessInstance;
import org.flowable.engine.test.Deployment;
import org.flowable.job.api.Job;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
/**
* Test case for all {@link FlowableEvent}s related to activities.
*
* @author Joram Barrez
* @author Tijs Rademakers
*/
public class ActivityEventsTest extends PluggableFlowableTestCase {
private TestFlowableActivityEventListener listener;
protected EventLogger databaseEventLogger;
@BeforeEach
protected void setUp() throws Exception {
listener = new TestFlowableActivityEventListener(true);
processEngineConfiguration.getEventDispatcher().addEventListener(listener);
// Database event logger setup
databaseEventLogger = new EventLogger(processEngineConfiguration.getClock(), processEngineConfiguration.getObjectMapper());
runtimeService.addEventListener(databaseEventLogger);
}
@AfterEach
protected void tearDown() throws Exception {
if (listener != null) {
listener.clearEventsReceived();
processEngineConfiguration.getEventDispatcher().removeEventListener(listener);
}
// Remove entries
for (EventLogEntry eventLogEntry : managementService.getEventLogEntries(null, null)) {
managementService.deleteEventLogEntry(eventLogEntry.getLogNumber());
}
// Database event logger teardown
runtimeService.removeEventListener(databaseEventLogger);
}
/**
* Test starting and completed events for activity. Since these events are dispatched in the core of the PVM, not all individual activity-type is tested. Rather, we test the main types (tasks,
* gateways, events, subprocesses).
*/
@Test
@Deployment
public void testActivityEvents() throws Exception {
// We're interested in the raw events, alter the listener to keep those as well
listener.setIgnoreRawActivityEvents(false);
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("activityProcess");
assertThat(processInstance).isNotNull();
assertThat(listener.getEventsReceived()).hasSize(3);
// Start-event activity started
FlowableActivityEvent activityEvent = (FlowableActivityEvent) listener.getEventsReceived().get(0);
assertThat(activityEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_STARTED);
assertThat(activityEvent.getActivityId()).isEqualTo("theStart");
assertThat(processInstance.getId()).isNotEqualTo(activityEvent.getExecutionId());
assertThat(activityEvent.getProcessInstanceId()).isEqualTo(processInstance.getProcessInstanceId());
assertThat(activityEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
// Start-event finished
activityEvent = (FlowableActivityEvent) listener.getEventsReceived().get(1);
assertThat(activityEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_COMPLETED);
assertThat(activityEvent.getActivityId()).isEqualTo("theStart");
assertThat(processInstance.getId()).isNotEqualTo(activityEvent.getExecutionId());
assertThat(activityEvent.getProcessInstanceId()).isEqualTo(processInstance.getProcessInstanceId());
assertThat(activityEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
// Usertask started
activityEvent = (FlowableActivityEvent) listener.getEventsReceived().get(2);
assertThat(activityEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_STARTED);
assertThat(activityEvent.getActivityId()).isEqualTo("shipOrder");
assertThat(processInstance.getId()).isNotEqualTo(activityEvent.getExecutionId());
assertThat(activityEvent.getProcessInstanceId()).isEqualTo(processInstance.getProcessInstanceId());
assertThat(activityEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
// Complete usertask
listener.clearEventsReceived();
org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(task).isNotNull();
taskService.complete(task.getId());
// Subprocess execution is created
Execution execution = runtimeService.createExecutionQuery().parentId(processInstance.getId()).singleResult();
assertThat(execution).isNotNull();
assertThat(listener.getEventsReceived()).hasSize(5);
activityEvent = (FlowableActivityEvent) listener.getEventsReceived().get(0);
assertThat(activityEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_COMPLETED);
assertThat(activityEvent.getActivityId()).isEqualTo("shipOrder");
assertThat(processInstance.getId()).isNotEqualTo(activityEvent.getExecutionId());
assertThat(activityEvent.getProcessInstanceId()).isEqualTo(processInstance.getProcessInstanceId());
assertThat(activityEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
activityEvent = (FlowableActivityEvent) listener.getEventsReceived().get(1);
assertThat(activityEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_STARTED);
assertThat(activityEvent.getActivityId()).isEqualTo("subProcess");
assertThat(activityEvent.getExecutionId()).isEqualTo(execution.getId());
assertThat(activityEvent.getProcessInstanceId()).isEqualTo(processInstance.getProcessInstanceId());
assertThat(activityEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
activityEvent = (FlowableActivityEvent) listener.getEventsReceived().get(2);
assertThat(activityEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_STARTED);
assertThat(activityEvent.getActivityId()).isEqualTo("subProcessStart");
assertThat(execution.getId()).isNotEqualTo(activityEvent.getExecutionId());
assertThat(activityEvent.getProcessInstanceId()).isEqualTo(processInstance.getProcessInstanceId());
assertThat(activityEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
activityEvent = (FlowableActivityEvent) listener.getEventsReceived().get(3);
assertThat(activityEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_COMPLETED);
assertThat(activityEvent.getActivityId()).isEqualTo("subProcessStart");
assertThat(execution.getId()).isNotEqualTo(activityEvent.getExecutionId());
assertThat(activityEvent.getProcessInstanceId()).isEqualTo(processInstance.getProcessInstanceId());
assertThat(activityEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
activityEvent = (FlowableActivityEvent) listener.getEventsReceived().get(4);
assertThat(activityEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_STARTED);
assertThat(activityEvent.getActivityId()).isEqualTo("subTask");
assertThat(execution.getId()).isNotEqualTo(activityEvent.getExecutionId());
assertThat(activityEvent.getProcessInstanceId()).isEqualTo(processInstance.getProcessInstanceId());
assertThat(activityEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
listener.clearEventsReceived();
// Check gateway and intermediate throw event
org.flowable.task.api.Task subTask = taskService.createTaskQuery().processInstanceId(processInstance.getProcessInstanceId()).singleResult();
assertThat(subTask).isNotNull();
taskService.complete(subTask.getId());
assertThat(listener.getEventsReceived()).hasSize(10);
activityEvent = (FlowableActivityEvent) listener.getEventsReceived().get(0);
assertThat(activityEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_COMPLETED);
assertThat(activityEvent.getActivityId()).isEqualTo("subTask");
activityEvent = (FlowableActivityEvent) listener.getEventsReceived().get(1);
assertThat(activityEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_STARTED);
assertThat(activityEvent.getActivityId()).isEqualTo("gateway");
activityEvent = (FlowableActivityEvent) listener.getEventsReceived().get(2);
assertThat(activityEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_COMPLETED);
assertThat(activityEvent.getActivityId()).isEqualTo("gateway");
activityEvent = (FlowableActivityEvent) listener.getEventsReceived().get(3);
assertThat(activityEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_STARTED);
assertThat(activityEvent.getActivityId()).isEqualTo("throwMessageEvent");
activityEvent = (FlowableActivityEvent) listener.getEventsReceived().get(4);
assertThat(activityEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_COMPLETED);
assertThat(activityEvent.getActivityId()).isEqualTo("throwMessageEvent");
activityEvent = (FlowableActivityEvent) listener.getEventsReceived().get(5);
assertThat(activityEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_STARTED);
assertThat(activityEvent.getActivityId()).isEqualTo("endSubProcess");
activityEvent = (FlowableActivityEvent) listener.getEventsReceived().get(6);
assertThat(activityEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_COMPLETED);
assertThat(activityEvent.getActivityId()).isEqualTo("endSubProcess");
activityEvent = (FlowableActivityEvent) listener.getEventsReceived().get(7);
assertThat(activityEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_COMPLETED);
assertThat(activityEvent.getActivityId()).isEqualTo("subProcess");
activityEvent = (FlowableActivityEvent) listener.getEventsReceived().get(8);
assertThat(activityEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_STARTED);
assertThat(activityEvent.getActivityId()).isEqualTo("theEnd");
activityEvent = (FlowableActivityEvent) listener.getEventsReceived().get(9);
assertThat(activityEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_COMPLETED);
assertThat(activityEvent.getActivityId()).isEqualTo("theEnd");
}
/**
* Test events related to signalling
*/
@Test
@Deployment
public void testActivitySignalEvents() throws Exception {
// Two paths are active in the process, one receive-task and one intermediate catching signal-event
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("signalProcess");
assertThat(processInstance).isNotNull();
// Check regular signal through API
Execution executionWithSignal = runtimeService.createExecutionQuery().activityId("receivePayment").singleResult();
assertThat(executionWithSignal).isNotNull();
Execution signalExecution = runtimeService.createExecutionQuery().signalEventSubscriptionName("alert").singleResult();
assertThat(signalExecution).isNotNull();
runtimeService.trigger(executionWithSignal.getId());
assertThat(listener.getEventsReceived()).hasSize(2);
assertThat(listener.getEventsReceived().get(0)).isInstanceOf(FlowableSignalEvent.class);
FlowableSignalEvent signalEvent = (FlowableSignalEvent) listener.getEventsReceived().get(0);
assertThat(signalEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_SIGNAL_WAITING);
assertThat(signalEvent.getActivityId()).isEqualTo("shipOrder");
assertThat(signalEvent.getExecutionId()).isEqualTo(signalExecution.getId());
assertThat(signalEvent.getProcessInstanceId()).isEqualTo(signalExecution.getProcessInstanceId());
assertThat(signalEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(signalEvent.getSignalName()).isEqualTo("alert");
assertThat(signalEvent.getSignalData()).isNull();
assertThat(listener.getEventsReceived().get(1)).isInstanceOf(FlowableSignalEvent.class);
signalEvent = (FlowableSignalEvent) listener.getEventsReceived().get(1);
assertThat(signalEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_SIGNALED);
assertThat(signalEvent.getActivityId()).isEqualTo("receivePayment");
assertThat(signalEvent.getExecutionId()).isEqualTo(executionWithSignal.getId());
assertThat(signalEvent.getProcessInstanceId()).isEqualTo(executionWithSignal.getProcessInstanceId());
assertThat(signalEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(signalEvent.getSignalName()).isNull();
assertThat(signalEvent.getSignalData()).isNull();
listener.clearEventsReceived();
// Check signal using event, and pass in additional payload
Execution executionWithSignalEvent = runtimeService.createExecutionQuery().activityId("shipOrder").singleResult();
runtimeService.signalEventReceived("alert", executionWithSignalEvent.getId(), Collections.singletonMap("test", (Object) "test"));
assertThat(listener.getEventsReceived()).hasSize(1);
assertThat(listener.getEventsReceived().get(0)).isInstanceOf(FlowableSignalEvent.class);
signalEvent = (FlowableSignalEvent) listener.getEventsReceived().get(0);
assertThat(signalEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_SIGNALED);
assertThat(signalEvent.getActivityId()).isEqualTo("shipOrder");
assertThat(signalEvent.getExecutionId()).isEqualTo(executionWithSignalEvent.getId());
assertThat(signalEvent.getProcessInstanceId()).isEqualTo(executionWithSignalEvent.getProcessInstanceId());
assertThat(signalEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(signalEvent.getSignalName()).isEqualTo("alert");
assertThat(signalEvent.getSignalData()).isNotNull();
listener.clearEventsReceived();
assertDatabaseEventPresent(FlowableEngineEventType.ACTIVITY_SIGNALED);
}
/**
* Test to verify if signals coming from an intermediate throw-event trigger the right events to be dispatched.
*/
@Test
@Deployment
public void testActivitySignalEventsWithinProcess() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("signalProcess");
assertThat(processInstance).isNotNull();
org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(task).isNotNull();
Execution executionWithSignalEvent = runtimeService.createExecutionQuery().activityId("shipOrder").singleResult();
taskService.complete(task.getId());
assertThat(listener.getEventsReceived()).hasSize(2);
assertThat(listener.getEventsReceived().get(0)).isInstanceOf(FlowableSignalEvent.class);
FlowableSignalEvent signalEvent = (FlowableSignalEvent) listener.getEventsReceived().get(0);
assertThat(signalEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_SIGNAL_WAITING);
assertThat(signalEvent.getActivityId()).isEqualTo("shipOrder");
assertThat(signalEvent.getExecutionId()).isEqualTo(executionWithSignalEvent.getId());
assertThat(signalEvent.getProcessInstanceId()).isEqualTo(executionWithSignalEvent.getProcessInstanceId());
assertThat(signalEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(signalEvent.getSignalName()).isEqualTo("alert");
assertThat(signalEvent.getSignalData()).isNull();
assertThat(listener.getEventsReceived().get(1)).isInstanceOf(FlowableSignalEvent.class);
signalEvent = (FlowableSignalEvent) listener.getEventsReceived().get(1);
assertThat(signalEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_SIGNALED);
assertThat(signalEvent.getActivityId()).isEqualTo("shipOrder");
assertThat(signalEvent.getExecutionId()).isEqualTo(executionWithSignalEvent.getId());
assertThat(signalEvent.getProcessInstanceId()).isEqualTo(executionWithSignalEvent.getProcessInstanceId());
assertThat(signalEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(signalEvent.getSignalName()).isEqualTo("alert");
assertThat(signalEvent.getSignalData()).isNull();
}
/**
* Test events related to message events, called from the API.
*/
@Test
@Deployment
public void testActivityMessageEvents() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("messageProcess");
assertThat(processInstance).isNotNull();
Execution executionWithMessage = runtimeService.createExecutionQuery().activityId("shipOrder").singleResult();
assertThat(executionWithMessage).isNotNull();
runtimeService.messageEventReceived("messageName", executionWithMessage.getId());
assertThat(listener.getEventsReceived()).hasSize(2);
// First, a message waiting event is expected
assertThat(listener.getEventsReceived().get(0)).isInstanceOf(FlowableMessageEvent.class);
FlowableMessageEvent messageEvent = (FlowableMessageEvent) listener.getEventsReceived().get(0);
assertThat(messageEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_MESSAGE_WAITING);
assertThat(messageEvent.getActivityId()).isEqualTo("shipOrder");
assertThat(messageEvent.getExecutionId()).isEqualTo(executionWithMessage.getId());
assertThat(messageEvent.getProcessInstanceId()).isEqualTo(executionWithMessage.getProcessInstanceId());
assertThat(messageEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(messageEvent.getMessageName()).isEqualTo("messageName");
assertThat(messageEvent.getMessageData()).isNull();
// Second, a message received event is expected
assertThat(listener.getEventsReceived().get(1)).isInstanceOf(FlowableMessageEvent.class);
messageEvent = (FlowableMessageEvent) listener.getEventsReceived().get(1);
assertThat(messageEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_MESSAGE_RECEIVED);
assertThat(messageEvent.getActivityId()).isEqualTo("shipOrder");
assertThat(messageEvent.getExecutionId()).isEqualTo(executionWithMessage.getId());
assertThat(messageEvent.getProcessInstanceId()).isEqualTo(executionWithMessage.getProcessInstanceId());
assertThat(messageEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(messageEvent.getMessageName()).isEqualTo("messageName");
assertThat(messageEvent.getMessageData()).isNull();
assertDatabaseEventPresent(FlowableEngineEventType.ACTIVITY_MESSAGE_RECEIVED);
}
/**
* Test events related to message events, called from the API, targeting an event-subprocess.
*/
@Test
@Deployment
public void testActivityMessageEventsInEventSubprocess() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("messageProcess");
assertThat(processInstance).isNotNull();
Execution executionWithMessage = runtimeService.createExecutionQuery().messageEventSubscriptionName("messageName").singleResult();
assertThat(executionWithMessage).isNotNull();
runtimeService.messageEventReceived("messageName", executionWithMessage.getId());
// Only a message events should be present, no signal-event, since the event-subprocess is
// not signaled, but executed instead
assertThat(listener.getEventsReceived()).hasSize(2);
// A message waiting event is expected
assertThat(listener.getEventsReceived().get(0)).isInstanceOf(FlowableMessageEvent.class);
FlowableMessageEvent messageEvent = (FlowableMessageEvent) listener.getEventsReceived().get(0);
assertThat(messageEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_MESSAGE_WAITING);
assertThat(messageEvent.getActivityId()).isEqualTo("catchMessage");
assertThat(messageEvent.getExecutionId()).isEqualTo(executionWithMessage.getId());
assertThat(messageEvent.getProcessInstanceId()).isEqualTo(executionWithMessage.getProcessInstanceId());
assertThat(messageEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(messageEvent.getMessageName()).isEqualTo("messageName");
assertThat(messageEvent.getMessageData()).isNull();
// A message received event is expected
assertThat(listener.getEventsReceived().get(1)).isInstanceOf(FlowableMessageEvent.class);
messageEvent = (FlowableMessageEvent) listener.getEventsReceived().get(1);
assertThat(messageEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_MESSAGE_RECEIVED);
assertThat(messageEvent.getActivityId()).isEqualTo("catchMessage");
assertThat(messageEvent.getExecutionId()).isEqualTo(executionWithMessage.getId());
assertThat(messageEvent.getProcessInstanceId()).isEqualTo(executionWithMessage.getProcessInstanceId());
assertThat(messageEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(messageEvent.getMessageName()).isEqualTo("messageName");
assertThat(messageEvent.getMessageData()).isNull();
assertDatabaseEventPresent(FlowableEngineEventType.ACTIVITY_MESSAGE_WAITING);
assertDatabaseEventPresent(FlowableEngineEventType.ACTIVITY_MESSAGE_RECEIVED);
}
/**
* Test events related to message events, called from the API, targeting an event-subprocess.
*/
@Test
@Deployment(resources = "org/flowable/engine/test/api/event/ActivityEventsTest.testActivityMessageEventsInEventSubprocess.bpmn20.xml")
public void testActivityMessageEventsInEventSubprocessForCancel() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("messageProcess");
assertThat(processInstance).isNotNull();
Execution executionWithMessage = runtimeService.createExecutionQuery().messageEventSubscriptionName("messageName").singleResult();
assertThat(executionWithMessage).isNotNull();
org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(task.getName()).isEqualTo("Wait");
taskService.complete(task.getId());
// Only a message events should be present, no signal-event, since the event-subprocess is
// not signaled, but executed instead
assertThat(listener.getEventsReceived()).hasSize(2);
// A message waiting event is expected
assertThat(listener.getEventsReceived().get(0)).isInstanceOf(FlowableMessageEvent.class);
FlowableMessageEvent messageEvent = (FlowableMessageEvent) listener.getEventsReceived().get(0);
assertThat(messageEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_MESSAGE_WAITING);
assertThat(messageEvent.getActivityId()).isEqualTo("catchMessage");
assertThat(messageEvent.getExecutionId()).isEqualTo(executionWithMessage.getId());
assertThat(messageEvent.getProcessInstanceId()).isEqualTo(executionWithMessage.getProcessInstanceId());
assertThat(messageEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(messageEvent.getMessageName()).isEqualTo("messageName");
assertThat(messageEvent.getMessageData()).isNull();
// A message received event is expected
assertThat(listener.getEventsReceived().get(1)).isInstanceOf(FlowableMessageEvent.class);
messageEvent = (FlowableMessageEvent) listener.getEventsReceived().get(1);
assertThat(messageEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_MESSAGE_CANCELLED);
assertThat(messageEvent.getActivityId()).isEqualTo("catchMessage");
assertThat(messageEvent.getExecutionId()).isEqualTo(executionWithMessage.getId());
assertThat(messageEvent.getProcessInstanceId()).isEqualTo(executionWithMessage.getProcessInstanceId());
assertThat(messageEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(messageEvent.getMessageName()).isEqualTo("messageName");
assertThat(messageEvent.getMessageData()).isNull();
assertDatabaseEventPresent(FlowableEngineEventType.ACTIVITY_MESSAGE_WAITING);
}
/**
* Test events related to compensation events.
*/
@Test
@Deployment
public void testActivityCompensationEvents() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("compensationProcess");
assertThat(processInstance).isNotNull();
org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(task).isNotNull();
// Complete task, next a compensation event will be thrown
taskService.complete(task.getId());
assertThat(listener.getEventsReceived()).hasSize(1);
// A compensate-event is expected
assertThat(listener.getEventsReceived().get(0)).isInstanceOf(FlowableActivityEvent.class);
FlowableActivityEvent activityEvent = (FlowableActivityEvent) listener.getEventsReceived().get(0);
assertThat(activityEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_COMPENSATE);
assertThat(activityEvent.getActivityId()).isEqualTo("usertask");
// A new execution is created for the compensation-event, this should be visible in the event
assertThat(processInstance.getId()).isNotEqualTo(activityEvent.getExecutionId());
assertThat(activityEvent.getProcessInstanceId()).isEqualTo(processInstance.getProcessInstanceId());
assertThat(activityEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
// Check if the process is still alive
processInstance = runtimeService.createProcessInstanceQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(processInstance).isNotNull();
assertDatabaseEventPresent(FlowableEngineEventType.ACTIVITY_COMPENSATE);
}
/**
* Test events related to error-events
*/
@Test
@Deployment
public void testActivityErrorEvents() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("errorProcess");
assertThat(processInstance).isNotNull();
// Error-handling should have ended the process
ProcessInstance afterErrorInstance = runtimeService.createProcessInstanceQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(afterErrorInstance).isNull();
FlowableErrorEvent errorEvent = null;
for (FlowableEvent event : listener.getEventsReceived()) {
if (event instanceof FlowableErrorEvent) {
if (errorEvent == null) {
errorEvent = (FlowableErrorEvent) event;
} else {
fail("Only one ActivityErrorEvent expected");
}
}
}
assertThat(errorEvent).isNotNull();
assertThat(errorEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_ERROR_RECEIVED);
assertThat(errorEvent.getActivityId()).isEqualTo("catchError");
assertThat(errorEvent.getErrorId()).isEqualTo("myError");
assertThat(errorEvent.getErrorCode()).isEqualTo("123");
assertThat(errorEvent.getProcessInstanceId()).isEqualTo(processInstance.getId());
assertThat(errorEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(processInstance.getId()).isNotEqualTo(errorEvent.getExecutionId());
}
/**
* Test events related to error-events, thrown from within process-execution (eg. service-task).
*/
@Test
@Deployment
public void testActivityErrorEventsFromBPMNError() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("errorProcess");
assertThat(processInstance).isNotNull();
// Error-handling should have ended the process
ProcessInstance afterErrorInstance = runtimeService.createProcessInstanceQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(afterErrorInstance).isNull();
FlowableErrorEvent errorEvent = null;
for (FlowableEvent event : listener.getEventsReceived()) {
if (event instanceof FlowableErrorEvent) {
if (errorEvent == null) {
errorEvent = (FlowableErrorEvent) event;
} else {
fail("Only one ActivityErrorEvent expected");
}
}
}
assertThat(errorEvent).isNotNull();
assertThat(errorEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_ERROR_RECEIVED);
assertThat(errorEvent.getActivityId()).isEqualTo("catchError");
assertThat(errorEvent.getErrorId()).isEqualTo("23");
assertThat(errorEvent.getErrorCode()).isEqualTo("23");
assertThat(errorEvent.getProcessInstanceId()).isEqualTo(processInstance.getId());
assertThat(errorEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(processInstance.getId()).isNotEqualTo(errorEvent.getExecutionId());
}
@Test
@Deployment(resources = "org/flowable/engine/test/api/event/JobEventsTest.testJobEntityEvents.bpmn20.xml")
public void testActivityTimeOutEvent() {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("testJobEvents");
Job theJob = managementService.createTimerJobQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(theJob).isNotNull();
// Force timer to fire
Calendar tomorrow = Calendar.getInstance();
tomorrow.add(Calendar.DAY_OF_YEAR, 1);
processEngineConfiguration.getClock().setCurrentTime(tomorrow.getTime());
waitForJobExecutorToProcessAllJobs(2000, 1000);
// Check timeout has been dispatched
assertThat(listener.getEventsReceived()).hasSize(1);
FlowableEvent activitiEvent = listener.getEventsReceived().get(0);
assertThat(activitiEvent.getType()).as("ACTIVITY_CANCELLED event expected")
.isEqualTo(FlowableEngineEventType.ACTIVITY_CANCELLED);
FlowableActivityCancelledEvent cancelledEvent = (FlowableActivityCancelledEvent) activitiEvent;
assertThat(cancelledEvent.getCause()).as("Boundary timer is the cause of the cancellation")
.isInstanceOf(BoundaryEvent.class);
BoundaryEvent boundaryEvent = (BoundaryEvent) cancelledEvent.getCause();
assertThat(boundaryEvent.getEventDefinitions().get(0)).as("Boundary timer is the cause of the cancellation")
.isInstanceOf(TimerEventDefinition.class);
}
@Test
@Deployment(resources = "org/flowable/engine/test/bpmn/event/timer/BoundaryTimerEventTest.testTimerOnNestingOfSubprocesses.bpmn20.xml")
public void testActivityTimeOutEventInSubProcess() {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("timerOnNestedSubprocesses");
Job theJob = managementService.createTimerJobQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(theJob).isNotNull();
// Force timer to fire
Calendar timeToFire = Calendar.getInstance();
timeToFire.add(Calendar.HOUR, 2);
timeToFire.add(Calendar.SECOND, 5);
processEngineConfiguration.getClock().setCurrentTime(timeToFire.getTime());
waitForJobExecutorToProcessAllJobs(2000, 200);
// Check timeout-events have been dispatched
assertThat(listener.getEventsReceived()).hasSize(4);
List<String> eventIdList = new ArrayList<>();
for (FlowableEvent event : listener.getEventsReceived()) {
assertThat(event.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertThat(((FlowableActivityCancelledEvent) event).getCause()).as("Boundary timer is the cause of the cancellation")
.isInstanceOf(BoundaryEvent.class);
assertThat(((BoundaryEvent) ((FlowableActivityCancelledEvent) event).getCause()).getEventDefinitions().get(0))
.as("Boundary timer is the cause of the cancellation")
.isInstanceOf(TimerEventDefinition.class);
eventIdList.add(((FlowableActivityEventImpl) event).getActivityId());
}
assertThat(eventIdList.indexOf("innerTask1")).isGreaterThanOrEqualTo(0);
assertThat(eventIdList.indexOf("innerTask2")).isGreaterThanOrEqualTo(0);
assertThat(eventIdList.indexOf("subprocess")).isGreaterThanOrEqualTo(0);
assertThat(eventIdList.indexOf("innerSubprocess")).isGreaterThanOrEqualTo(0);
}
@Test
@Deployment
public void testActivityTimeOutEventInCallActivity() {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("timerOnCallActivity");
Job theJob = managementService.createTimerJobQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(theJob).isNotNull();
// Force timer to fire
Calendar timeToFire = Calendar.getInstance();
timeToFire.add(Calendar.HOUR, 2);
timeToFire.add(Calendar.MINUTE, 5);
processEngineConfiguration.getClock().setCurrentTime(timeToFire.getTime());
waitForJobExecutorToProcessAllJobs(7000, 500);
// Check timeout-events have been dispatched
assertThat(listener.getEventsReceived()).hasSize(4);
List<String> eventIdList = new ArrayList<>();
for (FlowableEvent event : listener.getEventsReceived()) {
assertThat(event.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertThat(((FlowableCancelledEvent) event).getCause()).as("Boundary timer is the cause of the cancellation")
.isInstanceOf(BoundaryEvent.class);
BoundaryEvent boundaryEvent = (BoundaryEvent) ((FlowableCancelledEvent) event).getCause();
assertThat(boundaryEvent.getEventDefinitions().get(0)).as("Boundary timer is the cause of the cancellation")
.isInstanceOf(TimerEventDefinition.class);
eventIdList.add(((FlowableActivityEventImpl) event).getActivityId());
}
assertThat(eventIdList)
.containsOnly("innerTask1", "innerTask2", "callActivity", "innerSubprocess");
}
/**
* Test events related to message events, called from the API.
*/
@Test
@Deployment
public void testActivityMessageBoundaryEventsOnUserTask() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("messageOnUserTaskProcess");
assertThat(processInstance).isNotNull();
Execution executionWithMessage = runtimeService.createExecutionQuery().messageEventSubscriptionName("message_1").singleResult();
assertThat(executionWithMessage).isNotNull();
runtimeService.messageEventReceived("message_1", executionWithMessage.getId());
assertThat(listener.getEventsReceived()).hasSize(3);
// First, a message waiting event is expected
assertThat(listener.getEventsReceived().get(0)).isInstanceOf(FlowableMessageEvent.class);
FlowableMessageEvent messageEvent = (FlowableMessageEvent) listener.getEventsReceived().get(0);
assertThat(messageEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_MESSAGE_WAITING);
assertThat(messageEvent.getActivityId()).isEqualTo("boundaryMessageEventCatching");
assertThat(messageEvent.getExecutionId()).isEqualTo(executionWithMessage.getId());
assertThat(messageEvent.getProcessInstanceId()).isEqualTo(executionWithMessage.getProcessInstanceId());
assertThat(messageEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(messageEvent.getMessageName()).isEqualTo("message_1");
assertThat(messageEvent.getMessageData()).isNull();
// Second, a message received event is expected
assertThat(listener.getEventsReceived().get(1)).isInstanceOf(FlowableMessageEvent.class);
messageEvent = (FlowableMessageEvent) listener.getEventsReceived().get(1);
assertThat(messageEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_MESSAGE_RECEIVED);
assertThat(messageEvent.getActivityId()).isEqualTo("boundaryMessageEventCatching");
assertThat(messageEvent.getExecutionId()).isEqualTo(executionWithMessage.getId());
assertThat(messageEvent.getProcessInstanceId()).isEqualTo(executionWithMessage.getProcessInstanceId());
assertThat(messageEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(messageEvent.getMessageName()).isEqualTo("message_1");
assertThat(messageEvent.getMessageData()).isNull();
// Next, an signal-event is expected, as a result of the message
assertThat(listener.getEventsReceived().get(2)).isInstanceOf(FlowableActivityCancelledEvent.class);
FlowableActivityCancelledEvent signalEvent = (FlowableActivityCancelledEvent) listener.getEventsReceived().get(2);
assertThat(signalEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertThat(signalEvent.getActivityId()).isEqualTo("cloudformtask1");
assertThat(signalEvent.getProcessInstanceId()).isEqualTo(executionWithMessage.getProcessInstanceId());
assertThat(signalEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(signalEvent.getCause()).isNotNull();
assertThat(signalEvent.getCause()).isInstanceOf(BoundaryEvent.class);
BoundaryEvent cause = (BoundaryEvent) signalEvent.getCause();
assertThat(((MessageEventDefinition) cause.getEventDefinitions().get(0)).getMessageRef()).isEqualTo("message_1");
assertDatabaseEventPresent(FlowableEngineEventType.ACTIVITY_MESSAGE_WAITING);
assertDatabaseEventPresent(FlowableEngineEventType.ACTIVITY_MESSAGE_RECEIVED);
}
/**
* Test events related to message events, called from the API.
*/
@Test
@Deployment(resources = "org/flowable/engine/test/api/event/ActivityEventsTest.testActivityMessageBoundaryEventsOnUserTask.bpmn20.xml")
public void testActivityMessageBoundaryEventsOnUserTaskForCancel() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("messageOnUserTaskProcess");
assertThat(processInstance).isNotNull();
Execution executionWithMessage = runtimeService.createExecutionQuery().messageEventSubscriptionName("message_1").singleResult();
assertThat(executionWithMessage).isNotNull();
org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
assertThat(task.getName()).isEqualTo("User Task");
taskService.complete(task.getId());
assertThat(listener.getEventsReceived()).hasSize(2);
// First, a message waiting event is expected
assertThat(listener.getEventsReceived().get(0)).isInstanceOf(FlowableMessageEvent.class);
FlowableMessageEvent messageEvent = (FlowableMessageEvent) listener.getEventsReceived().get(0);
assertThat(messageEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_MESSAGE_WAITING);
assertThat(messageEvent.getActivityId()).isEqualTo("boundaryMessageEventCatching");
assertThat(messageEvent.getExecutionId()).isEqualTo(executionWithMessage.getId());
assertThat(messageEvent.getProcessInstanceId()).isEqualTo(executionWithMessage.getProcessInstanceId());
assertThat(messageEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(messageEvent.getMessageName()).isEqualTo("message_1");
assertThat(messageEvent.getMessageData()).isNull();
// Second, a message received event is expected
assertThat(listener.getEventsReceived().get(1)).isInstanceOf(FlowableMessageEvent.class);
messageEvent = (FlowableMessageEvent) listener.getEventsReceived().get(1);
assertThat(messageEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_MESSAGE_CANCELLED);
assertThat(messageEvent.getActivityId()).isEqualTo("boundaryMessageEventCatching");
assertThat(messageEvent.getExecutionId()).isEqualTo(executionWithMessage.getId());
assertThat(messageEvent.getProcessInstanceId()).isEqualTo(executionWithMessage.getProcessInstanceId());
assertThat(messageEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(messageEvent.getMessageName()).isEqualTo("message_1");
assertThat(messageEvent.getMessageData()).isNull();
assertDatabaseEventPresent(FlowableEngineEventType.ACTIVITY_MESSAGE_WAITING);
}
/**
* Test events related to message events, called from the API.
*/
@Test
@Deployment
public void testActivityMessageBoundaryEventsOnSubProcess() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("messageOnSubProcess");
assertThat(processInstance).isNotNull();
Execution executionWithMessage = runtimeService.createExecutionQuery().activityId("boundaryMessageEventCatching").singleResult();
assertThat(executionWithMessage).isNotNull();
runtimeService.messageEventReceived("message_1", executionWithMessage.getId());
assertThat(listener.getEventsReceived()).hasSize(4);
// First, a message waiting event is expected
assertThat(listener.getEventsReceived().get(0)).isInstanceOf(FlowableMessageEvent.class);
FlowableMessageEvent messageEvent = (FlowableMessageEvent) listener.getEventsReceived().get(0);
assertThat(messageEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_MESSAGE_WAITING);
assertThat(messageEvent.getActivityId()).isEqualTo("boundaryMessageEventCatching");
assertThat(messageEvent.getExecutionId()).isEqualTo(executionWithMessage.getId());
assertThat(messageEvent.getProcessInstanceId()).isEqualTo(executionWithMessage.getProcessInstanceId());
assertThat(messageEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(messageEvent.getMessageName()).isEqualTo("message_1");
assertThat(messageEvent.getMessageData()).isNull();
// Second, a message received event is expected
assertThat(listener.getEventsReceived().get(1)).isInstanceOf(FlowableMessageEvent.class);
messageEvent = (FlowableMessageEvent) listener.getEventsReceived().get(1);
assertThat(messageEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_MESSAGE_RECEIVED);
assertThat(messageEvent.getActivityId()).isEqualTo("boundaryMessageEventCatching");
assertThat(messageEvent.getExecutionId()).isEqualTo(executionWithMessage.getId());
assertThat(messageEvent.getProcessInstanceId()).isEqualTo(executionWithMessage.getProcessInstanceId());
assertThat(messageEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(messageEvent.getMessageName()).isEqualTo("message_1");
assertThat(messageEvent.getMessageData()).isNull();
// Next, an signal-event is expected, as a result of the message
assertThat(listener.getEventsReceived().get(2)).isInstanceOf(FlowableActivityCancelledEvent.class);
FlowableActivityCancelledEvent cancelEvent = (FlowableActivityCancelledEvent) listener.getEventsReceived().get(2);
assertThat(cancelEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertThat(cancelEvent.getActivityId()).isEqualTo("cloudformtask1");
assertThat(cancelEvent.getProcessInstanceId()).isEqualTo(executionWithMessage.getProcessInstanceId());
assertThat(cancelEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(cancelEvent.getCause()).isNotNull();
assertThat(cancelEvent.getCause()).isInstanceOf(BoundaryEvent.class);
BoundaryEvent cause = (BoundaryEvent) cancelEvent.getCause();
assertThat(((MessageEventDefinition) cause.getEventDefinitions().get(0)).getMessageRef()).isEqualTo("message_1");
assertThat(listener.getEventsReceived().get(3)).isInstanceOf(FlowableActivityCancelledEvent.class);
cancelEvent = (FlowableActivityCancelledEvent) listener.getEventsReceived().get(3);
assertThat(cancelEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertThat(cancelEvent.getActivityId()).isEqualTo("subProcess");
assertThat(cancelEvent.getProcessInstanceId()).isEqualTo(executionWithMessage.getProcessInstanceId());
assertThat(cancelEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(cancelEvent.getCause()).isNotNull();
assertThat(cancelEvent.getCause()).isInstanceOf(BoundaryEvent.class);
cause = (BoundaryEvent) cancelEvent.getCause();
assertThat(((MessageEventDefinition) cause.getEventDefinitions().get(0)).getMessageRef()).isEqualTo("message_1");
assertDatabaseEventPresent(FlowableEngineEventType.ACTIVITY_MESSAGE_WAITING);
assertDatabaseEventPresent(FlowableEngineEventType.ACTIVITY_MESSAGE_RECEIVED);
}
/**
* Test events related to message events, called from the API.
*/
@Test
@Deployment(resources = "org/flowable/engine/test/api/event/ActivityEventsTest.testActivityMessageBoundaryEventsOnSubProcess.bpmn20.xml")
public void testActivityMessageBoundaryEventsOnSubProcessForCancel() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("messageOnSubProcess");
assertThat(processInstance).isNotNull();
Execution executionWithMessage = runtimeService.createExecutionQuery().activityId("boundaryMessageEventCatching").singleResult();
assertThat(executionWithMessage).isNotNull();
org.flowable.task.api.Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
taskService.complete(task.getId());
assertThat(listener.getEventsReceived()).hasSize(2);
// First, a message waiting event is expected
assertThat(listener.getEventsReceived().get(0)).isInstanceOf(FlowableMessageEvent.class);
FlowableMessageEvent messageEvent = (FlowableMessageEvent) listener.getEventsReceived().get(0);
assertThat(messageEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_MESSAGE_WAITING);
assertThat(messageEvent.getActivityId()).isEqualTo("boundaryMessageEventCatching");
assertThat(messageEvent.getExecutionId()).isEqualTo(executionWithMessage.getId());
assertThat(messageEvent.getProcessInstanceId()).isEqualTo(executionWithMessage.getProcessInstanceId());
assertThat(messageEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(messageEvent.getMessageName()).isEqualTo("message_1");
assertThat(messageEvent.getMessageData()).isNull();
// Second, a message received event is expected
assertThat(listener.getEventsReceived().get(1)).isInstanceOf(FlowableMessageEvent.class);
messageEvent = (FlowableMessageEvent) listener.getEventsReceived().get(1);
assertThat(messageEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_MESSAGE_CANCELLED);
assertThat(messageEvent.getActivityId()).isEqualTo("boundaryMessageEventCatching");
assertThat(messageEvent.getExecutionId()).isEqualTo(executionWithMessage.getId());
assertThat(messageEvent.getProcessInstanceId()).isEqualTo(executionWithMessage.getProcessInstanceId());
assertThat(messageEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(messageEvent.getMessageName()).isEqualTo("message_1");
assertThat(messageEvent.getMessageData()).isNull();
assertDatabaseEventPresent(FlowableEngineEventType.ACTIVITY_MESSAGE_WAITING);
}
@Test
@Deployment
public void testActivitySignalBoundaryEventsOnSubProcess() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("signalOnSubProcess");
assertThat(processInstance).isNotNull();
Execution executionWithSignal = runtimeService.createExecutionQuery().activityId("userTaskInsideProcess").singleResult();
assertThat(executionWithSignal).isNotNull();
runtimeService.signalEventReceived("signalName");
assertThat(listener.getEventsReceived()).hasSize(4);
assertThat(listener.getEventsReceived().get(0)).isInstanceOf(FlowableSignalEventImpl.class);
FlowableSignalEventImpl signalEvent = (FlowableSignalEventImpl) listener.getEventsReceived().get(0);
assertThat(signalEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_SIGNAL_WAITING);
assertThat(signalEvent.getActivityId()).isEqualTo("boundarySignalEventCatching");
assertThat(signalEvent.getProcessInstanceId()).isEqualTo(executionWithSignal.getProcessInstanceId());
assertThat(signalEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(listener.getEventsReceived().get(1)).isInstanceOf(FlowableSignalEventImpl.class);
signalEvent = (FlowableSignalEventImpl) listener.getEventsReceived().get(1);
assertThat(signalEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_SIGNALED);
assertThat(signalEvent.getActivityId()).isEqualTo("boundarySignalEventCatching");
assertThat(signalEvent.getProcessInstanceId()).isEqualTo(executionWithSignal.getProcessInstanceId());
assertThat(signalEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(listener.getEventsReceived().get(2)).isInstanceOf(FlowableActivityCancelledEvent.class);
FlowableActivityCancelledEvent cancelEvent = (FlowableActivityCancelledEvent) listener.getEventsReceived().get(2);
assertThat(cancelEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertThat(cancelEvent.getActivityId()).isEqualTo("userTaskInsideProcess");
assertThat(cancelEvent.getExecutionId()).isEqualTo(executionWithSignal.getId());
assertThat(cancelEvent.getProcessInstanceId()).isEqualTo(executionWithSignal.getProcessInstanceId());
assertThat(cancelEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(cancelEvent.getCause()).isNotNull();
assertThat(cancelEvent.getCause()).isInstanceOf(BoundaryEvent.class);
BoundaryEvent cause = (BoundaryEvent) cancelEvent.getCause();
assertThat(cause.getEventDefinitions().get(0)).isInstanceOf(SignalEventDefinition.class);
SignalEventDefinition signalEventDefinition = ((SignalEventDefinition) cause.getEventDefinitions().get(0));
assertThat(signalEventDefinition.getSignalRef()).isEqualTo("signal");
assertThat(repositoryService.getBpmnModel(cancelEvent.getProcessDefinitionId()).getSignal(signalEventDefinition.getSignalRef()).getName())
.isEqualTo("signalName");
assertThat(listener.getEventsReceived().get(3)).isInstanceOf(FlowableActivityCancelledEvent.class);
cancelEvent = (FlowableActivityCancelledEvent) listener.getEventsReceived().get(3);
assertThat(cancelEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertThat(cancelEvent.getActivityId()).isEqualTo("subProcess");
assertThat(cancelEvent.getProcessInstanceId()).isEqualTo(executionWithSignal.getProcessInstanceId());
assertThat(cancelEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(cancelEvent.getCause()).isNotNull();
assertThat(cancelEvent.getCause()).isInstanceOf(BoundaryEvent.class);
cause = (BoundaryEvent) cancelEvent.getCause();
assertThat(cause.getEventDefinitions().get(0)).isInstanceOf(SignalEventDefinition.class);
signalEventDefinition = ((SignalEventDefinition) cause.getEventDefinitions().get(0));
assertThat(signalEventDefinition.getSignalRef()).isEqualTo("signal");
assertThat(repositoryService.getBpmnModel(cancelEvent.getProcessDefinitionId()).getSignal(signalEventDefinition.getSignalRef()).getName())
.isEqualTo("signalName");
}
@Test
@Deployment
public void testActivitySignalBoundaryEventsOnUserTask() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("signalOnUserTask");
assertThat(processInstance).isNotNull();
Execution executionWithSignal = runtimeService.createExecutionQuery().activityId("userTask").singleResult();
assertThat(executionWithSignal).isNotNull();
runtimeService.signalEventReceived("signalName");
// Next, an signal-event is expected, as a result of the message
assertThat(listener.getEventsReceived()).hasSize(3);
assertThat(listener.getEventsReceived().get(0)).isInstanceOf(FlowableSignalEventImpl.class);
FlowableSignalEventImpl signalEvent = (FlowableSignalEventImpl) listener.getEventsReceived().get(0);
assertThat(signalEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_SIGNAL_WAITING);
assertThat(signalEvent.getActivityId()).isEqualTo("boundarySignalEventCatching");
assertThat(signalEvent.getProcessInstanceId()).isEqualTo(executionWithSignal.getProcessInstanceId());
assertThat(signalEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(listener.getEventsReceived().get(1)).isInstanceOf(FlowableSignalEventImpl.class);
signalEvent = (FlowableSignalEventImpl) listener.getEventsReceived().get(1);
assertThat(signalEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_SIGNALED);
assertThat(signalEvent.getActivityId()).isEqualTo("boundarySignalEventCatching");
assertThat(signalEvent.getProcessInstanceId()).isEqualTo(executionWithSignal.getProcessInstanceId());
assertThat(signalEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(listener.getEventsReceived().get(2)).isInstanceOf(FlowableActivityCancelledEvent.class);
FlowableActivityCancelledEvent cancelEvent = (FlowableActivityCancelledEvent) listener.getEventsReceived().get(2);
assertThat(cancelEvent.getType()).isEqualTo(FlowableEngineEventType.ACTIVITY_CANCELLED);
assertThat(cancelEvent.getActivityId()).isEqualTo("userTask");
assertThat(cancelEvent.getProcessInstanceId()).isEqualTo(executionWithSignal.getProcessInstanceId());
assertThat(cancelEvent.getProcessDefinitionId()).isEqualTo(processInstance.getProcessDefinitionId());
assertThat(cancelEvent.getCause()).isNotNull();
assertThat(cancelEvent.getCause()).isInstanceOf(BoundaryEvent.class);
BoundaryEvent cause = (BoundaryEvent) cancelEvent.getCause();
assertThat(cause.getEventDefinitions().get(0)).isInstanceOf(SignalEventDefinition.class);
SignalEventDefinition signalEventDefinition = ((SignalEventDefinition) cause.getEventDefinitions().get(0));
assertThat(signalEventDefinition.getSignalRef()).isEqualTo("signal");
assertThat(repositoryService.getBpmnModel(cancelEvent.getProcessDefinitionId()).getSignal(signalEventDefinition.getSignalRef()).getName())
.isEqualTo("signalName");
}
protected void assertDatabaseEventPresent(FlowableEngineEventType eventType) {
String eventTypeString = eventType.name();
List<EventLogEntry> eventLogEntries = managementService.getEventLogEntries(0L, 100000L);
boolean found = false;
for (EventLogEntry entry : eventLogEntries) {
if (entry.getType().equals(eventTypeString)) {
found = true;
}
}
assertThat(found).isTrue();
}
}
|
|
/*
* JBoss, Home of Professional Open Source
* Copyright 2009, JBoss Inc., and individual contributors as indicated
* by the @authors tag.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.vfs;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.util.Random;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* A provider for temporary physical files and directories.
*
* @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a>
* @author <a href="mailto:ales.justin@jboss.org">Ales Justin</a>
*/
public final class TempFileProvider implements Closeable {
private static final String JBOSS_TMP_DIR_PROPERTY = "jboss.server.temp.dir";
private static final String JVM_TMP_DIR_PROPERTY = "java.io.tmpdir";
private static final File TMP_ROOT;
private static final int RETRIES = 10;
private final AtomicBoolean open = new AtomicBoolean(true);
static {
String configTmpDir = System.getProperty(JBOSS_TMP_DIR_PROPERTY);
if (configTmpDir == null) { configTmpDir = System.getProperty(JVM_TMP_DIR_PROPERTY); }
try {
TMP_ROOT = new File(configTmpDir, "vfs");
TMP_ROOT.mkdirs();
} catch (Exception e) {
throw VFSMessages.MESSAGES.cantSetupTempFileProvider(e);
}
}
/**
* Create a temporary file provider for a given type.
* <p/>
* This is the same as calling {@link #create(String, java.util.concurrent.ScheduledExecutorService, boolean) create(final String providerType, final ScheduledExecutorService executor, false)}
*
* @param providerType the provider type string (used as a prefix in the temp file dir name)
* @param executor the executor
* @return the new provider
* @throws IOException if an I/O error occurs
*/
public static TempFileProvider create(String providerType, ScheduledExecutorService executor) throws IOException {
return create(providerType, executor, false);
}
/**
* Create a temporary file provider for a given type.
*
* @param providerType The provider type string (used as a prefix in the temp file dir name)
* @param executor Executor which will be used to manage temp file provider tasks (like cleaning up/deleting the temp files when needed)
* @param cleanExisting If this is true, then this method will *try* to delete the existing temp content (if any) for the <code>providerType</code>. The attempt to delete the existing content (if any)
* will be done in the background and this method will not wait for the deletion to complete. The method will immediately return back with a usable {@link TempFileProvider}. Note that the
* <code>cleanExisting</code> will just act as a hint for this method to trigger the deletion of existing content. The method may not always be able to delete the existing contents.
* @return The new provider
* @throws IOException if an I/O error occurs
*/
public static TempFileProvider create(final String providerType, final ScheduledExecutorService executor, final boolean cleanExisting) throws IOException {
if (cleanExisting) {
try {
// The "clean existing" logic is as follows:
// 1) Rename the root directory "foo" corresponding to the provider type to "bar"
// 2) Submit a task to delete "bar" and its contents, in a background thread, to the the passed executor.
// 3) Create a "foo" root directory for the provider type and return that TempFileProvider (while at the same time the background task is in progress)
// This ensures that the "foo" root directory for the providerType is empty and the older content is being cleaned up in the background (without affecting the current processing),
// thus simulating a "cleanup existing content"
final File possiblyExistingProviderRoot = new File(TMP_ROOT, providerType);
if (possiblyExistingProviderRoot.exists()) {
// rename it so that it can be deleted as a separate (background) task
final File toBeDeletedProviderRoot = new File(TMP_ROOT, createTempName(providerType + "-to-be-deleted-", ""));
final boolean renamed = possiblyExistingProviderRoot.renameTo(toBeDeletedProviderRoot);
if (!renamed) {
throw new IOException("Failed to rename " + possiblyExistingProviderRoot.getAbsolutePath() + " to " + toBeDeletedProviderRoot.getAbsolutePath());
} else {
// delete in the background
executor.submit(new DeleteTask(toBeDeletedProviderRoot, executor));
}
}
} catch (Throwable t) {
// just log a message if existing contents couldn't be deleted
VFSLogger.ROOT_LOGGER.failedToCleanExistingContentForTempFileProvider(providerType);
// log the cause of the failure
VFSLogger.ROOT_LOGGER.debug("Failed to clean existing content for temp file provider of type " + providerType, t);
}
}
// now create and return the TempFileProvider for the providerType
final File providerRoot = new File(TMP_ROOT, providerType);
return new TempFileProvider(createTempDir(providerType, "", providerRoot), executor);
}
private final File providerRoot;
private final ScheduledExecutorService executor;
File getProviderRoot() {
return providerRoot;
}
private TempFileProvider(File providerRoot, ScheduledExecutorService executor) {
this.providerRoot = providerRoot;
this.executor = executor;
}
/**
* Create a temp directory, into which temporary files may be placed.
*
* @param originalName the original file name
* @return the temp directory
* @throws IOException for any error
*/
public TempDir createTempDir(String originalName) throws IOException {
if (!open.get()) {
throw VFSMessages.MESSAGES.tempFileProviderClosed();
}
final String name = createTempName(originalName + "-", "");
final File f = new File(providerRoot, name);
for (int i = 0; i < RETRIES; i++) {
if (f.mkdirs()) {
return new TempDir(this, f);
}
}
throw VFSMessages.MESSAGES.couldNotCreateDirectory(originalName,RETRIES);
}
private static final Random rng = new Random();
private static File createTempDir(String prefix, String suffix, File root) throws IOException {
for (int i = 0; i < RETRIES; i++) {
final File f = new File(root, createTempName(prefix, suffix));
if (f.mkdirs()) {
if (f.isDirectory()&&f.getParent()!=null){
f.delete();
}
return f;
}
}
throw VFSMessages.MESSAGES.couldNotCreateDirectoryForRoot(
root,
prefix,
suffix,
RETRIES);
}
static String createTempName(String prefix, String suffix) {
return prefix + Long.toHexString(rng.nextLong()) + suffix;
}
/**
* Close this provider and delete any temp files associated with it.
*/
public void close() throws IOException {
if (open.getAndSet(false)) {
delete(this.providerRoot);
}
}
protected void finalize() {
VFSUtils.safeClose(this);
}
/**
* Deletes any temp files associated with this provider
*
* @throws IOException
*/
void delete(final File root) throws IOException {
new DeleteTask(root, executor).run();
}
static final class DeleteTask implements Runnable {
private final File root;
private ScheduledExecutorService retryExecutor;
public DeleteTask(final File root, final ScheduledExecutorService retryExecutor) {
this.root = root;
this.retryExecutor = retryExecutor;
}
public void run() {
if (VFSUtils.recursiveDelete(root) == false) {
if (retryExecutor != null) {
VFSLogger.ROOT_LOGGER.tracef("Failed to delete root (%s), retrying in 30sec.", root);
retryExecutor.schedule(this, 30L, TimeUnit.SECONDS);
} else {
VFSLogger.ROOT_LOGGER.tracef("Failed to delete root (%s).", root);
}
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.entries;
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import org.apache.geode.cache.EntryEvent;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.internal.cache.InternalRegion;
import org.apache.geode.internal.cache.RegionEntryContext;
import org.apache.geode.internal.cache.Token;
import org.apache.geode.internal.cache.versions.VersionSource;
import org.apache.geode.internal.cache.versions.VersionStamp;
import org.apache.geode.internal.cache.versions.VersionTag;
import org.apache.geode.internal.offheap.OffHeapRegionEntryHelper;
import org.apache.geode.internal.offheap.annotations.Released;
import org.apache.geode.internal.offheap.annotations.Retained;
import org.apache.geode.internal.offheap.annotations.Unretained;
import org.apache.geode.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry;
/*
* macros whose definition changes this class:
*
* disk: DISK lru: LRU stats: STATS versioned: VERSIONED offheap: OFFHEAP
*
* One of the following key macros must be defined:
*
* key object: KEY_OBJECT key int: KEY_INT key long: KEY_LONG key uuid: KEY_UUID key string1:
* KEY_STRING1 key string2: KEY_STRING2
*/
/**
* Do not modify this class. It was generated. Instead modify LeafRegionEntry.cpp and then run
* ./dev-tools/generateRegionEntryClasses.sh (it must be run from the top level directory).
*/
public class VersionedThinRegionEntryOffHeapStringKey1 extends VersionedThinRegionEntryOffHeap {
// --------------------------------------- common fields ----------------------------------------
private static final AtomicLongFieldUpdater<VersionedThinRegionEntryOffHeapStringKey1> LAST_MODIFIED_UPDATER =
AtomicLongFieldUpdater.newUpdater(VersionedThinRegionEntryOffHeapStringKey1.class,
"lastModified");
protected int hash;
private HashEntry<Object, Object> nextEntry;
@SuppressWarnings("unused")
private volatile long lastModified;
// --------------------------------------- offheap fields ---------------------------------------
/**
* All access done using OFF_HEAP_ADDRESS_UPDATER so it is used even though the compiler can not
* tell it is.
*/
@SuppressWarnings("unused")
@Retained
@Released
private volatile long offHeapAddress;
/**
* I needed to add this because I wanted clear to call setValue which normally can only be called
* while the re is synced. But if I sync in that code it causes a lock ordering deadlock with the
* disk regions because they also get a rw lock in clear. Some hardware platforms do not support
* CAS on a long. If gemfire is run on one of those the AtomicLongFieldUpdater does a sync on the
* RegionEntry and we will once again be deadlocked. I don't know if we support any of the
* hardware platforms that do not have a 64bit CAS. If we do then we can expect deadlocks on disk
* regions.
*/
private static final AtomicLongFieldUpdater<VersionedThinRegionEntryOffHeapStringKey1> OFF_HEAP_ADDRESS_UPDATER =
AtomicLongFieldUpdater.newUpdater(VersionedThinRegionEntryOffHeapStringKey1.class,
"offHeapAddress");
// ------------------------------------- versioned fields ---------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private VersionSource memberId;
private short entryVersionLowBytes;
private short regionVersionHighBytes;
private int regionVersionLowBytes;
private byte entryVersionHighByte;
private byte distributedSystemId;
// --------------------------------------- key fields -------------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private final long bits1;
public VersionedThinRegionEntryOffHeapStringKey1(final RegionEntryContext context,
final String key, @Retained final Object value, final boolean byteEncode) {
super(context, value);
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// caller has already confirmed that key.length <= MAX_INLINE_STRING_KEY
long tempBits1 = 0L;
if (byteEncode) {
for (int i = key.length() - 1; i >= 0; i--) {
// Note: we know each byte is <= 0x7f so the "& 0xff" is not needed. But I added it in to
// keep findbugs happy.
tempBits1 |= (byte) key.charAt(i) & 0xff;
tempBits1 <<= 8;
}
tempBits1 |= 1 << 6;
} else {
for (int i = key.length() - 1; i >= 0; i--) {
tempBits1 |= key.charAt(i);
tempBits1 <<= 16;
}
}
tempBits1 |= key.length();
bits1 = tempBits1;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public Token getValueAsToken() {
return OffHeapRegionEntryHelper.getValueAsToken(this);
}
@Override
protected Object getValueField() {
return OffHeapRegionEntryHelper._getValue(this);
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
@Unretained
protected void setValueField(@Unretained final Object value) {
OffHeapRegionEntryHelper.setValue(this, value);
}
@Override
@Retained
public Object getValueRetain(final RegionEntryContext context, final boolean decompress) {
return OffHeapRegionEntryHelper._getValueRetain(this, decompress, context);
}
@Override
public long getAddress() {
return OFF_HEAP_ADDRESS_UPDATER.get(this);
}
@Override
public boolean setAddress(final long expectedAddress, long newAddress) {
return OFF_HEAP_ADDRESS_UPDATER.compareAndSet(this, expectedAddress, newAddress);
}
@Override
@Released
public void release() {
OffHeapRegionEntryHelper.releaseEntry(this);
}
@Override
public void returnToPool() {
// never implemented
}
@Override
protected long getLastModifiedField() {
return LAST_MODIFIED_UPDATER.get(this);
}
@Override
protected boolean compareAndSetLastModifiedField(final long expectedValue, final long newValue) {
return LAST_MODIFIED_UPDATER.compareAndSet(this, expectedValue, newValue);
}
@Override
public int getEntryHash() {
return hash;
}
@Override
protected void setEntryHash(final int hash) {
this.hash = hash;
}
@Override
public HashEntry<Object, Object> getNextEntry() {
return nextEntry;
}
@Override
public void setNextEntry(final HashEntry<Object, Object> nextEntry) {
this.nextEntry = nextEntry;
}
// -------------------------------------- versioned code ----------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public int getEntryVersion() {
return ((entryVersionHighByte << 16) & 0xFF0000) | (entryVersionLowBytes & 0xFFFF);
}
@Override
public long getRegionVersion() {
return (((long) regionVersionHighBytes) << 32) | (regionVersionLowBytes & 0x00000000FFFFFFFFL);
}
@Override
public long getVersionTimeStamp() {
return getLastModified();
}
@Override
public void setVersionTimeStamp(final long timeStamp) {
setLastModified(timeStamp);
}
@Override
public VersionSource getMemberID() {
return memberId;
}
@Override
public int getDistributedSystemId() {
return distributedSystemId;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public void setVersions(final VersionTag versionTag) {
memberId = versionTag.getMemberID();
int eVersion = versionTag.getEntryVersion();
entryVersionLowBytes = (short) (eVersion & 0xffff);
entryVersionHighByte = (byte) ((eVersion & 0xff0000) >> 16);
regionVersionHighBytes = versionTag.getRegionVersionHighBytes();
regionVersionLowBytes = versionTag.getRegionVersionLowBytes();
if (!versionTag.isGatewayTag()
&& distributedSystemId == versionTag.getDistributedSystemId()) {
if (getVersionTimeStamp() <= versionTag.getVersionTimeStamp()) {
setVersionTimeStamp(versionTag.getVersionTimeStamp());
} else {
versionTag.setVersionTimeStamp(getVersionTimeStamp());
}
} else {
setVersionTimeStamp(versionTag.getVersionTimeStamp());
}
distributedSystemId = (byte) (versionTag.getDistributedSystemId() & 0xff);
}
@Override
public void setMemberID(final VersionSource memberId) {
this.memberId = memberId;
}
@Override
public VersionStamp getVersionStamp() {
return this;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public VersionTag asVersionTag() {
VersionTag tag = VersionTag.create(memberId);
tag.setEntryVersion(getEntryVersion());
tag.setRegionVersion(regionVersionHighBytes, regionVersionLowBytes);
tag.setVersionTimeStamp(getVersionTimeStamp());
tag.setDistributedSystemId(distributedSystemId);
return tag;
}
@Override
public void processVersionTag(final InternalRegion region, final VersionTag versionTag,
final boolean isTombstoneFromGII, final boolean hasDelta, final VersionSource versionSource,
final InternalDistributedMember sender, final boolean checkForConflicts) {
basicProcessVersionTag(region, versionTag, isTombstoneFromGII, hasDelta, versionSource, sender,
checkForConflicts);
}
@Override
public void processVersionTag(final EntryEvent cacheEvent) {
// this keeps IDE happy. without it the sender chain becomes confused while browsing this code
super.processVersionTag(cacheEvent);
}
/** get rvv internal high byte. Used by region entries for transferring to storage */
@Override
public short getRegionVersionHighBytes() {
return regionVersionHighBytes;
}
/** get rvv internal low bytes. Used by region entries for transferring to storage */
@Override
public int getRegionVersionLowBytes() {
return regionVersionLowBytes;
}
// ----------------------------------------- key code -------------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private int getKeyLength() {
return (int) (bits1 & 0x003fL);
}
private int getEncoding() {
// 0 means encoded as char
// 1 means encoded as bytes that are all <= 0x7f;
return (int) (bits1 >> 6) & 0x03;
}
@Override
public Object getKey() {
int keyLength = getKeyLength();
char[] chars = new char[keyLength];
long tempBits1 = bits1;
if (getEncoding() == 1) {
for (int i = 0; i < keyLength; i++) {
tempBits1 >>= 8;
chars[i] = (char) (tempBits1 & 0x00ff);
}
} else {
for (int i = 0; i < keyLength; i++) {
tempBits1 >>= 16;
chars[i] = (char) (tempBits1 & 0x00FFff);
}
}
return new String(chars);
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public boolean isKeyEqual(final Object key) {
if (key instanceof String) {
String stringKey = (String) key;
int keyLength = getKeyLength();
if (stringKey.length() == keyLength) {
long tempBits1 = bits1;
if (getEncoding() == 1) {
for (int i = 0; i < keyLength; i++) {
tempBits1 >>= 8;
char character = (char) (tempBits1 & 0x00ff);
if (stringKey.charAt(i) != character) {
return false;
}
}
} else {
for (int i = 0; i < keyLength; i++) {
tempBits1 >>= 16;
char character = (char) (tempBits1 & 0x00FFff);
if (stringKey.charAt(i) != character) {
return false;
}
}
}
return true;
}
}
return false;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
}
|
|
import java.util.concurrent.atomic.*;
import java.util.*;
class AdaptiveArrayHashSet implements ISet
{
class HNode
{
// points to old HNode
public HNode old;
// bucket array
public AtomicReferenceArray<WFArrayFSet> buckets;
// store the size [for convenience]
public final int size;
// constructor
public HNode(HNode o, int s)
{
old = o;
size = s;
buckets = new AtomicReferenceArray<WFArrayFSet>(size);
}
}
class HelpRecord
{
int curTid;
long lastPhase;
long nextCheck;
private HelpRecord()
{
curTid = -1;
reset();
}
private void reset()
{
curTid = (curTid + 1) % SetBench.THREAD_NUM;
lastPhase = A.get(curTid).priority;
nextCheck = HELPING_DELAY;
}
}
public final static int MIN_BUCKET_NUM = 1;
public final static int MAX_BUCKET_NUM = 1 << 16;
private static final int MAX_TRIAL = 256;
private static final int HELPING_DELAY = 128;
// points to the current hash table
volatile HNode head;
volatile long counter;
private AtomicReferenceArray<WFArrayOp> A;
private HelpRecord [] helpRecords;
// field updaters
private static AtomicReferenceFieldUpdater<AdaptiveArrayHashSet, HNode> headUpdater
= AtomicReferenceFieldUpdater.newUpdater(AdaptiveArrayHashSet.class, HNode.class, "head");
private static AtomicLongFieldUpdater<AdaptiveArrayHashSet> counterUpdater
= AtomicLongFieldUpdater.newUpdater(AdaptiveArrayHashSet.class, "counter");
public AdaptiveArrayHashSet()
{
head = new HNode(null, MIN_BUCKET_NUM);
head.buckets.set(0, new WFArrayFSet());
counter = 0;
A = new AtomicReferenceArray<WFArrayOp>(SetBench.THREAD_NUM);
helpRecords = new HelpRecord[SetBench.THREAD_NUM];
for (int i = 0; i < A.length(); i++) {
WFArrayOp n = new WFArrayOp(-1, -1);
n.priority = Long.MAX_VALUE;
A.set(i, n);
helpRecords[i] = new HelpRecord();
}
}
public boolean insert(int key, int tid)
{
HNode h = head;
int result = apply(WFArrayOp.INSERT, key, tid);
if (Math.abs(result) > 2)
resize(h, true);
return result > 0;
}
public boolean remove(int key, int tid)
{
int result = apply(WFArrayOp.REMOVE, key, tid);
return result > 0;
}
public boolean contains(int key)
{
HNode t = head;
WFArrayFSet b = t.buckets.get(key % t.size);
// if the b is empty, use old table
if (b == null) {
HNode s = t.old;
b = (s == null)
? t.buckets.get(key % t.size)
: s.buckets.get(key % s.size);
}
return b.hasMember(key);
}
public boolean simpleInsert(int key, int tid)
{
return apply(WFArrayOp.INSERT, key, tid) > 0;
}
public boolean simpleRemove(int key, int tid)
{
return apply(WFArrayOp.REMOVE, key, tid) > 0;
}
public boolean grow()
{
HNode h = head;
return resize(h, true);
}
public boolean shrink()
{
HNode h = head;
return resize(h, false);
}
public int getBucketSize()
{
return head.size;
}
public void print()
{
HNode curr = head;
int age = 0;
while (curr != null) {
System.out.println("HashTableNode #" + Integer.toString(age++));
for (int i = 0; i < curr.size; i++) {
System.out.print(" Bucket " + Integer.toString(i) + ": ");
if (curr.buckets.get(i) != null)
curr.buckets.get(i).print();
else
System.out.println();
}
curr = curr.old;
System.out.println();
}
}
void helpIfNeeded(int tid)
{
HelpRecord rec = helpRecords[tid];
if (rec.nextCheck-- == 0) {
WFArrayOp op = A.get(rec.curTid);
if (op.priority != Long.MAX_VALUE && op.priority == rec.lastPhase) {
while (op.priority <= rec.lastPhase) {
HNode t = head;
int i = op.key % t.size;
WFArrayFSet b = t.buckets.get(i);
if (b == null)
helpResize(t, i);
else if (b.invoke(op))
break;
}
}
rec.reset();
}
}
private int apply(int type, int key, int mytid)
{
helpIfNeeded(mytid);
WFArrayOp myop = new WFArrayOp(key, type);
int trial = 0;
while (++trial < MAX_TRIAL) {
HNode t = head;
int i = myop.key % t.size;
WFArrayFSet b = t.buckets.get(i);
if (b == null)
helpResize(t, i);
else if (b.invoke(myop))
return WFArrayFSet.getResponse(myop);
}
return applySlow(myop, mytid);
}
private int applySlow(WFArrayOp myop, int mytid)
{
long prio = counterUpdater.getAndIncrement(this);
myop.priority = prio;
A.set(mytid, myop);
for (int tid = 0; tid < A.length(); tid++) {
WFArrayOp op = A.get(tid);
while (op.priority <= prio) {
HNode t = head;
int i = op.key % t.size;
WFArrayFSet b = t.buckets.get(i);
if (b == null)
helpResize(t, i);
else if (b.invoke(op))
break;
}
}
return WFArrayFSet.getResponse(myop);
}
private boolean resize(HNode t, boolean grow)
{
if ((t.size == MAX_BUCKET_NUM && grow) ||
(t.size == MIN_BUCKET_NUM && !grow))
return false;
if (t == head) {
// make sure we can deprecate t's predecessor
for (int i = 0; i < t.size; i++) {
if (t.buckets.get(i) == null)
helpResize(t, i);
}
// deprecate t's predecessor
t.old = null;
// switch to a new bucket array
if (t == head) {
HNode n = new HNode(t, grow ? t.size * 2 : t.size / 2);
return casHead(t, n);
}
}
return false;
}
private void helpResize(HNode t, int i)
{
WFArrayFSet b = t.buckets.get(i);
HNode s = t.old;
if (b == null && s != null) {
WFArrayFSet set = null;
if (s.size * 2 == t.size) /* growing */ {
WFArrayFSet p = s.buckets.get(i % s.size);
p.freeze();
set = p.split(t.size, i);
}
else /* shrinking */ {
WFArrayFSet p = s.buckets.get(i);
WFArrayFSet q = s.buckets.get(i + t.size);
p.freeze();
q.freeze();
set = p.merge(q);
}
t.buckets.compareAndSet(i, null, set);
}
}
private boolean casHead(HNode o, HNode n)
{
return headUpdater.compareAndSet(this, o, n);
}
}
|
|
/**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.convention;
import java.util.Map;
import org.joda.beans.Bean;
import org.joda.beans.BeanBuilder;
import org.joda.beans.BeanDefinition;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import com.opengamma.core.convention.ConventionType;
import com.opengamma.id.ExternalId;
import com.opengamma.id.ExternalIdBundle;
import com.opengamma.util.ArgumentChecker;
/**
* Convention for IMM swaps (swaps with dates generated by a roll date schedule).
*/
@BeanDefinition
public class RollDateSwapConvention extends FinancialConvention {
/**
* Type of the convention.
*/
public static final ConventionType TYPE = ConventionType.of("RollDateSwap");
/** Serialization version */
private static final long serialVersionUID = 1L;
/**
* The roll date convention.
*/
@PropertyDefinition(validate = "notNull")
private ExternalId _rollDateConvention;
/**
* The pay leg convention.
*/
@PropertyDefinition(validate = "notNull")
private ExternalId _payLegConvention;
/**
* The receive leg convention.
*/
@PropertyDefinition(validate = "notNull")
private ExternalId _receiveLegConvention;
/**
* Creates an instance.
*/
RollDateSwapConvention() {
super();
}
/**
* Creates an instance.
*
* @param name the convention name, not null
* @param externalIdBundle the external identifiers for this convention, not null
* @param payLegConvention the pay leg convention, not null
* @param receiveLegConvention the receive leg convention, not null
* @param rollDateConvention the roll date convention, not null
*/
public RollDateSwapConvention(
final String name, final ExternalIdBundle externalIdBundle, final ExternalId payLegConvention,
final ExternalId receiveLegConvention, final ExternalId rollDateConvention) {
super(name, externalIdBundle);
setRollDateConvention(rollDateConvention);
setPayLegConvention(payLegConvention);
setReceiveLegConvention(receiveLegConvention);
}
//-------------------------------------------------------------------------
/**
* Gets the type identifying this convention.
*
* @return the {@link #TYPE} constant, not null
*/
@Override
public ConventionType getConventionType() {
return TYPE;
}
/**
* Accepts a visitor to manage traversal of the hierarchy.
*
* @param <T> the result type of the visitor
* @param visitor the visitor, not null
* @return the result
*/
@Override
public <T> T accept(final FinancialConventionVisitor<T> visitor) {
ArgumentChecker.notNull(visitor, "visitor");
return visitor.visitIMMSwapConvention(this);
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code RollDateSwapConvention}.
* @return the meta-bean, not null
*/
public static RollDateSwapConvention.Meta meta() {
return RollDateSwapConvention.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(RollDateSwapConvention.Meta.INSTANCE);
}
@Override
public RollDateSwapConvention.Meta metaBean() {
return RollDateSwapConvention.Meta.INSTANCE;
}
//-----------------------------------------------------------------------
/**
* Gets the roll date convention.
* @return the value of the property, not null
*/
public ExternalId getRollDateConvention() {
return _rollDateConvention;
}
/**
* Sets the roll date convention.
* @param rollDateConvention the new value of the property, not null
*/
public void setRollDateConvention(ExternalId rollDateConvention) {
JodaBeanUtils.notNull(rollDateConvention, "rollDateConvention");
this._rollDateConvention = rollDateConvention;
}
/**
* Gets the the {@code rollDateConvention} property.
* @return the property, not null
*/
public final Property<ExternalId> rollDateConvention() {
return metaBean().rollDateConvention().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the pay leg convention.
* @return the value of the property, not null
*/
public ExternalId getPayLegConvention() {
return _payLegConvention;
}
/**
* Sets the pay leg convention.
* @param payLegConvention the new value of the property, not null
*/
public void setPayLegConvention(ExternalId payLegConvention) {
JodaBeanUtils.notNull(payLegConvention, "payLegConvention");
this._payLegConvention = payLegConvention;
}
/**
* Gets the the {@code payLegConvention} property.
* @return the property, not null
*/
public final Property<ExternalId> payLegConvention() {
return metaBean().payLegConvention().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the receive leg convention.
* @return the value of the property, not null
*/
public ExternalId getReceiveLegConvention() {
return _receiveLegConvention;
}
/**
* Sets the receive leg convention.
* @param receiveLegConvention the new value of the property, not null
*/
public void setReceiveLegConvention(ExternalId receiveLegConvention) {
JodaBeanUtils.notNull(receiveLegConvention, "receiveLegConvention");
this._receiveLegConvention = receiveLegConvention;
}
/**
* Gets the the {@code receiveLegConvention} property.
* @return the property, not null
*/
public final Property<ExternalId> receiveLegConvention() {
return metaBean().receiveLegConvention().createProperty(this);
}
//-----------------------------------------------------------------------
@Override
public RollDateSwapConvention clone() {
return JodaBeanUtils.cloneAlways(this);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
RollDateSwapConvention other = (RollDateSwapConvention) obj;
return JodaBeanUtils.equal(getRollDateConvention(), other.getRollDateConvention()) &&
JodaBeanUtils.equal(getPayLegConvention(), other.getPayLegConvention()) &&
JodaBeanUtils.equal(getReceiveLegConvention(), other.getReceiveLegConvention()) &&
super.equals(obj);
}
return false;
}
@Override
public int hashCode() {
int hash = 7;
hash = hash * 31 + JodaBeanUtils.hashCode(getRollDateConvention());
hash = hash * 31 + JodaBeanUtils.hashCode(getPayLegConvention());
hash = hash * 31 + JodaBeanUtils.hashCode(getReceiveLegConvention());
return hash ^ super.hashCode();
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(128);
buf.append("RollDateSwapConvention{");
int len = buf.length();
toString(buf);
if (buf.length() > len) {
buf.setLength(buf.length() - 2);
}
buf.append('}');
return buf.toString();
}
@Override
protected void toString(StringBuilder buf) {
super.toString(buf);
buf.append("rollDateConvention").append('=').append(JodaBeanUtils.toString(getRollDateConvention())).append(',').append(' ');
buf.append("payLegConvention").append('=').append(JodaBeanUtils.toString(getPayLegConvention())).append(',').append(' ');
buf.append("receiveLegConvention").append('=').append(JodaBeanUtils.toString(getReceiveLegConvention())).append(',').append(' ');
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code RollDateSwapConvention}.
*/
public static class Meta extends FinancialConvention.Meta {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code rollDateConvention} property.
*/
private final MetaProperty<ExternalId> _rollDateConvention = DirectMetaProperty.ofReadWrite(
this, "rollDateConvention", RollDateSwapConvention.class, ExternalId.class);
/**
* The meta-property for the {@code payLegConvention} property.
*/
private final MetaProperty<ExternalId> _payLegConvention = DirectMetaProperty.ofReadWrite(
this, "payLegConvention", RollDateSwapConvention.class, ExternalId.class);
/**
* The meta-property for the {@code receiveLegConvention} property.
*/
private final MetaProperty<ExternalId> _receiveLegConvention = DirectMetaProperty.ofReadWrite(
this, "receiveLegConvention", RollDateSwapConvention.class, ExternalId.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap(
this, (DirectMetaPropertyMap) super.metaPropertyMap(),
"rollDateConvention",
"payLegConvention",
"receiveLegConvention");
/**
* Restricted constructor.
*/
protected Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case 509875100: // rollDateConvention
return _rollDateConvention;
case 774631511: // payLegConvention
return _payLegConvention;
case -560732676: // receiveLegConvention
return _receiveLegConvention;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BeanBuilder<? extends RollDateSwapConvention> builder() {
return new DirectBeanBuilder<RollDateSwapConvention>(new RollDateSwapConvention());
}
@Override
public Class<? extends RollDateSwapConvention> beanType() {
return RollDateSwapConvention.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return _metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code rollDateConvention} property.
* @return the meta-property, not null
*/
public final MetaProperty<ExternalId> rollDateConvention() {
return _rollDateConvention;
}
/**
* The meta-property for the {@code payLegConvention} property.
* @return the meta-property, not null
*/
public final MetaProperty<ExternalId> payLegConvention() {
return _payLegConvention;
}
/**
* The meta-property for the {@code receiveLegConvention} property.
* @return the meta-property, not null
*/
public final MetaProperty<ExternalId> receiveLegConvention() {
return _receiveLegConvention;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case 509875100: // rollDateConvention
return ((RollDateSwapConvention) bean).getRollDateConvention();
case 774631511: // payLegConvention
return ((RollDateSwapConvention) bean).getPayLegConvention();
case -560732676: // receiveLegConvention
return ((RollDateSwapConvention) bean).getReceiveLegConvention();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
switch (propertyName.hashCode()) {
case 509875100: // rollDateConvention
((RollDateSwapConvention) bean).setRollDateConvention((ExternalId) newValue);
return;
case 774631511: // payLegConvention
((RollDateSwapConvention) bean).setPayLegConvention((ExternalId) newValue);
return;
case -560732676: // receiveLegConvention
((RollDateSwapConvention) bean).setReceiveLegConvention((ExternalId) newValue);
return;
}
super.propertySet(bean, propertyName, newValue, quiet);
}
@Override
protected void validate(Bean bean) {
JodaBeanUtils.notNull(((RollDateSwapConvention) bean)._rollDateConvention, "rollDateConvention");
JodaBeanUtils.notNull(((RollDateSwapConvention) bean)._payLegConvention, "payLegConvention");
JodaBeanUtils.notNull(((RollDateSwapConvention) bean)._receiveLegConvention, "receiveLegConvention");
super.validate(bean);
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
|
|
/*
* (c) Copyright Christian P. Fries, Germany. Contact: email@christian-fries.de.
*
* Created on 06.11.2015
*/
package net.finmath.montecarlo.interestrate.products;
import java.time.LocalDate;
import java.time.Month;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import net.finmath.exception.CalculationException;
import net.finmath.marketdata.model.AnalyticModel;
import net.finmath.marketdata.model.AnalyticModelFromCurvesAndVols;
import net.finmath.marketdata.model.curves.Curve;
import net.finmath.marketdata.model.curves.CurveInterpolation;
import net.finmath.marketdata.model.curves.DiscountCurve;
import net.finmath.marketdata.model.curves.ForwardCurve;
import net.finmath.marketdata.model.curves.ForwardCurveInterpolation;
import net.finmath.montecarlo.interestrate.CalibrationProduct;
import net.finmath.montecarlo.interestrate.LIBORMarketModel;
import net.finmath.montecarlo.interestrate.LIBORModelMonteCarloSimulationModel;
import net.finmath.montecarlo.interestrate.LIBORMonteCarloSimulationFromLIBORModel;
import net.finmath.montecarlo.interestrate.models.LIBORMarketModelFromCovarianceModel;
import net.finmath.montecarlo.interestrate.models.LIBORMarketModelFromCovarianceModel.Measure;
import net.finmath.montecarlo.interestrate.models.covariance.LIBORCorrelationModelExponentialDecay;
import net.finmath.montecarlo.interestrate.models.covariance.LIBORCovarianceModelFromVolatilityAndCorrelation;
import net.finmath.montecarlo.interestrate.models.covariance.LIBORVolatilityModelFromGivenMatrix;
import net.finmath.montecarlo.process.EulerSchemeFromProcessModel;
import net.finmath.time.TimeDiscretizationFromArray;
import net.finmath.time.businessdaycalendar.BusinessdayCalendar;
import net.finmath.time.businessdaycalendar.BusinessdayCalendarExcludingTARGETHolidays;
/**
* @author Christian Fries
*/
@RunWith(Parameterized.class)
public class SimpleCappedFlooredFloatingRateBondTest {
@Parameters(name="{0}")
public static Collection<Object[]> generateData()
{
return Arrays.asList(new Object[][] {
{ Measure.SPOT }, { Measure.TERMINAL }
});
}
private final int numberOfPaths = 10000;
private final Measure measure;
public SimpleCappedFlooredFloatingRateBondTest(final Measure measure) {
// Store measure
this.measure = measure;
}
@Test
public void test() throws CalculationException {
/*
* Create Monte-Carlo model
*/
final LIBORModelMonteCarloSimulationModel model = createLIBORMarketModel(numberOfPaths, measure);
/*
* Create Product
*/
final double[] fixingDates = (new TimeDiscretizationFromArray(0.0, 9, 0.5)).getAsDoubleArray();
final double[] paymentDates = (new TimeDiscretizationFromArray(0.5, 9, 0.5)).getAsDoubleArray();
final double maturity = 0.5 + 9 * 0.5;
final double[] floors = null;
final double[] caps = null;
final double[] spreads = null;
final AbstractTermStructureMonteCarloProduct product = new SimpleCappedFlooredFloatingRateBond("", fixingDates, paymentDates, spreads, floors, caps, maturity);
final double value = product.getValue(model);
System.out.println("Value of floating rate bond (measure = " + measure + "): " + value);
if(measure == Measure.SPOT) {
Assert.assertEquals("Value of floating rate bond.", 1.0, value, 1E-10);
}
if(measure == Measure.TERMINAL) {
Assert.assertEquals("Value of floating rate bond.", 1.0, value, 2E-2);
}
}
public static LIBORModelMonteCarloSimulationModel createLIBORMarketModel(final int numberOfPaths, final Measure measure) throws CalculationException {
final LocalDate referenceDate = LocalDate.of(2014, Month.AUGUST, 12);
// Create the forward curve (initial value of the LIBOR market model)
final ForwardCurve forwardCurve = ForwardCurveInterpolation.createForwardCurveFromForwards(
"forwardCurve" /* name of the curve */,
referenceDate,
"6M",
new BusinessdayCalendarExcludingTARGETHolidays(),
BusinessdayCalendar.DateRollConvention.FOLLOWING,
CurveInterpolation.InterpolationMethod.LINEAR,
CurveInterpolation.ExtrapolationMethod.CONSTANT,
CurveInterpolation.InterpolationEntity.VALUE,
ForwardCurveInterpolation.InterpolationEntityForward.FORWARD,
null,
null,
new double[] {0.5 , 1.0 , 2.0 , 5.0 , 40.0} /* fixings of the forward */,
new double[] {0.05, 0.05, 0.05, 0.05, 0.05} /* forwards */
);
// No discount curve - single curve model
final DiscountCurve discountCurve = null;
// AnalyticModel model = new AnalyticModelFromCuvesAndVols(new CurveInterface[] { forwardCurve , discountCurve });
final AnalyticModel model = new AnalyticModelFromCurvesAndVols(new Curve[] { forwardCurve });
/*
* Create the libor tenor structure and the initial values
*/
final double liborPeriodLength = 0.5;
final double liborRateTimeHorzion = 40.0;
final TimeDiscretizationFromArray liborPeriodDiscretization = new TimeDiscretizationFromArray(0.0, (int) (liborRateTimeHorzion / liborPeriodLength), liborPeriodLength);
/*
* Create a simulation time discretization
*/
final double lastTime = 40.0;
final double dt = 0.5;
final TimeDiscretizationFromArray timeDiscretizationFromArray = new TimeDiscretizationFromArray(0.0, (int) (lastTime / dt), dt);
/*
* Create a volatility structure v[i][j] = sigma_j(t_i)
*/
final double[][] volatility = new double[timeDiscretizationFromArray.getNumberOfTimeSteps()][liborPeriodDiscretization.getNumberOfTimeSteps()];
for (int timeIndex = 0; timeIndex < volatility.length; timeIndex++) {
for (int liborIndex = 0; liborIndex < volatility[timeIndex].length; liborIndex++) {
// Create a very simple volatility model here
final double time = timeDiscretizationFromArray.getTime(timeIndex);
final double maturity = liborPeriodDiscretization.getTime(liborIndex);
final double timeToMaturity = maturity - time;
double instVolatility;
if(timeToMaturity <= 0) {
instVolatility = 0; // This forward rate is already fixed, no volatility
} else {
instVolatility = 0.3 + 0.2 * Math.exp(-0.25 * timeToMaturity);
}
// Store
volatility[timeIndex][liborIndex] = instVolatility;
}
}
final LIBORVolatilityModelFromGivenMatrix volatilityModel = new LIBORVolatilityModelFromGivenMatrix(timeDiscretizationFromArray, liborPeriodDiscretization, volatility);
/*
* Create a correlation model rho_{i,j} = exp(-a * abs(T_i-T_j))
*/
final int numberOfFactors = 5;
final double correlationDecayParam = 0.2;
final LIBORCorrelationModelExponentialDecay correlationModel = new LIBORCorrelationModelExponentialDecay(
timeDiscretizationFromArray, liborPeriodDiscretization, numberOfFactors,
correlationDecayParam);
/*
* Combine volatility model and correlation model to a covariance model
*/
final LIBORCovarianceModelFromVolatilityAndCorrelation covarianceModel =
new LIBORCovarianceModelFromVolatilityAndCorrelation(timeDiscretizationFromArray,
liborPeriodDiscretization, volatilityModel, correlationModel);
// BlendedLocalVolatlityModel (future extension)
// AbstractLIBORCovarianceModel covarianceModel2 = new BlendedLocalVolatlityModel(covarianceModel, 0.00, false);
// Set model properties
final Map<String, String> properties = new HashMap<>();
// Choose the simulation measure
properties.put("measure", measure.name());
// Choose log normal model
properties.put("stateSpace", LIBORMarketModelFromCovarianceModel.StateSpace.LOGNORMAL.name());
// Empty array of calibration items - hence, model will use given covariance
final CalibrationProduct[] calibrationItems = new CalibrationProduct[0];
/*
* Create corresponding LIBOR Market Model
*/
final LIBORMarketModel liborMarketModel = new LIBORMarketModelFromCovarianceModel(
liborPeriodDiscretization, model, forwardCurve, discountCurve, covarianceModel, calibrationItems, properties);
final EulerSchemeFromProcessModel process = new EulerSchemeFromProcessModel(liborMarketModel,
new net.finmath.montecarlo.BrownianMotionLazyInit(timeDiscretizationFromArray,
numberOfFactors, numberOfPaths, 3141 /* seed */), EulerSchemeFromProcessModel.Scheme.PREDICTOR_CORRECTOR);
return new LIBORMonteCarloSimulationFromLIBORModel(liborMarketModel, process);
}
}
|
|
/**
* Copyright 2012-2015 ContactLab, Italy
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.contactlab.api.ws.domain;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for CampaignFeedback complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="CampaignFeedback">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="campaignIdentifier" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="campaignSize" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="views" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="uniqueViews" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="hiddenViews" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="webMailViews" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="clicks" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="uniqueClicks" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="bounces" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="fblComplaints" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="tafClicks" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="uniqueTafClicks" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="blacklistMatches" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="listunsubscribe" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="delivered" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="cancelled" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="temporaryBlacklistMatches" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="errors" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="unsubscribed" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="updates" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="temporaryBlacklisted" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="blacklisted" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="trackingDetails" type="{domain.ws.api.contactlab.com}TrackedLink" maxOccurs="unbounded" minOccurs="0"/>
* <element name="bounceDetails" type="{domain.ws.api.contactlab.com}BounceDetail" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "CampaignFeedback", propOrder = {
"campaignIdentifier",
"campaignSize",
"views",
"uniqueViews",
"hiddenViews",
"webMailViews",
"clicks",
"uniqueClicks",
"bounces",
"fblComplaints",
"tafClicks",
"uniqueTafClicks",
"blacklistMatches",
"listunsubscribe",
"delivered",
"cancelled",
"temporaryBlacklistMatches",
"errors",
"unsubscribed",
"updates",
"temporaryBlacklisted",
"blacklisted",
"trackingDetails",
"bounceDetails"
})
public class CampaignFeedback {
protected Integer campaignIdentifier;
protected Integer campaignSize;
protected Integer views;
protected Integer uniqueViews;
protected Integer hiddenViews;
protected Integer webMailViews;
protected Integer clicks;
protected Integer uniqueClicks;
protected Integer bounces;
protected Integer fblComplaints;
protected Integer tafClicks;
protected Integer uniqueTafClicks;
protected Integer blacklistMatches;
protected Integer listunsubscribe;
protected Integer delivered;
protected Integer cancelled;
protected Integer temporaryBlacklistMatches;
protected Integer errors;
protected Integer unsubscribed;
protected Integer updates;
protected Integer temporaryBlacklisted;
protected Integer blacklisted;
@XmlElement(nillable = true)
protected List<TrackedLink> trackingDetails;
@XmlElement(nillable = true)
protected List<BounceDetail> bounceDetails;
/**
* Gets the value of the campaignIdentifier property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getCampaignIdentifier() {
return campaignIdentifier;
}
/**
* Sets the value of the campaignIdentifier property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setCampaignIdentifier(Integer value) {
this.campaignIdentifier = value;
}
/**
* Gets the value of the campaignSize property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getCampaignSize() {
return campaignSize;
}
/**
* Sets the value of the campaignSize property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setCampaignSize(Integer value) {
this.campaignSize = value;
}
/**
* Gets the value of the views property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getViews() {
return views;
}
/**
* Sets the value of the views property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setViews(Integer value) {
this.views = value;
}
/**
* Gets the value of the uniqueViews property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getUniqueViews() {
return uniqueViews;
}
/**
* Sets the value of the uniqueViews property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setUniqueViews(Integer value) {
this.uniqueViews = value;
}
/**
* Gets the value of the hiddenViews property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getHiddenViews() {
return hiddenViews;
}
/**
* Sets the value of the hiddenViews property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setHiddenViews(Integer value) {
this.hiddenViews = value;
}
/**
* Gets the value of the webMailViews property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getWebMailViews() {
return webMailViews;
}
/**
* Sets the value of the webMailViews property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setWebMailViews(Integer value) {
this.webMailViews = value;
}
/**
* Gets the value of the clicks property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getClicks() {
return clicks;
}
/**
* Sets the value of the clicks property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setClicks(Integer value) {
this.clicks = value;
}
/**
* Gets the value of the uniqueClicks property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getUniqueClicks() {
return uniqueClicks;
}
/**
* Sets the value of the uniqueClicks property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setUniqueClicks(Integer value) {
this.uniqueClicks = value;
}
/**
* Gets the value of the bounces property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getBounces() {
return bounces;
}
/**
* Sets the value of the bounces property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setBounces(Integer value) {
this.bounces = value;
}
/**
* Gets the value of the fblComplaints property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getFblComplaints() {
return fblComplaints;
}
/**
* Sets the value of the fblComplaints property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setFblComplaints(Integer value) {
this.fblComplaints = value;
}
/**
* Gets the value of the tafClicks property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getTafClicks() {
return tafClicks;
}
/**
* Sets the value of the tafClicks property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setTafClicks(Integer value) {
this.tafClicks = value;
}
/**
* Gets the value of the uniqueTafClicks property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getUniqueTafClicks() {
return uniqueTafClicks;
}
/**
* Sets the value of the uniqueTafClicks property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setUniqueTafClicks(Integer value) {
this.uniqueTafClicks = value;
}
/**
* Gets the value of the blacklistMatches property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getBlacklistMatches() {
return blacklistMatches;
}
/**
* Sets the value of the blacklistMatches property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setBlacklistMatches(Integer value) {
this.blacklistMatches = value;
}
/**
* Gets the value of the listunsubscribe property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getListunsubscribe() {
return listunsubscribe;
}
/**
* Sets the value of the listunsubscribe property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setListunsubscribe(Integer value) {
this.listunsubscribe = value;
}
/**
* Gets the value of the delivered property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getDelivered() {
return delivered;
}
/**
* Sets the value of the delivered property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setDelivered(Integer value) {
this.delivered = value;
}
/**
* Gets the value of the cancelled property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getCancelled() {
return cancelled;
}
/**
* Sets the value of the cancelled property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setCancelled(Integer value) {
this.cancelled = value;
}
/**
* Gets the value of the temporaryBlacklistMatches property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getTemporaryBlacklistMatches() {
return temporaryBlacklistMatches;
}
/**
* Sets the value of the temporaryBlacklistMatches property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setTemporaryBlacklistMatches(Integer value) {
this.temporaryBlacklistMatches = value;
}
/**
* Gets the value of the errors property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getErrors() {
return errors;
}
/**
* Sets the value of the errors property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setErrors(Integer value) {
this.errors = value;
}
/**
* Gets the value of the unsubscribed property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getUnsubscribed() {
return unsubscribed;
}
/**
* Sets the value of the unsubscribed property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setUnsubscribed(Integer value) {
this.unsubscribed = value;
}
/**
* Gets the value of the updates property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getUpdates() {
return updates;
}
/**
* Sets the value of the updates property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setUpdates(Integer value) {
this.updates = value;
}
/**
* Gets the value of the temporaryBlacklisted property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getTemporaryBlacklisted() {
return temporaryBlacklisted;
}
/**
* Sets the value of the temporaryBlacklisted property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setTemporaryBlacklisted(Integer value) {
this.temporaryBlacklisted = value;
}
/**
* Gets the value of the blacklisted property.
*
* @return
* possible object is
* {@link Integer }
*
*/
public Integer getBlacklisted() {
return blacklisted;
}
/**
* Sets the value of the blacklisted property.
*
* @param value
* allowed object is
* {@link Integer }
*
*/
public void setBlacklisted(Integer value) {
this.blacklisted = value;
}
/**
* Gets the value of the trackingDetails property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the trackingDetails property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getTrackingDetails().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link TrackedLink }
*
*
*/
public List<TrackedLink> getTrackingDetails() {
if (trackingDetails == null) {
trackingDetails = new ArrayList<TrackedLink>();
}
return this.trackingDetails;
}
/**
* Gets the value of the bounceDetails property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the bounceDetails property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getBounceDetails().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link BounceDetail }
*
*
*/
public List<BounceDetail> getBounceDetails() {
if (bounceDetails == null) {
bounceDetails = new ArrayList<BounceDetail>();
}
return this.bounceDetails;
}
}
|
|
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is part of dcm4che, an implementation of DICOM(TM) in
* Java(TM), hosted at http://sourceforge.net/projects/dcm4che.
*
* The Initial Developer of the Original Code is
* Agfa-Gevaert AG.
* Portions created by the Initial Developer are Copyright (C) 2008
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* See listed authors below.
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package org.dcm4chee.web.dao.trash;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import javax.ejb.Stateless;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
import org.dcm4che2.data.DicomObject;
import org.dcm4che2.data.Tag;
import org.dcm4che2.data.VR;
import org.dcm4chee.archive.common.PrivateTag;
import org.dcm4chee.archive.entity.BaseEntity;
import org.dcm4chee.archive.entity.PrivateFile;
import org.dcm4chee.archive.entity.PrivateInstance;
import org.dcm4chee.archive.entity.PrivatePatient;
import org.dcm4chee.archive.entity.PrivateSeries;
import org.dcm4chee.archive.entity.PrivateStudy;
import org.dcm4chee.archive.entity.Study;
import org.dcm4chee.web.dao.util.QueryUtil;
import org.jboss.annotation.ejb.LocalBinding;
/**
* @author Robert David <robert.david@agfa.com>
* @version $Revision$ $Date$
* @since June 07, 2010
*/
@Stateless
@LocalBinding (jndiBinding=TrashListLocal.JNDI_NAME)
public class TrashListBean implements TrashListLocal {
@PersistenceContext(unitName="dcm4chee-arc")
private EntityManager em;
private static Comparator<PrivateInstance> instanceComparator = new Comparator<PrivateInstance>() {
public int compare(PrivateInstance o1, PrivateInstance o2) {
String in1 = o1.getAttributes().getString(Tag.InstanceNumber);
String in2 = o2.getAttributes().getString(Tag.InstanceNumber);
return QueryUtil.compareIntegerStringAndPk(o1.getPk(), o2.getPk(), in1, in2);
}
};
private static Comparator<PrivateSeries> seriesComparator = new Comparator<PrivateSeries>() {
public int compare(PrivateSeries o1, PrivateSeries o2) {
String in1 = o1.getAttributes().getString(Tag.SeriesNumber);
String in2 = o2.getAttributes().getString(Tag.SeriesNumber);
return QueryUtil.compareIntegerStringAndPk(o1.getPk(), o2.getPk(), in1, in2);
}
};
public int count(TrashListFilter filter, List<String> roles) {
if ((roles != null) && (roles.size() == 0)) return 0;
StringBuilder ql = new StringBuilder(64);
ql.append("SELECT COUNT(*)");
appendFromClause(ql, filter);
appendWhereClause(ql, filter, roles);
Query query = em.createQuery(ql.toString());
setQueryParameters(query, filter, roles);
return ((Number) query.getSingleResult()).intValue();
}
@SuppressWarnings("unchecked")
public List<PrivatePatient> findPatients(TrashListFilter filter, int pagesize, int offset, List<String> roles) {
if ((roles != null) && (roles.size() == 0)) return new ArrayList<PrivatePatient>();
StringBuilder ql = new StringBuilder(64);
ql.append("SELECT p");
if (!filter.isPatientQuery())
ql.append(", s");
appendFromClause(ql, filter);
appendWhereClause(ql, filter, roles);
QueryUtil.appendOrderBy(ql, new String[]{"p.patientName, p.patientID, p.issuerOfPatientID"});
Query query = em.createQuery(ql.toString());
setQueryParameters(query, filter, roles);
if (filter.isPatientQuery())
return query.setMaxResults(pagesize).setFirstResult(offset).getResultList();
else {
List<Object[]> result = query.setMaxResults(pagesize).setFirstResult(offset).getResultList();
List<PrivatePatient> patientList = new ArrayList<PrivatePatient>();
PrivatePatient patient = null;
for (Object[] element: result) {
patient = (PrivatePatient) element[0];
if (!patientList.contains(patient)) {
patient.setStudies(new HashSet<PrivateStudy>());
patientList.add(patient);
}
patient.getStudies().add((PrivateStudy) element[1]);
}
return patientList;
}
}
private static void appendFromClause(StringBuilder ql, TrashListFilter filter) {
ql.append(" FROM PrivatePatient p");
if (!filter.isPatientQuery())
ql.append(" INNER JOIN p.studies s");
}
private static void appendWhereClause(StringBuilder ql, TrashListFilter filter, List<String> roles) {
ql.append(" WHERE p.privateType = 1");
if ( filter.isPatientQuery()) {
appendPatFilter(ql, filter);
} else {
if ( QueryUtil.isUniversalMatch(filter.getStudyInstanceUID()) ) {
appendPatFilter(ql, filter);
QueryUtil.appendAccessionNumberFilter(ql, QueryUtil.checkAutoWildcard(filter.getAccessionNumber(), filter.isAutoWildcard()));
} else {
ql.append(" AND s.studyInstanceUID = :studyInstanceUID");
}
QueryUtil.appendSourceAETFilter(ql, new String[]{filter.getSourceAET()});
if ((roles != null) && !filter.isPatientQuery())
QueryUtil.appendDicomSecurityFilter(ql);
}
}
private static void appendPatFilter(StringBuilder ql, TrashListFilter filter) {
QueryUtil.appendPatientNameFilter(ql, "p", QueryUtil.checkAutoWildcard(filter.getPatientName(), filter.isPNAutoWildcard()));
QueryUtil.appendPatientIDFilter(ql, "p", QueryUtil.checkAutoWildcard(filter.getPatientID(), filter.isAutoWildcard()));
QueryUtil.appendIssuerOfPatientIDFilter(ql, "p", QueryUtil.checkAutoWildcard(filter.getIssuerOfPatientID(), filter.isAutoWildcard()));
}
private static void setQueryParameters(Query query, TrashListFilter filter, List<String> roles) {
if ( filter.isPatientQuery()) {
setPatQueryParameters(query, filter);
} else {
if ( QueryUtil.isUniversalMatch(filter.getStudyInstanceUID()) ) {
setPatQueryParameters(query, filter);
QueryUtil.setAccessionNumberQueryParameter(query, QueryUtil.checkAutoWildcard(filter.getAccessionNumber(), filter.isAutoWildcard()));
} else {
QueryUtil.setStudyInstanceUIDQueryParameter(query, filter.getStudyInstanceUID());
}
QueryUtil.setSourceAETQueryParameter(query, new String[]{filter.getSourceAET()});
if ((roles != null) && !filter.isPatientQuery())
query.setParameter("roles", roles);
}
}
private static void setPatQueryParameters(Query query, TrashListFilter filter) {
QueryUtil.setPatientNameQueryParameter(query, QueryUtil.checkAutoWildcard(filter.getPatientName(), filter.isPNAutoWildcard()));
QueryUtil.setPatientIDQueryParameter(query, QueryUtil.checkAutoWildcard(filter.getPatientID(), filter.isAutoWildcard()));
QueryUtil.setIssuerOfPatientIDQueryParameter(query, QueryUtil.checkAutoWildcard(filter.getIssuerOfPatientID(), filter.isAutoWildcard()));
}
public int countStudiesOfPatient(long pk, List<String> roles) {
if ((roles != null) && (roles.size() == 0)) return 0;
return ((Number) getStudiesOfPatientQuery(true, pk, roles).getSingleResult()).intValue();
}
@SuppressWarnings("unchecked")
public List<PrivateStudy> findStudiesOfPatient(long pk, List<String> roles) {
if ((roles != null) && (roles.size() == 0)) return new ArrayList<PrivateStudy>();
return getStudiesOfPatientQuery(false, pk, roles).getResultList();
}
private Query getStudiesOfPatientQuery(boolean isCount, long pk, List<String> roles) {
StringBuilder ql = new StringBuilder(64);
ql.append("SELECT " + (isCount ? "COUNT(s)" : "s") + " FROM PrivateStudy s WHERE s.patient.pk=?1");
if (roles != null)
QueryUtil.appendDicomSecurityFilter(ql);
Query query = em.createQuery(ql.toString());
query.setParameter(1, pk);
if (roles != null)
query.setParameter("roles", roles);
return query;
}
@SuppressWarnings("unchecked")
public List<PrivateSeries> findSeriesOfStudy(long pk) {
List<PrivateSeries> l = em.createQuery("FROM PrivateSeries s WHERE s.study.pk=?1 ORDER BY s.pk")
.setParameter(1, pk).getResultList();
Collections.sort(l, seriesComparator);
return l;
}
@SuppressWarnings("unchecked")
public List<PrivateInstance> findInstancesOfSeries(long pk) {
List<PrivateInstance> l = em.createQuery("FROM PrivateInstance i WHERE i.series.pk=?1 ORDER BY i.pk")
.setParameter(1, pk)
.getResultList();
Collections.sort(l, instanceComparator);
return l;
}
@SuppressWarnings("unchecked")
public List<String> selectDistinctSourceAETs() {
return em.createQuery("SELECT DISTINCT s.sourceAET FROM Series s WHERE s.sourceAET IS NOT NULL ORDER BY s.sourceAET")
.getResultList();
}
public PrivatePatient getPatient(long pk) {
return em.find(PrivatePatient.class, pk);
}
public PrivateStudy getStudy(long pk) {
return em.find(PrivateStudy.class, pk);
}
public PrivateSeries getSeries(long pk) {
return em.find(PrivateSeries.class, pk);
}
public PrivateInstance getInstance(long pk) {
return em.find(PrivateInstance.class, pk);
}
public void removeTrashEntities(List<Long> pks, Class<? extends BaseEntity> clazz, boolean removeFile) {
if (clazz.equals(PrivatePatient.class)) {
for (Long pk : pks)
removeTrashEntity(getPatient(pk), removeFile);
} else if (clazz.equals(PrivateStudy.class)) {
for (Long pk : pks)
removeTrashEntity(getStudy(pk), removeFile);
} else if (clazz.equals(PrivateSeries.class)) {
for (Long pk : pks)
removeTrashEntity(getSeries(pk), removeFile);
} else if (clazz.equals(PrivateInstance.class)) {
for (Long pk : pks)
removeTrashEntity(getInstance(pk), removeFile);
}
}
private void removeTrashEntity(BaseEntity entity, boolean removeFile) {
if (entity == null) return;
else {
if (entity instanceof PrivatePatient) {
PrivatePatient pp = (PrivatePatient) entity;
for (PrivateStudy pst : pp.getStudies())
removeTrashEntity(pst, removeFile);
em.remove(pp);
} else if (entity instanceof PrivateStudy) {
PrivateStudy pst = (PrivateStudy) entity;
for (PrivateSeries pse : pst.getSeries())
removeTrashEntity(pse, removeFile);
PrivatePatient p = pst.getPatient();
em.remove(pst);
if (p.getStudies().size() <= 1)
em.remove(p);
} else if (entity instanceof PrivateSeries) {
PrivateSeries pse = (PrivateSeries) entity;
for (PrivateInstance pi : pse.getInstances())
removeTrashEntity(pi, removeFile);
PrivateStudy pst = pse.getStudy();
em.remove(pse);
if (pst.getSeries().size() <= 1)
em.remove(pst);
} else if (entity instanceof PrivateInstance) {
PrivateInstance pi = (PrivateInstance) entity;
for (PrivateFile pf : pi.getFiles()) {
if (removeFile) {
em.remove(pf);
} else {
pf.setInstance(null);
em.merge(pf);
}
}
PrivateSeries pse = pi.getSeries();
em.remove(pi);
if (pse.getInstances().size() <= 1)
em.remove(pse);
} else return;
}
}
public void removeTrashAll() {
em.createQuery("UPDATE PrivateFile p SET p.instance = Null").executeUpdate();
em.createQuery("DELETE FROM PrivateInstance pi").executeUpdate();
em.createQuery("DELETE FROM PrivateSeries pse").executeUpdate();
em.createQuery("DELETE FROM PrivateStudy pst").executeUpdate();
em.createQuery("DELETE FROM PrivatePatient pp").executeUpdate();
}
@SuppressWarnings("unchecked")
public List<PrivateFile> getFilesForEntity(long pk, Class<? extends BaseEntity> clazz) {
String query = "SELECT DISTINCT f FROM PrivateFile f LEFT JOIN FETCH f.fileSystem fs ";
if (clazz.equals(PrivateInstance.class))
query += "WHERE f.instance.pk = :pk";
else {
query += "LEFT JOIN f.instance.series se ";
if (clazz.equals(PrivateSeries.class))
query += "WHERE se.pk = :pk";
else {
query += "LEFT JOIN se.study st ";
if (clazz.equals(PrivateStudy.class))
query += "WHERE st.pk = :pk";
else if (clazz.equals(PrivatePatient.class))
query += "LEFT JOIN st.patient p WHERE p.pk = :pk";
else return null;
}
}
return em.createQuery(query)
.setParameter("pk", pk)
.getResultList();
}
@SuppressWarnings("unchecked")
public List<Study> getStudiesInFolder(String[] suids) {
StringBuilder sb = new StringBuilder("SELECT st FROM Study st LEFT JOIN FETCH st.patient WHERE st.studyInstanceUID");
QueryUtil.appendIN(sb, suids.length);
Query q = em.createQuery(sb.toString());
QueryUtil.setParametersForIN(q, suids);
return q.getResultList();
}
@SuppressWarnings("unchecked")
public List<Study> getStudiesInFolder(long privPatPk) {
StringBuilder sb = new StringBuilder("SELECT st FROM Study st LEFT JOIN FETCH st.patient, PrivateStudy pst WHERE pst.patient.pk = :patPk AND st.studyInstanceUID = pst.studyInstanceUID");
Query q = em.createQuery(sb.toString());
q.setParameter("patPk", new Long(privPatPk));
return q.getResultList();
}
public DicomObject getDicomAttributes(long filePk) {
PrivateFile pf = em.find(PrivateFile.class, filePk);
DicomObject dio = pf.getInstance().getAttributes();
pf.getInstance().getSeries().getAttributes().copyTo(dio);
pf.getInstance().getSeries().getStudy().getAttributes().copyTo(dio);
pf.getInstance().getSeries().getStudy().getPatient().getAttributes().copyTo(dio);
dio.putString(dio.resolveTag(PrivateTag.CallingAET, PrivateTag.CreatorID),
VR.AE, pf.getInstance().getSeries().getSourceAET());
return dio;
}
public Long getNumberOfSeriesOfStudy(long studyPk) {
return (Long) em.createQuery("SELECT COUNT(s) from PrivateSeries s WHERE s.study.pk = :studyPk")
.setParameter("studyPk", studyPk)
.getSingleResult();
}
public Long getNumberOfInstancesOfStudy(long studyPk) {
return (Long) em.createQuery("SELECT DISTINCT COUNT(i) FROM PrivateInstance i, PrivateSeries se , PrivateStudy st WHERE i.series.pk = se.pk AND se.study.pk = st.pk AND st.pk = :studyPk")
.setParameter("studyPk", studyPk)
.getSingleResult();
}
public Long getNumberOfInstancesOfSeries(long seriesPk) {
return (Long) em.createQuery("SELECT COUNT(i) from PrivateInstance i WHERE i.series.pk = :seriesPk")
.setParameter("seriesPk", seriesPk)
.getSingleResult();
}
}
|
|
/*
* Copyright (C) 2004-2017, GoodData(R) Corporation. All rights reserved.
* This source code is licensed under the BSD-style license found in the
* LICENSE.txt file in the root directory of this source tree.
*/
package com.gooddata.dataset;
import com.gooddata.AbstractGoodDataIT;
import com.gooddata.GoodDataException;
import com.gooddata.gdc.AboutLinks.Link;
import com.gooddata.gdc.TaskStatus;
import com.gooddata.project.Project;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collection;
import static com.gooddata.util.ResourceUtils.OBJECT_MAPPER;
import static com.gooddata.util.ResourceUtils.readFromResource;
import static com.gooddata.util.ResourceUtils.readObjectFromResource;
import static net.jadler.Jadler.onRequest;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
import static org.testng.Assert.fail;
public class DatasetServiceIT extends AbstractGoodDataIT {
private static final String STATUS_URI = "/gdc/md/PROJECT_ID/tasks/TASK_ID/status";
private static final String DML_MAQL = "DELETE FROM {attr.logs.phase_id} WHERE {created.date.yyyymmdd} < \"2015-01-18\"";
private Project project;
@BeforeClass
public void setUpClass() throws Exception {
project = readObjectFromResource("/project/project.json", Project.class);
}
@BeforeMethod
public void setUp() throws Exception {
onRequest()
.havingMethodEqualTo("GET")
.havingPathEqualTo("/gdc")
.respond()
.withBody(readFromResource("/gdc/gdc.json"));
onRequest()
.havingPath(startsWith("/uploads/"))
.havingMethodEqualTo("PUT")
.respond()
.withStatus(200);
onRequest()
.havingPath(startsWith("/uploads/"))
.havingMethodEqualTo("DELETE")
.respond()
.withStatus(200);
onRequest()
.havingPathEqualTo("/gdc/md/PROJECT_ID/etl/pull2")
.havingMethodEqualTo("POST")
.respond()
.withStatus(201)
.withBody(readFromResource("/dataset/pullTask.json"));
}
@Test
public void shouldLoadDataset() throws Exception {
onRequest()
.havingPathEqualTo("/gdc/md/PROJECT/tasks/task/ID/status")
.respond()
.withStatus(202)
.withBody(readFromResource("/dataset/pullTaskStatusRunning.json"))
.thenRespond()
.withStatus(200)
.withBody(readFromResource("/dataset/pullTaskStatusOk.json"));
final DatasetManifest manifest = readObjectFromResource("/dataset/datasetManifest.json", DatasetManifest.class);
gd.getDatasetService().loadDataset(project, manifest, new ByteArrayInputStream(new byte[]{})).get();
}
@Test
public void shouldLoadDatasets() throws Exception {
onRequest()
.havingPathEqualTo("/gdc/md/PROJECT/tasks/task/ID/status")
.respond()
.withStatus(202)
.withBody(readFromResource("/dataset/pullTaskStatusRunning.json"))
.thenRespond()
.withStatus(200)
.withBody(readFromResource("/dataset/pullTaskStatusOk.json"));
final DatasetManifest manifest = readObjectFromResource("/dataset/datasetManifest.json", DatasetManifest.class);
final InputStream source = new ByteArrayInputStream(new byte[]{});
manifest.setSource(source);
gd.getDatasetService().loadDatasets(project, manifest).get();
}
@Test(expectedExceptions = DatasetException.class, expectedExceptionsMessageRegExp = ".*dataset.person.*Unable to load.*")
public void shouldFailPolling() throws Exception {
onRequest()
.havingPathEqualTo("/gdc/md/PROJECT/tasks/task/ID/status")
.respond()
.withStatus(400);
final DatasetManifest manifest = readObjectFromResource("/dataset/datasetManifest.json", DatasetManifest.class);
gd.getDatasetService().loadDataset(project, manifest, new ByteArrayInputStream(new byte[]{})).get();
}
@Test
public void shouldFailLoading() throws Exception {
onRequest()
.havingPathEqualTo("/gdc/md/PROJECT/tasks/task/ID/status")
.respond()
.withStatus(200)
.withBody(readFromResource("/dataset/pullTaskStatusError.json"));
final DatasetManifest manifest = readObjectFromResource("/dataset/datasetManifest.json", DatasetManifest.class);
try {
gd.getDatasetService().loadDataset(project, manifest, new ByteArrayInputStream(new byte[]{})).get();
fail("Exception should be thrown");
} catch (DatasetException e) {
assertThat(e.getMessage(), is("Load datasets [dataset.person] failed: [status: ERROR. Missing field [attr.person.age]]"));
}
}
@Test
public void shouldGetDatasetManifest() throws Exception {
onRequest()
.havingMethodEqualTo("GET")
.havingPathEqualTo("/gdc/md/PROJECT_ID/ldm/singleloadinterface/foo/manifest")
.respond()
.withBody(readFromResource("/dataset/datasetManifest.json"))
;
final DatasetManifest manifest = gd.getDatasetService().getDatasetManifest(project, "foo");
assertThat(manifest, is(notNullValue()));
}
@SuppressWarnings("deprecation")
@Test
public void shouldListDatasets() throws Exception {
onRequest()
.havingMethodEqualTo("GET")
.havingPathEqualTo("/gdc/md/PROJECT_ID/ldm/singleloadinterface")
.respond()
.withBody(readFromResource("/dataset/datasetLinks.json"))
;
final Collection<Dataset> datasets = gd.getDatasetService().listDatasets(project);
assertThat(datasets, hasSize(1));
assertThat(datasets.iterator().next().getUri(),
is("/gdc/md/PROJECT_ID/ldm/singleloadinterface/dataset.person"));
}
@Test
public void shouldListDatasetLinks() throws Exception {
onRequest()
.havingMethodEqualTo("GET")
.havingPathEqualTo("/gdc/md/PROJECT_ID/ldm/singleloadinterface")
.respond()
.withBody(readFromResource("/dataset/datasetLinks.json"))
;
final Collection<Link> datasets = gd.getDatasetService().listDatasetLinks(project);
assertThat(datasets, hasSize(1));
assertThat(datasets.iterator().next().getUri(),
is("/gdc/md/PROJECT_ID/ldm/singleloadinterface/dataset.person"));
}
@Test
public void shouldOptimizeSliHash() throws Exception {
onRequest()
.havingMethodEqualTo("POST")
.havingPathEqualTo("/gdc/md/PROJECT_ID/etl/mode")
.respond()
.withStatus(202)
.withBody("{\"uri\" : \"" + STATUS_URI + "\"}");
onRequest()
.havingMethodEqualTo("GET")
.havingPathEqualTo(STATUS_URI)
.respond()
.withStatus(202)
.withBody(OBJECT_MAPPER.writeValueAsString(new TaskStatus("RUNNING", STATUS_URI)))
.thenRespond()
.withStatus(200)
.withBody(OBJECT_MAPPER.writeValueAsString(new TaskStatus("OK", STATUS_URI)));
gd.getDatasetService().optimizeSliHash(project).get();
}
@Test(expectedExceptions = GoodDataException.class)
public void shouldFailOptimizeSliHash() throws Exception {
onRequest()
.havingMethodEqualTo("POST")
.havingPathEqualTo("/gdc/md/PROJECT_ID/etl/mode")
.respond()
.withStatus(202)
.withBody("{\"uri\" : \"" + STATUS_URI + "\"}");
onRequest()
.havingMethodEqualTo("GET")
.havingPathEqualTo(STATUS_URI)
.respond()
.withStatus(202)
.withBody(OBJECT_MAPPER.writeValueAsString(new TaskStatus("RUNNING", STATUS_URI)))
.thenRespond()
.withStatus(200)
.withBody(OBJECT_MAPPER.writeValueAsString(new TaskStatus("ERROR", STATUS_URI)));
gd.getDatasetService().optimizeSliHash(project).get();
}
@Test
public void shouldUpdateProjectData() throws IOException {
onRequest()
.havingMethodEqualTo("POST")
.havingPathEqualTo("/gdc/md/PROJECT_ID/dml/manage")
.respond()
.withStatus(202)
.withBody("{\"uri\" : \"" + STATUS_URI + "\"}");
onRequest()
.havingMethodEqualTo("GET")
.havingPathEqualTo(STATUS_URI)
.respond()
.withStatus(200) // REST API returns HTTP 200 when task is in RUNNING state :(
.withBody(OBJECT_MAPPER.writeValueAsString(new TaskState("RUNNING", STATUS_URI)))
.thenRespond()
.withStatus(200)
.withBody(OBJECT_MAPPER.writeValueAsString(new TaskState("OK", STATUS_URI)))
;
gd.getDatasetService().updateProjectData(project, DML_MAQL).get();
}
@Test(expectedExceptions = GoodDataException.class)
public void shouldFailUpdateProjectDataServerError() throws IOException {
onRequest()
.havingMethodEqualTo("POST")
.havingPathEqualTo("/gdc/md/PROJECT_ID/dml/manage")
.respond()
.withStatus(202)
.withBody("{\"uri\" : \"" + STATUS_URI + "\"}");
onRequest()
.havingMethodEqualTo("GET")
.havingPathEqualTo(STATUS_URI)
.respond()
.withStatus(500)
;
gd.getDatasetService().updateProjectData(project, DML_MAQL).get();
}
@Test(expectedExceptions = GoodDataException.class)
public void shouldFailUpdateProjectData() throws IOException {
onRequest()
.havingMethodEqualTo("POST")
.havingPathEqualTo("/gdc/md/PROJECT_ID/dml/manage")
.respond()
.withStatus(202)
.withBody("{\"uri\" : \"" + STATUS_URI + "\"}");
onRequest()
.havingMethodEqualTo("GET")
.havingPathEqualTo(STATUS_URI)
.respond()
.withStatus(202)
.withBody(OBJECT_MAPPER.writeValueAsString(new TaskState("RUNNING", STATUS_URI)))
.thenRespond()
.withStatus(200)
.withBody(OBJECT_MAPPER.writeValueAsString(new TaskState("ERROR", STATUS_URI)))
;
gd.getDatasetService().updateProjectData(project, DML_MAQL).get();
}
@Test
public void shouldListUploadsForDataset() throws Exception {
onRequest()
.havingPathEqualTo("/gdc/md/PROJECT_ID/data/sets")
.respond()
.withStatus(200)
.withBody(readFromResource("/dataset/uploads/data-sets.json"));
onRequest()
.havingPathEqualTo("/gdc/md/PROJECT_ID/data/uploads/814")
.respond()
.withStatus(200)
.withBody(readFromResource("/dataset/uploads/uploads.json"));
final Collection<Upload> uploads = gd.getDatasetService().listUploadsForDataset(project, "dataset.campaign");
assertThat(uploads, notNullValue());
assertThat(uploads, hasSize(2));
}
@Test
public void shouldGetLastUploadForDataset() throws Exception {
onRequest()
.havingPathEqualTo("/gdc/md/PROJECT_ID/data/sets")
.respond()
.withStatus(200)
.withBody(readFromResource("/dataset/uploads/data-sets.json"));
onRequest()
.havingPathEqualTo("/gdc/md/PROJECT_ID/data/upload/1076")
.respond()
.withStatus(200)
.withBody(readFromResource("/dataset/uploads/upload.json"));
final Upload upload = gd.getDatasetService().getLastUploadForDataset(project, "dataset.campaign");
assertThat(upload, notNullValue());
assertThat(upload.getStatus(), is("OK"));
}
@Test
public void shouldGetUploadStatistics() throws Exception {
onRequest()
.havingPathEqualTo("/gdc/md/PROJECT_ID/data/uploads_info")
.respond()
.withStatus(200)
.withBody(readFromResource("/dataset/uploads/data-uploads-info.json"));
final UploadStatistics uploadStatistics = gd.getDatasetService().getUploadStatistics(project);
assertThat(uploadStatistics, notNullValue());
assertThat(uploadStatistics.getUploadsCount("OK"), is(845));
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gemstone.gemfire.internal.logging;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.text.BreakIterator;
import java.text.DateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Handler;
import java.util.logging.Level;
import com.gemstone.gemfire.i18n.LogWriterI18n;
import com.gemstone.gemfire.internal.Assert;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.internal.process.StartupStatusListener;
import com.gemstone.org.jgroups.util.StringId;
/**
* Abstract implementation of {@link InternalLogWriter}.
* Each logger has a level and it will only print messages whose
* level is greater than or equal to the logger's level. The supported
* logger level constants, in ascending order, are:
* <ol>
* <li> {@link #ALL_LEVEL}
* <li> {@link #FINEST_LEVEL}
* <li> {@link #FINER_LEVEL}
* <li> {@link #FINE_LEVEL}
* <li> {@link #CONFIG_LEVEL}
* <li> {@link #INFO_LEVEL}
* <li> {@link #WARNING_LEVEL}
* <li> {@link #ERROR_LEVEL}
* <li> {@link #SEVERE_LEVEL}
* <li> {@link #NONE_LEVEL}
* </ol>
* <p>
* Subclasses must implement:
* <ol>
* <li> {@link #getLogWriterLevel}
* <li> {@link #put(int, String, Throwable)}
* <li> {@link #put(int, StringId, Object[], Throwable)}
* </ol>
*/
public abstract class LogWriterImpl implements InternalLogWriter {
// Constants
/**
* A bit mask to remove any potential flags added to the msgLevel.
* Intended to be used in {@link #getRealLogLevel}.
*/
private final static int LOGGING_FLAGS_MASK = 0x00FFFFFF;
/**
* A flag to indicate the {@link SecurityLogWriter#SECURITY_PREFIX}
* should be appended to the log level.
*/
protected final static int SECURITY_LOGGING_FLAG = 0x40000000;
static {
Assert.assertTrue( ALL_LEVEL == Level.ALL.intValue() );
Assert.assertTrue( NONE_LEVEL == Level.OFF.intValue() );
Assert.assertTrue( FINEST_LEVEL == Level.FINEST.intValue() );
Assert.assertTrue( FINER_LEVEL == Level.FINER.intValue() );
Assert.assertTrue( FINE_LEVEL == Level.FINE.intValue() );
Assert.assertTrue( CONFIG_LEVEL == Level.CONFIG.intValue() );
Assert.assertTrue( INFO_LEVEL == Level.INFO.intValue() );
Assert.assertTrue( WARNING_LEVEL == Level.WARNING.intValue() );
Assert.assertTrue( SEVERE_LEVEL == Level.SEVERE.intValue() );
int logLevels = FINEST_LEVEL |
FINER_LEVEL |
FINE_LEVEL |
CONFIG_LEVEL |
INFO_LEVEL |
WARNING_LEVEL|
SEVERE_LEVEL;
Assert.assertTrue( logLevels == (logLevels & LOGGING_FLAGS_MASK) );
Assert.assertTrue( 0 == (logLevels & SECURITY_LOGGING_FLAG) );
}
/**
* A listener which can be registered to be informed of startup events
*/
private static volatile StartupStatusListener startupListener;
// Constructors
protected LogWriterImpl() {
this.timeFormatter = DateFormatter.createDateFormat();
}
/**
* Gets the writer's level.
*/
public abstract int getLogWriterLevel();
public boolean isSecure() {
return false;
}
public static String allowedLogLevels() {
StringBuffer b = new StringBuffer(64);
for (int i=0; i < levelNames.length; i++) {
if (i != 0) {
b.append('|');
}
b.append(levelNames[i]);
}
return b.toString();
}
/**
* Gets the string representation for the given <code>level</code> int code.
*/
public static String levelToString(int level) {
switch (level) {
case ALL_LEVEL: return "all";
case FINEST_LEVEL: return "finest";
case FINER_LEVEL: return "finer";
case FINE_LEVEL: return "fine";
case CONFIG_LEVEL: return "config";
case INFO_LEVEL: return "info";
case WARNING_LEVEL: return "warning";
case ERROR_LEVEL: return "error";
case SEVERE_LEVEL: return "severe";
case NONE_LEVEL: return "none";
default: return levelToStringSpecialCase(level);
}
}
/**
* Handles the special cases for {@link #levelToString(int)} including
* such cases as the SECURITY_LOGGING_PREFIX and an invalid log level.
*/
private static String levelToStringSpecialCase(int levelWithFlags) {
if((levelWithFlags & SECURITY_LOGGING_FLAG) != 0) {
//We know the flag is set so XOR will zero it out.
int level = levelWithFlags ^ SECURITY_LOGGING_FLAG;
return SecurityLogWriter.SECURITY_PREFIX + levelToString(level);
} else {
//Needed to prevent infinite recursion
//This signifies an unknown log level was used
return "level-" + String.valueOf(levelWithFlags);
}
}
protected static int getRealLogLevel(int levelWithFlags) {
if(levelWithFlags == NONE_LEVEL) {
return levelWithFlags;
}
return levelWithFlags & LOGGING_FLAGS_MASK;
}
public static String join(Object[] a) {
return join(a, " ");
}
public static String join(Object[] a, String joinString) {
return join(Arrays.asList(a), joinString);
}
public static String join(List l) {
return join(l, " ");
}
public static String join(List l, String joinString) {
StringBuffer result = new StringBuffer(80);
boolean firstTime = true;
Iterator it = l.iterator();
while (it.hasNext()) {
if (firstTime) {
firstTime = false;
} else {
result.append(joinString);
}
result.append(it.next());
}
return result.toString();
}
/**
* Gets the level code for the given <code>levelName</code>.
* @throws IllegalArgumentException if an unknown level name is given.
*/
public static int levelNameToCode(String levelName) {
if ("all".equalsIgnoreCase(levelName)) {
return ALL_LEVEL;
}
if ("finest".equalsIgnoreCase(levelName) || "trace".equalsIgnoreCase(levelName)) {
return FINEST_LEVEL;
}
if ("finer".equalsIgnoreCase(levelName)) {
return FINER_LEVEL;
}
if ("fine".equalsIgnoreCase(levelName) || "debug".equalsIgnoreCase(levelName)) {
return FINE_LEVEL;
}
if ("config".equalsIgnoreCase(levelName)) {
return CONFIG_LEVEL;
}
if ("info".equalsIgnoreCase(levelName)) {
return INFO_LEVEL;
}
if ("warning".equalsIgnoreCase(levelName) || "warn".equalsIgnoreCase(levelName)) {
return WARNING_LEVEL;
}
if ("error".equalsIgnoreCase(levelName)) {
return ERROR_LEVEL;
}
if ("severe".equalsIgnoreCase(levelName) || "fatal".equalsIgnoreCase(levelName)) {
return SEVERE_LEVEL;
}
if ("none".equalsIgnoreCase(levelName)) {
return NONE_LEVEL;
}
try {
if (levelName.startsWith("level-")) {
String levelValue = levelName.substring("level-".length());
return Integer.parseInt(levelValue);
}
} catch (NumberFormatException ignore) {
} catch (NullPointerException ignore) {
}
throw new IllegalArgumentException("Unknown log-level \"" + levelName
+ "\". Valid levels are: "
+ join(levelNames)
+ ".");
}
/**
* Gets a String representation of the current time.
* @return a String representation of the current time.
*/
protected String getTimeStamp() {
return formatDate(new Date());
}
/**
* Convert a Date to a timestamp String.
* @param d a Date to format as a timestamp String.
* @return a String representation of the current time.
*/
protected String formatDate(Date d) {
try {
synchronized (timeFormatter) {
// Need sync: see bug 21858
return timeFormatter.format(d);
}
} catch (Exception e1) {
// Fix bug 21857
try {
return d.toString();
} catch (Exception e2) {
try {
return Long.toString(d.getTime());
} catch (Exception e3) {
return "timestampFormatFailed";
}
}
}
}
// Implementation of LogWriterI18n interface
/**
* Returns true if "severe" log messages are enabled.
* Returns false if "severe" log messages are disabled.
*/
public boolean severeEnabled() {
return getLogWriterLevel() <= SEVERE_LEVEL;
}
/**
* Writes both a message and exception to this writer.
* The message level is "severe".
*/
public void severe(String msg, Throwable ex) {
if (this.severeEnabled()) {
this.put(SEVERE_LEVEL, msg, ex);
}
}
/**
* Writes a message to this writer.
* The message level is "severe".
*/
public void severe(String msg) {
this.severe(msg, null);
}
/**
* Writes an exception to this writer.
* The exception level is "severe".
*/
public void severe(Throwable ex) {
this.severe(LocalizedStrings.EMPTY, ex);
}
/**
* Writes both a message and exception to this writer.
* The message level is "severe".
* @since 6.0
*/
public void severe(StringId msgID, Object[] params, Throwable ex) {
if (this.severeEnabled()) {
this.put(SEVERE_LEVEL, msgID, params, ex);
}
}
/**
* Writes both a message and exception to this writer.
* The message level is "severe".
* @since 6.0
*/
public void severe(StringId msgID, Object param, Throwable ex) {
if (this.severeEnabled()) {
this.put(SEVERE_LEVEL, msgID, new Object[] {param}, ex);
}
}
/**
* Writes both a message and exception to this writer.
* The message level is "severe".
* @since 6.0
*/
public void severe(StringId msgID, Throwable ex) {
severe(msgID, null, ex);
}
/**
* Writes both a message and exception to this writer.
* The message level is "severe".
* @since 6.0
*/
public void severe(StringId msgID, Object[] params) {
severe(msgID, params, null);
}
/**
* Writes both a message and exception to this writer.
* The message level is "severe".
* @since 6.0
*/
public void severe(StringId msgID, Object param) {
severe(msgID, param, null);
}
/**
* Writes both a message and exception to this writer.
* The message level is "severe".
* @since 6.0
*/
public void severe(StringId msgID) {
severe(msgID, null, null);
}
/**
* @return true if "error" log messages are enabled.
*/
public boolean errorEnabled() {
return getLogWriterLevel() <= ERROR_LEVEL;
}
/**
* Writes both a message and exception to this writer.
* The message level is "error".
*/
public void error(String msg, Throwable ex) {
if (this.errorEnabled()) {
this.put(ERROR_LEVEL, msg, ex);
}
}
/**
* Writes a message to this writer.
* The message level is "error".
*/
public void error(String msg) {
this.error(msg, null);
}
/**
* Writes an exception to this writer.
* The exception level is "error".
*/
public void error(Throwable ex) {
this.error(LocalizedStrings.EMPTY, ex);
}
/**
* Writes both a message and exception to this writer.
* The message level is "error".
* @since 6.0
*/
public void error(StringId msgID, Object[] params, Throwable ex) {
if (this.errorEnabled()) {
this.put(ERROR_LEVEL, msgID, params, ex);
}
}
/**
* Writes both a message and exception to this writer.
* The message level is "error".
* @since 6.0
*/
public void error(StringId msgID, Object param, Throwable ex) {
if (this.errorEnabled()) {
this.put(ERROR_LEVEL, msgID, new Object[] {param}, ex);
}
}
/**
* Writes both a message and exception to this writer.
* The message level is "error".
* @since 6.0
*/
public void error(StringId msgID, Throwable ex) {
error(msgID, null, ex);
}
/**
* Writes both a message and exception to this writer.
* The message level is "error".
* @since 6.0
*/
public void error(StringId msgID, Object[] params) {
error(msgID, params, null);
}
/**
* Writes both a message and exception to this writer.
* The message level is "error".
* @since 6.0
*/
public void error(StringId msgID, Object param) {
error(msgID, param, null);
}
/**
* Writes both a message and exception to this writer.
* The message level is "error".
* @since 6.0
*/
public void error(StringId msgID) {
error(msgID, null, null);
}
/**
* @return true if "warning" log messages are enabled.
*/
public boolean warningEnabled() {
return getLogWriterLevel() <= WARNING_LEVEL;
}
/**
* Writes both a message and exception to this writer.
* The message level is "warning".
*/
public void warning(String msg, Throwable ex) {
if (this.warningEnabled()) {
this.put(WARNING_LEVEL, msg, ex);
}
}
/**
* Writes a message to this writer.
* The message level is "warning".
*/
public void warning(String msg) {
this.warning(msg, null);
}
/**
* Writes an exception to this writer.
* The exception level is "warning".
*/
public void warning(Throwable ex) {
this.warning(LocalizedStrings.EMPTY, ex);
}
/**
* Writes both a message and exception to this writer.
* The message level is "warning".
* @since 6.0
*/
public void warning(StringId msgID, Object[] params, Throwable ex) {
if (this.warningEnabled()) {
this.put(WARNING_LEVEL, msgID, params, ex);
}
}
/**
* Writes both a message and exception to this writer.
* The message level is "warning".
* @since 6.0
*/
public void warning(StringId msgID, Object param, Throwable ex) {
if (this.warningEnabled()) {
this.put(WARNING_LEVEL, msgID, new Object[] {param}, ex);
}
}
/**
* Writes both a message and exception to this writer.
* The message level is "warning".
* @since 6.0
*/
public void warning(StringId msgID, Throwable ex) {
warning(msgID, null, ex);
}
/**
* Writes both a message and exception to this writer.
* The message level is "warning".
* @since 6.0
*/
public void warning(StringId msgID, Object[] params) {
warning(msgID, params, null);
}
/**
* Writes both a message and exception to this writer.
* The message level is "warning".
* @since 6.0
*/
public void warning(StringId msgID, Object param) {
warning(msgID, param, null);
}
/**
* Writes both a message and exception to this writer.
* The message level is "warning".
* @since 6.0
*/
public void warning(StringId msgID) {
warning(msgID, null, null);
}
/**
* @return true if "info" log messages are enabled.
*/
public boolean infoEnabled() {
return getLogWriterLevel() <= INFO_LEVEL
/* (bug 29581) && !SmHelper._memorySpaceLow() */;
}
/**
* Writes both a message and exception to this writer.
* The message level is "information".
*/
public void info(String msg, Throwable ex) {
if (this.infoEnabled()) {
this.put(INFO_LEVEL, msg, ex);
}
}
/**
* Writes a message to this writer.
* The message level is "information".
*/
public void info(String msg) {
this.info(msg, null);
}
/**
* Writes an exception to this writer.
* The exception level is "information".
*/
public void info(Throwable ex) {
this.info(LocalizedStrings.EMPTY, ex);
}
/**
* Writes both a message and exception to this writer.
* The message level is "info".
* @since 6.0
*/
public void info(StringId msgID, Object[] params, Throwable ex) {
if (this.infoEnabled()) {
this.put(INFO_LEVEL, msgID, params, ex);
}
}
/**
* Writes both a message and exception to this writer.
* The message level is "info".
* @since 6.0
*/
public void info(StringId msgID, Object param, Throwable ex) {
if (this.infoEnabled()) {
this.put(INFO_LEVEL, msgID, new Object[] {param}, ex);
}
}
/**
* Writes both a message and exception to this writer.
* The message level is "info".
* @since 6.0
*/
public void info(StringId msgID, Throwable ex) {
info(msgID, null, ex);
}
/**
* Writes both a message and exception to this writer.
* The message level is "info".
* @since 6.0
*/
public void info(StringId msgID, Object[] params) {
info(msgID, params, null);
}
/**
* Writes both a message and exception to this writer.
* The message level is "info".
* @since 6.0
*/
public void info(StringId msgID, Object param) {
info(msgID, param, null);
}
/**
* Writes both a message and exception to this writer.
* The message level is "info".
* @since 6.0
*/
public void info(StringId msgID) {
info(msgID, null, null);
}
/**
* @return true if "config" log messages are enabled.
*/
public boolean configEnabled() {
return getLogWriterLevel() <= CONFIG_LEVEL;
}
/**
* Writes both a message and exception to this writer.
* The message level is "config".
*/
public void config(String msg, Throwable ex) {
if (this.configEnabled()) {
this.put(CONFIG_LEVEL, msg, ex);
}
}
/**
* Writes a message to this writer.
* The message level is "config".
*/
public void config(String msg) {
this.config(msg, null);
}
/**
* Writes an exception to this writer.
* The exception level is "config".
*/
public void config(Throwable ex) {
this.config(LocalizedStrings.EMPTY, ex);
}
/**
* Writes both a message and exception to this writer.
* The message level is "config".
* @since 6.0
*/
public void config(StringId msgID, Object[] params, Throwable ex) {
if (this.configEnabled()) {
this.put(CONFIG_LEVEL, msgID, params, ex);
}
}
/**
* Writes both a message and exception to this writer.
* The message level is "config".
* @since 6.0
*/
public void config(StringId msgID, Object param, Throwable ex) {
if (this.configEnabled()) {
this.put(CONFIG_LEVEL, msgID, new Object[] {param}, ex);
}
}
/**
* Writes both a message and exception to this writer.
* The message level is "config".
* @since 6.0
*/
public void config(StringId msgID, Throwable ex) {
config(msgID, null, ex);
}
/**
* Writes both a message and exception to this writer.
* The message level is "config".
* @since 6.0
*/
public void config(StringId msgID, Object[] params) {
config(msgID, params, null);
}
/**
* Writes both a message and exception to this writer.
* The message level is "config".
* @since 6.0
*/
public void config(StringId msgID, Object param) {
config(msgID, param, null);
}
/**
* Writes both a message and exception to this writer.
* The message level is "config".
* @since 6.0
*/
public void config(StringId msgID) {
config(msgID, null, null);
}
/**
* @return true if "fine" log messages are enabled.
*/
public boolean fineEnabled() {
return getLogWriterLevel() <= FINE_LEVEL
/* (bug 29581) && !SmHelper._memorySpaceLow() */;
}
/**
* Writes both a message and exception to this writer.
* The message level is "fine".
*/
public void fine(String msg, Throwable ex) {
if (this.fineEnabled()) {
this.put(FINE_LEVEL, msg, ex);
}
}
/**
* Writes a message to this writer.
* The message level is "fine".
*/
public void fine(String msg) {
this.fine(msg, null);
}
/**
* Writes an exception to this writer.
* The exception level is "fine".
*/
public void fine(Throwable ex) {
this.fine(null, ex);
}
/**
* Returns true if "finer" log messages are enabled.
* Returns false if "finer" log messages are disabled.
*/
public boolean finerEnabled() {
return getLogWriterLevel() <= FINER_LEVEL
/* (bug 29581) && !SmHelper._memorySpaceLow() */;
}
/**
* Writes both a message and exception to this writer.
* The message level is "finer".
*/
public void finer(String msg, Throwable ex) {
if (this.finerEnabled()) {
this.put(FINER_LEVEL, msg, ex);
}
}
/**
* Writes a message to this writer.
* The message level is "finer".
*/
public void finer(String msg) {
this.finer(msg, null);
}
/**
* Writes an exception to this writer.
* The exception level is "finer".
*/
public void finer(Throwable ex) {
this.finer(null, ex);
}
/**
* Log a method entry.
* <p>The logging is done using the <code>finer</code> level.
* The string message will start with <code>"ENTRY"</code> and
* include the class and method names.
* @param sourceClass Name of class that issued the logging request.
* @param sourceMethod Name of the method that issued the logging request.
*/
public void entering(String sourceClass, String sourceMethod) {
if (this.finerEnabled()) {
this.finer("ENTRY " + sourceClass + ":" + sourceMethod);
}
}
/**
* Log a method return.
* <p>The logging is done using the <code>finer</code> level.
* The string message will start with <code>"RETURN"</code> and
* include the class and method names.
* @param sourceClass Name of class that issued the logging request.
* @param sourceMethod Name of the method that issued the logging request.
*/
public void exiting(String sourceClass, String sourceMethod) {
if (this.finerEnabled()) {
this.finer("RETURN " + sourceClass + ":" + sourceMethod);
}
}
/**
* Log throwing an exception.
* <p> Use to log that a method is
* terminating by throwing an exception. The logging is done using
* the <code>finer</code> level.
* <p> This is a convenience method that could be done
* instead by calling {@link #finer(String, Throwable)}.
* The string message will start with <code>"THROW"</code> and
* include the class and method names.
* @param sourceClass Name of class that issued the logging request.
* @param sourceMethod Name of the method that issued the logging request.
* @param thrown The Throwable that is being thrown.
*/
public void throwing(String sourceClass, String sourceMethod,
Throwable thrown) {
if (this.finerEnabled()) {
this.finer("THROW " + sourceClass + ":" + sourceMethod, thrown);
}
}
/**
* Returns true if "finest" log messages are enabled.
* Returns false if "finest" log messages are disabled.
*/
public boolean finestEnabled() {
return getLogWriterLevel() <= FINEST_LEVEL
/* (bug 29581) && !SmHelper._memorySpaceLow() */;
}
/**
* Writes both a message and exception to this writer.
* The message level is "finest".
*/
public void finest(String msg, Throwable ex) {
if (this.finestEnabled()) {
this.put(FINEST_LEVEL, msg, ex);
}
}
/**
* Writes a message to this writer.
* The message level is "finest".
*/
public void finest(String msg) {
this.finest(msg, null);
}
/**
* Writes an exception to this writer.
* The exception level is "finest".
*/
public void finest(Throwable ex) {
this.finest(null, ex);
}
/**
* Writes both a message and exception to this writer.
* If a startup listener is registered,
* the message will be written to the listener as well
* to be reported to a user.
* @since 7.0
*/
public void startup(StringId msgID, Object[] params) {
String message = msgID.toLocalizedString(params);
StartupStatusListener listener = startupListener;
if(listener != null) {
listener.setStatus(message);
}
if (this.infoEnabled()) {
this.put(INFO_LEVEL, message, null);
}
}
// internal implemenation methods
/**
* Logs a message and an exception of the given level.
*
* @param msgLevel
* the level code for the message to log
* @param msg
* the actual message to log
* @param exception
* the actual Exception to log
*/
public abstract void put(int msgLevel, String msg, Throwable exception);
/**
* Logs a message and an exception of the given level.
*
* @param msgLevel
* the level code for the message to log
* @param msgId
* A locale agnostic form of the message
* @param params
* the Object arguments to plug into the message
* @param exception
* the actual Exception to log
*/
public abstract void put(int msgLevel, StringId msgId, Object[] params, Throwable exception);
static void formatText(PrintWriter writer, String target,
int initialLength) {
BreakIterator boundary = BreakIterator.getLineInstance();
boundary.setText(target);
int start = boundary.first();
int end = boundary.next();
int lineLength = initialLength;
while (end != BreakIterator.DONE) {
// Look at the end and only accept whitespace breaks
char endChar = target.charAt(end-1);
while (!Character.isWhitespace(endChar)) {
int lastEnd = end;
end = boundary.next();
if (end == BreakIterator.DONE) {
// give up. We are at the end of the string
end = lastEnd;
break;
}
endChar = target.charAt(end-1);
}
int wordEnd = end;
if (endChar == '\n') {
// trim off the \n since println will do it for us
wordEnd--;
if (wordEnd > 0 && target.charAt(wordEnd-1) == '\r') {
wordEnd--;
}
} else if (endChar == '\t') {
// figure tabs use 8 characters
lineLength += 7;
}
String word = target.substring(start, wordEnd);
lineLength += word.length();
writer.print(word);
if (endChar == '\n' || endChar == '\r') {
// force end of line
writer.println();
writer.print(" ");
lineLength = 2;
}
start = end;
end = boundary.next();
}
if (lineLength != 0) {
writer.println();
}
}
/**
* Check if a message of the given level would actually be logged by this
* logger.
* This check is based on the Logger effective level.
* @param level a message logging level
* @return true if the given message level is currently being logged.
*/
public boolean isLoggable(int level) {
return getLogWriterLevel() <= level;
}
/**
* Log a message, with associated Throwable information.
* If the logger is currently enabled for the given message level then the
* given message is logged.
* @param level One of the message level identifiers, e.g. SEVERE
* @param message The string message
* @param thrown - Throwable associated with log message.
*/
public void log(int level, String message, Throwable thrown) {
if (isLoggable(level)) {
this.put(level, message, thrown);
}
}
/**
* Log a message.
* If the logger is currently enabled for the given message level then the
* given message is logged.
* @param level One of the message level identifiers, e.g. SEVERE
* @param message The string message
*/
public void log(int level, String message) {
if (isLoggable(level)) {
this.put(level, message, null);
}
}
public Handler getHandler() {
return new GemFireHandler(this);
}
/** Utility to get a stack trace as a string from a Throwable */
public static String getStackTrace(Throwable aThrowable) {
StringWriter sw = new StringWriter();
aThrowable.printStackTrace(new PrintWriter(sw, true));
return sw.toString();
}
/**
* Utility to periodically log a stack trace for a thread. The thread should
* be alive when this method is invoked.
* @param toStdout whether to log to stdout or to this log writer
* @param targetThread the thread to snapshot
* @param interval millis to wait betweeen snaps
* @param done when to stop snapshotting (also ceases if targetThread dies)
*/
public void logTraces(final boolean toStdout,
final Thread targetThread, final int interval, final AtomicBoolean done) {
if (targetThread == null) {
return;
}
Thread watcherThread = new Thread("Stack Tracer for '" + targetThread.getName() + "'") {
@Override
public void run() {
while (!done.get()) {
try { Thread.sleep(500); } catch (InterruptedException e) { return; }
if (!done.get() && targetThread.isAlive()) {
StringBuffer sb = new StringBuffer(500);
if (toStdout) {
sb.append("[trace ").append(getTimeStamp()).append("] ");
}
StackTraceElement[] els = targetThread.getStackTrace();
sb.append("Stack trace for '").append(targetThread.toString()).append("'\n");
if (els.length > 0) {
for (int i=0; i<els.length; i++) {
sb.append("\tat ").append(els[i].toString()).append("\n");
}
}
else {
sb.append(" no stack\n");
}
if (toStdout) {
System.out.println(sb.toString());
} else {
info(LocalizedStrings.DEBUG, sb.toString());
}
}
}
}
};
watcherThread.start();
}
/** Utility to get a stack trace for a thread */
public static StringBuffer getStackTrace(Thread targetThread) {
StringBuffer sb = new StringBuffer(500);
StackTraceElement[] els = targetThread.getStackTrace();
sb.append("Stack trace for '").append(targetThread.toString()).append("'\n");
if (els.length > 0) {
for (int i=0; i<els.length; i++) {
sb.append("\tat ").append(els[i].toString()).append("\n");
}
}
else {
sb.append(" no stack\n");
}
return sb;
}
// instance variables
private final DateFormat timeFormatter;
/*
* @see com.gemstone.gemfire.LogWriter
* @since 6.0
*/
public com.gemstone.gemfire.LogWriter convertToLogWriter() {
return this;
}
/*
* @see com.gemstone.gemfire.LogWriterI18n
* @since 6.0
*/
public LogWriterI18n convertToLogWriterI18n() {
return this;
}
public static void setStartupListener(StartupStatusListener mainListener) {
LogWriterImpl.startupListener= mainListener;
}
public static StartupStatusListener getStartupListener() {
return LogWriterImpl.startupListener;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.test;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.pig.EvalFunc;
import org.apache.pig.PigConfiguration;
import org.apache.pig.PigException;
import org.apache.pig.PigServer;
import org.apache.pig.backend.executionengine.ExecJob;
import org.apache.pig.builtin.BinStorage;
import org.apache.pig.data.BagFactory;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.DataType;
import org.apache.pig.data.DefaultBagFactory;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.apache.pig.impl.PigImplConstants;
import org.apache.pig.impl.io.FileLocalizer;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.impl.logicalLayer.schema.Schema;
import org.apache.pig.impl.util.LogUtils;
import org.apache.pig.impl.util.ObjectSerializer;
import org.apache.pig.test.utils.Identity;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public class TestEvalPipeline2 {
static MiniGenericCluster cluster = MiniGenericCluster.buildCluster();
private PigServer pigServer;
TupleFactory mTf = TupleFactory.getInstance();
BagFactory mBf = BagFactory.getInstance();
@Before
public void setUp() throws Exception{
pigServer = new PigServer(cluster.getExecType(), cluster.getProperties());
}
@AfterClass
public static void oneTimeTearDown() throws Exception {
cluster.shutDown();
}
@Test
public void testUdfInputOrder() throws IOException {
String[] input = {
"(123)",
"((123)",
"(123123123123)",
"(asdf)"
};
Util.createInputFile(cluster, "table_udfInp", input);
pigServer.registerQuery("a = load 'table_udfInp' as (i:int);");
pigServer.registerQuery("b = foreach a {dec = 'hello'; str1 = " + Identity.class.getName() +
"(dec,'abc','def');" +
"generate dec,str1; };");
Iterator<Tuple> it = pigServer.openIterator("b");
Tuple tup=null;
//tuple 1
tup = it.next();
Tuple out = (Tuple)tup.get(1);
Assert.assertEquals( out.get(0).toString(), "hello");
Assert.assertEquals(out.get(1).toString(), "abc");
Assert.assertEquals(out.get(2).toString(), "def");
Util.deleteFile(cluster, "table_udfInp");
}
@Test
public void testUDFwithStarInput() throws Exception {
int LOOP_COUNT = 10;
File tmpFile = Util.createTempFileDelOnExit("test", "txt");
PrintStream ps = new PrintStream(new FileOutputStream(tmpFile));
for(int i = 0; i < LOOP_COUNT; i++) {
for(int j=0;j<LOOP_COUNT;j+=2){
ps.println(i+"\t"+j);
ps.println(i+"\t"+j);
}
}
ps.close();
pigServer.registerQuery("A = LOAD '"
+ Util.generateURI(tmpFile.toString(), pigServer
.getPigContext()) + "';");
pigServer.registerQuery("B = group A by $0;");
String query = "C = foreach B {"
+ "generate " + Identity.class.getName() +"(*);"
+ "};";
pigServer.registerQuery(query);
Iterator<Tuple> iter = pigServer.openIterator("C");
if(!iter.hasNext()) Assert.fail("No output found");
List<Tuple> actualResList = new ArrayList<Tuple>();
while(iter.hasNext()){
actualResList.add(iter.next());
}
Util.sortQueryOutputsIfNeed(actualResList,Util.isSparkExecType(cluster.getExecType()));
int numIdentity = 0;
for (Tuple tuple : actualResList) {
Tuple t = (Tuple)tuple.get(0);
Assert.assertEquals(DataByteArray.class, t.get(0).getClass());
int group = Integer.parseInt(new String(((DataByteArray)t.get(0)).get()));
Assert.assertEquals(numIdentity, group);
Assert.assertTrue(t.get(1) instanceof DataBag);
DataBag bag = (DataBag)t.get(1);
Assert.assertEquals(10, bag.size());
Assert.assertEquals(2, t.size());
++numIdentity;
}
Assert.assertEquals(LOOP_COUNT, numIdentity);
}
@Test
public void testBinStorageByteArrayCastsSimple() throws IOException {
// Test for PIG-544 fix
// Tries to read data in BinStorage bytearrays as other pig types,
// should return null if the conversion fails.
// This test case does not use a practical example , it just tests
// if the conversion happens when minimum conditions for conversion
// such as expected number of bytes are met.
String[] input = {
"asdf\t12\t1.1\t231\t234\t3024123\t3.2492",
"sa\t1231\t123.4\t12345678\t1234.567\t5081123453\t9.181817",
"asdff\t1232123\t1.45345\t123456789\t123456789.9\t1234567\t1.234567"
};
Util.createInputFile(cluster, "table_bs_ac", input);
// test with BinStorage
pigServer.registerQuery("a = load 'table_bs_ac';");
String output = "/pig/out/TestEvalPipeline2_BinStorageByteArrayCasts";
pigServer.deleteFile(output);
pigServer.store("a", output, BinStorage.class.getName());
pigServer.registerQuery("b = load '" + output + "' using BinStorage('Utf8StorageConverter') "
+ "as (name: int, age: int, gpa: float, lage: long, dgpa: double, bi:biginteger, bd:bigdecimal);");
Iterator<Tuple> it = pigServer.openIterator("b");
Tuple tup=null;
// I have separately verified only few of the successful conversions,
// assuming the rest are correct.
// It is primarily testing if null is being returned when conversions
// are expected to fail
//tuple 1
tup = it.next();
Assert.assertTrue((Integer)tup.get(0) == null);
Assert.assertTrue((Integer)tup.get(1) == 12);
Assert.assertTrue((Float)tup.get(2) == 1.1F);
Assert.assertTrue((Long)tup.get(3) == 231L);
Assert.assertTrue((Double)tup.get(4) == 234.0);
Assert.assertEquals((BigInteger)tup.get(5), new BigInteger("3024123"));
Assert.assertEquals((BigDecimal)tup.get(6), new BigDecimal("3.2492"));
//tuple 2
tup = it.next();
Assert.assertTrue(tup.get(0) == null);
Assert.assertTrue((Integer)tup.get(1) == 1231);
Assert.assertTrue((Float)tup.get(2) == 123.4F);
Assert.assertTrue((Long)tup.get(3) == 12345678L);
Assert.assertTrue((Double)tup.get(4) == 1234.567);
Assert.assertEquals((BigInteger)tup.get(5), new BigInteger("5081123453"));
Assert.assertEquals((BigDecimal)tup.get(6), new BigDecimal("9.181817"));
//tuple 3
tup = it.next();
Assert.assertTrue(tup.get(0) == null);
Assert.assertTrue((Integer)tup.get(1) == 1232123);
Assert.assertTrue((Float)tup.get(2) == 1.45345F);
Assert.assertTrue((Long)tup.get(3) == 123456789L);
Assert.assertTrue((Double)tup.get(4) == 1.234567899E8);
Assert.assertEquals((BigInteger)tup.get(5), new BigInteger("1234567"));
Assert.assertEquals((BigDecimal)tup.get(6), new BigDecimal("1.234567"));
Util.deleteFile(cluster, "table");
}
@Test
public void testBinStorageByteArrayCastsComplexBag() throws IOException {
// Test for PIG-544 fix
// Tries to read data in BinStorage bytearrays as other pig bags,
// should return null if the conversion fails.
String[] input = {
"{(asdf)}",
"{(2344)}",
"{(2344}",
"{(323423423423434)}",
"{(323423423423434L)}",
"{(asdff)}"
};
Util.createInputFile(cluster, "table_bs_ac_clx", input);
// test with BinStorage
pigServer.registerQuery("a = load 'table_bs_ac_clx' as (f1);");
pigServer.registerQuery("b = foreach a generate (bag{tuple(int)})f1;");
Iterator<Tuple> it = pigServer.openIterator("b");
Tuple tup=null;
//tuple 1
tup = it.next();
Assert.assertTrue(tup.get(0) != null);
//tuple 2
tup = it.next();
Assert.assertTrue(tup.get(0) != null);
//tuple 3 - malformed
tup = it.next();
Assert.assertTrue(tup.get(0) == null);
//tuple 4 - integer exceeds size limit
tup = it.next();
Assert.assertTrue(tup.get(0) instanceof DataBag);
DataBag db = (DataBag)tup.get(0);
Assert.assertTrue(db.iterator().hasNext());
Tuple innerTuple = db.iterator().next();
Assert.assertTrue(innerTuple.get(0)==null);
//tuple 5
tup = it.next();
Assert.assertTrue(tup.get(0) != null);
//tuple 6
tup = it.next();
Assert.assertTrue(tup.get(0) != null);
Util.deleteFile(cluster, "table_bs_ac_clx");
}
@Test
public void testBinStorageByteArrayCastsComplexTuple() throws IOException {
// Test for PIG-544 fix
// Tries to read data in BinStorage bytearrays as other pig bags,
// should return null if the conversion fails.
String[] input = {
"(123)",
"((123)",
"(123123123123)",
"(asdf)"
};
Util.createInputFile(cluster, "table_bs_ac_clxt", input);
// test with BinStorage
pigServer.registerQuery("a = load 'table_bs_ac_clxt' as (t:tuple(t:tuple(i:int)));");
Iterator<Tuple> it = pigServer.openIterator("a");
Tuple tup=null;
//tuple 1
tup = it.next();
Assert.assertTrue(tup.get(0) == null);
//tuple 2 -malformed tuple
tup = it.next();
Assert.assertTrue(tup.get(0) == null);
//tuple 3 - integer exceeds size limit
tup = it.next();
Assert.assertTrue(tup.get(0) == null);
//tuple 4
tup = it.next();
Assert.assertTrue(tup.get(0) == null);
Util.deleteFile(cluster, "table_bs_ac_clxt");
}
@Test
public void testPigStorageWithCtrlChars() throws Exception {
String[] inputData = { "hello\u0001world", "good\u0001morning", "nice\u0001day" };
Util.createInputFile(cluster, "testPigStorageWithCtrlCharsInput.txt", inputData);
String script = "a = load 'testPigStorageWithCtrlCharsInput.txt' using PigStorage('\u0001');" +
"b = foreach a generate $0, CONCAT($0, '\u0005'), $1; " +
"store b into 'testPigStorageWithCtrlCharsOutput.txt' using PigStorage('\u0001');" +
"c = load 'testPigStorageWithCtrlCharsOutput.txt' using PigStorage('\u0001') as (f1:chararray, f2:chararray, f3:chararray);";
Util.registerMultiLineQuery(pigServer, script);
Iterator<Tuple> it = pigServer.openIterator("c");
HashMap<String, Tuple> expectedResults = new HashMap<String, Tuple>();
expectedResults.put("hello", (Tuple) Util.getPigConstant("('hello','hello\u0005','world')"));
expectedResults.put("good", (Tuple) Util.getPigConstant("('good','good\u0005','morning')"));
expectedResults.put("nice", (Tuple) Util.getPigConstant("('nice','nice\u0005','day')"));
HashMap<String, Boolean> seen = new HashMap<String, Boolean>();
int numRows = 0;
while(it.hasNext()) {
Tuple t = it.next();
String firstCol = (String) t.get(0);
Assert.assertFalse(seen.containsKey(firstCol));
seen.put(firstCol, true);
Assert.assertEquals(expectedResults.get(firstCol), t);
numRows++;
}
Assert.assertEquals(3, numRows);
Util.deleteFile(cluster, "testPigStorageWithCtrlCharsInput.txt");
}
@Test
// Test case added for PIG-850
public void testLimitedSortWithDump() throws Exception{
int LOOP_COUNT = 40;
File tmpFile = Util.createTempFileDelOnExit("test", "txt");
PrintStream ps = new PrintStream(new FileOutputStream(tmpFile));
Random r = new Random(1);
int rand;
for(int i = 0; i < LOOP_COUNT; i++) {
rand = r.nextInt(100);
ps.println(rand);
}
ps.close();
pigServer.registerQuery("A = LOAD '"
+ Util.generateURI(tmpFile.toString(), pigServer
.getPigContext()) + "' AS (num:int);");
pigServer.registerQuery("B = order A by num parallel 2;");
pigServer.registerQuery("C = limit B 10;");
Iterator<Tuple> result = pigServer.openIterator("C");
int numIdentity = 0;
while (result.hasNext())
{
result.next();
++numIdentity;
}
Assert.assertEquals(10, numIdentity);
}
@Test
public void testLimitAfterSort() throws Exception{
int LOOP_COUNT = 40;
File tmpFile = Util.createTempFileDelOnExit("test", "txt");
PrintStream ps = new PrintStream(new FileOutputStream(tmpFile));
Random r = new Random(1);
int rand;
for(int i = 0; i < LOOP_COUNT; i++) {
rand = r.nextInt(100);
ps.println(rand);
}
ps.close();
pigServer.registerQuery("A = LOAD '"
+ Util.generateURI(tmpFile.toString(), pigServer
.getPigContext()) + "' AS (num:int);");
pigServer.registerQuery("B = order A by num parallel 2;");
pigServer.registerQuery("C = limit B 10;");
Iterator<Tuple> iter = pigServer.openIterator("C");
if(!iter.hasNext()) Assert.fail("No output found");
int numIdentity = 0;
int oldNum = Integer.MIN_VALUE;
int newNum;
while(iter.hasNext()){
Tuple t = iter.next();
newNum = (Integer)t.get(0);
Assert.assertTrue(newNum>=oldNum);
oldNum = newNum;
++numIdentity;
}
Assert.assertEquals(10, numIdentity);
}
@Test
public void testLimitAfterSortDesc() throws Exception{
int LOOP_COUNT = 40;
File tmpFile = Util.createTempFileDelOnExit("test", "txt");
PrintStream ps = new PrintStream(new FileOutputStream(tmpFile));
Random r = new Random(1);
int rand;
for(int i = 0; i < LOOP_COUNT; i++) {
rand = r.nextInt(100);
ps.println(rand);
}
ps.close();
pigServer.registerQuery("A = LOAD '"
+ Util.generateURI(tmpFile.toString(), pigServer
.getPigContext()) + "' AS (num:int);");
pigServer.registerQuery("B = order A by num desc parallel 2;");
pigServer.registerQuery("C = limit B 10;");
Iterator<Tuple> iter = pigServer.openIterator("C");
if(!iter.hasNext()) Assert.fail("No output found");
int numIdentity = 0;
int oldNum = Integer.MAX_VALUE;
int newNum;
while(iter.hasNext()){
Tuple t = iter.next();
newNum = (Integer)t.get(0);
Assert.assertTrue(newNum<=oldNum);
oldNum = newNum;
++numIdentity;
}
Assert.assertEquals(10, numIdentity);
}
@Test
// See PIG-894
public void testEmptySort() throws Exception{
File tmpFile = Util.createTempFileDelOnExit("test", "txt");
pigServer.registerQuery("A = LOAD '"
+ Util.generateURI(tmpFile.toString(), pigServer
.getPigContext()) + "';");
pigServer.registerQuery("B = order A by $0;");
Iterator<Tuple> iter = pigServer.openIterator("B");
Assert.assertTrue(iter.hasNext()==false);
}
// See PIG-761
@Test
public void testLimitPOPackageAnnotator() throws Exception{
File tmpFile1 = Util.createTempFileDelOnExit("test1", "txt");
PrintStream ps1 = new PrintStream(new FileOutputStream(tmpFile1));
ps1.println("1\t2\t3");
ps1.println("2\t5\t2");
ps1.close();
File tmpFile2 = Util.createTempFileDelOnExit("test2", "txt");
PrintStream ps2 = new PrintStream(new FileOutputStream(tmpFile2));
ps2.println("1\t1");
ps2.println("2\t2");
ps2.close();
pigServer.registerQuery("A = LOAD '" + Util.generateURI(tmpFile1.toString(), pigServer.getPigContext()) + "' AS (a0, a1, a2);");
pigServer.registerQuery("B = LOAD '" + Util.generateURI(tmpFile2.toString(), pigServer.getPigContext()) + "' AS (b0, b1);");
pigServer.registerQuery("C = LIMIT B 100;");
pigServer.registerQuery("D = COGROUP C BY b0, A BY a0 PARALLEL 2;");
Iterator<Tuple> iter = pigServer.openIterator("D");
if (Util.isSparkExecType(cluster.getExecType())) {
String[] expectedResults =
new String[] {"(2,{(2,2)},{(2,5,2)})", "(1,{(1,1)},{(1,2,3)})" };
Util.checkQueryOutputs(iter, expectedResults,
org.apache.pig.newplan.logical.Util.translateSchema(pigServer.dumpSchema("D")), Util.isSparkExecType(cluster.getExecType()));
} else {
Assert.assertTrue(iter.hasNext());
Tuple t = iter.next();
Assert.assertTrue(t.toString().equals("(2,{(2,2)},{(2,5,2)})"));
Assert.assertTrue(iter.hasNext());
t = iter.next();
Assert.assertTrue(t.toString().equals("(1,{(1,1)},{(1,2,3)})"));
Assert.assertFalse(iter.hasNext());
}
}
// See PIG-1195
@Test
public void testNestedDescSort() throws Exception{
Util.createInputFile(cluster, "table_testNestedDescSort", new String[]{"3","4"});
pigServer.registerQuery("A = LOAD 'table_testNestedDescSort' as (a0:int);");
pigServer.registerQuery("B = group A ALL;");
pigServer.registerQuery("C = foreach B { D = order A by a0 desc;generate D;};");
Iterator<Tuple> iter = pigServer.openIterator("C");
Assert.assertTrue(iter.hasNext());
Tuple t = iter.next();
Assert.assertTrue(t.toString().equals("({(4),(3)})"));
Assert.assertFalse(iter.hasNext());
Util.deleteFile(cluster, "table_testNestedDescSort");
}
// See PIG-972
@Test
public void testDescribeNestedAlias() throws Exception{
String[] input = {
"1\t3",
"2\t4",
"3\t5"
};
Util.createInputFile(cluster, "table_testDescribeNestedAlias", input);
pigServer.registerQuery("A = LOAD 'table_testDescribeNestedAlias' as (a0, a1);");
pigServer.registerQuery("P = GROUP A by a1;");
// Test RelationalOperator
pigServer.registerQuery("B = FOREACH P { D = ORDER A by $0; generate group, D.$0; };");
// Test ExpressionOperator - negative test case
pigServer.registerQuery("C = FOREACH A { D = a0/a1; E=a1/a0; generate E as newcol; };");
Schema schema = pigServer.dumpSchemaNested("B", "D");
Assert.assertTrue(schema.toString().equalsIgnoreCase("{a0: bytearray,a1: bytearray}"));
try {
schema = pigServer.dumpSchemaNested("C", "E");
} catch (FrontendException e) {
Assert.assertTrue(e.getErrorCode() == 1113);
}
}
// See PIG-1484
@Test
public void testBinStorageCommaSeperatedPath() throws Exception{
String[] input = {
"1\t3",
"2\t4",
"3\t5"
};
Util.createInputFile(cluster, "table_simple1", input);
pigServer.setBatchOn();
pigServer.registerQuery("A = LOAD 'table_simple1' as (a0, a1);");
pigServer.registerQuery("store A into 'table_simple1.bin' using BinStorage();");
pigServer.registerQuery("store A into 'table_simple2.bin' using BinStorage();");
pigServer.executeBatch();
pigServer.registerQuery("A = LOAD 'table_simple1.bin,table_simple2.bin' using BinStorage();");
Iterator<Tuple> iter = pigServer.openIterator("A");
Tuple t = iter.next();
Assert.assertTrue(t.toString().equals("(1,3)"));
t = iter.next();
Assert.assertTrue(t.toString().equals("(2,4)"));
t = iter.next();
Assert.assertTrue(t.toString().equals("(3,5)"));
t = iter.next();
Assert.assertTrue(t.toString().equals("(1,3)"));
t = iter.next();
Assert.assertTrue(t.toString().equals("(2,4)"));
t = iter.next();
Assert.assertTrue(t.toString().equals("(3,5)"));
Assert.assertFalse(iter.hasNext());
}
// See PIG-1543
@Test
public void testEmptyBagIterator() throws Exception{
String[] input1 = {
"1",
"1",
"1"
};
String[] input2 = {
"2",
"2"
};
Util.createInputFile(cluster, "input1", input1);
Util.createInputFile(cluster, "input2", input2);
pigServer.registerQuery("A = load 'input1' as (a1:int);");
pigServer.registerQuery("B = load 'input2' as (b1:int);");
pigServer.registerQuery("C = COGROUP A by a1, B by b1;");
pigServer.registerQuery("C1 = foreach C { Alim = limit A 1; Blim = limit B 1; generate Alim, Blim; };");
pigServer.registerQuery("D1 = FOREACH C1 generate Alim,Blim, (IsEmpty(Alim)? 0:1), (IsEmpty(Blim)? 0:1), COUNT(Alim), COUNT(Blim);");
Iterator<Tuple> iter = pigServer.openIterator("D1");
List<Tuple> expectedResults = Util.getTuplesFromConstantTupleStrings(
new String[] {
"({(1)},{},1,0,1L,0L)",
"({},{(2)},0,1,0L,1L)" });
Util.checkQueryOutputsAfterSort(iter, expectedResults);
}
// See PIG-1669
@Test
public void testPushUpFilterScalar() throws Exception{
String[] input1 = {
"jason\t14\t4.7",
"jack\t18\t4.6"
};
String[] input2 = {
"jason\t14",
"jack\t18"
};
Util.createInputFile(cluster, "table_PushUpFilterScalar1", input1);
Util.createInputFile(cluster, "table_PushUpFilterScalar2", input2);
pigServer.registerQuery("A = load 'table_PushUpFilterScalar1' as (name, age, gpa);");
pigServer.registerQuery("B = load 'table_PushUpFilterScalar2' as (name, age);");
pigServer.registerQuery("C = filter A by age < 20;");
pigServer.registerQuery("D = filter B by age < 20;");
pigServer.registerQuery("simple_scalar = limit D 1;");
pigServer.registerQuery("E = join C by name, D by name;");
pigServer.registerQuery("F = filter E by C::age==(int)simple_scalar.age;");
Iterator<Tuple> iter = pigServer.openIterator("F");
Tuple t = iter.next();
Assert.assertTrue(t.toString().equals("(jason,14,4.7,jason,14)"));
Assert.assertFalse(iter.hasNext());
}
// See PIG-1683
@Test
public void testDuplicateReferenceInnerPlan() throws Exception{
String[] input1 = {
"1\t1\t1",
};
String[] input2 = {
"1\t1",
"2\t2"
};
Util.createInputFile(cluster, "table_testDuplicateReferenceInnerPlan1", input1);
Util.createInputFile(cluster, "table_testDuplicateReferenceInnerPlan2", input2);
pigServer.registerQuery("a = load 'table_testDuplicateReferenceInnerPlan1' as (a0, a1, a2);");
pigServer.registerQuery("b = load 'table_testDuplicateReferenceInnerPlan2' as (b0, b1);");
pigServer.registerQuery("c = join a by a0, b by b0;");
pigServer.registerQuery("d = foreach c {d0 = a::a1;d1 = a::a2;generate ((d0 is not null)? d0 : d1);};");
Iterator<Tuple> iter = pigServer.openIterator("d");
Tuple t = iter.next();
Assert.assertTrue(t.toString().equals("(1)"));
Assert.assertFalse(iter.hasNext());
}
// See PIG-1719
@Test
public void testBinCondSchema() throws IOException {
String[] inputData = new String[] {"hello world\t2"};
Util.createInputFile(cluster, "table_testSchemaSerialization.txt", inputData);
pigServer.registerQuery("a = load 'table_testSchemaSerialization.txt' as (a0:chararray, a1:int);");
pigServer.registerQuery("b = foreach a generate FLATTEN((a1<=1?{('null')}:TOKENIZE(a0)));");
pigServer.registerQuery("c = foreach b generate UPPER($0);");
Iterator<Tuple> it = pigServer.openIterator("c");
Tuple t = it.next();
Assert.assertTrue(t.get(0).equals("HELLO"));
t = it.next();
Assert.assertTrue(t.get(0).equals("WORLD"));
}
// See PIG-1721
@Test
public void testDuplicateInnerAlias() throws Exception{
String[] input1 = {
"1\t[key1#5]", "1\t[key2#5]", "2\t[key1#3]"
};
Util.createInputFile(cluster, "table_testDuplicateInnerAlias", input1);
pigServer.registerQuery("a = load 'table_testDuplicateInnerAlias' as (a0:int, a1:map[]);");
pigServer.registerQuery("b = filter a by a0==1;");
pigServer.registerQuery("c = foreach b { b0 = a1#'key1'; generate ((b0 is null or b0 == '')?1:0);};");
Iterator<Tuple> iter = pigServer.openIterator("c");
Tuple t = iter.next();
Assert.assertTrue((Integer)t.get(0)==0);
t = iter.next();
Assert.assertTrue((Integer)t.get(0)==1);
Assert.assertFalse(iter.hasNext());
}
// See PIG-3379
@Test
public void testNestedOperatorReuse() throws Exception{
String[] input1 = {
"60000\tdv1\txuaHeartBeat",
"70000\tdv2\txuaHeartBeat",
"80000\tdv1\txuaPowerOff",
"90000\tdv1\txuaPowerOn",
"110000\tdv2\txuaHeartBeat",
"120000\tdv2\txuaPowerOff",
"140000\tdv2\txuaPowerOn",
"150000\tdv1\txuaHeartBeat",
"160000\tdv2\txuaHeartBeat",
"250000\tdv1\txuaHeartBeat",
"310000\tdv2\txuaPowerOff",
"360000\tdv1\txuaPowerOn",
"420000\tdv3\txuaHeartBeat",
"450000\tdv3\txuaHeartBeat",
"540000\tdv4\txuaPowerOn",
"550000\tdv3\txuaHeartBeat",
"560000\tdv5\txuaHeartBeat" };
Util.createInputFile( cluster, "table_testNestedOperatorReuse", input1 );
String query = "Events = LOAD 'table_testNestedOperatorReuse' AS (eventTime:long, deviceId:chararray, eventName:chararray);" +
"Events = FOREACH Events GENERATE eventTime, deviceId, eventName;" +
"EventsPerMinute = GROUP Events BY (eventTime / 60000);" +
"EventsPerMinute = FOREACH EventsPerMinute {" +
" DistinctDevices = DISTINCT Events.deviceId;" +
" nbDevices = SIZE(DistinctDevices);" +
" DistinctDevices = FILTER Events BY eventName == 'xuaHeartBeat';" +
" nbDevicesWatching = SIZE(DistinctDevices);" +
" GENERATE $0*60000 as timeStamp, nbDevices as nbDevices, nbDevicesWatching as nbDevicesWatching;" +
"}" +
"EventsPerMinute = FILTER EventsPerMinute BY timeStamp >= 0 AND timeStamp < 300000;";
pigServer.registerQuery(query);
Iterator<Tuple> iter = pigServer.openIterator("EventsPerMinute");
List<Tuple> expectedResults = Util.getTuplesFromConstantTupleStrings(
new String[]{"(60000L,2L,3L)", "(120000L,2L,2L)", "(240000L,1L,1L)"});
Util.checkQueryOutputs(iter, expectedResults, Util.isSparkExecType(cluster.getExecType()));
}
// See PIG-1729
@Test
public void testDereferenceInnerPlan() throws Exception{
String[] input1 = {
"1\t2\t3"
};
String[] input2 = {
"1\t1"
};
Util.createInputFile(cluster, "table_testDereferenceInnerPlan1", input1);
Util.createInputFile(cluster, "table_testDereferenceInnerPlan2", input2);
pigServer.registerQuery("a = load 'table_testDereferenceInnerPlan1' as (a0:int, a1:int, a2:int);");
pigServer.registerQuery("b = load 'table_testDereferenceInnerPlan2' as (b0:int, b1:int);");
pigServer.registerQuery("c = cogroup a by a0, b by b0;");
pigServer.registerQuery("d = foreach c generate ((COUNT(a)==0L)?null : a.a0) as d0;");
pigServer.registerQuery("e = foreach d generate flatten(d0);");
pigServer.registerQuery("f = group e all;");
Iterator<Tuple> iter = pigServer.openIterator("f");
Tuple t = iter.next();
Assert.assertTrue(t.toString().equals("(all,{(1)})"));
Assert.assertFalse(iter.hasNext());
}
@Test
// See PIG-1725
public void testLOGenerateSchema() throws Exception{
String[] input1 = {
"1\t2\t{(1)}",
};
Util.createInputFile(cluster, "table_testLOGenerateSchema", input1);
pigServer.registerQuery("a = load 'table_testLOGenerateSchema' as (a0:int, a1, a2:bag{});");
pigServer.registerQuery("b = foreach a generate a0 as b0, a1 as b1, flatten(a2) as b2:int;");
pigServer.registerQuery("c = filter b by b0==1;");
pigServer.registerQuery("d = foreach c generate b0+1, b2;");
Iterator<Tuple> iter = pigServer.openIterator("d");
Tuple t = iter.next();
Assert.assertTrue(t.toString().equals("(2,1)"));
Assert.assertFalse(iter.hasNext());
}
// See PIG-1737
@Test
public void testMergeSchemaErrorMessage() throws IOException {
try {
pigServer.registerQuery("a = load '1.txt' as (a0, a1, a2);");
pigServer.registerQuery("b = group a by (a0, a1);");
pigServer.registerQuery("c = foreach b generate flatten(group) as c0;");
pigServer.openIterator("c");
} catch (Exception e) {
PigException pe = LogUtils.getPigException(e);
Util.checkStrContainsSubStr(pe.getMessage(), "Incompatible schema");
return;
}
Assert.fail();
}
// See PIG-1732
@Test
public void testForEachDupColumn() throws Exception{
String[] input1 = {
"1\t2",
};
String[] input2 = {
"1\t1\t3",
"2\t4\t2"
};
Util.createInputFile(cluster, "table_testForEachDupColumn1", input1);
Util.createInputFile(cluster, "table_testForEachDupColumn2", input2);
pigServer.registerQuery("a = load 'table_testForEachDupColumn1' as (a0, a1:int);");
pigServer.registerQuery("b = load 'table_testForEachDupColumn2' as (b0, b1:int, b2);");
pigServer.registerQuery("c = foreach a generate a0, a1, a1 as a2;");
pigServer.registerQuery("d = union b, c;");
pigServer.registerQuery("e = foreach d generate $1;");
Iterator<Tuple> iter = pigServer.openIterator("e");
Map<Object, Object> expected = new HashMap<Object, Object>(3);
expected.put(1, null);
expected.put(2, null);
expected.put(4, null);
Tuple t = iter.next();
Assert.assertTrue(t.size()==1);
Assert.assertTrue(expected.containsKey(t.get(0)));
t = iter.next();
Assert.assertTrue(t.size()==1);
Assert.assertTrue(expected.containsKey(t.get(0)));
t = iter.next();
Assert.assertTrue(t.size()==1);
Assert.assertTrue(expected.containsKey(t.get(0)));
Assert.assertFalse(iter.hasNext());
}
// See PIG-1745
@Test
public void testBinStorageByteCast() throws Exception{
String[] input1 = {
"1\t2\t3",
};
Util.createInputFile(cluster, "table_testBinStorageByteCast", input1);
pigServer.registerQuery("a = load 'table_testBinStorageByteCast' as (a0, a1, a2);");
pigServer.store("a", "table_testBinStorageByteCast.temp", BinStorage.class.getName());
pigServer.registerQuery("a = load 'table_testBinStorageByteCast.temp' using BinStorage() as (a0, a1, a2);");
pigServer.registerQuery("b = foreach a generate (long)a0;");
try {
pigServer.openIterator("b");
} catch (Exception e) {
PigException pe = LogUtils.getPigException(e);
//This changes in hadoop 23, we get error code 2997
//Assert.assertTrue(pe.getErrorCode()==1118);
return;
}
Assert.fail();
}
// See PIG-1761
@Test
public void testBagDereferenceInMiddle1() throws Exception{
String[] input1 = {
"foo@apache#44",
};
Util.createInputFile(cluster, "table_testBagDereferenceInMiddle1", input1);
pigServer.registerQuery("a = load 'table_testBagDereferenceInMiddle1' as (a0:chararray);");
pigServer.registerQuery("b = foreach a generate UPPER(REGEX_EXTRACT_ALL(a0, '.*@(.*)#.*').$0);");
Iterator<Tuple> iter = pigServer.openIterator("b");
Tuple t = iter.next();
Assert.assertTrue(t.size()==1);
Assert.assertTrue(t.get(0).equals("APACHE"));
}
// See PIG-1843
@Test
public void testBagDereferenceInMiddle2() throws Exception{
String[] input1 = {
"foo apache",
};
Util.createInputFile(cluster, "table_testBagDereferenceInMiddle2", input1);
pigServer.registerQuery("a = load 'table_testBagDereferenceInMiddle2' as (a0:chararray);");
pigServer.registerQuery("b = foreach a generate " + MapGenerate.class.getName() + " (STRSPLIT(a0).$0);");
Iterator<Tuple> iter = pigServer.openIterator("b");
Tuple t = iter.next();
Assert.assertTrue(t.size()==1);
Assert.assertTrue(t.toString().equals("([key#1])"));
}
// See PIG-1766
@Test
public void testForEachSameOriginColumn1() throws Exception {
String[] input1 = {
"1\t2",
"1\t3",
"2\t4",
"2\t5",
};
String[] input2 = {
"1\tone",
"2\ttwo",
};
Util.createInputFile(cluster, "table_testForEachSameOriginColumn1_1", input1);
Util.createInputFile(cluster, "table_testForEachSameOriginColumn1_2", input2);
pigServer.registerQuery("A = load 'table_testForEachSameOriginColumn1_1' AS (a0:int, a1:int);");
pigServer.registerQuery("B = load 'table_testForEachSameOriginColumn1_2' AS (b0:int, b1:chararray);");
pigServer.registerQuery("C = join A by a0, B by b0;");
pigServer.registerQuery("D = foreach B generate b0 as d0, b1 as d1;");
pigServer.registerQuery("E = join C by a1, D by d0;");
pigServer.registerQuery("F = foreach E generate b1, d1;");
Iterator<Tuple> iter = pigServer.openIterator("F");
Tuple t = iter.next();
Assert.assertTrue(t.size()==2);
Assert.assertTrue(t.get(0).equals("one"));
Assert.assertTrue(t.get(1).equals("two"));
}
// See PIG-1771
@Test
public void testLoadWithDifferentSchema() throws Exception{
String[] input1 = {
"hello\thello\t(hello)\t[key#value]",
};
Util.createInputFile(cluster, "table_testLoadWithDifferentSchema1", input1);
pigServer.registerQuery("a = load 'table_testLoadWithDifferentSchema1' as (a0:chararray, a1:chararray, a2, a3:map[]);");
pigServer.store("a", "table_testLoadWithDifferentSchema1.bin", "org.apache.pig.builtin.BinStorage");
pigServer.registerQuery("b = load 'table_testLoadWithDifferentSchema1.bin' USING BinStorage('Utf8StorageConverter') AS (b0:chararray, b1:chararray, b2:tuple(), b3:map[]);");
Iterator<Tuple> iter = pigServer.openIterator("b");
Tuple t = iter.next();
Assert.assertTrue(t.size()==4);
Assert.assertTrue(t.toString().equals("(hello,hello,(hello),[key#value])"));
}
static public class MapGenerate extends EvalFunc<Map<String, Integer>> {
@Override
public Map<String, Integer> exec(Tuple input) throws IOException {
Map<String, Integer> m = new HashMap<String, Integer>();
m.put("key", new Integer(input.size()));
return m;
}
@Override
public Schema outputSchema(Schema input) {
return new Schema(new Schema.FieldSchema(getSchemaName("parselong", input), DataType.MAP));
}
}
// See PIG-1277
@Test
public void testWrappingUnknownKey1() throws Exception{
String[] input1 = {
"1",
};
Util.createInputFile(cluster, "table_testWrappingUnknownKey1", input1);
pigServer.registerQuery("a = load 'table_testWrappingUnknownKey1' as (a0);");
pigServer.registerQuery("b = foreach a generate a0, "+ MapGenerate.class.getName() + "(*) as m:map[];");
pigServer.registerQuery("c = foreach b generate a0, m#'key' as key;");
pigServer.registerQuery("d = group c by key;");
Iterator<Tuple> iter = pigServer.openIterator("d");
Tuple t = iter.next();
Assert.assertTrue(t.size()==2);
Assert.assertTrue(t.toString().equals("(1,{(1,1)})"));
Assert.assertFalse(iter.hasNext());
}
// See PIG-999
@Test
public void testWrappingUnknownKey2() throws Exception{
String[] input1 = {
"1",
};
Util.createInputFile(cluster, "table_testWrappingUnknownKey2", input1);
pigServer.registerQuery("a = load 'table_testWrappingUnknownKey2' as (a0);");
pigServer.registerQuery("b = foreach a generate a0, "+ MapGenerate.class.getName() + "(*) as m:map[];");
pigServer.registerQuery("c = foreach b generate a0, m#'key' as key;");
pigServer.registerQuery("d = order c by key;");
Iterator<Tuple> iter = pigServer.openIterator("d");
Tuple t = iter.next();
Assert.assertTrue(t.size()==2);
Assert.assertTrue(t.toString().equals("(1,1)"));
Assert.assertFalse(iter.hasNext());
}
// See PIG-1065
@Test
public void testWrappingUnknownKey3() throws Exception{
String[] input1 = {
"1\t2",
"2\t3"
};
String[] input2 = {
"1",
};
Util.createInputFile(cluster, "table_testWrappingUnknownKey3_1", input1);
Util.createInputFile(cluster, "table_testWrappingUnknownKey3_2", input2);
pigServer.registerQuery("a = load 'table_testWrappingUnknownKey3_1' as (a0:chararray, a1:chararray);");
pigServer.registerQuery("b = load 'table_testWrappingUnknownKey3_2' as (b0:chararray);");
pigServer.registerQuery("c = union a, b;");
pigServer.registerQuery("d = order c by $0;");
Collection<String> results = new HashSet<String>();
results.add("(1,2)");
results.add("(1)");
results.add("(2,3)");
Iterator<Tuple> iter = pigServer.openIterator("d");
Tuple t = iter.next();
Assert.assertTrue(results.contains(t.toString()));
t = iter.next();
Assert.assertTrue(results.contains(t.toString()));
t = iter.next();
Assert.assertTrue(results.contains(t.toString()));
Assert.assertFalse(iter.hasNext());
}
// See PIG-1787
@Test
public void testOrderByLimitJoin() throws Exception{
String[] input1 = {
"1\t1",
"1\t2"
};
Util.createInputFile(cluster, "table_testOrderByLimitJoin", input1);
pigServer.registerQuery("a = load 'table_testOrderByLimitJoin' as (a0, a1);");
pigServer.registerQuery("b = group a by a0;");
pigServer.registerQuery("c = foreach b generate group as c0, COUNT(a) as c1;");
pigServer.registerQuery("d = order c by c1 parallel 2;");
pigServer.registerQuery("e = limit d 10;");
pigServer.registerQuery("f = join e by c0, a by a0;");
Iterator<Tuple> iter = pigServer.openIterator("f");
String[] expected = new String[] {"(1,2,1,1)", "(1,2,1,2)"};
Util.checkQueryOutputsAfterSortRecursive(iter, expected, org.apache.pig.newplan.logical.Util.translateSchema(pigServer.dumpSchema("f")));
}
// See PIG-1785
@Test
public void testForEachSameOriginColumn2() throws Exception{
String[] input1 = {
"{(1,2),(2,3)}",
};
Util.createInputFile(cluster, "table_testForEachSameOriginColumn2", input1);
pigServer.registerQuery("a = load 'table_testForEachSameOriginColumn2' as (a0:bag{t:tuple(i0:int, i1:int)});");
pigServer.registerQuery("b = foreach a generate flatten(a0) as (b0, b1), flatten(a0) as (b2, b3);");
pigServer.registerQuery("c = filter b by b0>b2;");
Iterator<Tuple> iter = pigServer.openIterator("c");
Tuple t = iter.next();
Assert.assertTrue(t.toString().contains("(2,3,1,2)"));
Assert.assertFalse(iter.hasNext());
}
// See PIG-1785
@Test
public void testForEachSameOriginColumn3() throws Exception{
String[] input1 = {
"1\t1\t2",
"1\t2\t3",
};
Util.createInputFile(cluster, "table_testForEachSameOriginColumn3", input1);
pigServer.registerQuery("a = load 'table_testForEachSameOriginColumn3' as (a0:int, a1:int, a2:int);");
pigServer.registerQuery("b = group a by a0;");
pigServer.registerQuery("c = foreach b generate flatten(a.(a1,a2)) as (b0, b1), flatten(a.(a1,a2)) as (b2, b3);");
pigServer.registerQuery("d = filter c by b0>b2;");
Iterator<Tuple> iter = pigServer.openIterator("d");
Tuple t = iter.next();
Assert.assertTrue(t.toString().contains("(2,3,1,2)"));
Assert.assertFalse(iter.hasNext());
}
// See PIG-1785
@Test
public void testAddingTwoBag() {
try {
pigServer.registerQuery("a = load '1.txt' as (name:chararray, age:int, gpa:double);");
pigServer.registerQuery("b = group a by name;");
pigServer.registerQuery("c = foreach b generate group, SUM(a.age*a.gpa);");
pigServer.openIterator("c");
} catch (Exception e) {
PigException pe = LogUtils.getPigException(e);
Assert.assertTrue(pe.getErrorCode()==1039);
Assert.assertTrue(pe.getMessage().contains("incompatible types"));
return;
}
Assert.fail();
}
public static class BagGenerateNoSchema extends EvalFunc<DataBag> {
@Override
public DataBag exec(Tuple input) throws IOException {
DataBag bg = DefaultBagFactory.getInstance().newDefaultBag();
bg.add(input);
return bg;
}
}
// See PIG-1813
@Test
public void testUDFNoSchemaPropagate1() throws Exception{
String[] input1 = {
"[key#1,key2#2]",
"[key#2,key2#3]",
};
Util.createInputFile(cluster, "table_testUDFNoSchemaPropagate1", input1);
pigServer.registerQuery("a = load 'table_testUDFNoSchemaPropagate1' as (a0:map[]);");
pigServer.registerQuery("b = foreach a generate " + BagGenerateNoSchema.class.getName() + "(*) as b0;");
pigServer.registerQuery("c = foreach b generate flatten(IdentityColumn(b0));");
pigServer.registerQuery("d = foreach c generate $0#'key';");
Iterator<Tuple> iter = pigServer.openIterator("d");
Tuple t = iter.next();
Assert.assertTrue(t.toString().contains("(1)"));
t = iter.next();
Assert.assertTrue(t.toString().contains("(2)"));
Assert.assertFalse(iter.hasNext());
}
// See PIG-1813
@Test
public void testUDFNoSchemaPropagate2() throws Exception{
String[] input1 = {
"[key#1,key2#2]",
"[key#2,key2#3]",
};
Util.createInputFile(cluster, "table_testUDFNoSchemaPropagate2", input1);
pigServer.registerQuery("a = load 'table_testUDFNoSchemaPropagate2' as (a0:map[]);");
pigServer.registerQuery("b = foreach a generate flatten(" + BagGenerateNoSchema.class.getName() + "(*)) as b0;");
pigServer.registerQuery("c = foreach b generate IdentityColumn(b0);");
pigServer.registerQuery("d = foreach c generate $0#'key';");
Iterator<Tuple> iter = pigServer.openIterator("d");
Tuple t = iter.next();
Assert.assertTrue(t.toString().contains("(1)"));
t = iter.next();
Assert.assertTrue(t.toString().contains("(2)"));
Assert.assertFalse(iter.hasNext());
}
// See PIG-1812
@Test
public void testLocalRearrangeInReducer() throws Exception{
String[] input1 = {
"1\t1",
"1\t1",
"1\t2",
};
String[] input2 = {
"1\t1",
};
Util.createInputFile(cluster, "table_testLocalRearrangeInReducer1", input1);
Util.createInputFile(cluster, "table_testLocalRearrangeInReducer2", input2);
pigServer.registerQuery("a = load 'table_testLocalRearrangeInReducer1' as (a0, a1);");
pigServer.registerQuery("b = distinct a;");
pigServer.registerQuery("c = load 'table_testLocalRearrangeInReducer2' as (c0, c1);");
pigServer.registerQuery("d = cogroup b by a0, c by c0;");
pigServer.registerQuery("e = foreach d { e1 = order c by c1; generate e1;};");
Iterator<Tuple> iter = pigServer.openIterator("e");
Tuple t = iter.next();
Assert.assertTrue(t.toString().contains("({(1,1)})"));
Assert.assertFalse(iter.hasNext());
}
// See PIG-1850
@Test
public void testProjectNullSchema() throws Exception{
String[] input = {
"0\t1",
};
Util.createInputFile(cluster, "table_testProjectNullSchema", input);
pigServer.registerQuery("a = load 'table_testProjectNullSchema';");
pigServer.registerQuery("b = foreach a generate ASIN($0), $1;");
pigServer.registerQuery("c = order b by $0;");
Iterator<Tuple> iter = pigServer.openIterator("c");
Tuple t = iter.next();
Assert.assertTrue(t.toString().contains("(0.0,1)"));
Assert.assertFalse(iter.hasNext());
}
// See PIG-1188
@Test
public void testSchemaDataNotMatch() throws Exception{
String[] input = {
"0\t1\t2",
"3\t4",
"5"
};
Util.createInputFile(cluster, "table_testSchemaDataNotMatch", input);
pigServer.registerQuery("a = load 'table_testSchemaDataNotMatch' as (a0, a1);");
Iterator<Tuple> iter = pigServer.openIterator("a");
Tuple t = iter.next();
Assert.assertTrue(t.size()==2);
Assert.assertTrue(t.get(0).toString().equals("0"));
Assert.assertTrue(t.get(1).toString().equals("1"));
t = iter.next();
Assert.assertTrue(t.size()==2);
Assert.assertTrue(t.get(0).toString().equals("3"));
Assert.assertTrue(t.get(1).toString().equals("4"));
t = iter.next();
Assert.assertTrue(t.size()==2);
Assert.assertTrue(t.get(0).toString().equals("5"));
Assert.assertTrue(t.get(1)==null);
Assert.assertFalse(iter.hasNext());
}
// See PIG-1912
@Test
public void testDuplicateLoadFuncSignature() throws Exception{
String[] input = {
"0\t1\ta",
};
Util.createInputFile(cluster, "table_testDuplicateLoadFuncSignature", input);
pigServer.setBatchOn();
pigServer.registerQuery("a = load 'table_testDuplicateLoadFuncSignature' as (a0, a1, a2);");
pigServer.registerQuery("b = foreach a generate a0, a1;");
pigServer.registerQuery("a = load 'table_testDuplicateLoadFuncSignature' as (a0, a1, a2);");
pigServer.registerQuery("c = foreach a generate a0, a2;");
pigServer.registerQuery("store b into 'testDuplicateLoadFuncSignatureOutput1';");
pigServer.registerQuery("store c into 'testDuplicateLoadFuncSignatureOutput2';");
pigServer.executeBatch();
pigServer.registerQuery("a = load 'testDuplicateLoadFuncSignatureOutput1';");
Iterator<Tuple> iter = pigServer.openIterator("a");
Tuple t = iter.next();
Assert.assertTrue(t.toString().equals("(0,1)"));
Assert.assertFalse(iter.hasNext());
pigServer.registerQuery("a = load 'testDuplicateLoadFuncSignatureOutput2';");
iter = pigServer.openIterator("a");
t = iter.next();
Assert.assertTrue(t.toString().equals("(0,a)"));
Assert.assertFalse(iter.hasNext());
}
// See PIG-1927
@Test
public void testDereferencePartialAlias() throws Exception{
pigServer.registerQuery("a = load '1.txt' as (a0:int, a1);");
pigServer.registerQuery("b = group a by a0;");
pigServer.registerQuery("c = foreach b generate flatten(a);");
pigServer.registerQuery("d = cogroup c by (a0);");
pigServer.registerQuery("e = foreach d generate c.a0 as e0;");
pigServer.registerQuery("f = foreach e generate e0;");
// Shall not throw exception
pigServer.explain("f", System.out);
}
// See PIG-1866
@Test
public void testProjBagInTuple() throws Exception{
String[] input = {
"(1,{(one),(two)})",
};
Util.createInputFile(cluster, "table_testProjBagInTuple", input);
pigServer.registerQuery("a = load 'table_testProjBagInTuple' as (t : tuple(i: int, b1: bag { b_tuple : tuple ( b_str: chararray) }));");
pigServer.registerQuery("b = foreach a generate t.b1;");
Iterator<Tuple> iter = pigServer.openIterator("b");
Tuple t = iter.next();
Assert.assertTrue(t.toString().equals("({(one),(two)})"));
Assert.assertFalse(iter.hasNext());
}
//See PIG-1974
@Test
public void testCastMap() throws Exception{
String[] input = {
"([key#1])",
"([key#2])",
};
Util.createInputFile(cluster, "table_testCastMap", input);
pigServer.registerQuery("a = load 'table_testCastMap' as (m:map[]);");
pigServer.registerQuery("b = foreach a generate (map[int])m;");
pigServer.registerQuery("c = foreach b generate m#'key' + 1;");
Iterator<Tuple> iter = pigServer.openIterator("c");
Tuple t = iter.next();
Assert.assertEquals(t.get(0), 2);
t = iter.next();
Assert.assertEquals(t.get(0), 3);
Assert.assertFalse(iter.hasNext());
}
// See PIG-1979
@Test
public void testDereferenceUidBug() throws Exception{
String[] input1 = {
"0\t0\t{(1,2)}\t1",
};
String[] input2 = {
"0\t0",
};
Util.createInputFile(cluster, "table_testDereferenceUidBug1", input1);
Util.createInputFile(cluster, "table_testDereferenceUidBug2", input2);
pigServer.registerQuery("a = load 'table_testDereferenceUidBug1' as (a0:int, a1:int, a2:{t:(i0:int, i1:int)}, a3:int);");
pigServer.registerQuery("b = foreach a generate a0, a1, a0+a1 as sum, a2.i0 as a2, a3;");
pigServer.registerQuery("c = filter b by sum==0;");
pigServer.registerQuery("d = load 'table_testDereferenceUidBug2' as (d0:int, d1:int);");
pigServer.registerQuery("e = join c by a0, d by d0;");
pigServer.registerQuery("f = foreach e generate c::a2;");
Iterator<Tuple> iter = pigServer.openIterator("f");
Tuple t = iter.next();
Assert.assertTrue(t.toString().equals("({(1)})"));
Assert.assertFalse(iter.hasNext());
}
static public class UDFWithNonStandardType extends EvalFunc<Tuple>{
@Override
public Tuple exec(Tuple input) throws IOException {
Tuple t = TupleFactory.getInstance().newTuple();
t.append(new ArrayList<Integer>());
return t;
}
}
// See PIG-1826
@Test
public void testNonStandardData() throws Exception{
String[] input1 = {
"0",
};
Util.createInputFile(cluster, "table_testNonStandardData", input1);
pigServer.registerQuery("a = load 'table_testNonStandardData' as (a0);");
pigServer.registerQuery("b = foreach a generate " + UDFWithNonStandardType.class.getName() + "(a0);");
try {
pigServer.openIterator("b");
Assert.fail();
} catch (Exception e) {
// Tez does not construct exceptions from stacktrace as it will have multiple ones.
// So e.getCause().getCause() will be null
Throwable cause = e.getCause().getCause() == null ? e.getCause() : e.getCause().getCause();
String message = cause.getMessage();
Assert.assertTrue(message.contains(ArrayList.class.getName()));
}
}
// See PIG-1826
@Test
public void testNonStandardDataWithoutFetch() throws Exception{
Properties props = pigServer.getPigContext().getProperties();
props.setProperty(PigConfiguration.PIG_OPT_FETCH, "false");
String[] input1 = {
"0",
};
try {
Util.createInputFile(cluster, "table_testNonStandardDataWithoutFetch", input1);
pigServer.registerQuery("a = load 'table_testNonStandardDataWithoutFetch' as (a0);");
pigServer.registerQuery("b = foreach a generate " + UDFWithNonStandardType.class.getName() + "(a0);");
try {
pigServer.openIterator("b");
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(ExceptionUtils.getRootCause(e).getMessage().contains(
"Unexpected data type " + ArrayList.class.getName() + " found in stream."));
}
}
finally {
props.setProperty(PigConfiguration.PIG_OPT_FETCH, "true");
}
}
// See PIG-2078
@Test
public void testProjectNullBag() throws Exception{
String[] input1 = {
"{(1)}\t2",
"\t3"
};
HashSet<String> optimizerRules = new HashSet<String>();
optimizerRules.add("MergeForEach");
pigServer.getPigContext().getProperties().setProperty(
PigImplConstants.PIG_OPTIMIZER_RULES_KEY,
ObjectSerializer.serialize(optimizerRules));
Util.createInputFile(cluster, "table_testProjectNullBag", input1);
pigServer.registerQuery("a = load 'table_testProjectNullBag' as (a0:bag{}, a1:int);");
pigServer.registerQuery("b = foreach a generate a0;");
Iterator<Tuple> iter = pigServer.openIterator("b");
Tuple t = iter.next();
Assert.assertTrue(t.toString().equals("({(1)})"));
t = iter.next();
Assert.assertTrue(t.toString().equals("()"));
Assert.assertFalse(iter.hasNext());
pigServer.getPigContext().getProperties().remove(PigImplConstants.PIG_OPTIMIZER_RULES_KEY);
}
// See PIG-2159
@Test
public void testUnionOnSchemaUidGeneration() throws Exception{
String[] input1 = {
"100,101,102,103,104,105",
"110,111,112,113,114,115"
};
String[] input2 = {
"200,201,202,203,204,205",
"210,211,212,213,214,215"
};
String[] input0 = {
"200,201,202,203,204,205",
"210,211,212,213,214,215"
};
Util.createInputFile(cluster, "table_testUnionOnSchemaUidGeneration1", input1);
Util.createInputFile(cluster, "table_testUnionOnSchemaUidGeneration2", input2);
Util.createInputFile(cluster, "table_testUnionOnSchemaUidGeneration0", input0);
pigServer.registerQuery("A = load 'table_testUnionOnSchemaUidGeneration1' using PigStorage(',') as (f1:int,f2:int,f3:int,f4:long,f5:double);");
pigServer.registerQuery("B = load 'table_testUnionOnSchemaUidGeneration2' using PigStorage(',') as (f1:int,f2:int,f3:int,f4:long,f5:double);");
pigServer.registerQuery("C = load 'table_testUnionOnSchemaUidGeneration0' using PigStorage(',') as (f1:int,f2:int,f3:int);");
pigServer.registerQuery("U = UNION ONSCHEMA A,B;");
pigServer.registerQuery("J = join C by (f1,f2,f3) LEFT OUTER, U by (f1,f2,f3);");
pigServer.registerQuery("Porj = foreach J generate C::f1 as f1 ,C::f2 as f2,C::f3 as f3,U::f4 as f4,U::f5 as f5;");
pigServer.registerQuery("G = GROUP Porj by (f1,f2,f3,f5);");
pigServer.registerQuery("Final = foreach G generate SUM(Porj.f4) as total;");
Iterator<Tuple> iter = pigServer.openIterator("Final");
Tuple t = iter.next();
Assert.assertTrue(t.toString().equals("(203)"));
t = iter.next();
Assert.assertTrue(t.toString().equals("(213)"));
Assert.assertFalse(iter.hasNext());
}
// See PIG-2185
@Test
public void testProjectEmptyBag() throws Exception{
String[] input = {
"{(12)}",
"{(23)}",
""
};
Util.createInputFile(cluster, "table_testProjectEmptyBag", input);
pigServer.registerQuery("A = load 'table_testProjectEmptyBag' as (bg:bag{});");
pigServer.registerQuery("B = FOREACH A { x = FILTER bg BY $0 == '12'; GENERATE x; };");
Iterator<Tuple> iter = pigServer.openIterator("B");
Tuple t = iter.next();
Assert.assertTrue(t.toString().equals("({(12)})"));
t = iter.next();
Assert.assertTrue(t.toString().equals("({})"));
Assert.assertTrue(iter.hasNext());
t = iter.next();
Assert.assertTrue(t.toString().equals("({})"));
Assert.assertFalse(iter.hasNext());
}
// See PIG-2231
@Test
public void testLimitFlatten() throws Exception{
String[] input = {
"1\tA",
"1\tB",
"2\tC",
"3\tD",
"3\tE",
"3\tF"
};
Util.createInputFile(cluster, "table_testLimitFlatten", input);
pigServer.registerQuery("data = load 'table_testLimitFlatten' as (k,v);");
pigServer.registerQuery("grouped = GROUP data BY k;");
if (Util.isSparkExecType(cluster.getExecType())) {
pigServer.registerQuery("grouped = ORDER grouped BY group;");
}
pigServer.registerQuery("selected = LIMIT grouped 2;");
pigServer.registerQuery("flattened = FOREACH selected GENERATE FLATTEN (data);");
Iterator<Tuple> iter = pigServer.openIterator("flattened");
String[] expected = new String[] {"(1,A)", "(1,B)", "(2,C)"};
Util.checkQueryOutputsAfterSortRecursive(iter, expected,
org.apache.pig.newplan.logical.Util.translateSchema(pigServer.dumpSchema("flattened")));
}
// See PIG-2237
@Test
public void testLimitAutoReducer() throws Exception{
String[] input = {
"1\tA",
"4\tB",
"2\tC",
"3\tD",
"6\tE",
"5\tF"
};
Util.createInputFile(cluster, "table_testLimitAutoReducer", input);
pigServer.getPigContext().getProperties().setProperty("pig.exec.reducers.bytes.per.reducer", "16");
pigServer.registerQuery("A = load 'table_testLimitAutoReducer';");
pigServer.registerQuery("B = order A by $0;");
pigServer.registerQuery("C = limit B 2;");
Iterator<Tuple> iter = pigServer.openIterator("C");
Tuple t = iter.next();
Assert.assertTrue(t.toString().equals("(1,A)"));
t = iter.next();
Assert.assertTrue(t.toString().equals("(2,C)"));
Assert.assertFalse(iter.hasNext());
}
@SuppressWarnings("unchecked")
@Test
public void testCrossAfterGroupAll() throws Exception{
String[] input = {
"1\tA",
"2\tB",
"3\tC",
"4\tD",
};
Util.createInputFile(cluster, "table_testCrossAfterGroupAll", input);
try {
pigServer.getPigContext().getProperties().setProperty("pig.exec.reducers.bytes.per.reducer", "40");
pigServer.registerQuery("A = load 'table_testCrossAfterGroupAll' as (a0:int, a1:chararray);");
pigServer.registerQuery("B = group A all;");
pigServer.registerQuery("C = foreach B generate COUNT(A);");
pigServer.registerQuery("D = cross A, C;");
Path output = FileLocalizer.getTemporaryPath(pigServer.getPigContext());
ExecJob job = pigServer.store("D", output.toString());
FileSystem fs = output.getFileSystem(cluster.getConfiguration());
FileStatus[] partFiles = fs.listStatus(output, new PathFilter() {
@Override
public boolean accept(Path path) {
if (path.getName().startsWith("part")) {
return true;
}
return false;
}
});
if (Util.isSparkExecType(cluster.getExecType())) {
// TODO: Fix this when we implement auto-parallelism in Spark
Assert.assertTrue(partFiles.length == 1);
} else {
// auto-parallelism is 2 in MR, 20 in Tez, so check >=2
Assert.assertTrue(partFiles.length >= 2);
}
// Check the output
Iterator<Tuple> iter = job.getResults();
List<Tuple> results = new ArrayList<Tuple>();
while (iter.hasNext()) {
results.add(iter.next());
}
Collections.sort(results);
Assert.assertEquals(4, results.size());
Assert.assertEquals("(1,A,4)", results.get(0).toString());
Assert.assertEquals("(2,B,4)", results.get(1).toString());
Assert.assertEquals("(3,C,4)", results.get(2).toString());
Assert.assertEquals("(4,D,4)", results.get(3).toString());
} finally {
pigServer.getPigContext().getProperties().remove("pig.exec.reducers.bytes.per.reducer");
}
}
// see PIG-4392
@Test
public void testRankWithEmptyReduce() throws Exception {
Util.createInputFile(cluster, "table_testRankWithEmptyReduce", new String[]{"1\t2\t3", "4\t5\t6", "7\t8\t9"});
pigServer.setDefaultParallel(4);
pigServer.registerQuery("d = load 'table_testRankWithEmptyReduce' as (a:int, b:int, c:int);");
pigServer.registerQuery("e = rank d by a parallel 4;");
Iterator<Tuple> iter = pigServer.openIterator("e");
Collection<String> results = new HashSet<String>();
results.add("(1,1,2,3)");
results.add("(2,4,5,6)");
results.add("(3,7,8,9)");
Assert.assertTrue(results.contains(iter.next().toString()));
Assert.assertTrue(results.contains(iter.next().toString()));
Assert.assertTrue(results.contains(iter.next().toString()));
Assert.assertFalse(iter.hasNext());
}
}
|
|
/*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rockagen.commons.util;
import java.lang.reflect.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Reflex Utils
*
* @author RA
* @since JDK1.6
*/
public class ReflexUtil {
// ~ Instance fields ==================================================
/**
*
*/
private static final Logger log = LoggerFactory.getLogger(ReflexUtil.class);
// ~ Constructors ==================================================
/**
*/
private ReflexUtil() {
}
// ~ Methods ==================================================
/**
*
* Direct reading the object attribute values,the private / protected
* modifiers will be ignoring.if getter function exist,return getter
* function value<p>If recursively is true, will looking from all class
* hierarchy</p>
*
* @param object object
* @param fieldName field name
* @param recursively recursively
* @return object
*/
public static Object getFieldValue(final Object object, String fieldName,
boolean recursively) {
Object result = null;
if (object == null)
return null;
Field field = ClassUtil.getDeclaredField(object.getClass(), fieldName,
recursively);
if (field == null) {
log.debug("Could not find field [ {} ] on target [ {} ]",
fieldName, object.getClass().getSimpleName());
return null;
}
String methodName = "get" + CommUtil.capitalize(fieldName);
Method method = ClassUtil.getDeclaredMethod(object.getClass(),
recursively, methodName);
if (method != null) {
try {
makeAccessible(method);
result = method.invoke(object);
return result;
} catch (InvocationTargetException e) {
log.debug("Could not find method [ {} ] on target [ {} ]",
methodName, object.getClass().getSimpleName());
}catch (IllegalAccessException e) {
// Will not happen
}
}
makeAccessible(field);
try {
result = field.get(object);
}catch (IllegalAccessException e) {
// Will not happen
}
return result;
}
/**
* Direct writing the object attribute values, the private / protected
* modifiers will be ignoring,if setter function exist,return setter
* function value. <p>If recursively is true, will looking from all class
* hierarchy</p>
*
* @param object object
* @param fieldName field name
* @param value value
* @param recursively recursively
*/
public static void setFieldValue(final Object object, String fieldName,
final Object value, boolean recursively) {
if (object == null)
return;
Field field = ClassUtil.getDeclaredField(object.getClass(), fieldName,
recursively);
if (field == null) {
log.debug("Could not find field [ {} ] on target [ {} ]",
fieldName, object.getClass().getSimpleName());
return;
}
String methodName = "set" + CommUtil.capitalize(fieldName);
Method method = ClassUtil.getDeclaredMethod(object.getClass(),
recursively, methodName,
value == null ? Object.class : value.getClass());
if (method != null) {
try {
makeAccessible(method);
method.invoke(object, value);
return;
} catch (InvocationTargetException e) {
log.debug("Could not find method [ {} ] on target [ {} ]",
methodName, object.getClass().getSimpleName());
} catch (NullPointerException e) {
log.debug("{} field: [ {} ] is null", object.getClass()
.getSimpleName(), fieldName);
}catch (IllegalAccessException e) {
// Will not happen
}
}
makeAccessible(field);
try {
field.set(object, value);
}catch (NullPointerException e) {
log.debug("{} field: [ {} ] is null", object.getClass()
.getSimpleName(), fieldName);
} catch (IllegalAccessException e) {
// Will not happen
}
}
/**
* set attribute is accessible
*
* @param field {@link java.lang.reflect.Field}
*/
public static void makeAccessible(final Field field) {
if (!Modifier.isPublic(field.getModifiers())
|| !Modifier.isPublic(field.getDeclaringClass().getModifiers())) {
field.setAccessible(true);
}
}
/**
* set method is accessible
*
* @param method {@link java.lang.reflect.Method}
*/
public static void makeAccessible(final Method method) {
if (!Modifier.isPublic(method.getModifiers())
|| !Modifier
.isPublic(method.getDeclaringClass().getModifiers())) {
method.setAccessible(true);
}
}
/**
* set constructor is accessible
* @param constructor {@link java.lang.reflect.Constructor}
* @param <T> t
*/
public static <T> void makeAccessible(final Constructor<T> constructor) {
if (!Modifier.isPublic(constructor.getModifiers())
|| !Modifier.isPublic(constructor.getDeclaringClass()
.getModifiers())) {
constructor.setAccessible(true);
}
}
/**
* obtained the parent class generic parameter types
* <p>
* for example:
* </p>
* <code>
* ClassB<T> extends ClassA<T>
* </code>
*
* @param clazz {@link java.lang.Class}
* @return Types
*/
public static Type[] getSuperClassGenricTypes(final Class<?> clazz) {
Type[] temp = { Object.class };
// eg: ClassA<T>
if (clazz == null)
return null;
Type type = clazz.getGenericSuperclass();
if (type instanceof ParameterizedType) {
return ((ParameterizedType) type).getActualTypeArguments();
} else {
log.warn(
"{} 's superclass not ParameterizedType",
clazz);
return temp;
}
}
/**
* obtained the parent class generic parameter type
* <p>
* for example:
* </p>
* <code>
* ClassB<T> extends ClassA<T>
* </code>
*
* @param clazz {@link java.lang.Class}
* @param index
* start 0
* @return Type
*/
public static Type getSuperClassGenricType(final Class<?> clazz, int index) {
Type[] types = getSuperClassGenricTypes(clazz);
if (index < 0) {
log.warn(
"{}'s index must be greater than 0,return the 0",
clazz == null ? Object.class.getSimpleName() : clazz
.getSimpleName());
return types[0];
} else if (index > types.length) {
log.warn(
"{}'s index in {} not found,return the last",
clazz == null ? Object.class.getSimpleName() : clazz
.getSimpleName(), index);
return types[types.length - 1];
} else {
return types[index];
}
}
/**
* obtained the parent class generic parameter classes
* <p>
* for example:
* </p>
* <code>
* ClassB<T> extends ClassA<T>
* </code>
*
* @param clazz {@link java.lang.Class}
* @return Classes
*/
public static Class<?>[] getSuperClassGenricClasses(final Class<?> clazz) {
Type[] types = getSuperClassGenricTypes(clazz);
Class<?>[] clazzs = new Class<?>[types.length];
for (int i = 0; i < types.length; i++) {
clazzs[i] = (Class<?>) types[i];
}
return clazzs;
}
/**
* obtained the parent class generic parameter class
* <p>
* for example:
* </p>
* <code>
* ClassB<T> extends ClassA<T>
* </code>
*
* @param clazz {@link java.lang.Class}
* @param index
* start 0
* @return Class
*/
public static Class<?> getSuperClassGenricClass(final Class<?> clazz,
int index) {
return (Class<?>) getSuperClassGenricType(clazz, index);
}
/**
* obtained the interface generic parameter types
* <p>
* for example:
* </p>
* <code>
* ClassB<T> implements ClassA<T>
* </code>
*
* @param clazz {@link java.lang.Class}
* @return Types
*/
public static Type[] getInterfacesGenricTypes(final Class<?> clazz) {
if (clazz == null)
return null;
Type[] types=clazz.getGenericInterfaces();
Type[] gtypes=new Type[0];
for(Type t:types){
if (t instanceof ParameterizedType) {
Type[] gts=((ParameterizedType) t).getActualTypeArguments();
int olen=gtypes.length;
int ilen=gts.length;
Type[] tmp=new Type[olen+ilen];
System.arraycopy(gtypes,0,tmp,0,olen);
System.arraycopy(gts, 0, tmp, olen, ilen);
gtypes=tmp;
}
}
return gtypes;
}
/**
* obtained the interface generic parameter type
* <p>
* for example:
* </p>
* <code>
* ClassB<T> implements ClassA<T>
* </code>
*
* @param clazz {@link java.lang.Class}
* @param index
* start 0
* @return Type
*/
public static Type getInterfacesGenricType(final Class<?> clazz, int index) {
Type[] types = getInterfacesGenricTypes(clazz);
if (index < 0) {
log.warn(
"{}'s index must be greater than 0,return the 0",
clazz == null ? Object.class.getSimpleName() : clazz
.getSimpleName());
return types[0];
} else if (index > types.length) {
log.warn(
"{}'s index in {} not found,return the last",
clazz == null ? Object.class.getSimpleName() : clazz
.getSimpleName(), index);
return types[types.length - 1];
} else {
return types[index];
}
}
/**
* obtained the interface generic parameter classes
* <p>
* for example:
* </p>
* <code>
* ClassB<T> implements ClassA<T>
* </code>
*
* @param clazz {@link java.lang.Class}
* @return Classes
*/
public static Class<?>[] getInterfacesGenricClasses(final Class<?> clazz) {
Type[] types = getInterfacesGenricTypes(clazz);
Class<?>[] clazzs = new Class<?>[types.length];
for (int i = 0; i < types.length; i++) {
clazzs[i] =(Class<?>)types[i];
}
return clazzs;
}
/**
* obtained the interface generic parameter class
* <p>
* for example:
* </p>
* <code>
* ClassB<T> implements ClassA<T>
* </code>
*
* @param clazz {@link java.lang.Class}
* @param index
* start 0
* @return Class
*/
public static Class<?> getInterfacesGenricClass(final Class<?> clazz,
int index) {
return (Class<?>) getInterfacesGenricType(clazz, index);
}
/**
* Create new instance of specified class and type from a map <p><b>note:
* clazz must write setter</b></p>
*
* @param clazz {@link java.lang.Class}
* @param paramsMap
* attributes
* @param accessible accesible
* @param <T> t
* @return instance
*/
public static <T> T getBasicInstance(final Class<T> clazz,
final Map<String, Object> paramsMap, boolean accessible) {
if (clazz != null && paramsMap != null && paramsMap.size() > 0) {
T instance = ClassUtil.getInstance(clazz, accessible);
for (Map.Entry<String, Object> entry : paramsMap.entrySet()) {
String key = entry.getKey();
if (CommUtil.isBlank(key)) {
continue;
}
key = CommUtil.uncapitalize(key);
setFieldValue(instance, key, entry.getValue(), false);
}
return instance;
} else {
return null;
}
}
}
|
|
/* Generated by camel build tools - do NOT edit this file! */
package org.apache.camel.component.sip;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.spi.GeneratedPropertyConfigurer;
import org.apache.camel.spi.PropertyConfigurerGetter;
import org.apache.camel.util.CaseInsensitiveMap;
import org.apache.camel.support.component.PropertyConfigurerSupport;
/**
* Generated by camel build tools - do NOT edit this file!
*/
@SuppressWarnings("unchecked")
public class SipEndpointConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
SipEndpoint target = (SipEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "addressfactory":
case "addressFactory": target.getConfiguration().setAddressFactory(property(camelContext, javax.sip.address.AddressFactory.class, value)); return true;
case "basicpropertybinding":
case "basicPropertyBinding": target.setBasicPropertyBinding(property(camelContext, boolean.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "cacheconnections":
case "cacheConnections": target.getConfiguration().setCacheConnections(property(camelContext, boolean.class, value)); return true;
case "callidheader":
case "callIdHeader": target.getConfiguration().setCallIdHeader(property(camelContext, javax.sip.header.CallIdHeader.class, value)); return true;
case "consumer": target.getConfiguration().setConsumer(property(camelContext, boolean.class, value)); return true;
case "contactheader":
case "contactHeader": target.getConfiguration().setContactHeader(property(camelContext, javax.sip.header.ContactHeader.class, value)); return true;
case "contentsubtype":
case "contentSubType": target.getConfiguration().setContentSubType(property(camelContext, java.lang.String.class, value)); return true;
case "contenttype":
case "contentType": target.getConfiguration().setContentType(property(camelContext, java.lang.String.class, value)); return true;
case "contenttypeheader":
case "contentTypeHeader": target.getConfiguration().setContentTypeHeader(property(camelContext, javax.sip.header.ContentTypeHeader.class, value)); return true;
case "eventheader":
case "eventHeader": target.getConfiguration().setEventHeader(property(camelContext, javax.sip.header.EventHeader.class, value)); return true;
case "eventheadername":
case "eventHeaderName": target.getConfiguration().setEventHeaderName(property(camelContext, java.lang.String.class, value)); return true;
case "eventid":
case "eventId": target.getConfiguration().setEventId(property(camelContext, java.lang.String.class, value)); return true;
case "exceptionhandler":
case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
case "exchangepattern":
case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
case "expiresheader":
case "expiresHeader": target.getConfiguration().setExpiresHeader(property(camelContext, javax.sip.header.ExpiresHeader.class, value)); return true;
case "extensionheader":
case "extensionHeader": target.getConfiguration().setExtensionHeader(property(camelContext, javax.sip.header.ExtensionHeader.class, value)); return true;
case "fromheader":
case "fromHeader": target.getConfiguration().setFromHeader(property(camelContext, javax.sip.header.FromHeader.class, value)); return true;
case "fromhost":
case "fromHost": target.getConfiguration().setFromHost(property(camelContext, java.lang.String.class, value)); return true;
case "fromport":
case "fromPort": target.getConfiguration().setFromPort(property(camelContext, int.class, value)); return true;
case "fromuser":
case "fromUser": target.getConfiguration().setFromUser(property(camelContext, java.lang.String.class, value)); return true;
case "headerfactory":
case "headerFactory": target.getConfiguration().setHeaderFactory(property(camelContext, javax.sip.header.HeaderFactory.class, value)); return true;
case "implementationdebuglogfile":
case "implementationDebugLogFile": target.getConfiguration().setImplementationDebugLogFile(property(camelContext, java.lang.String.class, value)); return true;
case "implementationserverlogfile":
case "implementationServerLogFile": target.getConfiguration().setImplementationServerLogFile(property(camelContext, java.lang.String.class, value)); return true;
case "implementationtracelevel":
case "implementationTraceLevel": target.getConfiguration().setImplementationTraceLevel(property(camelContext, java.lang.String.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "listeningpoint":
case "listeningPoint": target.getConfiguration().setListeningPoint(property(camelContext, javax.sip.ListeningPoint.class, value)); return true;
case "maxforwards":
case "maxForwards": target.getConfiguration().setMaxForwards(property(camelContext, int.class, value)); return true;
case "maxforwardsheader":
case "maxForwardsHeader": target.getConfiguration().setMaxForwardsHeader(property(camelContext, javax.sip.header.MaxForwardsHeader.class, value)); return true;
case "maxmessagesize":
case "maxMessageSize": target.getConfiguration().setMaxMessageSize(property(camelContext, int.class, value)); return true;
case "messagefactory":
case "messageFactory": target.getConfiguration().setMessageFactory(property(camelContext, javax.sip.message.MessageFactory.class, value)); return true;
case "msgexpiration":
case "msgExpiration": target.getConfiguration().setMsgExpiration(property(camelContext, int.class, value)); return true;
case "presenceagent":
case "presenceAgent": target.getConfiguration().setPresenceAgent(property(camelContext, boolean.class, value)); return true;
case "receivetimeoutmillis":
case "receiveTimeoutMillis": target.getConfiguration().setReceiveTimeoutMillis(property(camelContext, long.class, value)); return true;
case "sipfactory":
case "sipFactory": target.getConfiguration().setSipFactory(property(camelContext, javax.sip.SipFactory.class, value)); return true;
case "sipstack":
case "sipStack": target.getConfiguration().setSipStack(property(camelContext, javax.sip.SipStack.class, value)); return true;
case "sipuri":
case "sipUri": target.getConfiguration().setSipUri(property(camelContext, javax.sip.address.SipURI.class, value)); return true;
case "stackname":
case "stackName": target.getConfiguration().setStackName(property(camelContext, java.lang.String.class, value)); return true;
case "synchronous": target.setSynchronous(property(camelContext, boolean.class, value)); return true;
case "toheader":
case "toHeader": target.getConfiguration().setToHeader(property(camelContext, javax.sip.header.ToHeader.class, value)); return true;
case "tohost":
case "toHost": target.getConfiguration().setToHost(property(camelContext, java.lang.String.class, value)); return true;
case "toport":
case "toPort": target.getConfiguration().setToPort(property(camelContext, int.class, value)); return true;
case "touser":
case "toUser": target.getConfiguration().setToUser(property(camelContext, java.lang.String.class, value)); return true;
case "transport": target.getConfiguration().setTransport(property(camelContext, java.lang.String.class, value)); return true;
case "userouterforalluris":
case "useRouterForAllUris": target.getConfiguration().setUseRouterForAllUris(property(camelContext, boolean.class, value)); return true;
case "viaheaders":
case "viaHeaders": target.getConfiguration().setViaHeaders(property(camelContext, java.util.List.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
Map<String, Object> answer = new CaseInsensitiveMap();
answer.put("addressFactory", javax.sip.address.AddressFactory.class);
answer.put("basicPropertyBinding", boolean.class);
answer.put("bridgeErrorHandler", boolean.class);
answer.put("cacheConnections", boolean.class);
answer.put("callIdHeader", javax.sip.header.CallIdHeader.class);
answer.put("consumer", boolean.class);
answer.put("contactHeader", javax.sip.header.ContactHeader.class);
answer.put("contentSubType", java.lang.String.class);
answer.put("contentType", java.lang.String.class);
answer.put("contentTypeHeader", javax.sip.header.ContentTypeHeader.class);
answer.put("eventHeader", javax.sip.header.EventHeader.class);
answer.put("eventHeaderName", java.lang.String.class);
answer.put("eventId", java.lang.String.class);
answer.put("exceptionHandler", org.apache.camel.spi.ExceptionHandler.class);
answer.put("exchangePattern", org.apache.camel.ExchangePattern.class);
answer.put("expiresHeader", javax.sip.header.ExpiresHeader.class);
answer.put("extensionHeader", javax.sip.header.ExtensionHeader.class);
answer.put("fromHeader", javax.sip.header.FromHeader.class);
answer.put("fromHost", java.lang.String.class);
answer.put("fromPort", int.class);
answer.put("fromUser", java.lang.String.class);
answer.put("headerFactory", javax.sip.header.HeaderFactory.class);
answer.put("implementationDebugLogFile", java.lang.String.class);
answer.put("implementationServerLogFile", java.lang.String.class);
answer.put("implementationTraceLevel", java.lang.String.class);
answer.put("lazyStartProducer", boolean.class);
answer.put("listeningPoint", javax.sip.ListeningPoint.class);
answer.put("maxForwards", int.class);
answer.put("maxForwardsHeader", javax.sip.header.MaxForwardsHeader.class);
answer.put("maxMessageSize", int.class);
answer.put("messageFactory", javax.sip.message.MessageFactory.class);
answer.put("msgExpiration", int.class);
answer.put("presenceAgent", boolean.class);
answer.put("receiveTimeoutMillis", long.class);
answer.put("sipFactory", javax.sip.SipFactory.class);
answer.put("sipStack", javax.sip.SipStack.class);
answer.put("sipUri", javax.sip.address.SipURI.class);
answer.put("stackName", java.lang.String.class);
answer.put("synchronous", boolean.class);
answer.put("toHeader", javax.sip.header.ToHeader.class);
answer.put("toHost", java.lang.String.class);
answer.put("toPort", int.class);
answer.put("toUser", java.lang.String.class);
answer.put("transport", java.lang.String.class);
answer.put("useRouterForAllUris", boolean.class);
answer.put("viaHeaders", java.util.List.class);
return answer;
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
SipEndpoint target = (SipEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "addressfactory":
case "addressFactory": return target.getConfiguration().getAddressFactory();
case "basicpropertybinding":
case "basicPropertyBinding": return target.isBasicPropertyBinding();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "cacheconnections":
case "cacheConnections": return target.getConfiguration().isCacheConnections();
case "callidheader":
case "callIdHeader": return target.getConfiguration().getCallIdHeader();
case "consumer": return target.getConfiguration().isConsumer();
case "contactheader":
case "contactHeader": return target.getConfiguration().getContactHeader();
case "contentsubtype":
case "contentSubType": return target.getConfiguration().getContentSubType();
case "contenttype":
case "contentType": return target.getConfiguration().getContentType();
case "contenttypeheader":
case "contentTypeHeader": return target.getConfiguration().getContentTypeHeader();
case "eventheader":
case "eventHeader": return target.getConfiguration().getEventHeader();
case "eventheadername":
case "eventHeaderName": return target.getConfiguration().getEventHeaderName();
case "eventid":
case "eventId": return target.getConfiguration().getEventId();
case "exceptionhandler":
case "exceptionHandler": return target.getExceptionHandler();
case "exchangepattern":
case "exchangePattern": return target.getExchangePattern();
case "expiresheader":
case "expiresHeader": return target.getConfiguration().getExpiresHeader();
case "extensionheader":
case "extensionHeader": return target.getConfiguration().getExtensionHeader();
case "fromheader":
case "fromHeader": return target.getConfiguration().getFromHeader();
case "fromhost":
case "fromHost": return target.getConfiguration().getFromHost();
case "fromport":
case "fromPort": return target.getConfiguration().getFromPort();
case "fromuser":
case "fromUser": return target.getConfiguration().getFromUser();
case "headerfactory":
case "headerFactory": return target.getConfiguration().getHeaderFactory();
case "implementationdebuglogfile":
case "implementationDebugLogFile": return target.getConfiguration().getImplementationDebugLogFile();
case "implementationserverlogfile":
case "implementationServerLogFile": return target.getConfiguration().getImplementationServerLogFile();
case "implementationtracelevel":
case "implementationTraceLevel": return target.getConfiguration().getImplementationTraceLevel();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "listeningpoint":
case "listeningPoint": return target.getConfiguration().getListeningPoint();
case "maxforwards":
case "maxForwards": return target.getConfiguration().getMaxForwards();
case "maxforwardsheader":
case "maxForwardsHeader": return target.getConfiguration().getMaxForwardsHeader();
case "maxmessagesize":
case "maxMessageSize": return target.getConfiguration().getMaxMessageSize();
case "messagefactory":
case "messageFactory": return target.getConfiguration().getMessageFactory();
case "msgexpiration":
case "msgExpiration": return target.getConfiguration().getMsgExpiration();
case "presenceagent":
case "presenceAgent": return target.getConfiguration().isPresenceAgent();
case "receivetimeoutmillis":
case "receiveTimeoutMillis": return target.getConfiguration().getReceiveTimeoutMillis();
case "sipfactory":
case "sipFactory": return target.getConfiguration().getSipFactory();
case "sipstack":
case "sipStack": return target.getConfiguration().getSipStack();
case "sipuri":
case "sipUri": return target.getConfiguration().getSipUri();
case "stackname":
case "stackName": return target.getConfiguration().getStackName();
case "synchronous": return target.isSynchronous();
case "toheader":
case "toHeader": return target.getConfiguration().getToHeader();
case "tohost":
case "toHost": return target.getConfiguration().getToHost();
case "toport":
case "toPort": return target.getConfiguration().getToPort();
case "touser":
case "toUser": return target.getConfiguration().getToUser();
case "transport": return target.getConfiguration().getTransport();
case "userouterforalluris":
case "useRouterForAllUris": return target.getConfiguration().isUseRouterForAllUris();
case "viaheaders":
case "viaHeaders": return target.getConfiguration().getViaHeaders();
default: return null;
}
}
}
|
|
/*
* Copyright 2012 the original author or authors.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.google.bitcoin.uri;
import com.google.bitcoin.core.Address;
import com.google.bitcoin.core.Utils;
import com.google.bitcoin.params.MainNetParams;
import com.google.bitcoin.params.TestNet3Params;
import org.junit.Test;
import com.google.bitcoin.core.CoinDefinition;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import static org.junit.Assert.*;
public class BitcoinURITest {
private BitcoinURI testObject = null;
private static final String MAINNET_GOOD_ADDRESS = CoinDefinition.UNITTEST_ADDRESS; //"1KzTSfqjF2iKCduwz59nv2uqh1W2JsTxZH";
@Test
public void testConvertToBitcoinURI() throws Exception {
Address goodAddress = new Address(MainNetParams.get(), MAINNET_GOOD_ADDRESS);
// simple example
assertEquals(CoinDefinition.coinURIScheme + ":" + MAINNET_GOOD_ADDRESS + "?amount=12.34&label=Hello&message=AMessage", BitcoinURI.convertToBitcoinURI(goodAddress, Utils.toNanoCoins("12.34"), "Hello", "AMessage"));
// example with spaces, ampersand and plus
assertEquals(CoinDefinition.coinURIScheme + ":" + MAINNET_GOOD_ADDRESS + "?amount=12.34&label=Hello%20World&message=Mess%20%26%20age%20%2B%20hope", BitcoinURI.convertToBitcoinURI(goodAddress, Utils.toNanoCoins("12.34"), "Hello World", "Mess & age + hope"));
// no amount, label present, message present
assertEquals(CoinDefinition.coinURIScheme + ":" + MAINNET_GOOD_ADDRESS + "?label=Hello&message=glory", BitcoinURI.convertToBitcoinURI(goodAddress, null, "Hello", "glory"));
// amount present, no label, message present
assertEquals(CoinDefinition.coinURIScheme + ":" + MAINNET_GOOD_ADDRESS + "?amount=0.1&message=glory", BitcoinURI.convertToBitcoinURI(goodAddress, Utils.toNanoCoins("0.1"), null, "glory"));
assertEquals(CoinDefinition.coinURIScheme + ":" + MAINNET_GOOD_ADDRESS + "?amount=0.1&message=glory", BitcoinURI.convertToBitcoinURI(goodAddress, Utils.toNanoCoins("0.1"), "", "glory"));
// amount present, label present, no message
assertEquals(CoinDefinition.coinURIScheme + ":" + MAINNET_GOOD_ADDRESS + "?amount=12.34&label=Hello", BitcoinURI.convertToBitcoinURI(goodAddress, Utils.toNanoCoins("12.34"), "Hello", null));
assertEquals(CoinDefinition.coinURIScheme + ":" + MAINNET_GOOD_ADDRESS + "?amount=12.34&label=Hello", BitcoinURI.convertToBitcoinURI(goodAddress, Utils.toNanoCoins("12.34"), "Hello", ""));
// amount present, no label, no message
assertEquals(CoinDefinition.coinURIScheme + ":" + MAINNET_GOOD_ADDRESS + "?amount=1000", BitcoinURI.convertToBitcoinURI(goodAddress, Utils.toNanoCoins("1000"), null, null));
assertEquals(CoinDefinition.coinURIScheme + ":" + MAINNET_GOOD_ADDRESS + "?amount=1000", BitcoinURI.convertToBitcoinURI(goodAddress, Utils.toNanoCoins("1000"), "", ""));
// no amount, label present, no message
assertEquals(CoinDefinition.coinURIScheme + ":" + MAINNET_GOOD_ADDRESS + "?label=Hello", BitcoinURI.convertToBitcoinURI(goodAddress, null, "Hello", null));
// no amount, no label, message present
assertEquals(CoinDefinition.coinURIScheme + ":" + MAINNET_GOOD_ADDRESS + "?message=Agatha", BitcoinURI.convertToBitcoinURI(goodAddress, null, null, "Agatha"));
assertEquals(CoinDefinition.coinURIScheme + ":" + MAINNET_GOOD_ADDRESS + "?message=Agatha", BitcoinURI.convertToBitcoinURI(goodAddress, null, "", "Agatha"));
// no amount, no label, no message
assertEquals(CoinDefinition.coinURIScheme + ":" + MAINNET_GOOD_ADDRESS, BitcoinURI.convertToBitcoinURI(goodAddress, null, null, null));
assertEquals(CoinDefinition.coinURIScheme + ":" + MAINNET_GOOD_ADDRESS, BitcoinURI.convertToBitcoinURI(goodAddress, null, "", ""));
}
@Test
public void testGood_Simple() throws BitcoinURIParseException {
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS);
assertNotNull(testObject);
assertNull("Unexpected amount", testObject.getAmount());
assertNull("Unexpected label", testObject.getLabel());
assertEquals("Unexpected label", 20, testObject.getAddress().getHash160().length);
}
/**
* Test a broken URI (bad scheme)
*/
@Test
public void testBad_Scheme() {
try {
testObject = new BitcoinURI(MainNetParams.get(), "blimpcoin:" + MAINNET_GOOD_ADDRESS);
fail("Expecting BitcoinURIParseException");
} catch (BitcoinURIParseException e) {
}
}
/**
* Test a broken URI (bad syntax)
*/
@Test
public void testBad_BadSyntax() {
// Various illegal characters
try {
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + "|" + MAINNET_GOOD_ADDRESS);
fail("Expecting BitcoinURIParseException");
} catch (BitcoinURIParseException e) {
assertTrue(e.getMessage().contains("Bad URI syntax"));
}
try {
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS + "\\");
fail("Expecting BitcoinURIParseException");
} catch (BitcoinURIParseException e) {
assertTrue(e.getMessage().contains("Bad URI syntax"));
}
// Separator without field
try {
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":");
fail("Expecting BitcoinURIParseException");
} catch (BitcoinURIParseException e) {
assertTrue(e.getMessage().contains("Bad URI syntax"));
}
}
/**
* Test a broken URI (missing address)
*/
@Test
public void testBad_Address() {
try {
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME);
fail("Expecting BitcoinURIParseException");
} catch (BitcoinURIParseException e) {
}
}
/**
* Test a broken URI (bad address type)
*/
@Test
public void testBad_IncorrectAddressType() {
try {
testObject = new BitcoinURI(TestNet3Params.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS);
fail("Expecting BitcoinURIParseException");
} catch (BitcoinURIParseException e) {
assertTrue(e.getMessage().contains("Bad address"));
}
}
/**
* Handles a simple amount
*
* @throws BitcoinURIParseException
* If something goes wrong
*/
@Test
public void testGood_Amount() throws BitcoinURIParseException {
// Test the decimal parsing
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS
+ "?amount=6543210.12345678");
assertEquals("654321012345678", testObject.getAmount().toString());
// Test the decimal parsing
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS
+ "?amount=.12345678");
assertEquals("12345678", testObject.getAmount().toString());
// Test the integer parsing
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS
+ "?amount=6543210");
assertEquals("654321000000000", testObject.getAmount().toString());
}
/**
* Handles a simple label
*
* @throws BitcoinURIParseException
* If something goes wrong
*/
@Test
public void testGood_Label() throws BitcoinURIParseException {
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS
+ "?label=Hello%20World");
assertEquals("Hello World", testObject.getLabel());
}
/**
* Handles a simple label with an embedded ampersand and plus
*
* @throws BitcoinURIParseException
* If something goes wrong
* @throws UnsupportedEncodingException
*/
@Test
public void testGood_LabelWithAmpersandAndPlus() throws Exception {
String testString = "Hello Earth & Mars + Venus";
String encodedLabel = BitcoinURI.encodeURLString(testString);
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS + "?label="
+ encodedLabel);
assertEquals(testString, testObject.getLabel());
}
/**
* Handles a Russian label (Unicode test)
*
* @throws BitcoinURIParseException
* If something goes wrong
* @throws UnsupportedEncodingException
*/
@Test
public void testGood_LabelWithRussian() throws Exception {
// Moscow in Russian in Cyrillic
String moscowString = "\u041c\u043e\u0441\u043a\u0432\u0430";
String encodedLabel = BitcoinURI.encodeURLString(moscowString);
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS + "?label="
+ encodedLabel);
assertEquals(moscowString, testObject.getLabel());
}
/**
* Handles a simple message
*
* @throws BitcoinURIParseException
* If something goes wrong
*/
@Test
public void testGood_Message() throws BitcoinURIParseException {
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS
+ "?message=Hello%20World");
assertEquals("Hello World", testObject.getMessage());
}
/**
* Handles various well-formed combinations
*
* @throws BitcoinURIParseException
* If something goes wrong
*/
@Test
public void testGood_Combinations() throws BitcoinURIParseException {
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS
+ "?amount=6543210&label=Hello%20World&message=Be%20well");
assertEquals(
"BitcoinURI['amount'='654321000000000','label'='Hello World','message'='Be well','address'='"+CoinDefinition.UNITTEST_ADDRESS+"']",
testObject.toString());
}
/**
* Handles a badly formatted amount field
*
* @throws BitcoinURIParseException
* If something goes wrong
*/
@Test
public void testBad_Amount() throws BitcoinURIParseException {
// Missing
try {
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS
+ "?amount=");
fail("Expecting BitcoinURIParseException");
} catch (BitcoinURIParseException e) {
assertTrue(e.getMessage().contains("amount"));
}
// Non-decimal (BIP 21)
try {
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS
+ "?amount=12X4");
fail("Expecting BitcoinURIParseException");
} catch (BitcoinURIParseException e) {
assertTrue(e.getMessage().contains("amount"));
}
}
/**
* Handles a badly formatted label field
*
* @throws BitcoinURIParseException
* If something goes wrong
*/
@Test
public void testBad_Label() throws BitcoinURIParseException {
try {
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS
+ "?label=");
fail("Expecting BitcoinURIParseException");
} catch (BitcoinURIParseException e) {
assertTrue(e.getMessage().contains("label"));
}
}
/**
* Handles a badly formatted message field
*
* @throws BitcoinURIParseException
* If something goes wrong
*/
@Test
public void testBad_Message() throws BitcoinURIParseException {
try {
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS
+ "?message=");
fail("Expecting BitcoinURIParseException");
} catch (BitcoinURIParseException e) {
assertTrue(e.getMessage().contains("message"));
}
}
/**
* Handles duplicated fields (sneaky address overwrite attack)
*
* @throws BitcoinURIParseException
* If something goes wrong
*/
@Test
public void testBad_Duplicated() throws BitcoinURIParseException {
try {
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS
+ "?address=aardvark");
fail("Expecting BitcoinURIParseException");
} catch (BitcoinURIParseException e) {
assertTrue(e.getMessage().contains("address"));
}
}
/**
* Handles case when there are too many equals
*
* @throws BitcoinURIParseException
* If something goes wrong
*/
@Test
public void testBad_TooManyEquals() throws BitcoinURIParseException {
try {
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS
+ "?label=aardvark=zebra");
fail("Expecting BitcoinURIParseException");
} catch (BitcoinURIParseException e) {
assertTrue(e.getMessage().contains("cannot parse name value pair"));
}
}
/**
* Handles case when there are too many question marks
*
* @throws BitcoinURIParseException
* If something goes wrong
*/
@Test
public void testBad_TooManyQuestionMarks() throws BitcoinURIParseException {
try {
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS
+ "?label=aardvark?message=zebra");
fail("Expecting BitcoinURIParseException");
} catch (BitcoinURIParseException e) {
assertTrue(e.getMessage().contains("Too many question marks"));
}
}
/**
* Handles unknown fields (required and not required)
*
* @throws BitcoinURIParseException
* If something goes wrong
*/
@Test
public void testUnknown() throws BitcoinURIParseException {
// Unknown not required field
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS
+ "?aardvark=true");
assertEquals("BitcoinURI['aardvark'='true','address'='"+CoinDefinition.UNITTEST_ADDRESS+"']", testObject.toString());
assertEquals("true", (String) testObject.getParameterByName("aardvark"));
// Unknown not required field (isolated)
try {
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS
+ "?aardvark");
fail("Expecting BitcoinURIParseException");
} catch (BitcoinURIParseException e) {
assertTrue(e.getMessage().contains("cannot parse name value pair"));
}
// Unknown and required field
try {
testObject = new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS
+ "?req-aardvark=true");
fail("Expecting BitcoinURIParseException");
} catch (BitcoinURIParseException e) {
assertTrue(e.getMessage().contains("req-aardvark"));
}
}
@Test
public void brokenURIs() throws BitcoinURIParseException {
// Check we can parse the incorrectly formatted URIs produced by blockchain.info and its iPhone app.
String str = CoinDefinition.coinURIScheme + "://"+CoinDefinition.UNITTEST_ADDRESS+"?amount=0.01000000";
BitcoinURI uri = new BitcoinURI(str);
assertEquals(CoinDefinition.UNITTEST_ADDRESS, uri.getAddress().toString());
assertEquals(Utils.toNanoCoins(0, 1), uri.getAmount());
}
@Test(expected = BitcoinURIParseException.class)
public void testBad_AmountTooPrecise() throws BitcoinURIParseException {
new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS
+ "?amount=0.123456789");
}
@Test(expected = BitcoinURIParseException.class)
public void testBad_NegativeAmount() throws BitcoinURIParseException {
new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS
+ "?amount=-1");
}
@Test(expected = BitcoinURIParseException.class)
public void testBad_TooLargeAmount() throws BitcoinURIParseException {
new BitcoinURI(MainNetParams.get(), BitcoinURI.BITCOIN_SCHEME + ":" + MAINNET_GOOD_ADDRESS
+ "?amount="+(CoinDefinition.MAX_MONEY.multiply(BigInteger.valueOf(5))));
}
@Test
public void testPaymentProtocolReq() throws Exception {
// Non-backwards compatible form ...
BitcoinURI uri = new BitcoinURI(TestNet3Params.get(), CoinDefinition.coinURIScheme + ":?r=https%3A%2F%2Fbitcoincore.org%2F%7Egavin%2Ff.php%3Fh%3Db0f02e7cea67f168e25ec9b9f9d584f9");
assertEquals("https://bitcoincore.org/~gavin/f.php?h=b0f02e7cea67f168e25ec9b9f9d584f9", uri.getPaymentRequestUrl());
assertNull(uri.getAddress());
}
}
|
|
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.bookkeeper.client;
import static org.apache.bookkeeper.util.TestUtils.assertEventuallyTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import com.google.common.collect.Lists;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.bookkeeper.client.api.LedgerMetadata;
import org.apache.bookkeeper.client.api.WriteFlag;
import org.apache.bookkeeper.common.concurrent.FutureUtils;
import org.apache.bookkeeper.net.BookieId;
import org.apache.bookkeeper.net.BookieSocketAddress;
import org.apache.bookkeeper.versioning.Versioned;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Ledger recovery tests using mocks rather than a real cluster.
*/
public class HandleFailuresTest {
private static final Logger log = LoggerFactory.getLogger(LedgerRecovery2Test.class);
private static final BookieId b1 = new BookieSocketAddress("b1", 3181).toBookieId();
private static final BookieId b2 = new BookieSocketAddress("b2", 3181).toBookieId();
private static final BookieId b3 = new BookieSocketAddress("b3", 3181).toBookieId();
private static final BookieId b4 = new BookieSocketAddress("b4", 3181).toBookieId();
private static final BookieId b5 = new BookieSocketAddress("b5", 3181).toBookieId();
@Test
public void testChangeTriggeredOneTimeForOneFailure() throws Exception {
MockClientContext clientCtx = MockClientContext.create();
Versioned<LedgerMetadata> md = ClientUtil.setupLedger(clientCtx, 10L,
LedgerMetadataBuilder.create().newEnsembleEntry(
0L, Lists.newArrayList(b1, b2, b3)));
clientCtx.getMockRegistrationClient().addBookies(b4).get();
clientCtx.getMockBookieClient().errorBookies(b1);
LedgerHandle lh = new LedgerHandle(clientCtx, 10L, md, BookKeeper.DigestType.CRC32C,
ClientUtil.PASSWD, WriteFlag.NONE);
lh.appendAsync("entry1".getBytes());
lh.appendAsync("entry2".getBytes());
lh.appendAsync("entry3".getBytes());
lh.appendAsync("entry4".getBytes());
lh.appendAsync("entry5".getBytes()).get();
verify(clientCtx.getLedgerManager(), times(1)).writeLedgerMetadata(anyLong(), any(), any());
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().size(), 1);
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().get(0L), Lists.newArrayList(b4, b2, b3));
}
@Test
public void testSecondFailureOccursWhileFirstBeingHandled() throws Exception {
MockClientContext clientCtx = MockClientContext.create();
Versioned<LedgerMetadata> md = ClientUtil.setupLedger(clientCtx, 10L,
LedgerMetadataBuilder.create()
.withEnsembleSize(3).withWriteQuorumSize(3).withAckQuorumSize(3)
.newEnsembleEntry(0L, Lists.newArrayList(b1, b2, b3)));
clientCtx.getMockRegistrationClient().addBookies(b4, b5).get();
CompletableFuture<Void> b2blocker = new CompletableFuture<>();
clientCtx.getMockBookieClient().setPreWriteHook(
(bookie, ledgerId, entryId) -> {
if (bookie.equals(b1)) {
return FutureUtils.exception(new BKException.BKWriteException());
} else if (bookie.equals(b2)) {
return b2blocker;
} else {
return FutureUtils.value(null);
}
});
CompletableFuture<Void> metadataNotifier = new CompletableFuture<>();
CompletableFuture<Void> metadataBlocker = new CompletableFuture<>();
clientCtx.getMockLedgerManager().setPreWriteHook(
(ledgerId, metadata) -> {
metadataNotifier.complete(null);
return metadataBlocker;
});
LedgerHandle lh = new LedgerHandle(clientCtx, 10L, md, BookKeeper.DigestType.CRC32C,
ClientUtil.PASSWD, WriteFlag.NONE);
lh.appendAsync("entry1".getBytes());
lh.appendAsync("entry2".getBytes());
lh.appendAsync("entry3".getBytes());
lh.appendAsync("entry4".getBytes());
CompletableFuture<?> future = lh.appendAsync("entry5".getBytes());
metadataNotifier.get(); // wait for first metadata write to occur
b2blocker.completeExceptionally(new BKException.BKWriteException()); // make b2 requests fail
metadataBlocker.complete(null);
future.get();
verify(clientCtx.getLedgerManager(), times(2)).writeLedgerMetadata(anyLong(), any(), any());
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().size(), 1);
Assert.assertTrue(lh.getLedgerMetadata().getAllEnsembles().get(0L).contains(b3));
Assert.assertTrue(lh.getLedgerMetadata().getAllEnsembles().get(0L).contains(b4));
Assert.assertTrue(lh.getLedgerMetadata().getAllEnsembles().get(0L).contains(b5));
}
@Test
public void testHandlingFailuresOneBookieFailsImmediately() throws Exception {
MockClientContext clientCtx = MockClientContext.create();
Versioned<LedgerMetadata> md = ClientUtil.setupLedger(clientCtx, 10L,
LedgerMetadataBuilder.create()
.withEnsembleSize(3).withWriteQuorumSize(3).withAckQuorumSize(3)
.newEnsembleEntry(0L, Lists.newArrayList(b1, b2, b3)));
clientCtx.getMockRegistrationClient().addBookies(b4).get();
clientCtx.getMockBookieClient().errorBookies(b1);
LedgerHandle lh = new LedgerHandle(clientCtx, 10L, md, BookKeeper.DigestType.CRC32C,
ClientUtil.PASSWD, WriteFlag.NONE);
lh.append("entry1".getBytes());
lh.close();
Assert.assertTrue(lh.getLedgerMetadata().isClosed());
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().size(), 1);
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().get(0L), Lists.newArrayList(b4, b2, b3));
}
@Test
public void testHandlingFailuresOneBookieFailsAfterOneEntry() throws Exception {
MockClientContext clientCtx = MockClientContext.create();
Versioned<LedgerMetadata> md = ClientUtil.setupLedger(clientCtx, 10L,
LedgerMetadataBuilder.create()
.withEnsembleSize(3).withWriteQuorumSize(3).withAckQuorumSize(3)
.newEnsembleEntry(0L, Lists.newArrayList(b1, b2, b3)));
clientCtx.getMockRegistrationClient().addBookies(b4).get();
LedgerHandle lh = new LedgerHandle(clientCtx, 10L, md, BookKeeper.DigestType.CRC32C,
ClientUtil.PASSWD, WriteFlag.NONE);
lh.append("entry1".getBytes());
clientCtx.getMockBookieClient().errorBookies(b1);
lh.append("entry2".getBytes());
lh.close();
Assert.assertTrue(lh.getLedgerMetadata().isClosed());
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().size(), 2);
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().get(0L), Lists.newArrayList(b1, b2, b3));
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().get(1L), Lists.newArrayList(b4, b2, b3));
Assert.assertEquals(lh.getLedgerMetadata().getLastEntryId(), 1L);
}
@Test
public void testHandlingFailuresMultipleBookieFailImmediatelyNotEnoughToReplace() throws Exception {
MockClientContext clientCtx = MockClientContext.create();
Versioned<LedgerMetadata> md = ClientUtil.setupLedger(clientCtx, 10L,
LedgerMetadataBuilder.create()
.withEnsembleSize(3).withWriteQuorumSize(3).withAckQuorumSize(3)
.newEnsembleEntry(0L, Lists.newArrayList(b1, b2, b3)));
clientCtx.getMockBookieClient().errorBookies(b1, b2);
LedgerHandle lh = new LedgerHandle(clientCtx, 10L, md, BookKeeper.DigestType.CRC32C,
ClientUtil.PASSWD, WriteFlag.NONE);
try {
lh.append("entry1".getBytes());
Assert.fail("Shouldn't have been able to add");
} catch (BKException.BKNotEnoughBookiesException bke) {
// correct behaviour
assertEventuallyTrue("Failure to add should trigger ledger closure",
() -> lh.getLedgerMetadata().isClosed());
Assert.assertEquals("Ledger should be empty",
lh.getLedgerMetadata().getLastEntryId(), LedgerHandle.INVALID_ENTRY_ID);
Assert.assertEquals("Should be only one ensemble", lh.getLedgerMetadata().getAllEnsembles().size(), 1);
Assert.assertEquals("Ensemble shouldn't have changed", lh.getLedgerMetadata().getAllEnsembles().get(0L),
Lists.newArrayList(b1, b2, b3));
}
}
@Test
public void testHandlingFailuresMultipleBookieFailAfterOneEntryNotEnoughToReplace() throws Exception {
MockClientContext clientCtx = MockClientContext.create();
Versioned<LedgerMetadata> md = ClientUtil.setupLedger(clientCtx, 10L,
LedgerMetadataBuilder.create()
.withEnsembleSize(3).withWriteQuorumSize(3).withAckQuorumSize(3)
.newEnsembleEntry(0L, Lists.newArrayList(b1, b2, b3)));
LedgerHandle lh = new LedgerHandle(clientCtx, 10L, md, BookKeeper.DigestType.CRC32C,
ClientUtil.PASSWD, WriteFlag.NONE);
lh.append("entry1".getBytes());
clientCtx.getMockBookieClient().errorBookies(b1, b2);
try {
lh.append("entry2".getBytes());
Assert.fail("Shouldn't have been able to add");
} catch (BKException.BKNotEnoughBookiesException bke) {
// correct behaviour
assertEventuallyTrue("Failure to add should trigger ledger closure",
() -> lh.getLedgerMetadata().isClosed());
Assert.assertEquals("Ledger should be empty", lh.getLedgerMetadata().getLastEntryId(), 0L);
Assert.assertEquals("Should be only one ensemble", lh.getLedgerMetadata().getAllEnsembles().size(), 1);
Assert.assertEquals("Ensemble shouldn't have changed", lh.getLedgerMetadata().getAllEnsembles().get(0L),
Lists.newArrayList(b1, b2, b3));
}
}
@Test
public void testClientClosesWhileFailureHandlerInProgress() throws Exception {
MockClientContext clientCtx = MockClientContext.create();
Versioned<LedgerMetadata> md = ClientUtil.setupLedger(clientCtx, 10L,
LedgerMetadataBuilder.create()
.withEnsembleSize(3).withWriteQuorumSize(3).withAckQuorumSize(3)
.newEnsembleEntry(0L, Lists.newArrayList(b1, b2, b3)));
clientCtx.getMockRegistrationClient().addBookies(b4).get();
clientCtx.getMockBookieClient().errorBookies(b2);
CompletableFuture<Void> changeInProgress = new CompletableFuture<>();
CompletableFuture<Void> blockEnsembleChange = new CompletableFuture<>();
clientCtx.getMockLedgerManager().setPreWriteHook((ledgerId, metadata) -> {
// block the write trying to replace b2 with b4
if (metadata.getAllEnsembles().get(0L).get(1).equals(b4)) {
changeInProgress.complete(null);
return blockEnsembleChange;
} else {
return FutureUtils.value(null);
}
});
LedgerHandle lh = new LedgerHandle(clientCtx, 10L, md, BookKeeper.DigestType.CRC32C,
ClientUtil.PASSWD, WriteFlag.NONE);
CompletableFuture<?> future = lh.appendAsync("entry1".getBytes());
changeInProgress.get();
lh.close();
blockEnsembleChange.complete(null); // allow ensemble change to continue
try {
future.get();
Assert.fail("Add shouldn't have succeeded");
} catch (ExecutionException ee) {
Assert.assertEquals(ee.getCause().getClass(), BKException.BKLedgerClosedException.class);
}
Assert.assertTrue(lh.getLedgerMetadata().isClosed());
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().size(), 1);
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().get(0L), Lists.newArrayList(b1, b2, b3));
Assert.assertEquals(lh.getLedgerMetadata().getLastEntryId(), LedgerHandle.INVALID_ENTRY_ID);
}
@Test
public void testMetadataSetToClosedDuringFailureHandler() throws Exception {
MockClientContext clientCtx = MockClientContext.create();
Versioned<LedgerMetadata> md = ClientUtil.setupLedger(clientCtx, 10L,
LedgerMetadataBuilder.create()
.withEnsembleSize(3).withWriteQuorumSize(3).withAckQuorumSize(3)
.newEnsembleEntry(0L, Lists.newArrayList(b1, b2, b3)));
clientCtx.getMockRegistrationClient().addBookies(b4).get();
clientCtx.getMockBookieClient().errorBookies(b2);
CompletableFuture<Void> changeInProgress = new CompletableFuture<>();
CompletableFuture<Void> blockEnsembleChange = new CompletableFuture<>();
clientCtx.getMockLedgerManager().setPreWriteHook((ledgerId, metadata) -> {
if (metadata.getAllEnsembles().get(0L).get(1).equals(b4)) {
// block the write trying to replace b2 with b4
changeInProgress.complete(null);
return blockEnsembleChange;
} else {
return FutureUtils.value(null);
}
});
LedgerHandle lh = new LedgerHandle(clientCtx, 10L, md, BookKeeper.DigestType.CRC32C,
ClientUtil.PASSWD, WriteFlag.NONE);
CompletableFuture<?> future = lh.appendAsync("entry1".getBytes());
changeInProgress.get();
ClientUtil.transformMetadata(clientCtx, 10L,
(metadata) -> LedgerMetadataBuilder.from(metadata)
.withClosedState().withLastEntryId(1234L).withLength(10L).build());
blockEnsembleChange.complete(null); // allow ensemble change to continue
try {
future.get();
Assert.fail("Add shouldn't have succeeded");
} catch (ExecutionException ee) {
Assert.assertEquals(ee.getCause().getClass(), BKException.BKLedgerClosedException.class);
}
Assert.assertTrue(lh.getLedgerMetadata().isClosed());
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().size(), 1);
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().get(0L), Lists.newArrayList(b1, b2, b3));
Assert.assertEquals(lh.getLedgerMetadata().getLastEntryId(), 1234L);
}
@Test
public void testMetadataSetToInRecoveryDuringFailureHandler() throws Exception {
MockClientContext clientCtx = MockClientContext.create();
Versioned<LedgerMetadata> md = ClientUtil.setupLedger(clientCtx, 10L,
LedgerMetadataBuilder.create()
.withEnsembleSize(3).withWriteQuorumSize(3).withAckQuorumSize(3)
.newEnsembleEntry(0L, Lists.newArrayList(b1, b2, b3)));
clientCtx.getMockRegistrationClient().addBookies(b4).get();
clientCtx.getMockBookieClient().errorBookies(b2);
CompletableFuture<Void> changeInProgress = new CompletableFuture<>();
CompletableFuture<Void> blockEnsembleChange = new CompletableFuture<>();
clientCtx.getMockLedgerManager().setPreWriteHook((ledgerId, metadata) -> {
if (metadata.getAllEnsembles().get(0L).get(1).equals(b4)) {
// block the write trying to replace b2 with b4
changeInProgress.complete(null);
return blockEnsembleChange;
} else {
return FutureUtils.value(null);
}
});
LedgerHandle lh = new LedgerHandle(clientCtx, 10L, md, BookKeeper.DigestType.CRC32C,
ClientUtil.PASSWD, WriteFlag.NONE);
CompletableFuture<?> future = lh.appendAsync("entry1".getBytes());
changeInProgress.get();
ClientUtil.transformMetadata(clientCtx, 10L,
(metadata) -> LedgerMetadataBuilder.from(metadata).withInRecoveryState().build());
blockEnsembleChange.complete(null); // allow ensemble change to continue
try {
future.get();
Assert.fail("Add shouldn't have succeeded");
} catch (ExecutionException ee) {
Assert.assertEquals(ee.getCause().getClass(), BKException.BKLedgerFencedException.class);
}
Assert.assertFalse(lh.getLedgerMetadata().isClosed());
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().size(), 1);
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().get(0L), Lists.newArrayList(b1, b2, b3));
}
@Test
public void testOldEnsembleChangedDuringFailureHandler() throws Exception {
MockClientContext clientCtx = MockClientContext.create();
Versioned<LedgerMetadata> md = ClientUtil.setupLedger(clientCtx, 10L,
LedgerMetadataBuilder.create()
.withEnsembleSize(3).withWriteQuorumSize(3).withAckQuorumSize(3)
.newEnsembleEntry(0L, Lists.newArrayList(b1, b2, b3)));
LedgerHandle lh = new LedgerHandle(clientCtx, 10L, md, BookKeeper.DigestType.CRC32C,
ClientUtil.PASSWD, WriteFlag.NONE);
lh.append("entry1".getBytes());
clientCtx.getMockRegistrationClient().addBookies(b4).get();
clientCtx.getMockBookieClient().errorBookies(b3);
lh.append("entry2".getBytes());
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().size(), 2);
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().get(0L), Lists.newArrayList(b1, b2, b3));
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().get(1L), Lists.newArrayList(b1, b2, b4));
CompletableFuture<Void> changeInProgress = new CompletableFuture<>();
CompletableFuture<Void> blockEnsembleChange = new CompletableFuture<>();
clientCtx.getMockLedgerManager().setPreWriteHook((ledgerId, metadata) -> {
// block the write trying to replace b1 with b5
if (metadata.getAllEnsembles().size() > 2
&& metadata.getAllEnsembles().get(2L).get(0).equals(b5)) {
changeInProgress.complete(null);
return blockEnsembleChange;
} else {
return FutureUtils.value(null);
}
});
clientCtx.getMockRegistrationClient().addBookies(b5).get();
clientCtx.getMockBookieClient().errorBookies(b1);
CompletableFuture<?> future = lh.appendAsync("entry3".getBytes());
changeInProgress.get();
ClientUtil.transformMetadata(clientCtx, 10L,
(metadata) -> LedgerMetadataBuilder.from(metadata).replaceEnsembleEntry(
0L, Lists.newArrayList(b4, b2, b5)).build());
blockEnsembleChange.complete(null); // allow ensemble change to continue
future.get();
Assert.assertFalse(lh.getLedgerMetadata().isClosed());
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().size(), 3);
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().get(0L), Lists.newArrayList(b4, b2, b5));
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().get(1L), Lists.newArrayList(b1, b2, b4));
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().get(2L), Lists.newArrayList(b5, b2, b4));
}
@Test
public void testNoAddsAreCompletedWhileFailureHandlingInProgress() throws Exception {
MockClientContext clientCtx = MockClientContext.create();
Versioned<LedgerMetadata> md = ClientUtil.setupLedger(clientCtx, 10L,
LedgerMetadataBuilder.create()
.withEnsembleSize(3).withWriteQuorumSize(3).withAckQuorumSize(2)
.newEnsembleEntry(0L, Lists.newArrayList(b1, b2, b3)));
clientCtx.getMockRegistrationClient().addBookies(b4).get();
clientCtx.getMockBookieClient().errorBookies(b3);
LedgerHandle lh = new LedgerHandle(clientCtx, 10L, md, BookKeeper.DigestType.CRC32C,
ClientUtil.PASSWD, WriteFlag.NONE);
lh.append("entry1".getBytes());
CompletableFuture<Void> changeInProgress = new CompletableFuture<>();
CompletableFuture<Void> blockEnsembleChange = new CompletableFuture<>();
clientCtx.getMockLedgerManager().setPreWriteHook((ledgerId, metadata) -> {
// block the write trying to replace b3 with b4
if (metadata.getAllEnsembles().get(1L).get(2).equals(b4)) {
changeInProgress.complete(null);
return blockEnsembleChange;
} else {
return FutureUtils.value(null);
}
});
CompletableFuture<?> future = lh.appendAsync("entry2".getBytes());
changeInProgress.get();
try {
future.get(1, TimeUnit.SECONDS);
Assert.fail("Shouldn't complete");
} catch (TimeoutException te) {
}
blockEnsembleChange.complete(null);
future.get();
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().size(), 2);
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().get(0L), Lists.newArrayList(b1, b2, b3));
Assert.assertEquals(lh.getLedgerMetadata().getAllEnsembles().get(1L), Lists.newArrayList(b1, b2, b4));
}
@Test
public void testHandleFailureBookieNotInWriteSet() throws Exception {
MockClientContext clientCtx = MockClientContext.create();
Versioned<LedgerMetadata> md = ClientUtil.setupLedger(clientCtx, 10L,
LedgerMetadataBuilder.create()
.withEnsembleSize(3).withWriteQuorumSize(2).withAckQuorumSize(1)
.newEnsembleEntry(0L, Lists.newArrayList(b1, b2, b3)));
clientCtx.getMockRegistrationClient().addBookies(b4).get();
CompletableFuture<Void> b1Delay = new CompletableFuture<>();
// Delay the first write to b1, then error it
clientCtx.getMockBookieClient().setPreWriteHook((bookie, ledgerId, entryId) -> {
if (bookie.equals(b1)) {
return b1Delay;
} else {
return FutureUtils.value(null);
}
});
CompletableFuture<Void> changeInProgress = new CompletableFuture<>();
CompletableFuture<Void> blockEnsembleChange = new CompletableFuture<>();
clientCtx.getMockLedgerManager().setPreWriteHook((ledgerId, metadata) -> {
changeInProgress.complete(null);
return blockEnsembleChange;
});
LedgerHandle lh = new LedgerHandle(clientCtx, 10L, md, BookKeeper.DigestType.CRC32C,
ClientUtil.PASSWD, WriteFlag.NONE);
log.info("b2 should be enough to complete first add");
lh.append("entry1".getBytes());
log.info("when b1 completes with failure, handleFailures should kick off");
b1Delay.completeExceptionally(new BKException.BKWriteException());
log.info("write second entry, should have enough bookies, but blocks completion on failure handling");
CompletableFuture<?> e2 = lh.appendAsync("entry2".getBytes());
changeInProgress.get();
assertEventuallyTrue("e2 should eventually complete", () -> lh.pendingAddOps.peek().completed);
Assert.assertFalse("e2 shouldn't be completed to client", e2.isDone());
blockEnsembleChange.complete(null); // allow ensemble change to continue
log.info("e2 should complete");
e2.get(10, TimeUnit.SECONDS);
}
}
|
|
package test.annotationheaders;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.util.Arrays;
import java.util.Map.Entry;
import java.util.jar.Attributes;
import java.util.jar.Manifest;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import aQute.bnd.annotation.headers.BundleCategory;
import aQute.bnd.annotation.headers.BundleContributors;
import aQute.bnd.annotation.headers.BundleCopyright;
import aQute.bnd.annotation.headers.BundleDevelopers;
import aQute.bnd.annotation.headers.BundleDocURL;
import aQute.bnd.annotation.headers.Category;
import aQute.bnd.annotation.licenses.Apache_2_0;
import aQute.bnd.header.Attrs;
import aQute.bnd.header.Parameters;
import aQute.bnd.osgi.Builder;
import aQute.bnd.osgi.Constants;
import aQute.bnd.osgi.Domain;
import aQute.bnd.osgi.Jar;
import aQute.lib.io.IO;
import junit.framework.TestCase;
@SuppressWarnings("deprecation")
public class AnnotationHeadersTest extends TestCase {
/**
* Say I want to define a capability namespace for web applications, e.g.
* Provide-Capability: webapp; webapp=Petstore. Web application components
* necessarily require an HTTP implementation so they should
* Require-Capability: osgi.implementation;
* filter:="(osgi.implementation=osgi.http)". I want to define an annotation
* that I can put onto a component that implies both the above provide and
* require. I tried the following:
*
* <pre>
* @ProvideCapability(ns = "webapp")
* @RequireCapability(ns = "osgi.implementation", filter = "(osgi.implementation=osgi.http)")
* @interface WebApplication {
* String name();
* }
*
* @WebApplication(name = "Petstore")
* @Component
* public class PetstoreAppComponent {
* // ..
* }
* </pre>
*
* However this only generated the Provide, it did not generate the Require.
* If I switch the order of annotations so that @RequireCapability is first,
* then it only generates the Require.
*/
public void testMultipleManifestHeaders() throws Exception {
try (Builder b = new Builder();) {
b.addClasspath(IO.getFile("bin_test"));
b.setPrivatePackage("test.annotationheaders.multiple");
b.build();
assertTrue(b.check());
b.getJar()
.getManifest()
.write(System.out);
Attributes mainAttributes = b.getJar()
.getManifest()
.getMainAttributes();
Parameters req = new Parameters(mainAttributes.getValue(Constants.REQUIRE_CAPABILITY));
Parameters cap = new Parameters(mainAttributes.getValue(Constants.PROVIDE_CAPABILITY));
assertTrue(cap.get("provide") != null);
assertTrue(req.get("require") != null);
}
}
/**
* Default values of annotation attributes not included for customized
* webresource annotations #976
* <p>
* When I add a customized webresource annotation to a type definition, the
* default attributes are not included in the Require-Capability header. For
* example with the Jsonrpc webresource annotation this leads to the
* following Require-Capability in the manifest:
* osgi.enroute.webresource;filter:=
* "(&(osgi.enroute.webresource=/osgi/enroute/jsonrpc)(&(version>=1.1.1)(!(version>=2.0.0))))".
* Only when I explicitly add resource={"jsonrpc.js"} this is reflected in
* the manifest: osgi.enroute.webresource;resource:List
* <String>="jsonrpc.js";filter:=
* "(&(osgi.enroute.webresource=/osgi/enroute/jsonrpc)(&(version>=1.1.1)(!(version>=2.0.0))))",
* although jsonrpc.js is set as default for the resource attribute.
*/
public void testDefaultAttrs() throws Exception {
try (Builder b = new Builder();) {
b.addClasspath(IO.getFile("bin_test"));
b.setPrivatePackage("test.annotationheaders.attrs.defaults");
b.build();
assertTrue(b.check());
b.getJar()
.getManifest()
.write(System.out);
Domain d = Domain.domain(b.getJar()
.getManifest());
Parameters rc = d.getRequireCapability();
assertNotNull(rc);
assertEquals(2, rc.size());
Attrs attrs = rc.get("default-attrs");
assertEquals("42", attrs.get("foo"));
Parameters pc = d.getProvideCapability();
assertNotNull(pc);
assertEquals(1, pc.size());
attrs = pc.get("default-attrs");
assertEquals("42", attrs.get("foo"));
}
}
public void testWithAttrs() throws Exception {
Builder b = new Builder();
try {
b.addClasspath(new File("bin_test"));
b.setProperty("Private-Package", "test.annotationheaders.attrs");
b.build();
assertTrue(b.check());
Manifest m = b.getJar()
.getManifest();
m.write(System.out);
Parameters p = new Parameters(m.getMainAttributes()
.getValue("Provide-Capability"));
Attrs attrs = p.get("nsx");
assertNotNull(attrs);
assertEquals(Long.valueOf(3), attrs.getTyped("foo"));
p = new Parameters(m.getMainAttributes()
.getValue("Bundle-License"));
attrs = p.get("license");
assertNotNull(attrs);
assertEquals("abc", attrs.get("foo"));
p = new Parameters(m.getMainAttributes()
.getValue("Require-Capability"));
// namespaces must be "osgi.ee", "nsx" and "nsy" ONLY
assertNotNull(p.get("nsx"));
assertNotNull(p.get("nsy"));
assertNotNull(p.get("nsz"));
assertNotNull(p.get("param"));
assertNotNull(p.get("osgi.ee"));
assertEquals("spurious Require-Capability namespaces", 5, p.size());
attrs = p.get("nsx");
assertEquals(Arrays.asList("abc", "def"), attrs.getTyped("foo"));
attrs = p.get("nsy");
assertEquals("hello", attrs.get("value"));
attrs = p.get("nsz");
assertEquals("(nsz=*)", attrs.get("filter:"));
assertEquals("world", attrs.get("hello"));
attrs = p.get("param");
assertEquals("(&(a=hello)(b=goodbye))", attrs.get("filter:"));
} finally {
b.close();
}
}
@aQute.bnd.annotation.headers.RequireCapability(ns = "osgi.webresource", filter = "(&(osgi.webresource=/google/angular)${frange;${@version}})")
@interface Angular {
}
@aQute.bnd.annotation.headers.RequireCapability(ns = "not.there", filter = "(a=3)")
@interface Notused {
}
@BundleDevelopers("Peter.Kriens@aQute.biz;name='Peter Kriens';organization=aQute")
@interface pkriens {}
@BundleContributors(value = "Mieke.Kriens@aQute.biz", name = "Mieke Kriens", organization = "aQute")
@interface mkriens {}
@BundleContributors(value = "Thomas.Kriens@aQute.biz", name = "Thomas Kriens", organization = "aQute")
@interface tkriens {}
@BundleContributors(value = "Mischa.Kriens@aQute.biz", name = "Mischa Kriens", organization = "aQute")
@interface mischakriens {}
@aQute.bnd.annotation.headers.RequireCapability(ns = "abcdef", filter = "(&(abcdef=xyz)${frange;${@version}})")
@Apache_2_0
@pkriens
@mkriens
@tkriens
class A {
}
@BundleDocURL("http://www.aQute.biz")
@BundleCopyright("(c) ${tstamp;yyyy} aQute All Rights Reserved and other baloney")
@pkriens
@mkriens
@tkriens
@BundleCategory(Category.adoption)
class B {
}
@BundleCopyright("[[\n\rXyz: Hello world. , ; = \\]]")
class Z {
}
@BundleCopyright("v=${@version} p=${@package} c=${@class} s=${@class-short}")
@Angular
class C {
}
//
// Check if we can
interface X {
@aQute.bnd.annotation.headers.RequireCapability(ns = "x", filter = "(x=xx)")
@interface Require {}
@aQute.bnd.annotation.headers.ProvideCapability(ns = "x", name = "xx")
@interface Provide {}
}
@X.Provide
class XImpl {}
@aQute.bnd.annotation.headers.ProvideCapability(ns = "extrattrs", name = "extrattrs", value = "extra=YES")
interface ExtraAttrs {
}
@aQute.bnd.annotation.headers.RequireCapability(ns = "nofilter", filter = "")
@interface NoFilterRequirement {}
@NoFilterRequirement
class NoFilterImpl {}
public void testBasic() throws Exception {
Builder b = new Builder();
b.addClasspath(new File("bin_test"));
b.setProperty("Private-Package", "test.annotationheaders");
b.build();
assertTrue(b.check());
Manifest manifest = b.getJar()
.getManifest();
manifest.write(System.out);
Parameters provideWithExtraAttrs = new Parameters(manifest.getMainAttributes()
.getValue(Constants.PROVIDE_CAPABILITY));
Attrs attrs = provideWithExtraAttrs.get("extrattrs");
assertNotNull(attrs);
assertEquals("extrattrs=extrattrs;extra=YES", attrs.toString());
String rc = manifest.getMainAttributes()
.getValue(Constants.REQUIRE_CAPABILITY);
assertNotNull(rc);
System.out.println(rc);
assertTrue(rc.contains("osgi.webresource=/google"));
assertTrue(rc.contains(">=1.2.3")); // from package info
assertTrue(rc.contains(">=2.0.0")); // from package info
assertFalse(rc.contains("xx"));
assertTrue(rc.contains(",nofilter,"));
String pc = manifest.getMainAttributes()
.getValue(Constants.PROVIDE_CAPABILITY);
assertNotNull(pc);
System.out.println(pc);
assertTrue(pc.contains("x=xx"));
assertFalse(rc.contains("not.there"));
String bl = manifest.getMainAttributes()
.getValue(Constants.BUNDLE_LICENSE);
assertNotNull(bl);
System.out.println(bl);
assertTrue(bl.contains("Apache-2.0"));
assertTrue(bl.contains("MIT"));
assertFalse(bl.contains("GPL"));
String dv = manifest.getMainAttributes()
.getValue(Constants.BUNDLE_DEVELOPERS);
assertNotNull(dv);
System.out.println(dv);
assertTrue(dv.contains("Peter Kriens"));
dv = manifest.getMainAttributes()
.getValue(Constants.BUNDLE_CONTRIBUTORS);
assertNotNull(dv);
System.out.println(dv);
assertTrue(dv.contains("Mieke Kriens"));
assertTrue(dv.contains("Thomas Kriens"));
assertFalse(dv.contains("Mischa Kriens"));
dv = manifest.getMainAttributes()
.getValue(Constants.BUNDLE_COPYRIGHT);
assertNotNull(dv);
System.out.println(dv);
assertTrue(dv.contains("other baloney"));
Matcher m = Pattern.compile("([0-9]{4})")
.matcher(dv);
assertTrue(m.find());
assertTrue(Integer.parseInt(m.group(1)) >= 2014);
assertTrue(dv.contains("v=1.2.3"));
assertTrue(dv.contains("p=test.annotationheaders"));
assertTrue(dv.contains("c=test.annotationheaders.AnnotationHeadersTest$C"));
assertTrue(dv.contains("s=AnnotationHeadersTest$C"));
dv = manifest.getMainAttributes()
.getValue(Constants.BUNDLE_DOCURL);
assertNotNull(dv);
System.out.println(dv);
assertTrue(dv.contains("http://www.aQute.biz"));
Parameters cpr = new Parameters(manifest.getMainAttributes()
.getValue(Constants.BUNDLE_COPYRIGHT));
for (Entry<String, Attrs> e : cpr.entrySet()) {
System.out.println("cpr: " + e);
}
ByteArrayOutputStream bout = new ByteArrayOutputStream();
Jar.writeManifest(manifest, bout);
String s = new String(bout.toByteArray(), "UTF-8");
System.out.println(s);
ByteArrayInputStream bin = new ByteArrayInputStream(bout.toByteArray());
Manifest m2 = new Manifest(bin);
String v = m2.getMainAttributes()
.getValue(Constants.BUNDLE_COPYRIGHT);
assertNotNull(v);
assertTrue(v.contains("Hello world"));
assertNull(m2.getMainAttributes()
.getValue("Xyz"));
b.close();
}
}
|
|
// Copyright 2011-2016 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.security.zynamics.binnavi.Gui.Users;
import com.google.common.base.Preconditions;
import com.google.security.zynamics.binnavi.CUtilityFunctions;
import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntDeleteException;
import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntLoadDataException;
import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntSaveDataException;
import com.google.security.zynamics.binnavi.Database.Interfaces.SQLProvider;
import com.google.security.zynamics.binnavi.Database.Interfaces.SQLProviderListener;
import com.google.security.zynamics.binnavi.Gui.GraphWindows.CommentDialogs.Interfaces.IComment;
import com.google.security.zynamics.binnavi.Gui.Users.Interfaces.IUser;
import com.google.security.zynamics.binnavi.Gui.Users.Interfaces.IUserManagerListener;
import com.google.security.zynamics.zylib.general.ListenerProvider;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* The user manager is the central source for all things which are related to the user management in
* BinNavi It provides the information about which users are known in a specific database and
* regulates the access to the resources. User management as currently implemented in BinNavi does
* only provide support to avoid concurrent modification problems when multiple instances access the
* same database. This feature does not provide any security and will probably never will.
*/
public class CUserManager {
/**
* Keeps track of the user managers for the individual databases.
*/
private static Map<SQLProvider, CUserManager> managers = new HashMap<SQLProvider, CUserManager>();
/**
* Objects that want to be notified about changes regarding users.
*/
private final ListenerProvider<IUserManagerListener> listeners =
new ListenerProvider<IUserManagerListener>();
/**
* The set of users known to the manager.
*/
private final Set<IUser> users = new HashSet<IUser>();
/**
* The currently active user.
*/
private IUser activeUser = null;
/**
* Database for which the user manager was created.
*/
private final SQLProvider provider;
/**
* Listener which gets informed about changes in the {@link SQLProvider provider}. Used here to
* make sure we clean up the static references to the {@link SQLProvider provider} on close.
*/
private final SQLProviderListener providerListener = new InternalSQLProviderListener();
/**
* Creates a new user manager object.
*
* @param provider Database for which the user manager was created.
*/
private CUserManager(final SQLProvider provider, final List<IUser> users) {
this.provider =
Preconditions.checkNotNull(provider, "IE02716: provider argument can not be null.");
this.users.addAll(users);
this.provider.addListener(providerListener);
}
/**
* Returns the user manager for a database.
*
* @param provider The provider which is used to access the database.
*
* @return The user manager associated with the given database.
*/
public static synchronized CUserManager get(final SQLProvider provider) {
Preconditions.checkNotNull(provider, "IE02717: provider argument can not be null.");
if (!managers.containsKey(provider)) {
try {
managers.put(provider, new CUserManager(provider, provider.loadUsers()));
} catch (final CouldntLoadDataException exception) {
CUtilityFunctions.logException(exception);
}
}
return managers.get(provider);
}
private void close() {
managers.remove(provider);
provider.removeListener(providerListener);
}
/**
* Synchronizes the internal list of known users with the list of known users in the database.
*/
private synchronized void syncUsers() {
try {
users.clear();
users.addAll(provider.loadUsers());
} catch (final CouldntLoadDataException exception) {
CUtilityFunctions.logException(exception);
}
}
/**
* Adds a listener object that is notified about changes to users and changes in the user
* management.
*
* @param listener The listener object to add.
*/
public synchronized void addListener(final IUserManagerListener listener) {
listeners.addListener(listener);
}
/**
* Adds a user to the user management and saves the information to the database
*
* @param userName The name of the user to be added to user management.
*
* @return The user.
* @throws CouldntSaveDataException
*/
public synchronized IUser addUser(final String userName) throws CouldntSaveDataException {
Preconditions.checkNotNull(userName, "IE02718: user name argument can not be null.");
if (containsUserName(userName)) {
throw new IllegalStateException("IE02719: User is already known to user management.");
}
final IUser user = provider.addUser(userName);
users.add(user);
for (final IUserManagerListener listener : listeners) {
try {
listener.addedUser(user);
} catch (final Exception exception) {
CUtilityFunctions.logException(exception);
}
}
return user;
}
/**
* Checks the stored user names against a provided user name.
*
* @param userName The user name to be checked.
*
* @return true if the user names already contain the given user name.
*/
public synchronized boolean containsUserName(final String userName) {
Preconditions.checkNotNull(userName, "IE02720: userName argument can not be null");
syncUsers();
for (final IUser storedUser : users) {
if (storedUser.getUserName().equalsIgnoreCase(userName)) {
return true;
}
}
return false;
}
/**
* Removes a user from user management and deletes him from the database.
*
* @param user
* @throws CouldntDeleteException
*/
public synchronized void deleteUser(final IUser user) throws CouldntDeleteException {
Preconditions.checkNotNull(user, "IE02721: user argument can not be null");
if (!users.contains(user)) {
throw new IllegalStateException("IE02722: User is not known to the user management.");
}
provider.deleteUser(user);
for (final IUserManagerListener listener : listeners) {
try {
listener.deletedUser(user);
} catch (final Exception exception) {
CUtilityFunctions.logException(exception);
}
}
}
/**
* Edits a users user name to something else.
*
* @param user The user where the name should be changed.
* @param userName The user name to change the name of the user to.
*
* @return The new user.
* @throws CouldntSaveDataException
*/
public synchronized IUser editUserName(final IUser user, final String userName)
throws CouldntSaveDataException {
Preconditions.checkNotNull(user, "IE02723: user argument can not be null");
Preconditions.checkNotNull(userName, "IE02724: userName argument can not be null");
if (!users.contains(user)) {
throw new IllegalStateException("IE02725: User is not known to the user management.");
}
if (containsUserName(userName)) {
throw new IllegalStateException("IE02726: User name is already in use by another user.");
}
final IUser newUser = provider.editUserName(user, userName);
for (final IUserManagerListener listener : listeners) {
try {
listener.editedUser(newUser);
} catch (final Exception exception) {
CUtilityFunctions.logException(exception);
}
}
return newUser;
}
/**
* Returns the currently active user.
*
* @return The user which is currently active.
*/
public synchronized IUser getCurrentActiveUser() {
if (activeUser == null) {
throw new IllegalStateException("IE02727: Current active user is not set.");
}
return activeUser;
}
/**
* Returns the currently known listeners.
*
* @return The currently known listeners.
*/
public synchronized Iterator<IUserManagerListener> getListeners() {
return listeners.iterator();
}
/**
* Returns a user be reference of id.
*
* @param userId The user id of the user to search for.
*
* @return The user id found null otherwise.
*/
public synchronized IUser getUserById(final int userId) {
Preconditions.checkArgument(userId >= 0, "Error: User id must be a positive number");
for (final IUser storedUser : users) {
if (storedUser.getUserId() == userId) {
return storedUser;
}
}
// if it is not found locally it might be present in the database therefore sync after the local
// users have been checked.
syncUsers();
for (final IUser storedUser : users) {
if (storedUser.getUserId() == userId) {
return storedUser;
}
}
return null;
}
/**
* Returns a user by reference of name.
*
* @param userName The user name of the user to search for.
*
* @return The user if found null otherwise.
*/
public synchronized IUser getUserByUserName(final String userName) {
Preconditions.checkNotNull(userName, "IE02728: userName argument can not be null");
for (final IUser storedUser : users) {
if (storedUser.getUserName().equalsIgnoreCase(userName)) {
return storedUser;
}
}
// if it is not found locally it might be present in the database therefore sync after the local
// users have been checked.
syncUsers();
for (final IUser storedUser : users) {
if (storedUser.getUserName().equalsIgnoreCase(userName)) {
return storedUser;
}
}
return null;
}
public synchronized boolean isOwner(final IComment comment) {
return getCurrentActiveUser().getUserId() == comment.getUser().getUserId();
}
/**
* Remove a listener object from the user management
*
* @param listener the listener to be removed.
*/
public synchronized void removeListener(final IUserManagerListener listener) {
listeners.removeListener(listener);
}
/**
* Sets the current active user.
*
* @param user The user which will be set active.
*/
public synchronized void setCurrentActiveUser(final IUser user) {
Preconditions.checkNotNull(user, "IE02729: user argument can not be null.");
if (users.contains(user)) {
activeUser = user;
} else {
throw new IllegalStateException(
"Error: User to be set active is not known to user management.");
}
}
/**
* Internal listener class to be informed about provider changes.
*/
private class InternalSQLProviderListener implements SQLProviderListener {
@Override
public void providerClosing(SQLProvider provider) {
if (CUserManager.this.provider.equals(provider)) {
CUserManager.this.close();
}
}
}
}
|
|
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.FakeBuildRule;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePaths;
import com.facebook.buck.step.ExecutionContext;
import com.facebook.buck.step.TestExecutionContext;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.facebook.buck.testutil.integration.DebuggableTemporaryFolder;
import com.facebook.buck.util.MockClassLoader;
import com.google.common.base.Charsets;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.io.Files;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Writer;
import java.nio.charset.Charset;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Locale;
import java.util.Set;
import javax.lang.model.SourceVersion;
import javax.tools.DiagnosticListener;
import javax.tools.JavaCompiler;
import javax.tools.JavaFileManager;
import javax.tools.JavaFileObject;
import javax.tools.StandardJavaFileManager;
public class Jsr199JavacIntegrationTest {
private static final SourcePathResolver PATH_RESOLVER =
new SourcePathResolver(new BuildRuleResolver());
public static final ImmutableSortedSet<Path> SOURCE_PATHS =
ImmutableSortedSet.of(Paths.get("Example.java"));
@Rule
public DebuggableTemporaryFolder tmp = new DebuggableTemporaryFolder();
private Path pathToSrcsList;
@Before
public void setUp() {
pathToSrcsList = Paths.get(tmp.getRoot().getPath(), "srcs_list");
}
@Test
public void testGetDescription() throws IOException {
Jsr199Javac javac = createJavac(/* withSyntaxError */ false);
String pathToOutputDir = new File(tmp.getRoot(), "out").getAbsolutePath();
assertEquals(
String.format("javac -source %s -target %s -g " +
"-d %s " +
"-classpath '' " +
"@" + pathToSrcsList.toString(),
JavaBuckConfig.TARGETED_JAVA_VERSION,
JavaBuckConfig.TARGETED_JAVA_VERSION,
pathToOutputDir),
javac.getDescription(
ImmutableList.of(
"-source", JavaBuckConfig.TARGETED_JAVA_VERSION,
"-target", JavaBuckConfig.TARGETED_JAVA_VERSION,
"-g",
"-d", pathToOutputDir,
"-classpath", "''"),
SOURCE_PATHS,
Optional.of(pathToSrcsList)));
}
@Test
public void testGetShortName() throws IOException {
Jsr199Javac javac = createJavac(/* withSyntaxError */ false);
assertEquals("javac", javac.getShortName());
}
@Test
public void testClassesFile() throws IOException, InterruptedException {
Jsr199Javac javac = createJavac(/* withSyntaxError */ false);
ExecutionContext executionContext = TestExecutionContext.newInstance();
int exitCode = javac.buildWithClasspath(
executionContext,
createProjectFilesystem(),
PATH_RESOLVER,
BuildTargetFactory.newInstance("//some:example"),
ImmutableList.<String>of(),
SOURCE_PATHS,
Optional.of(pathToSrcsList),
Optional.<Path>absent());
assertEquals("javac should exit with code 0.", exitCode, 0);
File srcsListFile = pathToSrcsList.toFile();
assertTrue(srcsListFile.exists());
assertTrue(srcsListFile.isFile());
assertEquals("Example.java", Files.toString(srcsListFile, Charsets.UTF_8).trim());
}
/**
* There was a bug where `BuildTargetSourcePath` sources were written to the classes file using
* their string representation, rather than their resolved path.
*/
@Test
public void shouldWriteResolvedBuildTargetSourcePathsToClassesFile()
throws IOException, InterruptedException {
BuildRuleResolver resolver = new BuildRuleResolver();
SourcePathResolver pathResolver = new SourcePathResolver(resolver);
BuildRule rule = new FakeBuildRule("//:fake", pathResolver);
resolver.addToIndex(rule);
Jsr199Javac javac = createJavac(
/* withSyntaxError */ false);
ExecutionContext executionContext = TestExecutionContext.newInstance();
int exitCode = javac.buildWithClasspath(
executionContext,
createProjectFilesystem(),
PATH_RESOLVER,
BuildTargetFactory.newInstance("//some:example"),
ImmutableList.<String>of(),
SOURCE_PATHS,
Optional.of(pathToSrcsList),
Optional.<Path>absent());
assertEquals("javac should exit with code 0.", exitCode, 0);
File srcsListFile = pathToSrcsList.toFile();
assertTrue(srcsListFile.exists());
assertTrue(srcsListFile.isFile());
assertEquals("Example.java", Files.toString(srcsListFile, Charsets.UTF_8).trim());
}
public static final class MockJavac implements JavaCompiler {
public MockJavac() {
}
@Override
public Set<SourceVersion> getSourceVersions() {
return ImmutableSet.of(SourceVersion.RELEASE_7);
}
@Override
public int run(
InputStream in,
OutputStream out,
OutputStream err,
String... arguments) {
throw new UnsupportedOperationException("abcdef");
}
@Override
public int isSupportedOption(String option) {
return -1;
}
@Override
public StandardJavaFileManager
getStandardFileManager(
DiagnosticListener<? super JavaFileObject> diagnosticListener,
Locale locale,
Charset charset) {
throw new UnsupportedOperationException("abcdef");
}
@Override
public CompilationTask getTask(
Writer out,
JavaFileManager fileManager,
DiagnosticListener<? super JavaFileObject> diagnosticListener,
Iterable<String> options,
Iterable<String> classes,
Iterable<? extends JavaFileObject> compilationUnits) {
throw new UnsupportedOperationException("abcdef");
}
}
@Test
public void shouldUseSpecifiedJavacJar() throws Exception {
BuildRuleResolver resolver = new BuildRuleResolver();
SourcePathResolver pathResolver = new SourcePathResolver(resolver);
BuildRule rule = new FakeBuildRule("//:fake", pathResolver);
resolver.addToIndex(rule);
Path fakeJavacJar = Paths.get("ae036e57-77a7-4356-a79c-0f85b1a3290d", "fakeJavac.jar");
ExecutionContext executionContext = TestExecutionContext.newInstance();
MockClassLoader mockClassLoader = new MockClassLoader(
ClassLoader.getSystemClassLoader(),
ImmutableMap.<String, Class<?>>of(
"com.sun.tools.javac.api.JavacTool",
MockJavac.class));
executionContext.getClassLoaderCache().injectClassLoader(
ClassLoader.getSystemClassLoader(),
ImmutableList.of(fakeJavacJar.toUri().toURL()),
mockClassLoader);
Jsr199Javac javac = createJavac(
/* withSyntaxError */ false,
Optional.of(fakeJavacJar));
boolean caught = false;
try {
javac.buildWithClasspath(
executionContext,
createProjectFilesystem(),
PATH_RESOLVER,
BuildTargetFactory.newInstance("//some:example"),
ImmutableList.<String>of(),
SOURCE_PATHS,
Optional.of(pathToSrcsList),
Optional.<Path>absent());
fail("Did not expect compilation to succeed");
} catch (UnsupportedOperationException ex) {
if (ex.toString().contains("abcdef")) {
caught = true;
}
}
assertTrue("mock Java compiler should throw", caught);
}
private Jsr199Javac createJavac(
boolean withSyntaxError,
Optional<Path> javacJar) throws IOException {
File exampleJava = tmp.newFile("Example.java");
Files.write(Joiner.on('\n').join(
"package com.example;",
"",
"public class Example {" +
(withSyntaxError ? "" : "}")
),
exampleJava,
Charsets.UTF_8);
Path pathToOutputDirectory = Paths.get("out");
tmp.newFolder(pathToOutputDirectory.toString());
Optional<SourcePath> jar = javacJar.transform(
SourcePaths.toSourcePath(new FakeProjectFilesystem()));
if (jar.isPresent()) {
return new JarBackedJavac("com.sun.tools.javac.api.JavacTool", ImmutableSet.of(jar.get()));
}
return new JdkProvidedInMemoryJavac();
}
private Jsr199Javac createJavac(boolean withSyntaxError) throws IOException {
return createJavac(withSyntaxError, Optional.<Path>absent());
}
private ProjectFilesystem createProjectFilesystem() {
return new ProjectFilesystem(tmp.getRootPath());
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.packaging.test;
import org.elasticsearch.packaging.util.Distribution;
import org.elasticsearch.packaging.util.Docker;
import org.elasticsearch.packaging.util.FileUtils;
import org.elasticsearch.packaging.util.Installation;
import org.elasticsearch.packaging.util.Packages;
import org.elasticsearch.packaging.util.Platforms;
import org.elasticsearch.packaging.util.ServerUtils;
import org.elasticsearch.packaging.util.Shell;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.packaging.util.Archives.ARCHIVE_OWNER;
import static org.elasticsearch.packaging.util.Archives.installArchive;
import static org.elasticsearch.packaging.util.Archives.verifyArchiveInstallation;
import static org.elasticsearch.packaging.util.Docker.assertPermissionsAndOwnership;
import static org.elasticsearch.packaging.util.Docker.runContainer;
import static org.elasticsearch.packaging.util.Docker.runContainerExpectingFailure;
import static org.elasticsearch.packaging.util.Docker.waitForElasticsearch;
import static org.elasticsearch.packaging.util.Docker.waitForPathToExist;
import static org.elasticsearch.packaging.util.FileMatcher.Fileness.File;
import static org.elasticsearch.packaging.util.FileMatcher.file;
import static org.elasticsearch.packaging.util.FileMatcher.p600;
import static org.elasticsearch.packaging.util.FileMatcher.p660;
import static org.elasticsearch.packaging.util.FileUtils.rm;
import static org.elasticsearch.packaging.util.Packages.assertInstalled;
import static org.elasticsearch.packaging.util.Packages.assertRemoved;
import static org.elasticsearch.packaging.util.Packages.installPackage;
import static org.elasticsearch.packaging.util.Packages.verifyPackageInstallation;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.startsWith;
import static org.junit.Assume.assumeThat;
import static org.junit.Assume.assumeTrue;
public class KeystoreManagementTests extends PackagingTestCase {
public static final String ERROR_INCORRECT_PASSWORD = "Provided keystore password was incorrect";
public static final String ERROR_CORRUPTED_KEYSTORE = "Keystore has been corrupted or tampered with";
public static final String ERROR_KEYSTORE_NOT_PASSWORD_PROTECTED = "ERROR: Keystore is not password-protected";
public static final String ERROR_KEYSTORE_NOT_FOUND = "ERROR: Elasticsearch keystore not found";
/** Test initial archive state */
public void test10InstallArchiveDistribution() throws Exception {
assumeTrue(distribution().isArchive());
installation = installArchive(sh, distribution);
verifyArchiveInstallation(installation, distribution());
final Installation.Executables bin = installation.executables();
Shell.Result r = sh.runIgnoreExitCode(bin.keystoreTool.toString() + " has-passwd");
assertFalse("has-passwd should fail", r.isSuccess());
assertThat("has-passwd should indicate missing keystore", r.stderr, containsString(ERROR_KEYSTORE_NOT_FOUND));
}
/** Test initial package state */
public void test11InstallPackageDistribution() throws Exception {
assumeTrue(distribution().isPackage());
assertRemoved(distribution);
installation = installPackage(sh, distribution);
assertInstalled(distribution);
verifyPackageInstallation(installation, distribution, sh);
final Installation.Executables bin = installation.executables();
Shell.Result r = sh.runIgnoreExitCode(bin.keystoreTool.toString() + " has-passwd");
assertFalse("has-passwd should fail", r.isSuccess());
assertThat("has-passwd should indicate unprotected keystore", r.stderr, containsString(ERROR_KEYSTORE_NOT_PASSWORD_PROTECTED));
Shell.Result r2 = bin.keystoreTool.run("list");
assertThat(r2.stdout, containsString("keystore.seed"));
}
/** Test initial Docker state */
public void test12InstallDockerDistribution() throws Exception {
assumeTrue(distribution().isDocker());
installation = Docker.runContainer(distribution());
try {
waitForPathToExist(installation.config("elasticsearch.keystore"));
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
final Installation.Executables bin = installation.executables();
Shell.Result r = sh.runIgnoreExitCode(bin.keystoreTool.toString() + " has-passwd");
assertFalse("has-passwd should fail", r.isSuccess());
assertThat("has-passwd should indicate unprotected keystore", r.stdout, containsString(ERROR_KEYSTORE_NOT_PASSWORD_PROTECTED));
Shell.Result r2 = bin.keystoreTool.run("list");
assertThat(r2.stdout, containsString("keystore.seed"));
}
public void test20CreateKeystoreManually() throws Exception {
rmKeystoreIfExists();
createKeystore(null);
final Installation.Executables bin = installation.executables();
verifyKeystorePermissions();
Shell.Result r = bin.keystoreTool.run("list");
assertThat(r.stdout, containsString("keystore.seed"));
}
public void test30AutoCreateKeystore() throws Exception {
assumeTrue("Packages and docker are installed with a keystore file", distribution.isArchive());
rmKeystoreIfExists();
startElasticsearch();
stopElasticsearch();
Platforms.onWindows(() -> sh.chown(installation.config("elasticsearch.keystore")));
verifyKeystorePermissions();
final Installation.Executables bin = installation.executables();
Shell.Result r = bin.keystoreTool.run("list");
assertThat(r.stdout, containsString("keystore.seed"));
}
public void test40KeystorePasswordOnStandardInput() throws Exception {
assumeTrue("packages will use systemd, which doesn't handle stdin", distribution.isArchive());
assumeThat(installation, is(notNullValue()));
String password = "^|<>\\&exit"; // code insertion on Windows if special characters are not escaped
rmKeystoreIfExists();
createKeystore(password);
assertPasswordProtectedKeystore();
awaitElasticsearchStartup(runElasticsearchStartCommand(password, true, false));
ServerUtils.runElasticsearchTests();
stopElasticsearch();
}
public void test41WrongKeystorePasswordOnStandardInput() throws Exception {
assumeTrue("packages will use systemd, which doesn't handle stdin", distribution.isArchive());
assumeThat(installation, is(notNullValue()));
assertPasswordProtectedKeystore();
Shell.Result result = runElasticsearchStartCommand("wrong", false, false);
assertElasticsearchFailure(result, Arrays.asList(ERROR_INCORRECT_PASSWORD, ERROR_CORRUPTED_KEYSTORE), null);
}
public void test42KeystorePasswordOnTty() throws Exception {
/* Windows issue awaits fix: https://github.com/elastic/elasticsearch/issues/49340 */
assumeTrue("expect command isn't on Windows", distribution.platform != Distribution.Platform.WINDOWS);
assumeTrue("packages will use systemd, which doesn't handle stdin", distribution.isArchive());
assumeThat(installation, is(notNullValue()));
String password = "keystorepass";
rmKeystoreIfExists();
createKeystore(password);
assertPasswordProtectedKeystore();
awaitElasticsearchStartup(runElasticsearchStartCommand(password, true, true));
ServerUtils.runElasticsearchTests();
stopElasticsearch();
}
public void test43WrongKeystorePasswordOnTty() throws Exception {
/* Windows issue awaits fix: https://github.com/elastic/elasticsearch/issues/49340 */
assumeTrue("expect command isn't on Windows", distribution.platform != Distribution.Platform.WINDOWS);
assumeTrue("packages will use systemd, which doesn't handle stdin", distribution.isArchive());
assumeThat(installation, is(notNullValue()));
assertPasswordProtectedKeystore();
Shell.Result result = runElasticsearchStartCommand("wrong", false, true);
// error will be on stdout for "expect"
assertThat(result.stdout, anyOf(containsString(ERROR_INCORRECT_PASSWORD), containsString(ERROR_CORRUPTED_KEYSTORE)));
}
/**
* If we have an encrypted keystore, we shouldn't require a password to
* view help information.
*/
public void test44EncryptedKeystoreAllowsHelpMessage() throws Exception {
assumeTrue("users call elasticsearch directly in archive case", distribution.isArchive());
String password = "keystorepass";
rmKeystoreIfExists();
createKeystore(password);
assertPasswordProtectedKeystore();
Shell.Result r = installation.executables().elasticsearch.run("--help");
assertThat(r.stdout, startsWith("Starts Elasticsearch"));
}
public void test50KeystorePasswordFromFile() throws Exception {
assumeTrue("only for systemd", Platforms.isSystemd() && distribution().isPackage());
String password = "!@#$%^&*()|\\<>/?";
Path esKeystorePassphraseFile = installation.config.resolve("eks");
rmKeystoreIfExists();
createKeystore(password);
assertPasswordProtectedKeystore();
try {
sh.run("sudo systemctl set-environment ES_KEYSTORE_PASSPHRASE_FILE=" + esKeystorePassphraseFile);
Files.createFile(esKeystorePassphraseFile);
Files.write(esKeystorePassphraseFile, List.of(password));
startElasticsearch();
ServerUtils.runElasticsearchTests();
stopElasticsearch();
} finally {
sh.run("sudo systemctl unset-environment ES_KEYSTORE_PASSPHRASE_FILE");
}
}
public void test51WrongKeystorePasswordFromFile() throws Exception {
assumeTrue("only for systemd", Platforms.isSystemd() && distribution().isPackage());
Path esKeystorePassphraseFile = installation.config.resolve("eks");
assertPasswordProtectedKeystore();
try {
sh.run("sudo systemctl set-environment ES_KEYSTORE_PASSPHRASE_FILE=" + esKeystorePassphraseFile);
if (Files.exists(esKeystorePassphraseFile)) {
rm(esKeystorePassphraseFile);
}
Files.createFile(esKeystorePassphraseFile);
Files.write(esKeystorePassphraseFile, List.of("wrongpassword"));
Packages.JournaldWrapper journaldWrapper = new Packages.JournaldWrapper(sh);
Shell.Result result = runElasticsearchStartCommand(null, false, false);
assertElasticsearchFailure(result, Arrays.asList(ERROR_INCORRECT_PASSWORD, ERROR_CORRUPTED_KEYSTORE), journaldWrapper);
} finally {
sh.run("sudo systemctl unset-environment ES_KEYSTORE_PASSPHRASE_FILE");
}
}
/**
* Check that we can mount a password-protected keystore to a docker image
* and provide a password via an environment variable.
*/
public void test60DockerEnvironmentVariablePassword() throws Exception {
assumeTrue(distribution().isDocker());
String password = "password";
Path dockerKeystore = installation.config("elasticsearch.keystore");
Path localKeystoreFile = getKeystoreFileFromDockerContainer(password, dockerKeystore);
// restart ES with password and mounted keystore
Map<Path, Path> volumes = Map.of(localKeystoreFile, dockerKeystore);
Map<String, String> envVars = Map.of("KEYSTORE_PASSWORD", password);
runContainer(distribution(), volumes, envVars);
waitForElasticsearch(installation);
ServerUtils.runElasticsearchTests();
}
/**
* Check that we can mount a password-protected keystore to a docker image
* and provide a password via a file, pointed at from an environment variable.
*/
public void test61DockerEnvironmentVariablePasswordFromFile() throws Exception {
assumeTrue(distribution().isDocker());
Path tempDir = null;
try {
tempDir = createTempDir(DockerTests.class.getSimpleName());
String password = "password";
String passwordFilename = "password.txt";
Files.writeString(tempDir.resolve(passwordFilename), password + "\n");
Files.setPosixFilePermissions(tempDir.resolve(passwordFilename), p600);
Path dockerKeystore = installation.config("elasticsearch.keystore");
Path localKeystoreFile = getKeystoreFileFromDockerContainer(password, dockerKeystore);
// restart ES with password and mounted keystore
Map<Path, Path> volumes = Map.of(localKeystoreFile, dockerKeystore, tempDir, Path.of("/run/secrets"));
Map<String, String> envVars = Map.of("KEYSTORE_PASSWORD_FILE", "/run/secrets/" + passwordFilename);
runContainer(distribution(), volumes, envVars);
waitForElasticsearch(installation);
ServerUtils.runElasticsearchTests();
} finally {
if (tempDir != null) {
rm(tempDir);
}
}
}
/**
* Check that if we provide the wrong password for a mounted and password-protected
* keystore, Elasticsearch doesn't start.
*/
public void test62DockerEnvironmentVariableBadPassword() throws Exception {
assumeTrue(distribution().isDocker());
String password = "password";
Path dockerKeystore = installation.config("elasticsearch.keystore");
Path localKeystoreFile = getKeystoreFileFromDockerContainer(password, dockerKeystore);
// restart ES with password and mounted keystore
Map<Path, Path> volumes = Map.of(localKeystoreFile, dockerKeystore);
Map<String, String> envVars = Map.of("KEYSTORE_PASSWORD", "wrong");
Shell.Result r = runContainerExpectingFailure(distribution(), volumes, envVars);
assertThat(r.stderr, containsString(ERROR_INCORRECT_PASSWORD));
}
/**
* In the Docker context, it's a little bit tricky to get a password-protected
* keystore. All of the utilities we'd want to use are on the Docker image.
* This method mounts a temporary directory to a Docker container, password-protects
* the keystore, and then returns the path of the file that appears in the
* mounted directory (now accessible from the local filesystem).
*/
private Path getKeystoreFileFromDockerContainer(String password, Path dockerKeystore) throws IOException {
// Mount a temporary directory for copying the keystore
Path dockerTemp = Path.of("/usr/tmp/keystore-tmp");
Path tempDirectory = createTempDir(KeystoreManagementTests.class.getSimpleName());
Map<Path, Path> volumes = Map.of(tempDirectory, dockerTemp);
// It's very tricky to properly quote a pipeline that you're passing to
// a docker exec command, so we're just going to put a small script in the
// temp folder.
List<String> setPasswordScript = List.of(
"echo \"" + password,
password,
"\" | " + installation.executables().keystoreTool.toString() + " passwd"
);
Files.write(tempDirectory.resolve("set-pass.sh"), setPasswordScript);
runContainer(distribution(), volumes, null);
try {
waitForPathToExist(dockerTemp);
waitForPathToExist(dockerKeystore);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
// We need a local shell to put the correct permissions on our mounted directory.
Shell localShell = new Shell();
localShell.run("docker exec --tty " + Docker.getContainerId() + " chown elasticsearch:root " + dockerTemp);
localShell.run("docker exec --tty " + Docker.getContainerId() + " chown elasticsearch:root " + dockerTemp.resolve("set-pass.sh"));
sh.run("bash " + dockerTemp.resolve("set-pass.sh"));
// copy keystore to temp file to make it available to docker host
sh.run("cp " + dockerKeystore + " " + dockerTemp);
return tempDirectory.resolve("elasticsearch.keystore");
}
/** Create a keystore. Provide a password to password-protect it, otherwise use null */
private void createKeystore(String password) throws Exception {
Path keystore = installation.config("elasticsearch.keystore");
final Installation.Executables bin = installation.executables();
bin.keystoreTool.run("create");
// this is a hack around the fact that we can't run a command in the same session as the same user but not as administrator.
// the keystore ends up being owned by the Administrators group, so we manually set it to be owned by the vagrant user here.
// from the server's perspective the permissions aren't really different, this is just to reflect what we'd expect in the tests.
// when we run these commands as a role user we won't have to do this
Platforms.onWindows(() -> sh.chown(keystore));
if (distribution().isDocker()) {
try {
waitForPathToExist(keystore);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
if (password != null) {
setKeystorePassword(password);
}
}
private void rmKeystoreIfExists() {
Path keystore = installation.config("elasticsearch.keystore");
if (distribution().isDocker()) {
try {
waitForPathToExist(keystore);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
// Move the auto-created one out of the way, or else the CLI prompts asks us to confirm
sh.run("rm " + keystore);
} else {
if (Files.exists(keystore)) {
FileUtils.rm(keystore);
}
}
}
private void setKeystorePassword(String password) throws Exception {
final Installation.Executables bin = installation.executables();
// set the password by passing it to stdin twice
Platforms.onLinux(() -> bin.keystoreTool.run("passwd", password + "\n" + password + "\n"));
Platforms.onWindows(
() -> sh.run("Invoke-Command -ScriptBlock {echo '" + password + "'; echo '" + password + "'} | " + bin.keystoreTool + " passwd")
);
}
private void assertPasswordProtectedKeystore() {
Shell.Result r = installation.executables().keystoreTool.run("has-passwd");
assertThat("keystore should be password protected", r.exitCode, is(0));
}
private void verifyKeystorePermissions() {
Path keystore = installation.config("elasticsearch.keystore");
switch (distribution.packaging) {
case TAR:
case ZIP:
assertThat(keystore, file(File, ARCHIVE_OWNER, ARCHIVE_OWNER, p660));
break;
case DEB:
case RPM:
assertThat(keystore, file(File, "root", "elasticsearch", p660));
break;
case DOCKER:
assertPermissionsAndOwnership(keystore, p660);
break;
default:
throw new IllegalStateException("Unknown Elasticsearch packaging type.");
}
}
}
|
|
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.io.datastore;
import static com.google.cloud.dataflow.sdk.io.datastore.DatastoreV1.DATASTORE_BATCH_UPDATE_LIMIT;
import static com.google.cloud.dataflow.sdk.io.datastore.DatastoreV1.Read.DEFAULT_BUNDLE_SIZE_BYTES;
import static com.google.cloud.dataflow.sdk.io.datastore.DatastoreV1.Read.QUERY_BATCH_LIMIT;
import static com.google.cloud.dataflow.sdk.io.datastore.DatastoreV1.Read.getEstimatedSizeBytes;
import static com.google.cloud.dataflow.sdk.io.datastore.DatastoreV1.Read.makeRequest;
import static com.google.cloud.dataflow.sdk.io.datastore.DatastoreV1.isValidKey;
import static com.google.cloud.dataflow.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
import static com.google.datastore.v1.PropertyFilter.Operator.EQUAL;
import static com.google.datastore.v1.PropertyOrder.Direction.DESCENDING;
import static com.google.datastore.v1.client.DatastoreHelper.makeAndFilter;
import static com.google.datastore.v1.client.DatastoreHelper.makeDelete;
import static com.google.datastore.v1.client.DatastoreHelper.makeFilter;
import static com.google.datastore.v1.client.DatastoreHelper.makeKey;
import static com.google.datastore.v1.client.DatastoreHelper.makeOrder;
import static com.google.datastore.v1.client.DatastoreHelper.makeUpsert;
import static com.google.datastore.v1.client.DatastoreHelper.makeValue;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.verifyZeroInteractions;
import static org.mockito.Mockito.when;
import com.google.cloud.dataflow.sdk.io.datastore.DatastoreV1.DatastoreWriterFn;
import com.google.cloud.dataflow.sdk.io.datastore.DatastoreV1.DeleteEntity;
import com.google.cloud.dataflow.sdk.io.datastore.DatastoreV1.DeleteEntityFn;
import com.google.cloud.dataflow.sdk.io.datastore.DatastoreV1.DeleteKey;
import com.google.cloud.dataflow.sdk.io.datastore.DatastoreV1.DeleteKeyFn;
import com.google.cloud.dataflow.sdk.io.datastore.DatastoreV1.Read;
import com.google.cloud.dataflow.sdk.io.datastore.DatastoreV1.Read.ReadFn;
import com.google.cloud.dataflow.sdk.io.datastore.DatastoreV1.Read.SplitQueryFn;
import com.google.cloud.dataflow.sdk.io.datastore.DatastoreV1.Read.V1Options;
import com.google.cloud.dataflow.sdk.io.datastore.DatastoreV1.UpsertFn;
import com.google.cloud.dataflow.sdk.io.datastore.DatastoreV1.V1DatastoreFactory;
import com.google.cloud.dataflow.sdk.io.datastore.DatastoreV1.Write;
import com.google.cloud.dataflow.sdk.options.PipelineOptions;
import com.google.cloud.dataflow.sdk.transforms.DoFnTester;
import com.google.cloud.dataflow.sdk.transforms.DoFnTester.CloningBehavior;
import com.google.cloud.dataflow.sdk.transforms.display.DisplayData;
import com.google.cloud.dataflow.sdk.values.KV;
import com.google.datastore.v1.CommitRequest;
import com.google.datastore.v1.Entity;
import com.google.datastore.v1.EntityResult;
import com.google.datastore.v1.Key;
import com.google.datastore.v1.Mutation;
import com.google.datastore.v1.PartitionId;
import com.google.datastore.v1.Query;
import com.google.datastore.v1.QueryResultBatch;
import com.google.datastore.v1.RunQueryRequest;
import com.google.datastore.v1.RunQueryResponse;
import com.google.datastore.v1.client.Datastore;
import com.google.datastore.v1.client.QuerySplitter;
import com.google.protobuf.Int32Value;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
/**
* Tests for {@link DatastoreV1}.
*/
@RunWith(JUnit4.class)
public class DatastoreV1Test {
private static final String PROJECT_ID = "testProject";
private static final String NAMESPACE = "testNamespace";
private static final String KIND = "testKind";
private static final Query QUERY;
private static final V1Options v1Options;
static {
Query.Builder q = Query.newBuilder();
q.addKindBuilder().setName(KIND);
QUERY = q.build();
v1Options = V1Options.from(PROJECT_ID, QUERY, NAMESPACE);
}
private Read initialRead;
@Mock
Datastore mockDatastore;
@Mock
QuerySplitter mockQuerySplitter;
@Mock
V1DatastoreFactory mockDatastoreFactory;
@Rule
public final ExpectedException thrown = ExpectedException.none();
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
initialRead = DatastoreIO.v1().read()
.withProjectId(PROJECT_ID).withQuery(QUERY).withNamespace(NAMESPACE);
when(mockDatastoreFactory.getDatastore(any(PipelineOptions.class), any(String.class)))
.thenReturn(mockDatastore);
when(mockDatastoreFactory.getQuerySplitter())
.thenReturn(mockQuerySplitter);
}
@Test
public void testBuildRead() throws Exception {
Read read = DatastoreIO.v1().read()
.withProjectId(PROJECT_ID).withQuery(QUERY).withNamespace(NAMESPACE);
assertEquals(QUERY, read.getQuery());
assertEquals(PROJECT_ID, read.getProjectId());
assertEquals(NAMESPACE, read.getNamespace());
}
/**
* {@link #testBuildRead} but constructed in a different order.
*/
@Test
public void testBuildReadAlt() throws Exception {
Read read = DatastoreIO.v1().read()
.withProjectId(PROJECT_ID).withNamespace(NAMESPACE).withQuery(QUERY);
assertEquals(QUERY, read.getQuery());
assertEquals(PROJECT_ID, read.getProjectId());
assertEquals(NAMESPACE, read.getNamespace());
}
@Test
public void testReadValidationFailsProject() throws Exception {
Read read = DatastoreIO.v1().read().withQuery(QUERY);
thrown.expect(NullPointerException.class);
thrown.expectMessage("project");
read.validate(null);
}
@Test
public void testReadValidationFailsQuery() throws Exception {
Read read = DatastoreIO.v1().read().withProjectId(PROJECT_ID);
thrown.expect(NullPointerException.class);
thrown.expectMessage("query");
read.validate(null);
}
@Test
public void testReadValidationFailsQueryLimitZero() throws Exception {
Query invalidLimit = Query.newBuilder().setLimit(Int32Value.newBuilder().setValue(0)).build();
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("Invalid query limit 0: must be positive");
DatastoreIO.v1().read().withQuery(invalidLimit);
}
@Test
public void testReadValidationFailsQueryLimitNegative() throws Exception {
Query invalidLimit = Query.newBuilder().setLimit(Int32Value.newBuilder().setValue(-5)).build();
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("Invalid query limit -5: must be positive");
DatastoreIO.v1().read().withQuery(invalidLimit);
}
@Test
public void testReadValidationSucceedsNamespace() throws Exception {
Read read = DatastoreIO.v1().read().withProjectId(PROJECT_ID).withQuery(QUERY);
/* Should succeed, as a null namespace is fine. */
read.validate(null);
}
@Test
public void testReadDisplayData() {
Read read = DatastoreIO.v1().read()
.withProjectId(PROJECT_ID)
.withQuery(QUERY)
.withNamespace(NAMESPACE);
DisplayData displayData = DisplayData.from(read);
assertThat(displayData, hasDisplayItem("projectId", PROJECT_ID));
assertThat(displayData, hasDisplayItem("query", QUERY.toString()));
assertThat(displayData, hasDisplayItem("namespace", NAMESPACE));
}
@Test
public void testWriteDoesNotAllowNullProject() throws Exception {
thrown.expect(NullPointerException.class);
thrown.expectMessage("projectId");
DatastoreIO.v1().write().withProjectId(null);
}
@Test
public void testWriteValidationFailsWithNoProject() throws Exception {
Write write = DatastoreIO.v1().write();
thrown.expect(NullPointerException.class);
thrown.expectMessage("projectId");
write.validate(null);
}
@Test
public void testWriteValidationSucceedsWithProject() throws Exception {
Write write = DatastoreIO.v1().write().withProjectId(PROJECT_ID);
write.validate(null);
}
@Test
public void testWriteDisplayData() {
Write write = DatastoreIO.v1().write().withProjectId(PROJECT_ID);
DisplayData displayData = DisplayData.from(write);
assertThat(displayData, hasDisplayItem("projectId", PROJECT_ID));
assertThat(displayData, hasDisplayItem("mutationFn", UpsertFn.class));
}
@Test
public void testDeleteEntityDoesNotAllowNullProject() throws Exception {
thrown.expect(NullPointerException.class);
thrown.expectMessage("projectId");
DatastoreIO.v1().deleteEntity().withProjectId(null);
}
@Test
public void testDeleteEntityValidationFailsWithNoProject() throws Exception {
DeleteEntity deleteEntity = DatastoreIO.v1().deleteEntity();
thrown.expect(NullPointerException.class);
thrown.expectMessage("projectId");
deleteEntity.validate(null);
}
@Test
public void testDeleteEntityValidationSucceedsWithProject() throws Exception {
DeleteEntity deleteEntity = DatastoreIO.v1().deleteEntity().withProjectId(PROJECT_ID);
deleteEntity.validate(null);
}
@Test
public void testDeleteEntityDisplayData() {
DeleteEntity deleteEntity = DatastoreIO.v1().deleteEntity().withProjectId(PROJECT_ID);
DisplayData displayData = DisplayData.from(deleteEntity);
assertThat(displayData, hasDisplayItem("projectId", PROJECT_ID));
assertThat(displayData, hasDisplayItem("mutationFn", DeleteEntityFn.class));
}
@Test
public void testDeleteKeyDoesNotAllowNullProject() throws Exception {
thrown.expect(NullPointerException.class);
thrown.expectMessage("projectId");
DatastoreIO.v1().deleteKey().withProjectId(null);
}
@Test
public void testDeleteKeyValidationFailsWithNoProject() throws Exception {
DeleteKey deleteKey = DatastoreIO.v1().deleteKey();
thrown.expect(NullPointerException.class);
thrown.expectMessage("projectId");
deleteKey.validate(null);
}
@Test
public void testDeleteKeyValidationSucceedsWithProject() throws Exception {
DeleteKey deleteKey = DatastoreIO.v1().deleteKey().withProjectId(PROJECT_ID);
deleteKey.validate(null);
}
@Test
public void testDeleteKeyDisplayData() {
DeleteKey deleteKey = DatastoreIO.v1().deleteKey().withProjectId(PROJECT_ID);
DisplayData displayData = DisplayData.from(deleteKey);
assertThat(displayData, hasDisplayItem("projectId", PROJECT_ID));
assertThat(displayData, hasDisplayItem("mutationFn", DeleteKeyFn.class));
}
/**
* Test building a Write using builder methods.
*/
@Test
public void testBuildWrite() throws Exception {
DatastoreV1.Write write = DatastoreIO.v1().write().withProjectId(PROJECT_ID);
assertEquals(PROJECT_ID, write.getProjectId());
}
/**
* Test the detection of complete and incomplete keys.
*/
@Test
public void testHasNameOrId() {
Key key;
// Complete with name, no ancestor
key = makeKey("bird", "finch").build();
assertTrue(isValidKey(key));
// Complete with id, no ancestor
key = makeKey("bird", 123).build();
assertTrue(isValidKey(key));
// Incomplete, no ancestor
key = makeKey("bird").build();
assertFalse(isValidKey(key));
// Complete with name and ancestor
key = makeKey("bird", "owl").build();
key = makeKey(key, "bird", "horned").build();
assertTrue(isValidKey(key));
// Complete with id and ancestor
key = makeKey("bird", "owl").build();
key = makeKey(key, "bird", 123).build();
assertTrue(isValidKey(key));
// Incomplete with ancestor
key = makeKey("bird", "owl").build();
key = makeKey(key, "bird").build();
assertFalse(isValidKey(key));
key = makeKey().build();
assertFalse(isValidKey(key));
}
/**
* Test that entities with incomplete keys cannot be updated.
*/
@Test
public void testAddEntitiesWithIncompleteKeys() throws Exception {
Key key = makeKey("bird").build();
Entity entity = Entity.newBuilder().setKey(key).build();
UpsertFn upsertFn = new UpsertFn();
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("Entities to be written to the Cloud Datastore must have complete keys");
upsertFn.apply(entity);
}
@Test
/**
* Test that entities with valid keys are transformed to upsert mutations.
*/
public void testAddEntities() throws Exception {
Key key = makeKey("bird", "finch").build();
Entity entity = Entity.newBuilder().setKey(key).build();
UpsertFn upsertFn = new UpsertFn();
Mutation exceptedMutation = makeUpsert(entity).build();
assertEquals(upsertFn.apply(entity), exceptedMutation);
}
/**
* Test that entities with incomplete keys cannot be deleted.
*/
@Test
public void testDeleteEntitiesWithIncompleteKeys() throws Exception {
Key key = makeKey("bird").build();
Entity entity = Entity.newBuilder().setKey(key).build();
DeleteEntityFn deleteEntityFn = new DeleteEntityFn();
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("Entities to be deleted from the Cloud Datastore must have complete keys");
deleteEntityFn.apply(entity);
}
/**
* Test that entities with valid keys are transformed to delete mutations.
*/
@Test
public void testDeleteEntities() throws Exception {
Key key = makeKey("bird", "finch").build();
Entity entity = Entity.newBuilder().setKey(key).build();
DeleteEntityFn deleteEntityFn = new DeleteEntityFn();
Mutation exceptedMutation = makeDelete(entity.getKey()).build();
assertEquals(deleteEntityFn.apply(entity), exceptedMutation);
}
/**
* Test that incomplete keys cannot be deleted.
*/
@Test
public void testDeleteIncompleteKeys() throws Exception {
Key key = makeKey("bird").build();
DeleteKeyFn deleteKeyFn = new DeleteKeyFn();
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("Keys to be deleted from the Cloud Datastore must be complete");
deleteKeyFn.apply(key);
}
/**
* Test that valid keys are transformed to delete mutations.
*/
@Test
public void testDeleteKeys() throws Exception {
Key key = makeKey("bird", "finch").build();
DeleteKeyFn deleteKeyFn = new DeleteKeyFn();
Mutation exceptedMutation = makeDelete(key).build();
assertEquals(deleteKeyFn.apply(key), exceptedMutation);
}
@Test
public void testDatastoreWriteFnDisplayData() {
DatastoreWriterFn datastoreWriter = new DatastoreWriterFn(PROJECT_ID);
DisplayData displayData = DisplayData.from(datastoreWriter);
assertThat(displayData, hasDisplayItem("projectId", PROJECT_ID));
}
/** Tests {@link DatastoreWriterFn} with entities less than one batch. */
@Test
public void testDatatoreWriterFnWithOneBatch() throws Exception {
datastoreWriterFnTest(100);
}
/** Tests {@link DatastoreWriterFn} with entities of more than one batches, but not a multiple. */
@Test
public void testDatatoreWriterFnWithMultipleBatches() throws Exception {
datastoreWriterFnTest(DATASTORE_BATCH_UPDATE_LIMIT * 3 + 100);
}
/**
* Tests {@link DatastoreWriterFn} with entities of several batches, using an exact multiple of
* write batch size.
*/
@Test
public void testDatatoreWriterFnWithBatchesExactMultiple() throws Exception {
datastoreWriterFnTest(DATASTORE_BATCH_UPDATE_LIMIT * 2);
}
// A helper method to test DatastoreWriterFn for various batch sizes.
private void datastoreWriterFnTest(int numMutations) throws Exception {
// Create the requested number of mutations.
List<Mutation> mutations = new ArrayList<>(numMutations);
for (int i = 0; i < numMutations; ++i) {
mutations.add(
makeUpsert(Entity.newBuilder().setKey(makeKey("key" + i, i + 1)).build()).build());
}
DatastoreWriterFn datastoreWriter = new DatastoreWriterFn(PROJECT_ID, mockDatastoreFactory);
DoFnTester<Mutation, Void> doFnTester = DoFnTester.of(datastoreWriter);
doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE);
doFnTester.processBatch(mutations);
int start = 0;
while (start < numMutations) {
int end = Math.min(numMutations, start + DATASTORE_BATCH_UPDATE_LIMIT);
CommitRequest.Builder commitRequest = CommitRequest.newBuilder();
commitRequest.setMode(CommitRequest.Mode.NON_TRANSACTIONAL);
commitRequest.addAllMutations(mutations.subList(start, end));
// Verify all the batch requests were made with the expected mutations.
verify(mockDatastore, times(1)).commit(commitRequest.build());
start = end;
}
}
/**
* Tests {@link DatastoreV1.Read#getEstimatedSizeBytes} to fetch and return estimated size for a
* query.
*/
@Test
public void testEstimatedSizeBytes() throws Exception {
long entityBytes = 100L;
// In seconds
long timestamp = 1234L;
RunQueryRequest latestTimestampRequest = makeRequest(makeLatestTimestampQuery(NAMESPACE),
NAMESPACE);
RunQueryResponse latestTimestampResponse = makeLatestTimestampResponse(timestamp);
// Per Kind statistics request and response
RunQueryRequest statRequest = makeRequest(makeStatKindQuery(NAMESPACE, timestamp), NAMESPACE);
RunQueryResponse statResponse = makeStatKindResponse(entityBytes);
when(mockDatastore.runQuery(latestTimestampRequest))
.thenReturn(latestTimestampResponse);
when(mockDatastore.runQuery(statRequest))
.thenReturn(statResponse);
assertEquals(entityBytes, getEstimatedSizeBytes(mockDatastore, QUERY, NAMESPACE));
verify(mockDatastore, times(1)).runQuery(latestTimestampRequest);
verify(mockDatastore, times(1)).runQuery(statRequest);
}
/**
* Tests {@link SplitQueryFn} when number of query splits is specified.
*/
@Test
public void testSplitQueryFnWithNumSplits() throws Exception {
int numSplits = 100;
when(mockQuerySplitter.getSplits(
eq(QUERY), any(PartitionId.class), eq(numSplits), any(Datastore.class)))
.thenReturn(splitQuery(QUERY, numSplits));
SplitQueryFn splitQueryFn = new SplitQueryFn(v1Options, numSplits, mockDatastoreFactory);
DoFnTester<Query, KV<Integer, Query>> doFnTester = DoFnTester.of(splitQueryFn);
/**
* Although Datastore client is marked transient in {@link SplitQueryFn}, when injected through
* mock factory using a when clause for unit testing purposes, it is not serializable
* because it doesn't have a no-arg constructor. Thus disabling the cloning to prevent the
* doFn from being serialized.
*/
doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE);
List<KV<Integer, Query>> queries = doFnTester.processBatch(QUERY);
assertEquals(queries.size(), numSplits);
verifyUniqueKeys(queries);
verify(mockQuerySplitter, times(1)).getSplits(
eq(QUERY), any(PartitionId.class), eq(numSplits), any(Datastore.class));
verifyZeroInteractions(mockDatastore);
}
/**
* Tests {@link SplitQueryFn} when no query splits is specified.
*/
@Test
public void testSplitQueryFnWithoutNumSplits() throws Exception {
// Force SplitQueryFn to compute the number of query splits
int numSplits = 0;
int expectedNumSplits = 20;
long entityBytes = expectedNumSplits * DEFAULT_BUNDLE_SIZE_BYTES;
// In seconds
long timestamp = 1234L;
RunQueryRequest latestTimestampRequest = makeRequest(makeLatestTimestampQuery(NAMESPACE),
NAMESPACE);
RunQueryResponse latestTimestampResponse = makeLatestTimestampResponse(timestamp);
// Per Kind statistics request and response
RunQueryRequest statRequest = makeRequest(makeStatKindQuery(NAMESPACE, timestamp), NAMESPACE);
RunQueryResponse statResponse = makeStatKindResponse(entityBytes);
when(mockDatastore.runQuery(latestTimestampRequest))
.thenReturn(latestTimestampResponse);
when(mockDatastore.runQuery(statRequest))
.thenReturn(statResponse);
when(mockQuerySplitter.getSplits(
eq(QUERY), any(PartitionId.class), eq(expectedNumSplits), any(Datastore.class)))
.thenReturn(splitQuery(QUERY, expectedNumSplits));
SplitQueryFn splitQueryFn = new SplitQueryFn(v1Options, numSplits, mockDatastoreFactory);
DoFnTester<Query, KV<Integer, Query>> doFnTester = DoFnTester.of(splitQueryFn);
doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE);
List<KV<Integer, Query>> queries = doFnTester.processBatch(QUERY);
assertEquals(queries.size(), expectedNumSplits);
verifyUniqueKeys(queries);
verify(mockQuerySplitter, times(1)).getSplits(
eq(QUERY), any(PartitionId.class), eq(expectedNumSplits), any(Datastore.class));
verify(mockDatastore, times(1)).runQuery(latestTimestampRequest);
verify(mockDatastore, times(1)).runQuery(statRequest);
}
/**
* Tests {@link DatastoreV1.Read.SplitQueryFn} when the query has a user specified limit.
*/
@Test
public void testSplitQueryFnWithQueryLimit() throws Exception {
Query queryWithLimit = QUERY.toBuilder().clone()
.setLimit(Int32Value.newBuilder().setValue(1))
.build();
SplitQueryFn splitQueryFn = new SplitQueryFn(v1Options, 10, mockDatastoreFactory);
DoFnTester<Query, KV<Integer, Query>> doFnTester = DoFnTester.of(splitQueryFn);
doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE);
List<KV<Integer, Query>> queries = doFnTester.processBatch(queryWithLimit);
assertEquals(queries.size(), 1);
verifyUniqueKeys(queries);
verifyNoMoreInteractions(mockDatastore);
verifyNoMoreInteractions(mockQuerySplitter);
}
/** Tests {@link ReadFn} with a query limit less than one batch. */
@Test
public void testReadFnWithOneBatch() throws Exception {
readFnTest(5);
}
/** Tests {@link ReadFn} with a query limit more than one batch, and not a multiple. */
@Test
public void testReadFnWithMultipleBatches() throws Exception {
readFnTest(QUERY_BATCH_LIMIT + 5);
}
/** Tests {@link ReadFn} for several batches, using an exact multiple of batch size results. */
@Test
public void testReadFnWithBatchesExactMultiple() throws Exception {
readFnTest(5 * QUERY_BATCH_LIMIT);
}
/** Helper Methods */
/** A helper function that verifies if all the queries have unique keys. */
private void verifyUniqueKeys(List<KV<Integer, Query>> queries) {
Set<Integer> keys = new HashSet<>();
for (KV<Integer, Query> kv: queries) {
keys.add(kv.getKey());
}
assertEquals(keys.size(), queries.size());
}
/**
* A helper function that creates mock {@link Entity} results in response to a query. Always
* indicates that more results are available, unless the batch is limited to fewer than
* {@link DatastoreV1.Read#QUERY_BATCH_LIMIT} results.
*/
private static RunQueryResponse mockResponseForQuery(Query q) {
// Every query DatastoreV1 sends should have a limit.
assertTrue(q.hasLimit());
// The limit should be in the range [1, QUERY_BATCH_LIMIT]
int limit = q.getLimit().getValue();
assertThat(limit, greaterThanOrEqualTo(1));
assertThat(limit, lessThanOrEqualTo(QUERY_BATCH_LIMIT));
// Create the requested number of entities.
List<EntityResult> entities = new ArrayList<>(limit);
for (int i = 0; i < limit; ++i) {
entities.add(
EntityResult.newBuilder()
.setEntity(Entity.newBuilder().setKey(makeKey("key" + i, i + 1)))
.build());
}
// Fill out the other parameters on the returned result batch.
RunQueryResponse.Builder ret = RunQueryResponse.newBuilder();
ret.getBatchBuilder()
.addAllEntityResults(entities)
.setEntityResultType(EntityResult.ResultType.FULL)
.setMoreResults(
limit == QUERY_BATCH_LIMIT
? QueryResultBatch.MoreResultsType.NOT_FINISHED
: QueryResultBatch.MoreResultsType.NO_MORE_RESULTS);
return ret.build();
}
/** Helper function to run a test reading from a {@link ReadFn}. */
private void readFnTest(int numEntities) throws Exception {
// An empty query to read entities.
Query query = Query.newBuilder().setLimit(
Int32Value.newBuilder().setValue(numEntities)).build();
// Use mockResponseForQuery to generate results.
when(mockDatastore.runQuery(any(RunQueryRequest.class)))
.thenAnswer(new Answer<RunQueryResponse>() {
@Override
public RunQueryResponse answer(InvocationOnMock invocationOnMock) throws Throwable {
Query q = ((RunQueryRequest) invocationOnMock.getArguments()[0]).getQuery();
return mockResponseForQuery(q);
}
});
ReadFn readFn = new ReadFn(v1Options, mockDatastoreFactory);
DoFnTester<Query, Entity> doFnTester = DoFnTester.of(readFn);
/**
* Although Datastore client is marked transient in {@link ReadFn}, when injected through
* mock factory using a when clause for unit testing purposes, it is not serializable
* because it doesn't have a no-arg constructor. Thus disabling the cloning to prevent the
* test object from being serialized.
*/
doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE);
List<Entity> entities = doFnTester.processBatch(query);
int expectedNumCallsToRunQuery = (int) Math.ceil((double) numEntities / QUERY_BATCH_LIMIT);
verify(mockDatastore, times(expectedNumCallsToRunQuery)).runQuery(any(RunQueryRequest.class));
// Validate the number of results.
assertEquals(numEntities, entities.size());
}
/** Builds a per-kind statistics response with the given entity size. */
private static RunQueryResponse makeStatKindResponse(long entitySizeInBytes) {
RunQueryResponse.Builder statKindResponse = RunQueryResponse.newBuilder();
Entity.Builder entity = Entity.newBuilder();
entity.setKey(makeKey("dummyKind", "dummyId"));
entity.getMutableProperties().put("entity_bytes", makeValue(entitySizeInBytes).build());
EntityResult.Builder entityResult = EntityResult.newBuilder();
entityResult.setEntity(entity);
QueryResultBatch.Builder batch = QueryResultBatch.newBuilder();
batch.addEntityResults(entityResult);
statKindResponse.setBatch(batch);
return statKindResponse.build();
}
/** Builds a response of the given timestamp. */
private static RunQueryResponse makeLatestTimestampResponse(long timestamp) {
RunQueryResponse.Builder timestampResponse = RunQueryResponse.newBuilder();
Entity.Builder entity = Entity.newBuilder();
entity.setKey(makeKey("dummyKind", "dummyId"));
entity.getMutableProperties().put("timestamp", makeValue(new Date(timestamp * 1000)).build());
EntityResult.Builder entityResult = EntityResult.newBuilder();
entityResult.setEntity(entity);
QueryResultBatch.Builder batch = QueryResultBatch.newBuilder();
batch.addEntityResults(entityResult);
timestampResponse.setBatch(batch);
return timestampResponse.build();
}
/** Builds a per-kind statistics query for the given timestamp and namespace. */
private static Query makeStatKindQuery(String namespace, long timestamp) {
Query.Builder statQuery = Query.newBuilder();
if (namespace == null) {
statQuery.addKindBuilder().setName("__Stat_Kind__");
} else {
statQuery.addKindBuilder().setName("__Stat_Ns_Kind__");
}
statQuery.setFilter(makeAndFilter(
makeFilter("kind_name", EQUAL, makeValue(KIND)).build(),
makeFilter("timestamp", EQUAL, makeValue(timestamp * 1000000L)).build()));
return statQuery.build();
}
/** Builds a latest timestamp statistics query. */
private static Query makeLatestTimestampQuery(String namespace) {
Query.Builder timestampQuery = Query.newBuilder();
if (namespace == null) {
timestampQuery.addKindBuilder().setName("__Stat_Total__");
} else {
timestampQuery.addKindBuilder().setName("__Stat_Ns_Total__");
}
timestampQuery.addOrder(makeOrder("timestamp", DESCENDING));
timestampQuery.setLimit(Int32Value.newBuilder().setValue(1));
return timestampQuery.build();
}
/** Generate dummy query splits. */
private List<Query> splitQuery(Query query, int numSplits) {
List<Query> queries = new LinkedList<>();
for (int i = 0; i < numSplits; i++) {
queries.add(query.toBuilder().clone().build());
}
return queries;
}
}
|
|
package dash;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Color;
import android.os.Bundle;
import android.os.IBinder;
import android.preference.PreferenceManager;
import android.support.v4.app.NotificationCompat;
import android.util.Log;
import com.getpebble.android.kit.PebbleKit;
import com.getpebble.android.kit.util.PebbleDictionary;
import com.wordpress.ninedof.dashapi.R;
import org.json.JSONArray;
import org.json.JSONObject;
import java.util.UUID;
import activity.Landing;
import data.Meta;
import data.PermissionManager;
public class Service extends android.app.Service {
private static final String TAG = Service.class.getName();
private static final boolean DEBUG = true;
private void parse(PebbleDictionary dict, final UUID uuid) {
Context context = getApplicationContext();
final PebbleDictionary out = new PebbleDictionary();
// Check version first
String versionRemote = dict.getString(Keys.AppKeyLibraryVersion);
if(Meta.isRemoteCompatible(versionRemote)) {
out.addInt32(Keys.RequestTypeError, 0);
out.addInt32(Keys.AppKeyErrorCode, Keys.ErrorCodeSuccess);
} else {
out.addInt32(Keys.RequestTypeError, 0);
out.addInt32(Keys.AppKeyErrorCode, Keys.ErrorCodeWrongVersion);
PebbleKit.sendDataToPebble(getApplicationContext(), uuid, out);
return;
}
// Get data request
if(dict.getInteger(Keys.RequestTypeGetData) != null) {
out.addInt32(Keys.RequestTypeGetData, 0);
int type = dict.getInteger(Keys.AppKeyDataType).intValue();
out.addInt32(Keys.AppKeyDataType, type);
APIHandler.handleGetData(context, type, out);
}
// Set feature request
if(dict.getInteger(Keys.RequestTypeSetFeature) != null) {
// Permitted?
if(!PermissionManager.isPermitted(context, uuid)) {
out.addInt32(Keys.RequestTypeError, 0);
out.addInt32(Keys.AppKeyErrorCode, Keys.ErrorCodeNoPermissions);
notifyNoPermission(uuid);
} else {
out.addInt32(Keys.RequestTypeSetFeature, 0);
int type = dict.getInteger(Keys.AppKeyFeatureType).intValue();
out.addInt32(Keys.AppKeyFeatureType, type);
int state = dict.getInteger(Keys.AppKeyFeatureState).intValue();
out.addInt32(Keys.AppKeyFeatureState, state);
APIHandler.handleSetFeature(context, type, state, out);
}
}
// Get feature request
if(dict.getInteger(Keys.RequestTypeGetFeature) != null) {
out.addInt32(Keys.RequestTypeGetFeature, 0);
int type = dict.getInteger(Keys.AppKeyFeatureType).intValue();
out.addInt32(Keys.AppKeyFeatureType, type);
APIHandler.handleGetFeature(context, type, out);
}
// Is available request
if(dict.getInteger(Keys.RequestTypeIsAvailable) != null) {
// Handled by AppKeyLibraryVersion in header
}
// Wait at least xms for GSM signal strength listener to resolve
// Does not appear to happen immediately
new Thread(new Runnable() {
@Override
public void run() {
PebbleKit.sendDataToPebble(getApplicationContext(), uuid, out);
Log.d(TAG, "Sent response to " + uuid.toString());
if(DEBUG) {
Log.d(TAG, "JSON out: " + out.toJsonString());
}
}
}).start();
}
private void checkPermissionEntry(PebbleDictionary dict, UUID uuid) {
Context context = getApplicationContext();
// Add for Landing ArrayList
String name = dict.getString(Keys.AppKeyAppName);
if(PermissionManager.getName(context, uuid) == null) {
// First time we've seen this app?
PermissionManager.setPermitted(context, uuid, false); // This also modifies the list
}
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
PermissionManager.setName(context, uuid, name);
}
private void notifyNoPermission(UUID uuid) {
Context context = getApplicationContext();
String name = PermissionManager.getName(context, uuid);
NotificationCompat.Builder builder = new NotificationCompat.Builder(context);
builder.setSmallIcon(R.drawable.ic_launcher_notif);
builder.setContentTitle("Dash API");
builder.setContentText(name + " is requesting permission to write to an Android API. Tap to grant permission.");
builder.setColor(Color.DKGRAY);
builder.setAutoCancel(true);
PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
new Intent(this, Landing.class), PendingIntent.FLAG_UPDATE_CURRENT);
builder.setContentIntent(contentIntent);
int id = 1;
NotificationManager mNotifyMgr = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE);
mNotifyMgr.notify(id, builder.build());
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
if (intent == null) {
Log.e(TAG, "Intent was null");
return super.onStartCommand(intent, flags, startId);
}
try {
Log.d(TAG, "onStartCommand()");
//Get dictionary and parse
Bundle extras = intent.getExtras();
String jsonData = extras.getString("json");
if(DEBUG) {
Log.d(TAG, "JSON in: " + jsonData);
analyse(jsonData);
}
String uuidString = extras.getString("uuid");
UUID uuid = UUID.fromString(uuidString);
PebbleDictionary dict = PebbleDictionary.fromJson(jsonData);
checkPermissionEntry(dict, uuid);
parse(dict, uuid);
} catch (Exception e) {
Log.e(TAG, "onStartCommand() threw exception: " + e.getLocalizedMessage());
e.printStackTrace();
}
return super.onStartCommand(intent, flags, startId);
}
@Override
public IBinder onBind(Intent arg0) {
return null;
}
private void analyse(String jsonData) {
try {
JSONArray dictArr = new JSONArray(jsonData);
for(int i = 0; i < dictArr.length(); i++) {
JSONObject obj = dictArr.getJSONObject(i);
int key = obj.getInt("key");
String keyStr;
keyStr = Keys.ReqKeyDataFTypeToString(key);
if(keyStr == null) {
// Try second method
keyStr = Keys.ErrToString(key);
}
String value = obj.getString("value");
String valueStr;
int v = 0;
try {
v = Integer.parseInt(value);
} catch(Exception e) {
// Was a string
v = -1;
}
if(v >= 0) {
// Was integer
valueStr = Keys.ReqKeyDataFTypeToString(v);
} else {
valueStr = value;
}
Log.d(TAG, "analyse: k=" + keyStr + ", v=" + valueStr);
}
} catch(Exception e) {
Log.e(TAG, "NOT JSON");
}
}
}
|
|
package org.moparscape.elysium.world;
import org.moparscape.elysium.def.NPCLoc;
import org.moparscape.elysium.entity.*;
import org.moparscape.elysium.io.WorldLoader;
import org.moparscape.elysium.util.EntityList;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
/**
* Created by IntelliJ IDEA.
*
* @author lothy
*/
public final class World {
/**
* The maximum height of the map (944 squares per level)
*/
public static final int MAX_HEIGHT = 3776;
public static final int MAX_NPCS = 5000;
public static final int MAX_PLAYERS = 1000;
public static final int MAX_WIDTH = 944;
private static final EntityFactory ENTITY_FACTORY = new DefaultEntityFactory();
private static final World INSTANCE;
private final EntityList<Npc> npcList = new EntityList<>(MAX_NPCS);
private final TileValue outsideWorld = new TileValue();
private final EntityList<Player> playerList = new EntityList<>(MAX_PLAYERS);
private final List<Shop> shops = new ArrayList<>();
static {
INSTANCE = new World();
try {
WorldLoader loader = new WorldLoader();
loader.loadWorld(INSTANCE);
} catch (Exception e) {
e.printStackTrace();
throw new IllegalStateException("WorldLoader failed.");
}
}
private final TileValue[][] tileType = new TileValue[MAX_WIDTH][MAX_HEIGHT];
private int currentPlayerCount = 0;
private World() {
this.outsideWorld.mapValue = Byte.MAX_VALUE;
this.outsideWorld.objectValue = Byte.MAX_VALUE;
}
public static EntityFactory getEntityFactory() {
return ENTITY_FACTORY;
}
public static World getInstance() {
return INSTANCE;
}
public EntityList<Npc> getNpcs() {
return npcList;
}
public Player getPlayer(int index) {
return playerList.get(index);
}
public Player getPlayerByUsernameHash(long usernameHash) {
for (Player p : playerList) {
if (usernameHash == p.getUsernameHash()) return p;
}
return null;
}
public EntityList<Player> getPlayers() {
return playerList;
}
private int getRandomOrdinate(Random r, int basePoint, int maxDistance) {
boolean negative = r.nextBoolean();
int offset = r.nextInt() % maxDistance;
if (negative) offset *= -1;
return basePoint + offset;
}
/**
* Gets the tile value as point x, y
*/
public TileValue getTileValue(int x, int y) {
if (!withinWorld(x, y)) {
return outsideWorld;
}
TileValue t = tileType[x][y];
if (t == null) {
t = new TileValue();
tileType[x][y] = t;
}
return t;
}
/**
* Updates the map to include a new door
*/
public void registerDoor(GameObject o) {
if (o.getDoorDef().getDoorType() != 1) {
return;
}
int dir = o.getDirection();
Point location = o.getLocation();
int x = location.getX();
int y = location.getY();
if (dir == 0) {
getTileValue(x, y).objectValue |= 1;
getTileValue(x, y - 1).objectValue |= 4;
} else if (dir == 1) {
getTileValue(x, y).objectValue |= 2;
getTileValue(x - 1, y).objectValue |= 8;
} else if (dir == 2) {
getTileValue(x, y).objectValue |= 0x10;
} else if (dir == 3) {
getTileValue(x, y).objectValue |= 0x20;
}
}
public void registerGameObject(GameObject o) {
Point location = o.getLocation();
if (location != null) {
Region r = Region.getRegion(location);
r.addObject(o);
}
switch (o.getType()) {
case 0:
registerObject(o);
break;
case 1:
registerDoor(o);
break;
}
}
public boolean registerItem(Item item) {
Point location = item.getLocation();
if (location != null) {
Region r = Region.getRegion(location);
r.addItem(item);
}
return true;
}
public boolean registerNpc(Npc n) {
NPCLoc npc = n.getLoc();
if (npc.startX < npc.minX || npc.startX > npc.maxX || npc.startY < npc.minY || npc.startY > npc.maxY || (getTileValue(npc.startX, npc.startY).mapValue & 64) != 0) {
System.out.println("Fucked Npc: <id>" + npc.id + "</id><startX>" + npc.startX + "</startX><startY>" + npc.startY + "</startY>");
}
return npcList.add(n);
}
/**
* Updates the map to include a new object
*/
public void registerObject(GameObject o) {
if (o.getGameObjectDef().getType() != 1 && o.getGameObjectDef().getType() != 2) {
return;
}
int dir = o.getDirection();
Point location = o.getLocation();
int baseX = location.getX();
int baseY = location.getY();
int width, height;
if (dir == 0 || dir == 4) {
width = o.getGameObjectDef().getWidth();
height = o.getGameObjectDef().getHeight();
} else {
height = o.getGameObjectDef().getWidth();
width = o.getGameObjectDef().getHeight();
}
for (int x = baseX; x < baseX + width; x++) {
for (int y = baseY; y < baseY + height; y++) {
if (o.getGameObjectDef().getType() == 1) {
getTileValue(x, y).objectValue |= 0x40;
} else if (dir == 0) {
getTileValue(x, y).objectValue |= 2;
getTileValue(x - 1, y).objectValue |= 8;
} else if (dir == 2) {
getTileValue(x, y).objectValue |= 4;
getTileValue(x, y + 1).objectValue |= 1;
} else if (dir == 4) {
getTileValue(x, y).objectValue |= 8;
getTileValue(x + 1, y).objectValue |= 2;
} else if (dir == 6) {
getTileValue(x, y).objectValue |= 1;
getTileValue(x, y - 1).objectValue |= 4;
}
}
}
}
public boolean registerPlayer(Player p) {
return playerList.add(p);
}
public void registerShop(final Shop shop) {
shop.setEquilibrium();
shops.add(shop);
}
public void seedWithEntities() {
Point workingLocation = new Point(329, 552);
SecureRandom random = new SecureRandom();
random.setSeed(13333333333337L);
for (int i = 0; i < 500; i++) {
int x = getRandomOrdinate(random, workingLocation.getX(), 10);
int y = getRandomOrdinate(random, workingLocation.getY(), 10);
Point loc = new Point(x, y);
Item item = new Item(i, 1, loc, null);
Region.getRegion(loc).addItem(item);
}
//
// for (int i = 1; i < 20; i++) {
// int x = getRandomOrdinate(random, workingLocation.getX(), 10);
// int y = getRandomOrdinate(random, workingLocation.getY(), 10);
//
// Point loc = new Point(x, y);
// Npc npc = new Npc(95);
// npc.setIndex(i);
// npc.setLocation(loc, true);
// }
}
public boolean unregisterPlayer(Player p) {
if (p == null) return false;
p.setLoggedIn(false);
playerList.remove(p);
Point location = p.getLocation();
if (location != null) {
Region r = Region.getRegion(location);
r.removePlayer(p);
}
return true;
}
/**
* Are the given coords within the world boundaries
*/
public boolean withinWorld(int x, int y) {
return x >= 0 && x < MAX_WIDTH && y >= 0 && y < MAX_HEIGHT;
}
}
|
|
/*
* Copyright 2013 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.channel.epoll;
import io.netty.channel.DefaultFileRegion;
import io.netty.channel.unix.Errors.NativeIoException;
import io.netty.util.internal.NativeLibraryLoader;
import io.netty.util.internal.PlatformDependent;
import io.netty.util.internal.SystemPropertyUtil;
import io.netty.channel.unix.FileDescriptor;
import io.netty.channel.unix.NativeInetAddress;
import java.io.IOException;
import java.net.InetAddress;
import java.util.Locale;
import static io.netty.channel.epoll.NativeStaticallyReferencedJniMethods.epollerr;
import static io.netty.channel.epoll.NativeStaticallyReferencedJniMethods.epollet;
import static io.netty.channel.epoll.NativeStaticallyReferencedJniMethods.epollin;
import static io.netty.channel.epoll.NativeStaticallyReferencedJniMethods.epollout;
import static io.netty.channel.epoll.NativeStaticallyReferencedJniMethods.epollrdhup;
import static io.netty.channel.epoll.NativeStaticallyReferencedJniMethods.iovMax;
import static io.netty.channel.epoll.NativeStaticallyReferencedJniMethods.isSupportingSendmmsg;
import static io.netty.channel.epoll.NativeStaticallyReferencedJniMethods.isSupportingTcpFastopen;
import static io.netty.channel.epoll.NativeStaticallyReferencedJniMethods.kernelVersion;
import static io.netty.channel.epoll.NativeStaticallyReferencedJniMethods.ssizeMax;
import static io.netty.channel.epoll.NativeStaticallyReferencedJniMethods.tcpMd5SigMaxKeyLen;
import static io.netty.channel.epoll.NativeStaticallyReferencedJniMethods.uioMaxIov;
import static io.netty.channel.unix.Errors.ERRNO_EAGAIN_NEGATIVE;
import static io.netty.channel.unix.Errors.ERRNO_EPIPE_NEGATIVE;
import static io.netty.channel.unix.Errors.ERRNO_EWOULDBLOCK_NEGATIVE;
import static io.netty.channel.unix.Errors.ioResult;
import static io.netty.channel.unix.Errors.newConnectionResetException;
import static io.netty.channel.unix.Errors.newIOException;
/**
* Native helper methods
* <p><strong>Internal usage only!</strong>
* <p>Static members which call JNI methods must be defined in {@link NativeStaticallyReferencedJniMethods}.
*/
public final class Native {
static {
String name = SystemPropertyUtil.get("os.name").toLowerCase(Locale.UK).trim();
if (!name.startsWith("linux")) {
throw new IllegalStateException("Only supported on Linux");
}
NativeLibraryLoader.load(SystemPropertyUtil.get("io.netty.packagePrefix", "").replace('.', '-') +
"netty-transport-native-epoll",
PlatformDependent.getClassLoader(Native.class));
}
// EventLoop operations and constants
public static final int EPOLLIN = epollin();
public static final int EPOLLOUT = epollout();
public static final int EPOLLRDHUP = epollrdhup();
public static final int EPOLLET = epollet();
public static final int EPOLLERR = epollerr();
public static final int IOV_MAX = iovMax();
public static final int UIO_MAX_IOV = uioMaxIov();
public static final boolean IS_SUPPORTING_SENDMMSG = isSupportingSendmmsg();
public static final boolean IS_SUPPORTING_TCP_FASTOPEN = isSupportingTcpFastopen();
public static final long SSIZE_MAX = ssizeMax();
public static final int TCP_MD5SIG_MAXKEYLEN = tcpMd5SigMaxKeyLen();
public static final String KERNEL_VERSION = kernelVersion();
private static final NativeIoException CONNECTION_RESET_EXCEPTION_SENDFILE;
private static final NativeIoException CONNECTION_RESET_EXCEPTION_SENDMMSG;
private static final NativeIoException CONNECTION_RESET_EXCEPTION_SPLICE;
static {
CONNECTION_RESET_EXCEPTION_SENDFILE = newConnectionResetException("syscall:sendfile(...)",
ERRNO_EPIPE_NEGATIVE);
CONNECTION_RESET_EXCEPTION_SENDMMSG = newConnectionResetException("syscall:sendmmsg(...)",
ERRNO_EPIPE_NEGATIVE);
CONNECTION_RESET_EXCEPTION_SPLICE = newConnectionResetException("syscall:splice(...)",
ERRNO_EPIPE_NEGATIVE);
}
public static FileDescriptor newEventFd() {
return new FileDescriptor(eventFd());
}
private static native int eventFd();
public static native void eventFdWrite(int fd, long value);
public static native void eventFdRead(int fd);
public static FileDescriptor newEpollCreate() {
return new FileDescriptor(epollCreate());
}
private static native int epollCreate();
public static int epollWait(int efd, EpollEventArray events, int timeout) throws IOException {
int ready = epollWait0(efd, events.memoryAddress(), events.length(), timeout);
if (ready < 0) {
throw newIOException("epoll_wait", ready);
}
return ready;
}
private static native int epollWait0(int efd, long address, int len, int timeout);
public static void epollCtlAdd(int efd, final int fd, final int flags) throws IOException {
int res = epollCtlAdd0(efd, fd, flags);
if (res < 0) {
throw newIOException("epoll_ctl", res);
}
}
private static native int epollCtlAdd0(int efd, final int fd, final int flags);
public static void epollCtlMod(int efd, final int fd, final int flags) throws IOException {
int res = epollCtlMod0(efd, fd, flags);
if (res < 0) {
throw newIOException("epoll_ctl", res);
}
}
private static native int epollCtlMod0(int efd, final int fd, final int flags);
public static void epollCtlDel(int efd, final int fd) throws IOException {
int res = epollCtlDel0(efd, fd);
if (res < 0) {
throw newIOException("epoll_ctl", res);
}
}
private static native int epollCtlDel0(int efd, final int fd);
// File-descriptor operations
public static int splice(int fd, long offIn, int fdOut, long offOut, long len) throws IOException {
int res = splice0(fd, offIn, fdOut, offOut, len);
if (res >= 0) {
return res;
}
return ioResult("splice", res, CONNECTION_RESET_EXCEPTION_SPLICE);
}
private static native int splice0(int fd, long offIn, int fdOut, long offOut, long len);
public static long sendfile(
int dest, DefaultFileRegion src, long baseOffset, long offset, long length) throws IOException {
// Open the file-region as it may be created via the lazy constructor. This is needed as we directly access
// the FileChannel field directly via JNI
src.open();
long res = sendfile0(dest, src, baseOffset, offset, length);
if (res >= 0) {
return res;
}
return ioResult("sendfile", (int) res, CONNECTION_RESET_EXCEPTION_SENDFILE);
}
private static native long sendfile0(
int dest, DefaultFileRegion src, long baseOffset, long offset, long length) throws IOException;
public static int sendmmsg(
int fd, NativeDatagramPacketArray.NativeDatagramPacket[] msgs, int offset, int len) throws IOException {
int res = sendmmsg0(fd, msgs, offset, len);
if (res >= 0) {
return res;
}
return ioResult("sendmmsg", res, CONNECTION_RESET_EXCEPTION_SENDMMSG);
}
private static native int sendmmsg0(
int fd, NativeDatagramPacketArray.NativeDatagramPacket[] msgs, int offset, int len);
public static int recvFd(int fd) throws IOException {
int res = recvFd0(fd);
if (res > 0) {
return res;
}
if (res == 0) {
return -1;
}
if (res == ERRNO_EAGAIN_NEGATIVE || res == ERRNO_EWOULDBLOCK_NEGATIVE) {
// Everything consumed so just return -1 here.
return 0;
}
throw newIOException("recvFd", res);
}
private static native int recvFd0(int fd);
public static int sendFd(int socketFd, int fd) throws IOException {
int res = sendFd0(socketFd, fd);
if (res >= 0) {
return res;
}
if (res == ERRNO_EAGAIN_NEGATIVE || res == ERRNO_EWOULDBLOCK_NEGATIVE) {
// Everything consumed so just return -1 here.
return -1;
}
throw newIOException("sendFd", res);
}
private static native int sendFd0(int socketFd, int fd);
// Socket option operations
public static native int isReuseAddress(int fd);
public static native int isReusePort(int fd);
public static native int getTcpNotSentLowAt(int fd);
public static native int getTrafficClass(int fd);
public static native int isBroadcast(int fd);
public static native int getTcpKeepIdle(int fd);
public static native int getTcpKeepIntvl(int fd);
public static native int getTcpKeepCnt(int fd);
public static native int getTcpUserTimeout(int milliseconds);
public static native int isIpFreeBind(int fd);
public static native void setReuseAddress(int fd, int reuseAddress);
public static native void setReusePort(int fd, int reuseAddress);
public static native void setTcpFastopen(int fd, int tcpFastopenBacklog);
public static native void setTcpNotSentLowAt(int fd, int tcpNotSentLowAt);
public static native void setTrafficClass(int fd, int tcpNoDelay);
public static native void setBroadcast(int fd, int broadcast);
public static native void setTcpKeepIdle(int fd, int seconds);
public static native void setTcpKeepIntvl(int fd, int seconds);
public static native void setTcpKeepCnt(int fd, int probes);
public static native void setTcpUserTimeout(int fd, int milliseconds);
public static native void setIpFreeBind(int fd, int freeBind);
public static void tcpInfo(int fd, EpollTcpInfo info) {
tcpInfo0(fd, info.info);
}
private static native void tcpInfo0(int fd, int[] array);
public static void setTcpMd5Sig(int fd, InetAddress address, byte[] key) {
final NativeInetAddress a = NativeInetAddress.newInstance(address);
setTcpMd5Sig0(fd, a.address(), a.scopeId(), key);
}
private static native void setTcpMd5Sig0(int fd, byte[] address, int scopeId, byte[] key);
// epoll_event related
public static native int sizeofEpollEvent();
public static native int offsetofEpollData();
private Native() {
// utility
}
}
|
|
/*
* Copyright 2002-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cache.config;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.w3c.dom.Element;
import org.springframework.beans.factory.config.TypedStringValue;
import org.springframework.beans.factory.parsing.ReaderContext;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.ManagedList;
import org.springframework.beans.factory.support.ManagedMap;
import org.springframework.beans.factory.support.RootBeanDefinition;
import org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser;
import org.springframework.beans.factory.xml.ParserContext;
import org.springframework.cache.interceptor.CacheEvictOperation;
import org.springframework.cache.interceptor.CacheInterceptor;
import org.springframework.cache.interceptor.CacheOperation;
import org.springframework.cache.interceptor.CachePutOperation;
import org.springframework.cache.interceptor.CacheableOperation;
import org.springframework.cache.interceptor.NameMatchCacheOperationSource;
import org.springframework.util.StringUtils;
import org.springframework.util.xml.DomUtils;
/**
* {@link org.springframework.beans.factory.xml.BeanDefinitionParser
* BeanDefinitionParser} for the {@code <tx:advice/>} tag.
*
* @author Costin Leau
* @author Phillip Webb
* @author Stephane Nicoll
*/
class CacheAdviceParser extends AbstractSingleBeanDefinitionParser {
private static final String CACHEABLE_ELEMENT = "cacheable";
private static final String CACHE_EVICT_ELEMENT = "cache-evict";
private static final String CACHE_PUT_ELEMENT = "cache-put";
private static final String METHOD_ATTRIBUTE = "method";
private static final String DEFS_ELEMENT = "caching";
@Override
protected Class<?> getBeanClass(Element element) {
return CacheInterceptor.class;
}
@Override
protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) {
builder.addPropertyReference("cacheManager", CacheNamespaceHandler.extractCacheManager(element));
CacheNamespaceHandler.parseKeyGenerator(element, builder.getBeanDefinition());
List<Element> cacheDefs = DomUtils.getChildElementsByTagName(element, DEFS_ELEMENT);
if (cacheDefs.size() >= 1) {
// Using attributes source.
List<RootBeanDefinition> attributeSourceDefinitions = parseDefinitionsSources(cacheDefs, parserContext);
builder.addPropertyValue("cacheOperationSources", attributeSourceDefinitions);
}
else {
// Assume annotations source.
builder.addPropertyValue("cacheOperationSources",
new RootBeanDefinition("org.springframework.cache.annotation.AnnotationCacheOperationSource"));
}
}
private List<RootBeanDefinition> parseDefinitionsSources(List<Element> definitions, ParserContext parserContext) {
ManagedList<RootBeanDefinition> defs = new ManagedList<RootBeanDefinition>(definitions.size());
// extract default param for the definition
for (Element element : definitions) {
defs.add(parseDefinitionSource(element, parserContext));
}
return defs;
}
private RootBeanDefinition parseDefinitionSource(Element definition, ParserContext parserContext) {
Props prop = new Props(definition);
// add cacheable first
ManagedMap<TypedStringValue, Collection<CacheOperation>> cacheOpMap = new ManagedMap<TypedStringValue, Collection<CacheOperation>>();
cacheOpMap.setSource(parserContext.extractSource(definition));
List<Element> cacheableCacheMethods = DomUtils.getChildElementsByTagName(definition, CACHEABLE_ELEMENT);
for (Element opElement : cacheableCacheMethods) {
String name = prop.merge(opElement, parserContext.getReaderContext());
TypedStringValue nameHolder = new TypedStringValue(name);
nameHolder.setSource(parserContext.extractSource(opElement));
CacheableOperation.Builder builder = prop.merge(opElement,
parserContext.getReaderContext(), new CacheableOperation.Builder());
builder.setUnless(getAttributeValue(opElement, "unless", ""));
builder.setSync(Boolean.valueOf(getAttributeValue(opElement, "sync", "false")));
Collection<CacheOperation> col = cacheOpMap.get(nameHolder);
if (col == null) {
col = new ArrayList<CacheOperation>(2);
cacheOpMap.put(nameHolder, col);
}
col.add(builder.build());
}
List<Element> evictCacheMethods = DomUtils.getChildElementsByTagName(definition, CACHE_EVICT_ELEMENT);
for (Element opElement : evictCacheMethods) {
String name = prop.merge(opElement, parserContext.getReaderContext());
TypedStringValue nameHolder = new TypedStringValue(name);
nameHolder.setSource(parserContext.extractSource(opElement));
CacheEvictOperation.Builder builder = prop.merge(opElement,
parserContext.getReaderContext(), new CacheEvictOperation.Builder());
String wide = opElement.getAttribute("all-entries");
if (StringUtils.hasText(wide)) {
builder.setCacheWide(Boolean.valueOf(wide.trim()));
}
String after = opElement.getAttribute("before-invocation");
if (StringUtils.hasText(after)) {
builder.setBeforeInvocation(Boolean.valueOf(after.trim()));
}
Collection<CacheOperation> col = cacheOpMap.get(nameHolder);
if (col == null) {
col = new ArrayList<CacheOperation>(2);
cacheOpMap.put(nameHolder, col);
}
col.add(builder.build());
}
List<Element> putCacheMethods = DomUtils.getChildElementsByTagName(definition, CACHE_PUT_ELEMENT);
for (Element opElement : putCacheMethods) {
String name = prop.merge(opElement, parserContext.getReaderContext());
TypedStringValue nameHolder = new TypedStringValue(name);
nameHolder.setSource(parserContext.extractSource(opElement));
CachePutOperation.Builder builder = prop.merge(opElement,
parserContext.getReaderContext(), new CachePutOperation.Builder());
builder.setUnless(getAttributeValue(opElement, "unless", ""));
Collection<CacheOperation> col = cacheOpMap.get(nameHolder);
if (col == null) {
col = new ArrayList<CacheOperation>(2);
cacheOpMap.put(nameHolder, col);
}
col.add(builder.build());
}
RootBeanDefinition attributeSourceDefinition = new RootBeanDefinition(NameMatchCacheOperationSource.class);
attributeSourceDefinition.setSource(parserContext.extractSource(definition));
attributeSourceDefinition.getPropertyValues().add("nameMap", cacheOpMap);
return attributeSourceDefinition;
}
private static String getAttributeValue(Element element, String attributeName, String defaultValue) {
String attribute = element.getAttribute(attributeName);
if (StringUtils.hasText(attribute)) {
return attribute.trim();
}
return defaultValue;
}
/**
* Simple, reusable class used for overriding defaults.
*/
private static class Props {
private String key;
private String keyGenerator;
private String cacheManager;
private String condition;
private String method;
private String[] caches = null;
Props(Element root) {
String defaultCache = root.getAttribute("cache");
this.key = root.getAttribute("key");
this.keyGenerator = root.getAttribute("key-generator");
this.cacheManager = root.getAttribute("cache-manager");
this.condition = root.getAttribute("condition");
this.method = root.getAttribute(METHOD_ATTRIBUTE);
if (StringUtils.hasText(defaultCache)) {
this.caches = StringUtils.commaDelimitedListToStringArray(defaultCache.trim());
}
}
<T extends CacheOperation.Builder> T merge(Element element, ReaderContext readerCtx, T builder) {
String cache = element.getAttribute("cache");
// sanity check
String[] localCaches = this.caches;
if (StringUtils.hasText(cache)) {
localCaches = StringUtils.commaDelimitedListToStringArray(cache.trim());
}
else {
if (this.caches == null) {
readerCtx.error("No cache specified for " + element.getNodeName(), element);
}
}
builder.setCacheNames(localCaches);
builder.setKey(getAttributeValue(element, "key", this.key));
builder.setKeyGenerator(getAttributeValue(element, "key-generator", this.keyGenerator));
builder.setCacheManager(getAttributeValue(element, "cache-manager", this.cacheManager));
builder.setCondition(getAttributeValue(element, "condition", this.condition));
if (StringUtils.hasText(builder.getKey()) && StringUtils.hasText(builder.getKeyGenerator())) {
throw new IllegalStateException("Invalid cache advice configuration on '"
+ element.toString() + "'. Both 'key' and 'keyGenerator' attributes have been set. " +
"These attributes are mutually exclusive: either set the SpEL expression used to" +
"compute the key at runtime or set the name of the KeyGenerator bean to use.");
}
return builder;
}
String merge(Element element, ReaderContext readerCtx) {
String method = element.getAttribute(METHOD_ATTRIBUTE);
if (StringUtils.hasText(method)) {
return method.trim();
}
if (StringUtils.hasText(this.method)) {
return this.method;
}
readerCtx.error("No method specified for " + element.getNodeName(), element);
return null;
}
}
}
|
|
package minecraft;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.PriorityQueue;
import util.Vector3f;
import util.Vector3i;
public class LevelData
{
private LevelMetadata m_metadata;
private Map<RegionCoord, RegionData> m_regions;
public LevelData()
{
m_metadata = new LevelMetadata();
m_regions = new HashMap<>();
}
public RegionData getRegion(int regionX, int regionZ)
{
return m_regions.get(new RegionCoord(regionX, regionZ));
}
public void addRegion(RegionData region)
{
m_regions.put(region.getRegionCoord(), region);
}
public Terrain getFullTerrain()
{
return new Terrain(getAllSections());
}
public Terrain getNearTerrain(Vector3f viewpoint, float radius)
{
List<SectionUnit> nearSections = new ArrayList<>();
for(SectionUnit section : getAllSections())
{
Vector3f sectionCenter = section.getCenterCoord().toVector3f();
if(sectionCenter.sub(viewpoint).squareLength() < radius * radius)
{
nearSections.add(section);
}
}
MCLogger.log("found " + nearSections.size() + " sections in " + radius + " blocks around " + viewpoint);
return new Terrain(nearSections);
}
// NOTE: unconnected regions is not reachable in the current implementation
public Terrain getReachableTerrain(Vector3f viewpoint)
{
MCLogger.log("determining reachable sections...");
// HACK
int MAX_RADIUS = 6;
Vector3f pv = toSectionCoord(viewpoint).toVector3f();
System.err.println("pv: " + pv);
class Flood implements Comparable<Flood>
{
Vector3i coord;
EFacing front;
Flood(Vector3i coord, EFacing front)
{
this.coord = coord;
this.front = front;
}
@Override
public int compareTo(Flood other)
{
Vector3f p1 = coord.toVector3f();
Vector3f p2 = other.coord.toVector3f();
float value1 = pv.sub(p1).squareLength();
float value2 = pv.sub(p2).squareLength();
return value1 < value2 ? -1 : (value1 > value2 ? 1 : 0);
}
}
Vector3i[] coordOffsets = new Vector3i[]{
new Vector3i( 0, 0, -1),
new Vector3i( 0, 0, 1),
new Vector3i(-1, 0, 0),
new Vector3i( 1, 0, 0),
new Vector3i( 0, -1, 0),
new Vector3i( 0, 1, 0)
};
SectionStateMap floodedArea = new SectionStateMap();
Map<Vector3i, FaceReachability> sectionReachability = new HashMap<>();
List<SectionUnit> reachableSections = new ArrayList<>();
Map<Vector3i, SectionUnit> sectionMap = new HashMap<>();
for(SectionUnit section : getAllSections())
{
sectionMap.put(Coordinate.toSection(section.getCoord()), section);
}
PriorityQueue<Flood> floodQueue = new PriorityQueue<>();
{
Vector3i rootCoord = toSectionCoord(viewpoint);
for(EFacing front : EFacing.values())
{
floodQueue.add(new Flood(rootCoord, front));
}
floodedArea.setSection(rootCoord, true);
if(sectionMap.get(rootCoord) != null)
{
reachableSections.add(sectionMap.get(rootCoord));
}
}
while(!floodQueue.isEmpty())
{
Flood flood = floodQueue.poll();
Vector3i coord = flood.coord.add(coordOffsets[flood.front.getValue()]);
if(!isInBound(coord) ||
coord.y < 0 || coord.y >= ChunkData.NUM_SECTIONS ||
floodedArea.getSection(coord))
{
continue;
}
if(coord.toVector3f().sub(pv).length() > MAX_RADIUS)
{
break;
}
SectionUnit section = sectionMap.get(coord);
if(section == null)
{
for(EFacing nextFront : EFacing.values())
{
floodQueue.add(new Flood(coord, nextFront));
}
floodedArea.setSection(coord, true);
}
else
{
FaceReachability reachability = sectionReachability.get(coord);
if(reachability == null)
{
reachability = section.getData().determinReachability();
sectionReachability.put(coord, reachability);
reachableSections.add(section);
}
EFacing from = flood.front.getOpposite();
for(EFacing to : EFacing.values())
{
if(from != to && reachability.isReachable(from, to))
{
floodQueue.add(new Flood(coord, to));
reachability.setReachability(from, to, false);
}
}
// it is possible for flood to flow within the section it came from
if(!floodedArea.getSection(flood.coord))
{
FaceReachability fromReachability = sectionReachability.get(flood.coord);
if(fromReachability.isReachable(flood.front))
{
floodQueue.add(new Flood(coord, from));
}
}
if(reachability.isFullyUnreachable())
{
floodedArea.setSection(coord, true);
}
}
}// end while
MCLogger.log("found " + reachableSections.size() + " reachable sections");
return new Terrain(reachableSections);
}
private static Vector3i toSectionCoord(Vector3f viewpoint)
{
float clampedY = Math.max(0.0f, Math.min(viewpoint.y, ChunkData.SIZE_Y));
return new Vector3i(
Math.floorDiv((int)Math.floor(viewpoint.x), SectionData.SIZE_X),
Math.min((int)clampedY / SectionData.SIZE_Y, ChunkData.NUM_SECTIONS - 1),
Math.floorDiv((int)Math.floor(viewpoint.z), SectionData.SIZE_Z));
}
private boolean isInBound(Vector3i section)
{
RegionCoord region = Coordinate.sectionToRegion(section);
return m_regions.containsKey(region);
}
private List<SectionUnit> getAllSections()
{
List<SectionUnit> sections = new ArrayList<>();
for(RegionData region : m_regions.values())
{
for(int chunkZ = 0; chunkZ < RegionData.NUM_CHUNKS_Z; ++chunkZ)
{
for(int chunkX = 0; chunkX < RegionData.NUM_CHUNKS_X; ++chunkX)
{
ChunkData chunk = region.getChunk(chunkX, chunkZ);
if(chunk == null)
{
continue;
}
for(int s = 0; s < ChunkData.NUM_SECTIONS; ++s)
{
SectionData section = chunk.getSection(s);
if(section == null)
{
continue;
}
final int x = region.getX() + chunkX * SectionData.SIZE_X;
final int y = region.getY() + s * SectionData.SIZE_Y;
final int z = region.getZ() + chunkZ * SectionData.SIZE_Z;
sections.add(new SectionUnit(new Vector3i(x, y, z), section));
}
}
}
}
MCLogger.log("found " + sections.size() + " non-empty sections");
return sections;
}
public void setMetadata(LevelMetadata metadata)
{
m_metadata = metadata;
}
public LevelMetadata getMetadata()
{
return m_metadata;
}
}
|
|
/*
* Copyright 2015 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.javascript.jscomp.Es6ToEs3Converter.ClassDeclarationMetadata;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.JSDocInfo.Visibility;
import com.google.javascript.rhino.JSDocInfoBuilder;
import com.google.javascript.rhino.JSTypeExpression;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Node.TypeDeclarationNode;
import com.google.javascript.rhino.Token;
import com.google.javascript.rhino.TypeDeclarationsIR;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* Converts {@link Node#getDeclaredTypeExpression()} to {@link JSDocInfo#getType()} type
* annotations. Types are marked as inline types.
*/
public final class Es6TypedToEs6Converter implements NodeTraversal.Callback, HotSwapCompilerPass {
static final DiagnosticType CANNOT_CONVERT_MEMBER_VARIABLES = DiagnosticType.error(
"JSC_CANNOT_CONVERT_FIELDS",
"Can only convert class member variables (fields) in declarations or the right hand side of "
+ "a simple assignment.");
static final DiagnosticType CANNOT_CONVERT_BOUNDED_GENERICS = DiagnosticType.error(
"JSC_CANNOT_CONVERT_BOUNDED_GENERICS",
"Bounded generics are not yet implemented.");
static final DiagnosticType TYPE_ALIAS_ALREADY_DECLARED = DiagnosticType.error(
"JSC_TYPE_ALIAS_ALREADY_DECLARED",
"Type alias already declared as a variable: {0}");
static final DiagnosticType TYPE_QUERY_NOT_SUPPORTED = DiagnosticType.error(
"JSC_TYPE_QUERY_NOT_SUPPORTED",
"Type query is currently not supported.");
static final DiagnosticType UNSUPPORTED_RECORD_TYPE = DiagnosticType.error(
"JSC_UNSUPPORTED_RECORD_TYPE",
"Currently only member variables are supported in record types, please consider "
+ "using interfaces instead.");
static final DiagnosticType COMPUTED_PROP_ACCESS_MODIFIER = DiagnosticType.error(
"JSC_UNSUPPORTED_ACCESS_MODIFIER",
"Accessibility is not checked on computed properties");
private final AbstractCompiler compiler;
private final Map<Node, Namespace> nodeNamespaceMap;
private final Set<String> convertedNamespaces;
private Namespace currNamespace;
Es6TypedToEs6Converter(AbstractCompiler compiler) {
this.compiler = compiler;
this.nodeNamespaceMap = new HashMap<>();
this.convertedNamespaces = new HashSet<>();
}
@Override
public void process(Node externs, Node scriptRoot) {
ScanNamespaces scanner = new ScanNamespaces();
NodeTraversal.traverseEs6(compiler, externs, scanner);
NodeTraversal.traverseEs6(compiler, scriptRoot, scanner);
NodeTraversal.traverseEs6(compiler, externs, this);
NodeTraversal.traverseEs6(compiler, scriptRoot, this);
}
@Override
public void hotSwapScript(Node scriptRoot, Node originalRoot) {
ScanNamespaces scanner = new ScanNamespaces();
NodeTraversal.traverseEs6(compiler, scriptRoot, scanner);
NodeTraversal.traverseEs6(compiler, scriptRoot, this);
}
@Override
public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) {
if (n.getType() == Token.NAMESPACE) {
currNamespace = nodeNamespaceMap.get(n);
}
return true;
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
switch (n.getType()) {
case Token.CLASS:
visitClass(n, parent);
break;
case Token.INTERFACE:
visitInterface(n, parent);
break;
case Token.ENUM:
visitEnum(n, parent);
break;
case Token.NAME:
case Token.REST:
maybeVisitColonType(n, n);
break;
case Token.FUNCTION:
visitFunction(n, parent);
break;
case Token.TYPE_ALIAS:
visitTypeAlias(t, n, parent);
break;
case Token.DECLARE:
visitAmbientDeclaration(n);
break;
case Token.NAMESPACE:
visitNamespaceDeclaration(n, parent);
break;
case Token.VAR:
case Token.LET:
case Token.CONST:
visitVarInsideNamespace(n, parent);
break;
default:
}
}
private void visitNamespaceDeclaration(Node n, Node parent) {
popNamespace(n, parent);
for (Node name = NodeUtil.getRootOfQualifiedName(n.getFirstChild()); name != n;
name = name.getParent()) {
String fullName = maybePrependCurrNamespace(name.getQualifiedName());
if (!convertedNamespaces.contains(fullName)) {
JSDocInfoBuilder doc = JSDocInfoBuilder.maybeCopyFrom(n.getJSDocInfo());
doc.recordConstancy();
Node namespaceDec = NodeUtil.newQNameDeclaration(
compiler, fullName, IR.objectlit(), doc.build()).useSourceInfoFromForTree(n);
parent.addChildBefore(namespaceDec, n);
convertedNamespaces.add(fullName);
}
}
Node insertPoint = n;
for (Node child : n.getLastChild().children()) {
Node detached = child.detachFromParent();
parent.addChildAfter(detached, insertPoint);
insertPoint = detached;
}
parent.removeChild(n);
compiler.reportCodeChange();
}
private void maybeAddGenerics(Node n, Node jsDocNode) {
Node name = n.getFirstChild();
Node generics = (Node) name.getProp(Node.GENERIC_TYPE_LIST);
if (generics != null) {
JSDocInfoBuilder doc = JSDocInfoBuilder.maybeCopyFrom(jsDocNode.getJSDocInfo());
// Discard the type bound (the "extends" part) for now
for (Node typeName : generics.children()) {
doc.recordTemplateTypeName(typeName.getString());
if (typeName.hasChildren()) {
compiler.report(JSError.make(name, CANNOT_CONVERT_BOUNDED_GENERICS));
return;
}
}
name.removeProp(Node.GENERIC_TYPE_LIST);
jsDocNode.setJSDocInfo(doc.build());
}
}
private void visitClass(Node n, Node parent) {
maybeAddGenerics(n, n);
JSDocInfoBuilder doc = JSDocInfoBuilder.maybeCopyFrom(n.getJSDocInfo());
Node interfaces = (Node) n.getProp(Node.IMPLEMENTS);
if (interfaces != null) {
for (Node child : interfaces.children()) {
Node type = convertWithLocation(child);
doc.recordImplementedInterface(new JSTypeExpression(type, n.getSourceFileName()));
}
n.removeProp(Node.IMPLEMENTS);
}
Node superType = n.getChildAtIndex(1);
Node newSuperType = maybeGetQualifiedNameNode(superType);
if (newSuperType != superType) {
n.replaceChild(superType, newSuperType);
}
Node classMembers = n.getLastChild();
ClassDeclarationMetadata metadata = ClassDeclarationMetadata.create(n, parent);
for (Node member : classMembers.children()) {
if (member.isIndexSignature()) {
doc.recordImplementedInterface(createIObject(member));
continue;
}
// Functions are handled by the regular Es6ToEs3Converter
if (!member.isMemberVariableDef() && !member.getBooleanProp(Node.COMPUTED_PROP_VARIABLE)) {
maybeAddVisibility(member);
continue;
}
if (metadata == null) {
compiler.report(JSError.make(n, CANNOT_CONVERT_MEMBER_VARIABLES));
return;
}
metadata.insertNodeAndAdvance(createPropertyDefinition(member, metadata.fullClassName));
compiler.reportCodeChange();
}
n.setJSDocInfo(doc.build());
maybeCreateQualifiedDeclaration(n, parent);
}
private void visitInterface(Node n, Node parent) {
maybeAddGenerics(n, n);
Node name = n.getFirstChild();
Node superTypes = name.getNext();
JSDocInfoBuilder doc = JSDocInfoBuilder.maybeCopyFrom(n.getJSDocInfo());
doc.recordInterface();
if (!superTypes.isEmpty()) {
for (Node child : superTypes.children()) {
Node type = convertWithLocation(child);
doc.recordExtendedInterface(new JSTypeExpression(type, n.getSourceFileName()));
}
}
Node insertionPoint = n;
Node members = n.getLastChild();
for (Node member : members.children()) {
// Synthesize a block for method signatures.
if (member.isMemberFunctionDef()) {
Node function = member.getFirstChild();
function.getLastChild().setType(Token.BLOCK);
continue;
}
if (member.isIndexSignature()) {
doc.recordExtendedInterface(createIObject(member));
continue;
}
Node newNode = createPropertyDefinition(member, name.getString());
insertionPoint.getParent().addChildAfter(newNode, insertionPoint);
insertionPoint = newNode;
}
n.setJSDocInfo(doc.build());
// Convert interface to class
n.setType(Token.CLASS);
Node empty = new Node(Token.EMPTY).useSourceInfoIfMissingFrom(n);
n.replaceChild(superTypes, empty);
members.setType(Token.CLASS_MEMBERS);
maybeCreateQualifiedDeclaration(n, parent);
compiler.reportCodeChange();
}
private JSTypeExpression createIObject(Node indexSignature) {
Node indexType = convertWithLocation(indexSignature.getFirstChild()
.getDeclaredTypeExpression());
Node declaredType = convertWithLocation(indexSignature.getDeclaredTypeExpression());
Node block = new Node(Token.BLOCK, indexType, declaredType);
Node iObject = IR.string("IObject");
iObject.addChildrenToFront(block);
JSTypeExpression bang = new JSTypeExpression(new Node(Token.BANG, iObject)
.useSourceInfoIfMissingFromForTree(indexSignature), indexSignature.getSourceFileName());
indexSignature.detachFromParent();
compiler.reportCodeChange();
return bang;
}
private Node createPropertyDefinition(Node member, String name) {
member.detachFromParent();
Node nameAccess = NodeUtil.newQName(compiler, name);
Node prototypeAcess = NodeUtil.newPropertyAccess(compiler, nameAccess, "prototype");
Node qualifiedMemberAccess =
Es6ToEs3Converter.getQualifiedMemberAccess(compiler, member, nameAccess,
prototypeAcess);
// Copy type information.
maybeVisitColonType(member, member);
maybeAddVisibility(member);
qualifiedMemberAccess.setJSDocInfo(member.getJSDocInfo());
Node newNode = NodeUtil.newExpr(qualifiedMemberAccess);
return newNode.useSourceInfoIfMissingFromForTree(member);
}
private void visitEnum(Node n, Node parent) {
Node name = n.getFirstChild();
Node members = n.getLastChild();
double nextValue = 0;
Node[] stringKeys = new Node[members.getChildCount()];
for (int i = 0; i < members.getChildCount(); i++) {
Node child = members.getChildAtIndex(i);
if (child.hasChildren()) {
nextValue = child.getFirstChild().getDouble() + 1;
} else {
child.addChildToFront(IR.number(nextValue++));
}
stringKeys[i] = child;
}
for (Node child : stringKeys) {
child.detachFromParent();
}
String oldName = name.getString();
String qName = maybePrependCurrNamespace(oldName);
JSDocInfoBuilder builder = JSDocInfoBuilder.maybeCopyFrom(n.getJSDocInfo());
builder.recordEnumParameterType(
new JSTypeExpression(IR.string("number"), n.getSourceFileName()));
Node newDec = NodeUtil.newQNameDeclaration(
compiler,
qName,
IR.objectlit(stringKeys),
builder.build()).useSourceInfoFromForTree(n);
n.setJSDocInfo(null);
parent.replaceChild(n, newDec);
compiler.reportCodeChange();
}
private void visitFunction(Node n, Node parent) {
// For member functions (eg. class Foo<T> { f() {} }), the JSDocInfo
// needs to go on the synthetic MEMBER_FUNCTION_DEF node.
Node jsDocNode = parent.getType() == Token.MEMBER_FUNCTION_DEF
? parent
: n;
maybeAddGenerics(n, jsDocNode);
maybeVisitColonType(n, jsDocNode); // Return types are colon types on the function node
if (n.getLastChild().isEmpty()) {
n.replaceChild(n.getLastChild(), IR.block().useSourceInfoFrom(n));
}
maybeCreateQualifiedDeclaration(n, parent);
}
private void maybeAddVisibility(Node n) {
Visibility access = (Visibility) n.getProp(Node.ACCESS_MODIFIER);
if (access != null) {
if (n.isComputedProp()) {
compiler.report(JSError.make(n, COMPUTED_PROP_ACCESS_MODIFIER));
}
JSDocInfoBuilder memberDoc = JSDocInfoBuilder.maybeCopyFrom(n.getJSDocInfo());
memberDoc.recordVisibility(access);
n.setJSDocInfo(memberDoc.build());
n.removeProp(Node.ACCESS_MODIFIER);
}
}
private void maybeVisitColonType(Node n, Node jsDocNode) {
Node type = n.getDeclaredTypeExpression();
boolean hasColonType = type != null;
if (n.isRest() && hasColonType) {
type = new Node(Token.ELLIPSIS, convertWithLocation(type.removeFirstChild()));
} else {
type = maybeProcessOptionalParameter(n, type);
}
if (type == null) {
return;
}
JSDocInfo info = jsDocNode.getJSDocInfo();
JSDocInfoBuilder builder = JSDocInfoBuilder.maybeCopyFrom(info);
JSTypeExpression typeExpression = new JSTypeExpression(type, n.getSourceFileName());
switch (n.getType()) {
case Token.FUNCTION:
builder.recordReturnType(typeExpression);
break;
case Token.MEMBER_VARIABLE_DEF:
builder.recordType(typeExpression);
break;
default:
builder.recordType(typeExpression);
builder.recordInlineType();
}
info = builder.build();
jsDocNode.setJSDocInfo(info);
if (hasColonType) {
n.setDeclaredTypeExpression(null);
compiler.reportCodeChange();
}
}
private void visitTypeAlias(NodeTraversal t, Node n, Node parent) {
String alias = n.getString();
if (t.getScope().isDeclared(alias, true)) {
compiler.report(
JSError.make(n, TYPE_ALIAS_ALREADY_DECLARED, alias));
}
JSDocInfoBuilder builder = JSDocInfoBuilder.maybeCopyFrom(n.getJSDocInfo());
builder.recordTypedef(new JSTypeExpression(
convertWithLocation(n.getFirstChild()), n.getSourceFileName()));
Node newName =
maybeGetQualifiedNameNode(IR.name(n.getString())).useSourceInfoIfMissingFromForTree(n);
Node newDec1 = NodeUtil.newQNameDeclaration(
compiler,
newName.getQualifiedName(),
null,
builder.build()).useSourceInfoFromForTree(n);
parent.replaceChild(n, newDec1);
compiler.reportCodeChange();
}
private void visitAmbientDeclaration(Node n) {
Node child = n.getFirstChild();
JSDocInfoBuilder builder = JSDocInfoBuilder.maybeCopyFrom(child.getJSDocInfo());
if (child.getType() == Token.CONST) {
builder.recordConstancy();
child.setType(Token.VAR);
}
child.setJSDocInfo(builder.build());
n.detachFromParent();
// The node can have multiple children if transformed from an ambient namespace declaration.
for (Node c : n.children()) {
compiler.getSynthesizedExternsInput().getAstRoot(compiler)
.addChildToBack(c.detachFromParent());
}
compiler.reportCodeChange();
}
private void visitVarInsideNamespace(Node n, Node parent) {
if (currNamespace != null) {
Node insertPoint = n;
for (Node child : n.children()) {
Node name = child;
String oldName = name.getString();
String qName = maybePrependCurrNamespace(oldName);
JSDocInfoBuilder builder = JSDocInfoBuilder.maybeCopyFrom(child.getJSDocInfo());
if (n.isConst()) {
builder.recordConstancy();
}
Node newDec = NodeUtil.newQNameDeclaration(
compiler,
qName,
child.removeFirstChild(),
builder.build()).useSourceInfoFromForTree(n);
parent.addChildAfter(newDec, insertPoint);
insertPoint = newDec;
}
n.detachFromParent();
compiler.reportCodeChange();
}
}
private Node maybeCreateAnyType(Node n, Node type) {
return type == null ? TypeDeclarationsIR.anyType().useSourceInfoIfMissingFrom(n) : type;
}
private Node maybeProcessOptionalParameter(Node n, Node type) {
if (n.getBooleanProp(Node.OPT_ES6_TYPED)) {
n.putBooleanProp(Node.OPT_ES6_TYPED, false);
type = maybeCreateAnyType(n, type);
return new Node(Token.EQUALS, convertWithLocation(type));
} else {
return type == null ? null : convertWithLocation(type);
}
}
private Node convertWithLocation(Node type) {
return convertDeclaredTypeToJSDoc(type).useSourceInfoIfMissingFrom(type);
}
private Node convertDeclaredTypeToJSDoc(Node type) {
Preconditions.checkArgument(type instanceof TypeDeclarationNode);
switch (type.getType()) {
// "Primitive" types.
case Token.STRING_TYPE:
return IR.string("string");
case Token.BOOLEAN_TYPE:
return IR.string("boolean");
case Token.NUMBER_TYPE:
return IR.string("number");
case Token.VOID_TYPE:
return IR.string("void");
case Token.ANY_TYPE:
return new Node(Token.QMARK);
// Named types.
case Token.NAMED_TYPE:
return convertNamedType(type);
case Token.ARRAY_TYPE: {
Node arrayType = IR.string("Array");
Node memberType = convertWithLocation(type.getFirstChild());
arrayType.addChildToFront(
new Node(Token.BLOCK, memberType).useSourceInfoIfMissingFrom(type));
return new Node(Token.BANG, arrayType);
}
case Token.PARAMETERIZED_TYPE: {
Node namedType = type.getFirstChild();
Node result = convertWithLocation(namedType);
Node typeParameterTarget =
result.getType() == Token.BANG ? result.getFirstChild() : result;
Node parameters = IR.block().useSourceInfoIfMissingFrom(type);
typeParameterTarget.addChildToFront(parameters);
for (Node param = namedType.getNext(); param != null; param = param.getNext()) {
parameters.addChildToBack(convertWithLocation(param));
}
return result;
}
// Composite types.
case Token.FUNCTION_TYPE:
Node returnType = type.getFirstChild();
Node paramList = new Node(Token.PARAM_LIST);
for (Node param = returnType.getNext(); param != null; param = param.getNext()) {
Node paramType = param.getDeclaredTypeExpression();
if (param.isRest()) {
if (paramType == null) {
paramType = new Node(Token.ELLIPSIS, new Node(Token.QMARK));
} else {
paramType = new Node(Token.ELLIPSIS,
convertWithLocation(paramType.getFirstChild()));
}
} else {
paramType = maybeProcessOptionalParameter(param,
maybeCreateAnyType(param, paramType));
}
paramList.addChildToBack(paramType);
}
Node function = new Node(Token.FUNCTION);
function.addChildToBack(paramList);
function.addChildToBack(convertWithLocation(returnType));
return function;
case Token.UNION_TYPE:
Node pipe = new Node(Token.PIPE);
for (Node child : type.children()) {
pipe.addChildToBack(convertWithLocation(child));
}
return pipe;
case Token.RECORD_TYPE:
Node lb = new Node(Token.LB);
for (Node memberVar : type.children()) {
if (!memberVar.isMemberVariableDef()) {
compiler.report(JSError.make(type, UNSUPPORTED_RECORD_TYPE));
continue;
}
Node colon = new Node(Token.COLON);
memberVar.setType(Token.STRING_KEY);
Node memberType = convertWithLocation(
maybeCreateAnyType(memberVar, memberVar.getDeclaredTypeExpression()));
memberVar.setDeclaredTypeExpression(null);
colon.addChildToBack(memberVar.detachFromParent());
colon.addChildToBack(memberType);
lb.addChildrenToBack(colon);
}
return new Node(Token.LC, lb);
case Token.TYPEOF:
// Currently, TypeQuery is not supported in Closure's type system.
compiler.report(JSError.make(type, TYPE_QUERY_NOT_SUPPORTED));
return new Node(Token.QMARK);
default:
// TODO(moz): Implement.
break;
}
throw new IllegalArgumentException(
"Unexpected node type for type conversion: " + type.getType());
}
private Node convertNamedType(Node type) {
Node oldNameNode = type.getFirstChild();
Node newNameNode = maybeGetQualifiedNameNode(oldNameNode);
if (newNameNode != oldNameNode) {
type.replaceChild(oldNameNode, newNameNode);
}
Node propTree = type.getFirstChild();
String dotted = propTree.getQualifiedName();
// In the native type syntax, nominal types are non-nullable by default.
// NOTE(dimvar): This adds ! in front of type variables as well.
// Minor issue, not worth fixing for now.
// To fix, we must first transpile declarations of generic types, collect
// the type variables in scope, and use them during transpilation.
return new Node(Token.BANG, IR.string(dotted));
}
private void maybeCreateQualifiedDeclaration(Node n, Node parent) {
if (currNamespace != null) {
Node name = n.getFirstChild();
String oldName = name.getString();
String qName = maybePrependCurrNamespace(oldName);
Node newName = n.isFunction() ? IR.name("") : IR.empty();
newName.useSourceInfoFrom(n);
n.replaceChild(name, newName);
Node placeHolder = IR.empty();
parent.replaceChild(n, placeHolder);
Node newDec = NodeUtil.newQNameDeclaration(
compiler,
qName,
n,
n.getJSDocInfo()).useSourceInfoFromForTree(n);
n.setJSDocInfo(null);
parent.replaceChild(placeHolder, newDec);
compiler.reportCodeChange();
}
}
private Node maybeGetQualifiedNameNode(Node oldNameNode) {
if (oldNameNode.isName()) {
String oldName = oldNameNode.getString();
for (Namespace definitionNamespace = currNamespace; definitionNamespace != null;
definitionNamespace = definitionNamespace.parent) {
if (definitionNamespace.typeNames.contains(oldName)) {
return NodeUtil.newQName(compiler, definitionNamespace.name + "." + oldName)
.useSourceInfoFromForTree(oldNameNode);
}
}
}
return oldNameNode;
}
private String maybePrependCurrNamespace(String oldName) {
return currNamespace == null ? oldName : currNamespace.name + "." + oldName;
}
private void popNamespace(Node n, Node parent) {
if (n.getType() == Token.NAMESPACE) {
currNamespace = nodeNamespaceMap.get(
parent.getType() == Token.DECLARE ? parent.getParent().getParent() : parent.getParent());
}
}
private class ScanNamespaces implements NodeTraversal.Callback {
private Map<String, Namespace> namespaces = new HashMap<>();
@Override
public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) {
switch (n.getType()) {
case Token.SCRIPT:
case Token.NAMESPACE_ELEMENTS:
return true;
case Token.BLOCK:
return n.getFirstChild() != null && n.getFirstChild().isScript();
case Token.DECLARE:
return n.getFirstChild().getType() == Token.NAMESPACE;
case Token.NAMESPACE:
String[] segments = n.getFirstChild().getQualifiedName().split("\\.");
for (String s : segments) {
String currName = maybePrependCurrNamespace(s);
if (!namespaces.containsKey(currName)) {
currNamespace = new Namespace(currName, currNamespace);
namespaces.put(currName, currNamespace);
}
currNamespace = namespaces.get(currName);
}
nodeNamespaceMap.put(n, currNamespace);
return true;
case Token.CLASS:
case Token.INTERFACE:
case Token.ENUM:
if (currNamespace != null) {
currNamespace.typeNames.add(n.getFirstChild().getString());
}
return true;
case Token.TYPE_ALIAS:
if (currNamespace != null) {
currNamespace.typeNames.add(n.getString());
}
return true;
}
return false;
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
popNamespace(n, parent);
}
}
private static class Namespace {
private final String name;
private Set<String> typeNames;
private Namespace parent;
private Namespace(String name, Namespace parent) {
this.name = name;
this.parent = parent;
this.typeNames = new HashSet<>();
}
}
}
|
|
/**
* Copyright 2016 Pinterest, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pinterest.deployservice.db;
import com.pinterest.deployservice.bean.BuildBean;
import com.pinterest.deployservice.bean.SetClause;
import com.pinterest.deployservice.dao.BuildDAO;
import com.google.common.base.Optional;
import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.dbutils.QueryRunner;
import org.apache.commons.dbutils.ResultSetHandler;
import org.apache.commons.dbutils.handlers.BeanHandler;
import org.apache.commons.dbutils.handlers.BeanListHandler;
import org.apache.commons.lang.StringUtils;
import org.joda.time.Interval;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
public class DBBuildDAOImpl implements BuildDAO {
private static final int DEFAULT_SIZE = 100;
private static final String INSERT_BUILD_TEMPLATE = "INSERT INTO builds SET %s";
private static final String DELETE_BUILD = "DELETE FROM builds WHERE build_id=?";
private static final String GET_BUILD_BY_ID = "SELECT * FROM builds WHERE build_id=?";
private static final String GET_BUILDS_BY_COMMIT_7 =
"SELECT * FROM builds WHERE scm_commit_7=? ORDER BY publish_date DESC LIMIT ?,?";
private static final String GET_LATEST_BUILD_BY_NAME =
"SELECT * FROM builds WHERE build_name=? ORDER BY publish_date DESC LIMIT 1";
private static final String
GET_LATEST_BUILD_BY_NAME_2 =
"SELECT * FROM builds WHERE build_name=? AND scm_branch=? ORDER BY publish_date DESC "
+ "LIMIT 1";
private static final String GET_BUILDS_BY_NAME =
"SELECT * FROM builds WHERE build_name=? " + "ORDER BY publish_date DESC LIMIT ?,?";
private static final String GET_BUILDS_BY_NAME_2 =
"SELECT * FROM builds WHERE build_name=? AND scm_branch=? "
+ "ORDER BY publish_date DESC LIMIT ?,?";
private static final String
GET_BUILD_NAMES =
"SELECT DISTINCT build_name FROM builds WHERE build_name LIKE ? ORDER BY build_name ASC "
+ "LIMIT ?,?";
private static final String GET_BRANCHES =
"SELECT DISTINCT scm_branch FROM builds WHERE build_name=?";
private static final String GET_BUILD_NAMES2 =
"SELECT DISTINCT build_name FROM builds ORDER BY build_name ASC LIMIT ?,?";
private static final String GET_BUILDS_BY_NAME_X =
"SELECT * FROM builds WHERE build_name=? AND "
+ "publish_date<=? AND publish_date>? ORDER BY publish_date DESC LIMIT 5000";
private static final String GET_BUILDS_BY_NAME_X_2 =
"SELECT * FROM builds WHERE build_name=? AND scm_branch=? AND "
+ "publish_date<=? AND publish_date>? ORDER BY publish_date DESC LIMIT 5000";
private static final String
GET_ACCEPTED_BUILDS_TEMPLATE =
"SELECT * FROM builds WHERE build_name='%s' AND publish_date>%d ORDER BY publish_date DESC "
+ "LIMIT %d";
private static final String
GET_ACCEPTED_BUILDS_TEMPLATE2 =
"SELECT * FROM builds WHERE build_name='%s' AND scm_branch='%s' AND publish_date>%d ORDER "
+ "BY publish_date DESC LIMIT %d";
private static final String
GET_ACCEPTED_BUILDS_BETWEEN_TEMPLATE =
"SELECT * FROM builds WHERE build_name='%s' AND publish_date>%d AND publish_date<%d ORDER "
+ "BY publish_date DESC LIMIT %d";
private static final String
GET_ACCEPTED_BUILDS_BETWEEN_TEMPLATE2 =
"SELECT * FROM builds WHERE build_name='%s' AND scm_branch='%s' AND publish_date>%d AND "
+ "publish_date<%d ORDER BY publish_date DESC LIMIT %d";
private static final String GET_ALL_BUILD_NAMES = "SELECT DISTINCT build_name FROM builds";
private static final String GET_TOTAL_BY_NAME =
"SELECT COUNT(*) FROM builds WHERE build_name=?";
private static final String GET_LIST_OF_BUILDS_BY_IDs =
"" + "SELECT * FROM builds where build_id IN (%s)";
private static final String DELETE_UNUSED_BUILDS =
"DELETE FROM builds WHERE build_name=? AND publish_date<? "
+ "AND NOT EXISTS (SELECT 1 FROM deploys WHERE deploys.build_id = builds.build_id) "
+ "ORDER BY publish_date ASC LIMIT ?";
private static final String GET_CURRENT_BUILD_BY_GROUP_NAME = "SELECT * FROM builds WHERE build_id IN " +
"(SELECT build_id FROM deploys WHERE deploy_id IN " +
"(SELECT deploy_id FROM environs WHERE env_id IN" +
" (SELECT env_id FROM groups_and_envs WHERE group_name = '%s')" +
"))";
private BasicDataSource dataSource;
public DBBuildDAOImpl(BasicDataSource dataSource) {
this.dataSource = dataSource;
}
@Override
public void insert(BuildBean buildBean) throws Exception {
SetClause setClause = buildBean.genSetClause();
String clause = String.format(INSERT_BUILD_TEMPLATE, setClause.getClause());
new QueryRunner(dataSource).update(clause, setClause.getValueArray());
}
@Override
public void delete(String buildId) throws Exception {
new QueryRunner(dataSource).update(DELETE_BUILD, buildId);
}
@Override
public BuildBean getById(String buildId) throws Exception {
ResultSetHandler<BuildBean> h = new BeanHandler<>(BuildBean.class);
return new QueryRunner(dataSource).query(GET_BUILD_BY_ID, h, buildId);
}
@Override
public List<BuildBean> getByCommit7(String scmCommit7, int pageIndex, int pageSize)
throws Exception {
ResultSetHandler<List<BuildBean>> h = new BeanListHandler<>(BuildBean.class);
return new QueryRunner(dataSource)
.query(GET_BUILDS_BY_COMMIT_7, h, scmCommit7, (pageIndex - 1) * pageSize, pageSize);
}
@Override
public BuildBean getLatest(String buildName, String branch) throws Exception {
QueryRunner run = new QueryRunner(this.dataSource);
ResultSetHandler<BuildBean> h = new BeanHandler<>(BuildBean.class);
if (StringUtils.isNotEmpty(branch)) {
return run.query(GET_LATEST_BUILD_BY_NAME_2, h, buildName, branch);
} else {
return run.query(GET_LATEST_BUILD_BY_NAME, h, buildName);
}
}
@Override
public List<String> getBuildNames(String nameFilter, int pageIndex, int pageSize)
throws Exception {
QueryRunner run = new QueryRunner(this.dataSource);
if (StringUtils.isNotEmpty(nameFilter)) {
return run
.query(GET_BUILD_NAMES,
SingleResultSetHandlerFactory.<String>newListObjectHandler(),
String.format("%%%s%%", nameFilter), (pageIndex - 1) * pageSize, pageSize);
} else {
return run
.query(GET_BUILD_NAMES2,
SingleResultSetHandlerFactory.<String>newListObjectHandler(),
(pageIndex - 1) * pageSize, pageSize);
}
}
@Override
public List<BuildBean> getByNameDate(String buildName, String branch, long before, long after)
throws Exception {
QueryRunner run = new QueryRunner(this.dataSource);
ResultSetHandler<List<BuildBean>> h = new BeanListHandler<>(BuildBean.class);
if (StringUtils.isNotEmpty(branch)) {
return run.query(GET_BUILDS_BY_NAME_X_2, h, buildName, branch, before, after);
} else {
return run.query(GET_BUILDS_BY_NAME_X, h, buildName, before, after);
}
}
@Override
public List<BuildBean> getByName(String buildName, String branch, int pageIndex, int pageSize)
throws Exception {
QueryRunner run = new QueryRunner(this.dataSource);
long start = (pageIndex - 1) * pageSize;
ResultSetHandler<List<BuildBean>> h = new BeanListHandler<>(BuildBean.class);
if (StringUtils.isNotEmpty(branch)) {
return run.query(GET_BUILDS_BY_NAME_2, h, buildName, branch, start, pageSize);
} else {
return run.query(GET_BUILDS_BY_NAME, h, buildName, start, pageSize);
}
}
@Override
public List<String> getBranches(String buildName) throws Exception {
QueryRunner run = new QueryRunner(this.dataSource);
return run.query(GET_BRANCHES, SingleResultSetHandlerFactory.<String>newListObjectHandler(),
buildName);
}
@Override
public List<BuildBean> getAcceptedBuilds(String buildName, String branch, Interval interval,
int size) throws Exception {
ResultSetHandler<List<BuildBean>> h = new BeanListHandler<>(BuildBean.class);
if (StringUtils.isNotEmpty(branch)) {
return new QueryRunner(dataSource).query(
String.format(GET_ACCEPTED_BUILDS_BETWEEN_TEMPLATE2, buildName, branch,
interval.getStartMillis(), interval.getEndMillis(), size), h);
} else {
return new QueryRunner(dataSource).query(
String.format(GET_ACCEPTED_BUILDS_BETWEEN_TEMPLATE, buildName,
interval.getStartMillis(),
interval.getEndMillis(), size), h);
}
}
@Override
public List<String> getAllBuildNames() throws Exception {
QueryRunner run = new QueryRunner(this.dataSource);
return run
.query(GET_ALL_BUILD_NAMES,
SingleResultSetHandlerFactory.<String>newListObjectHandler());
}
@Override
public long countBuildsByName(String buildName) throws Exception {
Long
n =
new QueryRunner(dataSource)
.query(GET_TOTAL_BY_NAME, SingleResultSetHandlerFactory.<Long>newObjectHandler(),
buildName);
return n == null ? 0 : n;
}
@Override
public void deleteUnusedBuilds(String buildName, long timeThreshold, long numOfBuilds)
throws Exception {
new QueryRunner(dataSource)
.update(DELETE_UNUSED_BUILDS, buildName, timeThreshold, numOfBuilds);
}
@Override
public List<BuildBean> getBuildsFromIds(Collection<String> ids) throws Exception {
if (ids.size() == 0) {
return new ArrayList<>(); //MySQL doesn't allow IN (). So just return empty here.
}
ResultSetHandler<List<BuildBean>> h = new BeanListHandler<>(BuildBean.class);
QueryRunner run = new QueryRunner(dataSource);
return run
.query(String.format(GET_LIST_OF_BUILDS_BY_IDs, QueryUtils.genStringGroupClause(ids)),
h);
}
@Override
public List<BuildBean> get(String scmCommit, String buildName, String scmBranch,
Optional<Integer> pageIndex, Optional<Integer> pageSize, Long before,
Long after)
throws Exception {
if (!StringUtils.isEmpty(scmCommit)) {
return this.getByCommit7(StringUtils.substring(scmCommit, 0, 7), pageIndex.or(1),
pageSize.or(DEFAULT_SIZE));
}
if (!StringUtils.isEmpty(buildName)) {
if (before != null && after != null) {
return this.getByNameDate(buildName, scmBranch, before, after);
} else {
return this
.getByName(buildName, scmBranch, pageIndex.or(1), pageSize.or(DEFAULT_SIZE));
}
}
return new ArrayList<>();
}
@Override
public List<BuildBean> getCurrentBuildsByGroupName(String groupName) throws Exception {
ResultSetHandler<List<BuildBean>> h = new BeanListHandler<>(BuildBean.class);
return new QueryRunner(dataSource).query(String.format(GET_CURRENT_BUILD_BY_GROUP_NAME, groupName), h);
}
}
|
|
/*
* Copyright (C) 2018 Satomichi Nishihara
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package burai.app.proxy;
import java.io.IOException;
import java.net.URL;
import java.util.Optional;
import java.util.ResourceBundle;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.Node;
import javafx.scene.control.ButtonType;
import javafx.scene.control.CheckBox;
import javafx.scene.control.Dialog;
import javafx.scene.control.DialogPane;
import javafx.scene.control.Label;
import javafx.scene.control.PasswordField;
import javafx.scene.control.TextField;
import burai.app.QEFXMain;
import burai.com.env.Environments;
public class QEFXProxyDialog extends Dialog<ButtonType> implements Initializable {
@FXML
private TextField hostField;
@FXML
private TextField portField;
@FXML
private TextField userField;
@FXML
private PasswordField passField;
@FXML
private CheckBox passCheck;
public QEFXProxyDialog() {
super();
DialogPane dialogPane = this.getDialogPane();
QEFXMain.initializeStyleSheets(dialogPane.getStylesheets());
QEFXMain.initializeDialogOwner(this);
this.setResizable(false);
this.setTitle("Proxy server");
dialogPane.setHeaderText("Set proxy server.");
dialogPane.getButtonTypes().clear();
dialogPane.getButtonTypes().addAll(ButtonType.OK, ButtonType.CANCEL);
Node node = null;
try {
node = this.createContent();
} catch (Exception e) {
node = new Label("ERROR: cannot show QEFXProxyDialog.");
e.printStackTrace();
}
dialogPane.setContent(node);
this.setResultConverter(buttonType -> {
return buttonType;
});
}
private Node createContent() throws IOException {
FXMLLoader fxmlLoader = new FXMLLoader(this.getClass().getResource("QEFXProxyDialog.fxml"));
fxmlLoader.setController(this);
return fxmlLoader.load();
}
@Override
public void initialize(URL location, ResourceBundle resources) {
this.setupHostField();
this.setupPortField();
this.setupUserField();
this.setupPassField();
this.setupPassCheck();
}
private void setupHostField() {
if (this.hostField == null) {
return;
}
String hostStr = Environments.getProperty(ProxyServer.PROP_KEY_HOST);
if (hostStr == null) {
this.hostField.setText("");
} else {
this.hostField.setText(hostStr);
}
}
private void setupPortField() {
if (this.portField == null) {
return;
}
String portStr = Environments.getProperty(ProxyServer.PROP_KEY_PORT);
if (portStr == null) {
this.portField.setText("");
} else {
this.portField.setText(portStr);
}
}
private void setupUserField() {
if (this.userField == null) {
return;
}
String userStr = Environments.getProperty(ProxyServer.PROP_KEY_USER);
if (userStr == null) {
this.userField.setText("");
} else {
this.userField.setText(userStr);
}
}
private void setupPassField() {
if (this.passField == null) {
return;
}
String passStr = Environments.getProperty(ProxyServer.PROP_KEY_PASSWORD);
if (passStr == null) {
this.passField.setText("");
} else {
this.passField.setText(passStr);
}
}
private void setupPassCheck() {
if (this.passCheck == null) {
return;
}
this.passCheck.setSelected(Environments.getBoolProperty(ProxyServer.PROP_KEY_SAVEPASSWORD));
}
public void showAndSetProperties() {
Optional<ButtonType> optButtonType = this.showAndWait();
if (optButtonType == null || !optButtonType.isPresent()) {
return;
}
if (optButtonType.get() != ButtonType.OK) {
return;
}
String hostStr = this.getHost();
String portStr = this.getPort();
String userStr = this.getUser();
String passStr = this.getPassword();
boolean passSaved = this.isPasswordSaved();
if (hostStr != null && (!hostStr.isEmpty())) {
Environments.setProperty(ProxyServer.PROP_KEY_HOST, hostStr);
} else {
Environments.removeProperty(ProxyServer.PROP_KEY_HOST);
}
if (portStr != null && (!portStr.isEmpty())) {
Environments.setProperty(ProxyServer.PROP_KEY_PORT, portStr);
} else {
Environments.removeProperty(ProxyServer.PROP_KEY_PORT);
}
if (userStr != null && (!userStr.isEmpty())) {
Environments.setProperty(ProxyServer.PROP_KEY_USER, userStr);
} else {
Environments.removeProperty(ProxyServer.PROP_KEY_USER);
}
if (passSaved && passStr != null && (!passStr.isEmpty())) {
Environments.setProperty(ProxyServer.PROP_KEY_PASSWORD, passStr);
} else {
Environments.removeProperty(ProxyServer.PROP_KEY_PASSWORD);
}
Environments.setProperty(ProxyServer.PROP_KEY_SAVEPASSWORD, passSaved);
ProxyServer.initProxyServer(passStr);
}
private String getHost() {
if (this.hostField == null) {
return null;
}
String value = this.hostField.getText();
return value == null ? null : value.trim();
}
private String getPort() {
if (this.portField == null) {
return null;
}
String value = this.portField.getText();
return value == null ? null : value.trim();
}
private String getUser() {
if (this.userField == null) {
return null;
}
String value = this.userField.getText();
return value == null ? null : value.trim();
}
private String getPassword() {
if (this.passField == null) {
return null;
}
String value = this.passField.getText();
return value == null ? null : value.trim();
}
private boolean isPasswordSaved() {
if (this.passCheck == null) {
return false;
}
return this.passCheck.isSelected();
}
}
|
|
/*
* RED5 Open Source Media Server - https://github.com/Red5/ Copyright 2006-2016 by respective authors (see below). All rights reserved. Licensed under the Apache License, Version
* 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless
* required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
package org.red5.server.so;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.List;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentSkipListSet;
import org.red5.server.api.event.IEventListener;
import org.red5.server.net.rtmp.event.BaseEvent;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Shared object event
*/
public class SharedObjectMessage extends BaseEvent implements ISharedObjectMessage {
private static Logger log = LoggerFactory.getLogger(SharedObjectMessage.class);
private static final long serialVersionUID = -8128704039659990049L;
/**
* SO event name
*/
private String name;
/**
* SO events chain
*/
private ConcurrentSkipListSet<ISharedObjectEvent> events = new ConcurrentSkipListSet<>();
/**
* SO version, used for synchronization purposes
*/
private volatile int version;
/**
* Whether SO persistent
*/
private boolean persistent;
public SharedObjectMessage() {
}
/**
* Creates Shared Object event with given name, version and persistence flag
*
* @param name
* Event name
* @param version
* SO version
* @param persistent
* SO persistence flag
*/
public SharedObjectMessage(String name, int version, boolean persistent) {
this(null, name, version, persistent);
}
/**
* Creates Shared Object event with given listener, name, SO version and persistence flag
*
* @param source
* Event listener
* @param name
* Event name
* @param version
* SO version
* @param persistent
* SO persistence flag
*/
public SharedObjectMessage(IEventListener source, String name, int version, boolean persistent) {
super(Type.SHARED_OBJECT, source);
this.name = name;
this.version = version;
this.persistent = persistent;
}
/**
* Resets the version and events to an initial state.
*/
public void reset() {
version = 0;
events.clear();
}
/** {@inheritDoc} */
@Override
public byte getDataType() {
return TYPE_SHARED_OBJECT;
}
/** {@inheritDoc} */
public int getVersion() {
return version;
}
/**
* Setter for version
*
* @param version
* New version
*/
protected void setVersion(int version) {
this.version = version;
}
/** {@inheritDoc} */
public String getName() {
return name;
}
/**
* Setter for name
*
* @param name
* Event name
*/
protected void setName(String name) {
this.name = name;
}
/** {@inheritDoc} */
public boolean isPersistent() {
return persistent;
}
/**
* Setter for persistence flag
*
* @param persistent
* Persistence flag
*/
protected void setPersistent(boolean persistent) {
this.persistent = persistent;
}
/** {@inheritDoc} */
public boolean addEvent(ISharedObjectEvent.Type type, String key, Object value) {
return events.add(new SharedObjectEvent(type, key, value));
}
/** {@inheritDoc} */
public boolean addEvent(ISharedObjectEvent event) {
return events.add(event);
}
/** {@inheritDoc} */
public void addEvents(List<ISharedObjectEvent> events) {
this.events.addAll(events);
}
/** {@inheritDoc} */
public void addEvents(Queue<ISharedObjectEvent> events) {
this.events.addAll(events);
}
/** {@inheritDoc} */
public void addEvents(Set<ISharedObjectEvent> events) {
this.events.addAll(events);
}
/** {@inheritDoc} */
public Set<ISharedObjectEvent> getEvents() {
return events;
}
/** {@inheritDoc} */
public void clear() {
events.clear();
}
/** {@inheritDoc} */
public boolean isEmpty() {
return events.isEmpty();
}
/** {@inheritDoc} */
@Override
public Type getType() {
return Type.SHARED_OBJECT;
}
/** {@inheritDoc} */
@Override
public Object getObject() {
return getEvents();
}
/** {@inheritDoc} */
@Override
protected void releaseInternal() {
}
/** {@inheritDoc} */
@Override
public String toString() {
StringBuilder sb = new StringBuilder(getClass().getSimpleName());
sb.append(": ").append(name).append(" v=").append(version).append(" persistent=").append(persistent).append(" { ");
for (ISharedObjectEvent event : events) {
sb.append(event);
sb.append(' ');
}
sb.append('}');
return sb.toString();
}
@SuppressWarnings({ "unchecked" })
@Override
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
super.readExternal(in);
name = (String) in.readUTF();
version = in.readInt();
persistent = in.readBoolean();
Object o = in.readObject();
if (o != null) {
log.trace("events type: {}", o.getClass().getName());
if (o instanceof ConcurrentSkipListSet) {
events = (ConcurrentSkipListSet<ISharedObjectEvent>) o;
}
}
if (log.isTraceEnabled()) {
log.trace("readExternal: {}", toString());
}
}
@Override
public void writeExternal(ObjectOutput out) throws IOException {
super.writeExternal(out);
if (log.isTraceEnabled()) {
log.trace("writeExternal: {}", toString());
}
out.writeUTF(name);
out.writeInt(version);
out.writeBoolean(persistent);
out.writeObject(events);
}
}
|
|
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.elasticloadbalancing.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
*/
public class RemoveTagsRequest extends AmazonWebServiceRequest implements
Serializable, Cloneable {
/**
* <p>
* The name of the load balancer. You can specify a maximum of one load
* balancer name.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<String> loadBalancerNames;
/**
* <p>
* The list of tag keys to remove.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<TagKeyOnly> tags;
/**
* <p>
* The name of the load balancer. You can specify a maximum of one load
* balancer name.
* </p>
*
* @return The name of the load balancer. You can specify a maximum of one
* load balancer name.
*/
public java.util.List<String> getLoadBalancerNames() {
if (loadBalancerNames == null) {
loadBalancerNames = new com.amazonaws.internal.SdkInternalList<String>();
}
return loadBalancerNames;
}
/**
* <p>
* The name of the load balancer. You can specify a maximum of one load
* balancer name.
* </p>
*
* @param loadBalancerNames
* The name of the load balancer. You can specify a maximum of one
* load balancer name.
*/
public void setLoadBalancerNames(
java.util.Collection<String> loadBalancerNames) {
if (loadBalancerNames == null) {
this.loadBalancerNames = null;
return;
}
this.loadBalancerNames = new com.amazonaws.internal.SdkInternalList<String>(
loadBalancerNames);
}
/**
* <p>
* The name of the load balancer. You can specify a maximum of one load
* balancer name.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setLoadBalancerNames(java.util.Collection)} or
* {@link #withLoadBalancerNames(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param loadBalancerNames
* The name of the load balancer. You can specify a maximum of one
* load balancer name.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RemoveTagsRequest withLoadBalancerNames(String... loadBalancerNames) {
if (this.loadBalancerNames == null) {
setLoadBalancerNames(new com.amazonaws.internal.SdkInternalList<String>(
loadBalancerNames.length));
}
for (String ele : loadBalancerNames) {
this.loadBalancerNames.add(ele);
}
return this;
}
/**
* <p>
* The name of the load balancer. You can specify a maximum of one load
* balancer name.
* </p>
*
* @param loadBalancerNames
* The name of the load balancer. You can specify a maximum of one
* load balancer name.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RemoveTagsRequest withLoadBalancerNames(
java.util.Collection<String> loadBalancerNames) {
setLoadBalancerNames(loadBalancerNames);
return this;
}
/**
* <p>
* The list of tag keys to remove.
* </p>
*
* @return The list of tag keys to remove.
*/
public java.util.List<TagKeyOnly> getTags() {
if (tags == null) {
tags = new com.amazonaws.internal.SdkInternalList<TagKeyOnly>();
}
return tags;
}
/**
* <p>
* The list of tag keys to remove.
* </p>
*
* @param tags
* The list of tag keys to remove.
*/
public void setTags(java.util.Collection<TagKeyOnly> tags) {
if (tags == null) {
this.tags = null;
return;
}
this.tags = new com.amazonaws.internal.SdkInternalList<TagKeyOnly>(tags);
}
/**
* <p>
* The list of tag keys to remove.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setTags(java.util.Collection)} or
* {@link #withTags(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param tags
* The list of tag keys to remove.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RemoveTagsRequest withTags(TagKeyOnly... tags) {
if (this.tags == null) {
setTags(new com.amazonaws.internal.SdkInternalList<TagKeyOnly>(
tags.length));
}
for (TagKeyOnly ele : tags) {
this.tags.add(ele);
}
return this;
}
/**
* <p>
* The list of tag keys to remove.
* </p>
*
* @param tags
* The list of tag keys to remove.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RemoveTagsRequest withTags(java.util.Collection<TagKeyOnly> tags) {
setTags(tags);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getLoadBalancerNames() != null)
sb.append("LoadBalancerNames: " + getLoadBalancerNames() + ",");
if (getTags() != null)
sb.append("Tags: " + getTags());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof RemoveTagsRequest == false)
return false;
RemoveTagsRequest other = (RemoveTagsRequest) obj;
if (other.getLoadBalancerNames() == null
^ this.getLoadBalancerNames() == null)
return false;
if (other.getLoadBalancerNames() != null
&& other.getLoadBalancerNames().equals(
this.getLoadBalancerNames()) == false)
return false;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null
&& other.getTags().equals(this.getTags()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getLoadBalancerNames() == null) ? 0
: getLoadBalancerNames().hashCode());
hashCode = prime * hashCode
+ ((getTags() == null) ? 0 : getTags().hashCode());
return hashCode;
}
@Override
public RemoveTagsRequest clone() {
return (RemoveTagsRequest) super.clone();
}
}
|
|
package org.ndexbio.cxio.core;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import org.ndexbio.cxio.core.interfaces.AspectElement;
import org.ndexbio.cxio.core.interfaces.AspectFragmentWriter;
import org.ndexbio.cxio.metadata.MetaDataCollection;
import org.ndexbio.cxio.misc.AspectElementCounts;
import org.ndexbio.cxio.misc.NumberVerification;
import org.ndexbio.cxio.misc.OpaqueElement;
import org.ndexbio.cxio.misc.Status;
import org.ndexbio.cxio.util.CxConstants;
import org.ndexbio.cxio.util.CxioUtil;
import org.ndexbio.cxio.util.JsonWriter;
/**
* This class is for writing aspect fragments (lists of aspects).
*
* @author cmzmasek
*
*/
@Deprecated
public final class CxWriter {
private final JsonWriter _jw;
// private final MessageDigest _md;
private boolean _started;
private boolean _ended;
private boolean _fragment_started;
private String _current_fragment_name;
private final Map<String, AspectFragmentWriter> _writers;
private final AspectElementCounts _element_counts;
private boolean _calculate_element_counts;
private MetaDataCollection _pre_meta_data;
private MetaDataCollection _post_meta_data;
private boolean _in_fragment;
/**
* Returns a CxWriter for reading from OutputStream out.
* <br>
* Subsequent calls to method {@link #addAspectFragmentWriter(AspectFragmentWriter writer)} are
* required to add {@link org.ndexbio.cxio.core.interfaces.AspectFragmentWriter} to the newly created CxWriter.
*
* @param out the OutputStream to read
* @param use_default_pretty_printer to turn pretty printing on/off
* @return a CxWriter writer
* @throws IOException
*/
public final static CxWriter createInstance(final OutputStream out, final boolean use_default_pretty_printer) throws IOException {
return new CxWriter(out, use_default_pretty_printer);
}
/**
* This method is for adding a {@link org.ndexbio.cxio.core.interfaces.AspectFragmentWriter} to this CxWriter.
*
* @param writer the AspectFragmentWrite to add
*/
public void addAspectFragmentWriter(final AspectFragmentWriter writer) {
if (writer == null) {
throw new IllegalArgumentException("aspect fragment writer is null");
}
if (CxioUtil.isEmpty(writer.getAspectName())) {
throw new IllegalArgumentException("aspect name is null or empty");
}
_writers.put(writer.getAspectName(), writer);
}
/**
* This method is to be called prior to writing individual aspect elements of a given type/name.
* <br>
*
* @param aspect_name the name of the aspect elements to be written
* @throws IOException
*/
public void startAspectFragment(final String aspect_name) throws IOException {
checkIfEnded();
checkIfNotStarted();
if (_fragment_started) {
throw new IllegalStateException("fragment already started");
}
if (CxioUtil.isEmpty(aspect_name)) {
throw new IllegalStateException("aspect fragment name must not be empty or null");
}
_fragment_started = true;
_current_fragment_name = aspect_name;
_jw.startArray(aspect_name);
}
private final void checkIfEnded() {
if (_ended) {
throw new IllegalStateException("already ended");
}
}
/**
* This method is to be called after writing individual aspect elements of a given type/name.
*
* @throws IOException
*/
public void endAspectFragment() throws IOException {
checkIfEnded();
checkIfNotStarted();
if (!_fragment_started) {
throw new IllegalStateException("fragment not started");
}
_fragment_started = false;
_in_fragment = false;
_current_fragment_name = null;
_jw.endArray();
}
/**
* This method is to be called at the end of writing to a stream.
*
* @throws IOException
*/
public void end(final boolean success, final String message) throws IOException {
checkIfEnded();
checkIfNotStarted();
if ( !success && ( _fragment_started || _in_fragment )) {
_jw.endArray();
}
_ended = true;
_started = false;
_current_fragment_name = null;
writeMetaData(_post_meta_data);
final Status status = new Status(success, message);
status.toJson(_jw);
_jw.end();
}
private final void checkIfNotStarted() {
if (!_started) {
throw new IllegalStateException("not started");
}
}
/**
* This method is to be called at the beginning of writing to a stream.
*
*
* @param long_number a Long for verification purposes
* @throws IOException
*/
public void start() throws IOException {
checkIfEnded();
if (_started) {
throw new IllegalStateException("already started");
}
_started = true;
_ended = false;
_jw.start();
// TODO: Remove eventually. Some services use this to identify CX
final NumberVerification nv = new NumberVerification(CxConstants.LONG_NUMBER_TEST);
nv.toJson(_jw);
writeMetaData(_pre_meta_data);
}
/**
* This is for writing a list of AspectElements (a aspect fragment) of a given type.
* <br>
* A appropriate {@link org.ndexbio.cxio.core.interfaces.AspectFragmentWriter} will be automatically
* selected (if added previously).
*
*
* @param elements the list of AspectElements to be written
* @throws IOException
*/
public void writeAspectElements(final List<AspectElement> elements) throws IOException {
checkIfEnded();
checkIfNotStarted();
if (_fragment_started) {
throw new IllegalStateException("in individual elements writing state");
}
if ((elements == null) || elements.isEmpty()) {
return;
}
if (_writers.containsKey(elements.get(0).getAspectName())) {
final AspectFragmentWriter writer = _writers.get(elements.get(0).getAspectName());
_in_fragment = true;
writer.write(elements, _jw);
_in_fragment = false;
if (_calculate_element_counts) {
_element_counts.processAspectElements(elements);
}
}
}
/**
* This is for writing a list of AspectElements (a aspect fragment) of a given type.
*
* @param elements the list of AspectElements to be written
* @param writer a appropriate {@link org.ndexbio.cxio.core.interfaces.AspectFragmentWriter}
* @throws IOException
*/
public void writeAspectElements(final List<AspectElement> elements, final AspectFragmentWriter writer) throws IOException {
checkIfEnded();
checkIfNotStarted();
if (_fragment_started) {
throw new IllegalStateException("in individual elements writing state");
}
if ((elements == null) || elements.isEmpty()) {
return;
}
_in_fragment = true;
writer.write(elements, _jw);
_in_fragment = false;
if (_calculate_element_counts) {
_element_counts.processAspectElements(elements);
}
}
/**
* This is for writing a single AspectElement.
* A appropriate {@link org.ndexbio.cxio.core.interfaces.AspectFragmentWriter} will be automatically
* selected (if added previously).
* <br>
* Prior to calling this method for a AspectElement of a given type/name,
* {@link #startAspectFragment(String aspect_name)} needs to be called.
* <br>
* After all AspectElements of a given type/name a written, {@link #endAspectFragment()} needs to be called.
*
* @param element the AspectElements to be written
* @throws IOException
*/
public void writeAspectElement(final AspectElement element) throws IOException {
checkIfEnded();
checkIfNotStarted();
if (!_fragment_started) {
throw new IllegalStateException("fragment not started");
}
if (element == null) {
return;
}
if (_writers.containsKey(element.getAspectName())) {
final AspectFragmentWriter writer = _writers.get(element.getAspectName());
writer.writeElement(element, _jw);
if (_calculate_element_counts) {
_element_counts.processAspectElement(element);
}
}
}
/**
* This is for writing a single AspectElement.
* <br>
* Prior to calling this method for a AspectElement of a given type/name,
* {@link #startAspectFragment(String aspect_name)} needs to be called.
* <br>
* After all AspectElements of a given type/name a written, {@link #endAspectFragment()} needs to be called.
*
* @param element the AspectElements to be written
* @param writer a appropriate {@link org.ndexbio.cxio.core.interfaces.AspectFragmentWriter}
* @throws IOException
*/
public void writeAspectElement(final AspectElement element, final AspectFragmentWriter writer) throws IOException {
checkIfEnded();
checkIfNotStarted();
if (!_fragment_started) {
throw new IllegalStateException("fragment not started");
}
if (element == null) {
return;
}
writer.writeElement(element, _jw);
if (_calculate_element_counts) {
_element_counts.processAspectElement(element);
}
}
/**
* This returns an object which gives access to the element counts
* for the aspect element written out.
*
* @return the ElementCounts
*/
public final AspectElementCounts getAspectElementCounts() {
return _element_counts;
}
/**
* To turn on/off the calculation of checksum and aspect element counts.
*
* @param calculate_element_counts
*/
public final void setCalculateAspectElementCounts(final boolean calculate_element_counts) {
_calculate_element_counts = calculate_element_counts;
}
/* public final byte[] getMd5Checksum() {
if (_md == null) {
throw new IllegalStateException("cx writer is not set up to calculare checksum");
}
return _md.digest();
} */
public final void addPreMetaData(final MetaDataCollection pre_meta_data) {
checkIfEnded();
if (_started) {
throw new IllegalStateException("illegal attempt to add pre meta-data: already started");
}
_pre_meta_data = pre_meta_data;
}
public final void addPostMetaData(final MetaDataCollection post_meta_data) {
checkIfEnded();
_post_meta_data = post_meta_data;
}
private final void writeMetaData(final MetaDataCollection md) throws IOException {
if ((md != null) && !md.isEmpty()) {
md.toJson(_jw);
}
}
public final void writeOpaqueAspectFragment(final String name, final OpaqueElement opque_element) throws IOException {
writeOpaqueAspectFragment(name, opque_element.toJsonString());
}
public final void writeOpaqueAspectFragment(final String name, final String json_string) throws IOException {
checkIfEnded();
checkIfNotStarted();
if (_fragment_started) {
throw new IllegalStateException("in individual elements writing state");
}
if (CxioUtil.isEmpty(json_string)) {
return;
}
_jw.writeJsonNodeAsList(name, json_string);
if (_calculate_element_counts) {
_element_counts.processAspectElement(name);
}
}
public final void writeOpaqueAspectElement(final OpaqueElement opque_element) throws IOException {
writeOpaqueAspectElement(opque_element.toJsonString());
}
public final void writeOpaqueAspectElement(final String json_string) throws IOException {
checkIfEnded();
checkIfNotStarted();
if (!_fragment_started) {
throw new IllegalStateException("fragment not started");
}
if (CxioUtil.isEmpty(json_string)) {
return;
}
_jw.writeAnonymousAspectElement(json_string);
if (_calculate_element_counts) {
_element_counts.processAspectElement(_current_fragment_name);
}
}
public final void writeOpaqueAspectFragment2(final String name, final Collection<OpaqueElement> opque_elements) throws IOException {
checkIfEnded();
checkIfNotStarted();
if (_fragment_started) {
throw new IllegalStateException("in individual elements writing state");
}
if ((opque_elements == null) || opque_elements.isEmpty()) {
return;
}
startAspectFragment(name);
for (final OpaqueElement opaque_element : opque_elements) {
_jw.writeJsonObject(opaque_element.getData());
// writeOpaqueAspectElement(opaque_element);
}
if (_calculate_element_counts) {
_element_counts.processAspectElement(name, opque_elements.size());
}
endAspectFragment();
}
public final void writeOpaqueAspectFragment(final String name, final Collection<String> json_strings) throws IOException {
checkIfEnded();
checkIfNotStarted();
if (_fragment_started) {
throw new IllegalStateException("in individual elements writing state");
}
if ((json_strings == null) || json_strings.isEmpty()) {
return;
}
_jw.writeAnonymousAspectElements(name, json_strings);
if (_calculate_element_counts) {
_element_counts.processAspectElement(name, json_strings.size());
}
}
/**
* Convenience method to write a map (aspect name to lists of aspect elements) to
* a output stream.
*
*
* @param aspect_elements
* @param cx_writer
* @param output_stream
* @throws IOException
*/
public final static void writeFromMap(final SortedMap<String, List<AspectElement>> aspect_elements, final CxWriter cx_writer, final OutputStream output_stream) throws IOException {
final boolean success = true;
String msg = "";
cx_writer.start();
try {
for (final String k : aspect_elements.keySet()) {
final List<AspectElement> x = aspect_elements.get(k);
cx_writer.writeAspectElements(x);
}
}
catch (final Exception e) {
msg = e.getMessage();
}
cx_writer.end(success, msg);
}
private CxWriter(final OutputStream os, final boolean use_default_pretty_printer) throws IOException {
if (os == null) {
throw new IllegalArgumentException("attempt to use null outputstream");
}
_writers = new HashMap<>();
// _md = null;
_jw = JsonWriter.createInstance(os, use_default_pretty_printer);
_started = false;
_ended = false;
_fragment_started = false;
_calculate_element_counts = true;
_element_counts = AspectElementCounts.createInstance();
_pre_meta_data = null;
_post_meta_data = null;
}
}
|
|
/**
*******************************************************************************
* Copyright (C) 2006-2012, International Business Machines Corporation and *
* others. All Rights Reserved. *
*******************************************************************************
*
*******************************************************************************
*/
package com.ibm.icu.charset;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.nio.charset.Charset;
import java.nio.charset.IllegalCharsetNameException;
import java.nio.charset.UnsupportedCharsetException;
import java.util.HashMap;
import com.ibm.icu.text.UnicodeSet;
/**
* <p>A subclass of java.nio.Charset for providing implementation of ICU's charset converters.
* This API is used to convert codepage or character encoded data to and
* from UTF-16. You can open a converter with {@link Charset#forName } and {@link #forNameICU }. With that
* converter, you can get its properties, set options, convert your data.</p>
*
* <p>Since many software programs recogize different converter names for
* different types of converters, there are other functions in this API to
* iterate over the converter aliases.
*
* @stable ICU 3.6
*/
public abstract class CharsetICU extends Charset{
String icuCanonicalName;
String javaCanonicalName;
int options;
float maxCharsPerByte;
String name; /* +4: 60 internal name of the converter- invariant chars */
int codepage; /* +64: 4 codepage # (now IBM-$codepage) */
byte platform; /* +68: 1 platform of the converter (only IBM now) */
byte conversionType; /* +69: 1 conversion type */
int minBytesPerChar; /* +70: 1 Minimum # bytes per char in this codepage */
int maxBytesPerChar; /* +71: 1 Maximum # bytes output per UChar in this codepage */
byte subChar[/*UCNV_MAX_SUBCHAR_LEN*/]; /* +72: 4 [note: 4 and 8 byte boundary] */
byte subCharLen; /* +76: 1 */
byte hasToUnicodeFallback; /* +77: 1 UBool needs to be changed to UBool to be consistent across platform */
byte hasFromUnicodeFallback; /* +78: 1 */
short unicodeMask; /* +79: 1 bit 0: has supplementary bit 1: has single surrogates */
byte subChar1; /* +80: 1 single-byte substitution character for IBM MBCS (0 if none) */
//byte reserved[/*19*/]; /* +81: 19 to round out the structure */
// typedef enum UConverterUnicodeSet {
/**
* Parameter that select the set of roundtrippable Unicode code points.
* @stable ICU 4.0
*/
public static final int ROUNDTRIP_SET=0;
/**
* Select the set of Unicode code points with roundtrip or fallback mappings.
* Not supported at this point.
* @internal
* @deprecated This API is ICU internal only.
*/
public static final int ROUNDTRIP_AND_FALLBACK_SET =1;
//} UConverterUnicodeSet;
/**
*
* @param icuCanonicalName
* @param canonicalName
* @param aliases
* @stable ICU 3.6
*/
protected CharsetICU(String icuCanonicalName, String canonicalName, String[] aliases) {
super(canonicalName,aliases);
if(canonicalName.length() == 0){
throw new IllegalCharsetNameException(canonicalName);
}
this.javaCanonicalName = canonicalName;
this.icuCanonicalName = icuCanonicalName;
}
/**
* Ascertains if a charset is a sub set of this charset
* Implements the abstract method of super class.
* @param cs charset to test
* @return true if the given charset is a subset of this charset
* @stable ICU 3.6
*/
public boolean contains(Charset cs){
if (null == cs) {
return false;
} else if (this.equals(cs)) {
return true;
}
return false;
}
private static final HashMap<String, String> algorithmicCharsets = new HashMap<String, String>();
static{
algorithmicCharsets.put("LMBCS-1", "com.ibm.icu.charset.CharsetLMBCS");
algorithmicCharsets.put("LMBCS-2", "com.ibm.icu.charset.CharsetLMBCS");
algorithmicCharsets.put("LMBCS-3", "com.ibm.icu.charset.CharsetLMBCS");
algorithmicCharsets.put("LMBCS-4", "com.ibm.icu.charset.CharsetLMBCS");
algorithmicCharsets.put("LMBCS-5", "com.ibm.icu.charset.CharsetLMBCS");
algorithmicCharsets.put("LMBCS-6", "com.ibm.icu.charset.CharsetLMBCS");
algorithmicCharsets.put("LMBCS-8", "com.ibm.icu.charset.CharsetLMBCS");
algorithmicCharsets.put("LMBCS-11", "com.ibm.icu.charset.CharsetLMBCS");
algorithmicCharsets.put("LMBCS-16", "com.ibm.icu.charset.CharsetLMBCS");
algorithmicCharsets.put("LMBCS-17", "com.ibm.icu.charset.CharsetLMBCS");
algorithmicCharsets.put("LMBCS-18", "com.ibm.icu.charset.CharsetLMBCS");
algorithmicCharsets.put("LMBCS-19", "com.ibm.icu.charset.CharsetLMBCS");
algorithmicCharsets.put("BOCU-1", "com.ibm.icu.charset.CharsetBOCU1" );
algorithmicCharsets.put("SCSU", "com.ibm.icu.charset.CharsetSCSU" );
algorithmicCharsets.put("US-ASCII", "com.ibm.icu.charset.CharsetASCII" );
algorithmicCharsets.put("ISO-8859-1", "com.ibm.icu.charset.Charset88591" );
algorithmicCharsets.put("UTF-16", "com.ibm.icu.charset.CharsetUTF16" );
algorithmicCharsets.put("UTF-16BE", "com.ibm.icu.charset.CharsetUTF16BE" );
algorithmicCharsets.put("UTF-16BE,version=1", "com.ibm.icu.charset.CharsetUTF16BE" );
algorithmicCharsets.put("UTF-16LE", "com.ibm.icu.charset.CharsetUTF16LE" );
algorithmicCharsets.put("UTF-16LE,version=1", "com.ibm.icu.charset.CharsetUTF16LE" );
algorithmicCharsets.put("UTF16_OppositeEndian", "com.ibm.icu.charset.CharsetUTF16LE" );
algorithmicCharsets.put("UTF16_PlatformEndian", "com.ibm.icu.charset.CharsetUTF16" );
algorithmicCharsets.put("UTF-32", "com.ibm.icu.charset.CharsetUTF32" );
algorithmicCharsets.put("UTF-32BE", "com.ibm.icu.charset.CharsetUTF32BE" );
algorithmicCharsets.put("UTF-32LE", "com.ibm.icu.charset.CharsetUTF32LE" );
algorithmicCharsets.put("UTF32_OppositeEndian", "com.ibm.icu.charset.CharsetUTF32LE" );
algorithmicCharsets.put("UTF32_PlatformEndian", "com.ibm.icu.charset.CharsetUTF32" );
algorithmicCharsets.put("UTF-8", "com.ibm.icu.charset.CharsetUTF8" );
algorithmicCharsets.put("CESU-8", "com.ibm.icu.charset.CharsetCESU8" );
algorithmicCharsets.put("UTF-7", "com.ibm.icu.charset.CharsetUTF7" );
algorithmicCharsets.put("ISCII,version=0", "com.ibm.icu.charset.CharsetISCII" );
algorithmicCharsets.put("ISCII,version=1", "com.ibm.icu.charset.CharsetISCII" );
algorithmicCharsets.put("ISCII,version=2", "com.ibm.icu.charset.CharsetISCII" );
algorithmicCharsets.put("ISCII,version=3", "com.ibm.icu.charset.CharsetISCII" );
algorithmicCharsets.put("ISCII,version=4", "com.ibm.icu.charset.CharsetISCII" );
algorithmicCharsets.put("ISCII,version=5", "com.ibm.icu.charset.CharsetISCII" );
algorithmicCharsets.put("ISCII,version=6", "com.ibm.icu.charset.CharsetISCII" );
algorithmicCharsets.put("ISCII,version=7", "com.ibm.icu.charset.CharsetISCII" );
algorithmicCharsets.put("ISCII,version=8", "com.ibm.icu.charset.CharsetISCII" );
algorithmicCharsets.put("IMAP-mailbox-name", "com.ibm.icu.charset.CharsetUTF7" );
algorithmicCharsets.put("HZ", "com.ibm.icu.charset.CharsetHZ" );
algorithmicCharsets.put("ISO_2022,locale=ja,version=0", "com.ibm.icu.charset.CharsetISO2022" );
algorithmicCharsets.put("ISO_2022,locale=ja,version=1", "com.ibm.icu.charset.CharsetISO2022" );
algorithmicCharsets.put("ISO_2022,locale=ja,version=2", "com.ibm.icu.charset.CharsetISO2022" );
algorithmicCharsets.put("ISO_2022,locale=ja,version=3", "com.ibm.icu.charset.CharsetISO2022" );
algorithmicCharsets.put("ISO_2022,locale=ja,version=4", "com.ibm.icu.charset.CharsetISO2022" );
algorithmicCharsets.put("ISO_2022,locale=zh,version=0", "com.ibm.icu.charset.CharsetISO2022" );
algorithmicCharsets.put("ISO_2022,locale=zh,version=1", "com.ibm.icu.charset.CharsetISO2022" );
algorithmicCharsets.put("ISO_2022,locale=zh,version=2", "com.ibm.icu.charset.CharsetISO2022" );
algorithmicCharsets.put("ISO_2022,locale=ko,version=0", "com.ibm.icu.charset.CharsetISO2022" );
algorithmicCharsets.put("ISO_2022,locale=ko,version=1", "com.ibm.icu.charset.CharsetISO2022" );
algorithmicCharsets.put("x11-compound-text", "com.ibm.icu.charset.CharsetCompoundText" );
}
/*public*/ static final Charset getCharset(String icuCanonicalName, String javaCanonicalName, String[] aliases){
String className = algorithmicCharsets.get(icuCanonicalName);
if(className==null){
//all the cnv files are loaded as MBCS
className = "com.ibm.icu.charset.CharsetMBCS";
}
try{
CharsetICU conv = null;
Class<? extends CharsetICU> cs = Class.forName(className).asSubclass(CharsetICU.class);
Class<?>[] paramTypes = new Class<?>[]{ String.class, String.class, String[].class};
final Constructor<? extends CharsetICU> c = cs.getConstructor(paramTypes);
Object[] params = new Object[]{ icuCanonicalName, javaCanonicalName, aliases};
// Run constructor
try {
conv = c.newInstance(params);
if (conv != null) {
return conv;
}
}catch (InvocationTargetException e) {
throw new UnsupportedCharsetException( icuCanonicalName+": "+"Could not load " + className+ ". Exception:" + e.getTargetException());
}
}catch(ClassNotFoundException ex){
}catch(NoSuchMethodException ex){
}catch (IllegalAccessException ex){
}catch (InstantiationException ex){
}
throw new UnsupportedCharsetException( icuCanonicalName+": "+"Could not load " + className);
}
static final boolean isSurrogate(int c){
return (((c)&0xfffff800)==0xd800);
}
/*
* Returns the default charset name
*/
// static final String getDefaultCharsetName(){
// String defaultEncoding = new InputStreamReader(new ByteArrayInputStream(new byte[0])).getEncoding();
// return defaultEncoding;
// }
/**
* Returns a charset object for the named charset.
* This method gurantee that ICU charset is returned when
* available. If the ICU charset provider does not support
* the specified charset, then try other charset providers
* including the standard Java charset provider.
*
* @param charsetName The name of the requested charset,
* may be either a canonical name or an alias
* @return A charset object for the named charset
* @throws IllegalCharsetNameException If the given charset name
* is illegal
* @throws UnsupportedCharsetException If no support for the
* named charset is available in this instance of th Java
* virtual machine
* @stable ICU 3.6
*/
public static Charset forNameICU(String charsetName) throws IllegalCharsetNameException, UnsupportedCharsetException {
CharsetProviderICU icuProvider = new CharsetProviderICU();
CharsetICU cs = (CharsetICU) icuProvider.charsetForName(charsetName);
if (cs != null) {
return cs;
}
return Charset.forName(charsetName);
}
// /**
// * @see java.lang.Comparable#compareTo(java.lang.Object)
// * @stable 3.8
// */
// public int compareTo(Object otherObj) {
// if (!(otherObj instanceof CharsetICU)) {
// return -1;
// }
// return icuCanonicalName.compareTo(((CharsetICU)otherObj).icuCanonicalName);
// }
/**
* This follows ucnv.c method ucnv_detectUnicodeSignature() to detect the
* start of the stream for example U+FEFF (the Unicode BOM/signature
* character) that can be ignored.
*
* Detects Unicode signature byte sequences at the start of the byte stream
* and returns number of bytes of the BOM of the indicated Unicode charset.
* 0 is returned when no Unicode signature is recognized.
*
*/
// TODO This should be proposed as CharsetDecoderICU API.
// static String detectUnicodeSignature(ByteBuffer source) {
// int signatureLength = 0; // number of bytes of the signature
// final int SIG_MAX_LEN = 5;
// String sigUniCharset = null; // states what unicode charset is the BOM
// int i = 0;
//
// /*
// * initial 0xa5 bytes: make sure that if we read <SIG_MAX_LEN bytes we
// * don't misdetect something
// */
// byte start[] = { (byte) 0xa5, (byte) 0xa5, (byte) 0xa5, (byte) 0xa5,
// (byte) 0xa5 };
//
// while (i < source.remaining() && i < SIG_MAX_LEN) {
// start[i] = source.get(i);
// i++;
// }
//
// if (start[0] == (byte) 0xFE && start[1] == (byte) 0xFF) {
// signatureLength = 2;
// sigUniCharset = "UTF-16BE";
// source.position(signatureLength);
// return sigUniCharset;
// } else if (start[0] == (byte) 0xFF && start[1] == (byte) 0xFE) {
// if (start[2] == (byte) 0x00 && start[3] == (byte) 0x00) {
// signatureLength = 4;
// sigUniCharset = "UTF-32LE";
// source.position(signatureLength);
// return sigUniCharset;
// } else {
// signatureLength = 2;
// sigUniCharset = "UTF-16LE";
// source.position(signatureLength);
// return sigUniCharset;
// }
// } else if (start[0] == (byte) 0xEF && start[1] == (byte) 0xBB
// && start[2] == (byte) 0xBF) {
// signatureLength = 3;
// sigUniCharset = "UTF-8";
// source.position(signatureLength);
// return sigUniCharset;
// } else if (start[0] == (byte) 0x00 && start[1] == (byte) 0x00
// && start[2] == (byte) 0xFE && start[3] == (byte) 0xFF) {
// signatureLength = 4;
// sigUniCharset = "UTF-32BE";
// source.position(signatureLength);
// return sigUniCharset;
// } else if (start[0] == (byte) 0x0E && start[1] == (byte) 0xFE
// && start[2] == (byte) 0xFF) {
// signatureLength = 3;
// sigUniCharset = "SCSU";
// source.position(signatureLength);
// return sigUniCharset;
// } else if (start[0] == (byte) 0xFB && start[1] == (byte) 0xEE
// && start[2] == (byte) 0x28) {
// signatureLength = 3;
// sigUniCharset = "BOCU-1";
// source.position(signatureLength);
// return sigUniCharset;
// } else if (start[0] == (byte) 0x2B && start[1] == (byte) 0x2F
// && start[2] == (byte) 0x76) {
//
// if (start[3] == (byte) 0x38 && start[4] == (byte) 0x2D) {
// signatureLength = 5;
// sigUniCharset = "UTF-7";
// source.position(signatureLength);
// return sigUniCharset;
// } else if (start[3] == (byte) 0x38 || start[3] == (byte) 0x39
// || start[3] == (byte) 0x2B || start[3] == (byte) 0x2F) {
// signatureLength = 4;
// sigUniCharset = "UTF-7";
// source.position(signatureLength);
// return sigUniCharset;
// }
// } else if (start[0] == (byte) 0xDD && start[2] == (byte) 0x73
// && start[2] == (byte) 0x66 && start[3] == (byte) 0x73) {
// signatureLength = 4;
// sigUniCharset = "UTF-EBCDIC";
// source.position(signatureLength);
// return sigUniCharset;
// }
//
// /* no known Unicode signature byte sequence recognized */
// return null;
// }
abstract void getUnicodeSetImpl(UnicodeSet setFillIn, int which);
/**
* <p>Returns the set of Unicode code points that can be converted by an ICU Converter.
* <p>
* The current implementation returns only one kind of set (UCNV_ROUNDTRIP_SET): The set of all Unicode code points that can be
* roundtrip-converted (converted without any data loss) with the converter This set will not include code points that have fallback
* mappings or are only the result of reverse fallback mappings. See UTR #22 "Character Mapping Markup Language" at <a href="http://www.unicode.org/reports/tr22/">http://www.unicode.org/reports/tr22/</a>
* <p>* In the future, there may be more UConverterUnicodeSet choices to select sets with different properties.
* <p>
* <p>This is useful for example for
* <ul><li>checking that a string or document can be roundtrip-converted with a converter,
* without/before actually performing the conversion</li>
* <li>testing if a converter can be used for text for typical text for a certain locale,
* by comparing its roundtrip set with the set of ExemplarCharacters from
* ICU's locale data or other sources</li></ul>
*
* @param setFillIn A valid UnicodeSet. It will be cleared by this function before
* the converter's specific set is filled in.
* @param which A selector; currently ROUNDTRIP_SET is the only supported value.
* @throws IllegalArgumentException if the parameters does not match.
* @stable ICU 4.0
*/
public void getUnicodeSet(UnicodeSet setFillIn, int which){
if( setFillIn == null || which != ROUNDTRIP_SET ){
throw new IllegalArgumentException();
}
setFillIn.clear();
getUnicodeSetImpl(setFillIn, which);
}
/**
* Returns whether or not the charset of the converter has a fixed number of bytes
* per charset character.
* An example of this are converters that are of the type UCNV_SBCS or UCNV_DBCS.
* Another example is UTF-32 which is always 4 bytes per character. A UTF-32 code point
* may represent more than one UTF-8 or UTF-16 code units but always have size of 4 bytes.
* Note: This method is not intended to be used to determine whether the charset has a
* fixed ratio of bytes to Unicode codes units for any particular Unicode encoding form.
* @return true if the converter is fixed-width
* @stable ICU 4.8
*/
public boolean isFixedWidth() {
if (this instanceof CharsetASCII || this instanceof CharsetUTF32) {
return true;
}
if (this instanceof CharsetMBCS) {
if (((CharsetMBCS)this).sharedData.staticData.maxBytesPerChar == ((CharsetMBCS)this).sharedData.staticData.minBytesPerChar) {
return true;
}
}
return false;
}
static void getNonSurrogateUnicodeSet(UnicodeSet setFillIn){
setFillIn.add(0, 0xd7ff);
setFillIn.add(0xe000, 0x10ffff);
}
static void getCompleteUnicodeSet(UnicodeSet setFillIn){
setFillIn.add(0, 0x10ffff);
}
}
|
|
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.projectView.impl;
import com.intellij.icons.AllIcons;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.projectView.impl.ProjectViewFileNestingService.NestingRule;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.ValidationInfo;
import com.intellij.ui.DumbAwareActionButton;
import com.intellij.ui.ToolbarDecorator;
import com.intellij.ui.components.JBCheckBox;
import com.intellij.ui.table.TableView;
import com.intellij.util.Consumer;
import com.intellij.util.ui.ColumnInfo;
import com.intellij.util.ui.ElementProducer;
import com.intellij.util.ui.ListTableModel;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.ArrayList;
import java.util.List;
public class FileNestingInProjectViewDialog extends DialogWrapper {
private final JBCheckBox myUseNestingRulesCheckBox;
private final JPanel myRulesPanel;
private final TableView<NestingRule> myTable;
protected FileNestingInProjectViewDialog(@NotNull final Project project) {
super(project);
setTitle(IdeBundle.message("file.nesting.dialog.title"));
myUseNestingRulesCheckBox = new JBCheckBox(IdeBundle.message("use.file.nesting.rules.checkbox"));
myUseNestingRulesCheckBox.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
UIUtil.setEnabled(myRulesPanel, myUseNestingRulesCheckBox.isSelected(), true);
}
});
myTable = createTable();
myRulesPanel = createRulesPanel(myTable);
init();
}
@Override
protected String getHelpId() {
return "project.view.file.nesting.dialog";
}
@Override
protected JComponent createCenterPanel() {
final JPanel mainPanel = new JPanel(new BorderLayout());
mainPanel.add(myUseNestingRulesCheckBox, BorderLayout.NORTH);
mainPanel.add(myRulesPanel, BorderLayout.CENTER);
return mainPanel;
}
private static JPanel createRulesPanel(@NotNull final TableView<NestingRule> table) {
final ToolbarDecorator toolbarDecorator =
ToolbarDecorator.createDecorator(table,
new ElementProducer<NestingRule>() {
@Override
public boolean canCreateElement() {
return true;
}
@Override
public NestingRule createElement() {
return new NestingRule();
}
})
.disableUpDownActions()
.addExtraAction(
new DumbAwareActionButton(IdeBundle.message("file.nesting.use.default.ruleset"), AllIcons.Actions.Reset_to_default) {
@Override
public void actionPerformed(AnActionEvent e) {
final List<NestingRule> rules = new ArrayList<>();
for (NestingRule rule : ProjectViewFileNestingService.DEFAULT_NESTING_RULES) {
rules.add(new NestingRule(rule.getParentFileSuffix(), rule.getChildFileSuffix()));
}
table.getListTableModel().setItems(rules);
}
});
return toolbarDecorator.createPanel();
}
private static TableView<NestingRule> createTable() {
ListTableModel<NestingRule> model = new ListTableModel<>(
new ColumnInfo<NestingRule, String>("Parent file suffix") {
@Override
public boolean isCellEditable(NestingRule rule) {
return true;
}
@Override
public String valueOf(NestingRule rule) {
return rule.getParentFileSuffix();
}
@Override
public void setValue(NestingRule rule, String value) {
rule.setParentFileSuffix(value.trim());
}
},
new ColumnInfo<NestingRule, String>("Child file suffix") {
@Override
public boolean isCellEditable(NestingRule rule) {
return true;
}
@Override
public String valueOf(NestingRule rule) {
return rule.getChildFileSuffix();
}
@Override
public void setValue(NestingRule rule, String value) {
rule.setChildFileSuffix(value);
}
}
);
final TableView<NestingRule> table = new TableView<>(model);
table.setRowHeight(new JTextField().getPreferredSize().height + table.getRowMargin());
return table;
}
@Nullable
@Override
protected ValidationInfo doValidate() {
if (!myUseNestingRulesCheckBox.isSelected()) return null;
List<NestingRule> items = myTable.getListTableModel().getItems();
for (int i = 0; i < items.size(); i++) {
final NestingRule rule = items.get(i);
final int row = i + 1;
if (rule.getParentFileSuffix().isEmpty()) {
return new ValidationInfo("Parent file suffix must not be empty (see row " + row + ")", null);
}
if (rule.getChildFileSuffix().isEmpty()) {
return new ValidationInfo("Child file suffix must not be empty (see row " + row + ")", null);
}
if (rule.getChildFileSuffix().equals(rule.getParentFileSuffix())) {
return new ValidationInfo(
"Parent and child file suffixes must not be equal ('" + rule.getParentFileSuffix() + "', see row " + row + ")", null);
}
}
return null;
}
@Override
protected void doOKAction() {
myTable.stopEditing();
super.doOKAction();
}
public void reset(boolean useFileNestingRules) {
myUseNestingRulesCheckBox.setSelected(useFileNestingRules);
UIUtil.setEnabled(myRulesPanel, myUseNestingRulesCheckBox.isSelected(), true);
final List<NestingRule> rules = new ArrayList<>();
for (NestingRule rule : ProjectViewFileNestingService.getInstance().getRules()) {
rules.add(new NestingRule(rule.getParentFileSuffix(), rule.getChildFileSuffix()));
}
myTable.getListTableModel().setItems(rules);
}
public void apply(@NotNull final Consumer<Boolean> useNestingRulesOptionConsumer) {
useNestingRulesOptionConsumer.consume(myUseNestingRulesCheckBox.isSelected());
if (myUseNestingRulesCheckBox.isSelected()) {
final List<NestingRule> result = new ArrayList<>();
for (NestingRule rule : myTable.getListTableModel().getItems()) {
result.add(new NestingRule(rule.getParentFileSuffix(), rule.getChildFileSuffix()));
}
ProjectViewFileNestingService.getInstance().setRules(result);
}
}
}
|
|
/*
* Copyright 2004-2011 H2 Group. Multiple-Licensed under the H2 License,
* Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/
package org.apache.hadoop.hdfs.server.namenode.mapdb;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Arrays;
/**
* An implementation of the EncryptionXTEA block cipher algorithm.
* <p>
* This implementation uses 32 rounds.
* The best attack reported as of 2009 is 36 rounds (Wikipedia).
* <p/>
* It requires 32 byte long encryption key, so SHA256 password hash is used.
*/
public final class EncryptionXTEA implements Serializer<byte[]>{
/**
* Blocks sizes are always multiples of this number.
*/
public static final int ALIGN = 16;
private static final int DELTA = 0x9E3779B9;
private final int k0, k1, k2, k3, k4, k5, k6, k7, k8, k9, k10, k11, k12, k13, k14, k15;
private final int k16, k17, k18, k19, k20, k21, k22, k23, k24, k25, k26, k27, k28, k29, k30, k31;
public EncryptionXTEA(byte[] password) {
byte[] b = getHash(password, false);
int[] key = new int[4];
for (int i = 0; i < 16;) {
key[i / 4] = (b[i++] << 24) + ((b[i++] & 255) << 16) + ((b[i++] & 255) << 8) + (b[i++] & 255);
}
int[] r = new int[32];
for (int i = 0, sum = 0; i < 32;) {
r[i++] = sum + key[sum & 3];
sum += DELTA;
r[i++] = sum + key[ (sum >>> 11) & 3];
}
k0 = r[0]; k1 = r[1]; k2 = r[2]; k3 = r[3]; k4 = r[4]; k5 = r[5]; k6 = r[6]; k7 = r[7];
k8 = r[8]; k9 = r[9]; k10 = r[10]; k11 = r[11]; k12 = r[12]; k13 = r[13]; k14 = r[14]; k15 = r[15];
k16 = r[16]; k17 = r[17]; k18 = r[18]; k19 = r[19]; k20 = r[20]; k21 = r[21]; k22 = r[22]; k23 = r[23];
k24 = r[24]; k25 = r[25]; k26 = r[26]; k27 = r[27]; k28 = r[28]; k29 = r[29]; k30 = r[30]; k31 = r[31];
}
public void encrypt(byte[] bytes, int off, int len) {
if (len % ALIGN != 0) {
throw new InternalError("unaligned len " + len);
}
for (int i = off; i < off + len; i += 8) {
encryptBlock(bytes, bytes, i);
}
}
public void decrypt(byte[] bytes, int off, int len) {
if (len % ALIGN != 0) {
throw new InternalError("unaligned len " + len);
}
for (int i = off; i < off + len; i += 8) {
decryptBlock(bytes, bytes, i);
}
}
private void encryptBlock(byte[] in, byte[] out, int off) {
int y = (in[off] << 24) | ((in[off+1] & 255) << 16) | ((in[off+2] & 255) << 8) | (in[off+3] & 255);
int z = (in[off+4] << 24) | ((in[off+5] & 255) << 16) | ((in[off+6] & 255) << 8) | (in[off+7] & 255);
y += (((z << 4) ^ (z >>> 5)) + z) ^ k0; z += (((y >>> 5) ^ (y << 4)) + y) ^ k1;
y += (((z << 4) ^ (z >>> 5)) + z) ^ k2; z += (((y >>> 5) ^ (y << 4)) + y) ^ k3;
y += (((z << 4) ^ (z >>> 5)) + z) ^ k4; z += (((y >>> 5) ^ (y << 4)) + y) ^ k5;
y += (((z << 4) ^ (z >>> 5)) + z) ^ k6; z += (((y >>> 5) ^ (y << 4)) + y) ^ k7;
y += (((z << 4) ^ (z >>> 5)) + z) ^ k8; z += (((y >>> 5) ^ (y << 4)) + y) ^ k9;
y += (((z << 4) ^ (z >>> 5)) + z) ^ k10; z += (((y >>> 5) ^ (y << 4)) + y) ^ k11;
y += (((z << 4) ^ (z >>> 5)) + z) ^ k12; z += (((y >>> 5) ^ (y << 4)) + y) ^ k13;
y += (((z << 4) ^ (z >>> 5)) + z) ^ k14; z += (((y >>> 5) ^ (y << 4)) + y) ^ k15;
y += (((z << 4) ^ (z >>> 5)) + z) ^ k16; z += (((y >>> 5) ^ (y << 4)) + y) ^ k17;
y += (((z << 4) ^ (z >>> 5)) + z) ^ k18; z += (((y >>> 5) ^ (y << 4)) + y) ^ k19;
y += (((z << 4) ^ (z >>> 5)) + z) ^ k20; z += (((y >>> 5) ^ (y << 4)) + y) ^ k21;
y += (((z << 4) ^ (z >>> 5)) + z) ^ k22; z += (((y >>> 5) ^ (y << 4)) + y) ^ k23;
y += (((z << 4) ^ (z >>> 5)) + z) ^ k24; z += (((y >>> 5) ^ (y << 4)) + y) ^ k25;
y += (((z << 4) ^ (z >>> 5)) + z) ^ k26; z += (((y >>> 5) ^ (y << 4)) + y) ^ k27;
y += (((z << 4) ^ (z >>> 5)) + z) ^ k28; z += (((y >>> 5) ^ (y << 4)) + y) ^ k29;
y += (((z << 4) ^ (z >>> 5)) + z) ^ k30; z += (((y >>> 5) ^ (y << 4)) + y) ^ k31;
out[off] = (byte) (y >> 24); out[off+1] = (byte) (y >> 16); out[off+2] = (byte) (y >> 8); out[off+3] = (byte) y;
out[off+4] = (byte) (z >> 24); out[off+5] = (byte) (z >> 16); out[off+6] = (byte) (z >> 8); out[off+7] = (byte) z;
}
private void decryptBlock(byte[] in, byte[] out, int off) {
int y = (in[off] << 24) | ((in[off+1] & 255) << 16) | ((in[off+2] & 255) << 8) | (in[off+3] & 255);
int z = (in[off+4] << 24) | ((in[off+5] & 255) << 16) | ((in[off+6] & 255) << 8) | (in[off+7] & 255);
z -= (((y >>> 5) ^ (y << 4)) + y) ^ k31; y -= (((z << 4) ^ (z >>> 5)) + z) ^ k30;
z -= (((y >>> 5) ^ (y << 4)) + y) ^ k29; y -= (((z << 4) ^ (z >>> 5)) + z) ^ k28;
z -= (((y >>> 5) ^ (y << 4)) + y) ^ k27; y -= (((z << 4) ^ (z >>> 5)) + z) ^ k26;
z -= (((y >>> 5) ^ (y << 4)) + y) ^ k25; y -= (((z << 4) ^ (z >>> 5)) + z) ^ k24;
z -= (((y >>> 5) ^ (y << 4)) + y) ^ k23; y -= (((z << 4) ^ (z >>> 5)) + z) ^ k22;
z -= (((y >>> 5) ^ (y << 4)) + y) ^ k21; y -= (((z << 4) ^ (z >>> 5)) + z) ^ k20;
z -= (((y >>> 5) ^ (y << 4)) + y) ^ k19; y -= (((z << 4) ^ (z >>> 5)) + z) ^ k18;
z -= (((y >>> 5) ^ (y << 4)) + y) ^ k17; y -= (((z << 4) ^ (z >>> 5)) + z) ^ k16;
z -= (((y >>> 5) ^ (y << 4)) + y) ^ k15; y -= (((z << 4) ^ (z >>> 5)) + z) ^ k14;
z -= (((y >>> 5) ^ (y << 4)) + y) ^ k13; y -= (((z << 4) ^ (z >>> 5)) + z) ^ k12;
z -= (((y >>> 5) ^ (y << 4)) + y) ^ k11; y -= (((z << 4) ^ (z >>> 5)) + z) ^ k10;
z -= (((y >>> 5) ^ (y << 4)) + y) ^ k9; y -= (((z << 4) ^ (z >>> 5)) + z) ^ k8;
z -= (((y >>> 5) ^ (y << 4)) + y) ^ k7; y -= (((z << 4) ^ (z >>> 5)) + z) ^ k6;
z -= (((y >>> 5) ^ (y << 4)) + y) ^ k5; y -= (((z << 4) ^ (z >>> 5)) + z) ^ k4;
z -= (((y >>> 5) ^ (y << 4)) + y) ^ k3; y -= (((z << 4) ^ (z >>> 5)) + z) ^ k2;
z -= (((y >>> 5) ^ (y << 4)) + y) ^ k1; y -= (((z << 4) ^ (z >>> 5)) + z) ^ k0;
out[off] = (byte) (y >> 24); out[off+1] = (byte) (y >> 16); out[off+2] = (byte) (y >> 8); out[off+3] = (byte) y;
out[off+4] = (byte) (z >> 24); out[off+5] = (byte) (z >> 16); out[off+6] = (byte) (z >> 8); out[off+7] = (byte) z;
}
/**
* The first 32 bits of the fractional parts of the cube roots of the first
* sixty-four prime numbers. Used for SHA256 password hash
*/
private static final int[] K = { 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,
0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, 0xd807aa98,
0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe,
0x9bdc06a7, 0xc19bf174, 0xe49b69c1, 0xefbe4786, 0x0fc19dc6,
0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3,
0xd5a79147, 0x06ca6351, 0x14292967, 0x27b70a85, 0x2e1b2138,
0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e,
0x92722c85, 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3,
0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, 0x19a4c116,
0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a,
0x5b9cca4f, 0x682e6ff3, 0x748f82ee, 0x78a5636f, 0x84c87814,
0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2 };
/**
* Calculate the hash code for the given data.
*
* @param data the data to hash
* @param nullData if the data should be filled with zeros after calculating the hash code
* @return the hash code
*/
public static byte[] getHash(byte[] data, boolean nullData) {
int byteLen = data.length;
int intLen = ((byteLen + 9 + 63) / 64) * 16;
byte[] bytes = new byte[intLen * 4];
System.arraycopy(data, 0, bytes, 0, byteLen);
if (nullData) {
Arrays.fill(data, (byte) 0);
}
bytes[byteLen] = (byte) 0x80;
int[] buff = new int[intLen];
for (int i = 0, j = 0; j < intLen; i += 4, j++) {
buff[j] = readInt(bytes, i);
}
buff[intLen - 2] = byteLen >>> 29;
buff[intLen - 1] = byteLen << 3;
int[] w = new int[64];
int[] hh = { 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a,
0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19 };
for (int block = 0; block < intLen; block += 16) {
for (int i = 0; i < 16; i++) {
w[i] = buff[block + i];
}
for (int i = 16; i < 64; i++) {
int x = w[i - 2];
int theta1 = rot(x, 17) ^ rot(x, 19) ^ (x >>> 10);
x = w[i - 15];
int theta0 = rot(x, 7) ^ rot(x, 18) ^ (x >>> 3);
w[i] = theta1 + w[i - 7] + theta0 + w[i - 16];
}
int a = hh[0], b = hh[1], c = hh[2], d = hh[3];
int e = hh[4], f = hh[5], g = hh[6], h = hh[7];
for (int i = 0; i < 64; i++) {
int t1 = h + (rot(e, 6) ^ rot(e, 11) ^ rot(e, 25))
+ ((e & f) ^ ((~e) & g)) + K[i] + w[i];
int t2 = (rot(a, 2) ^ rot(a, 13) ^ rot(a, 22))
+ ((a & b) ^ (a & c) ^ (b & c));
h = g;
g = f;
f = e;
e = d + t1;
d = c;
c = b;
b = a;
a = t1 + t2;
}
hh[0] += a;
hh[1] += b;
hh[2] += c;
hh[3] += d;
hh[4] += e;
hh[5] += f;
hh[6] += g;
hh[7] += h;
}
byte[] result = new byte[32];
for (int i = 0; i < 8; i++) {
writeInt(result, i * 4, hh[i]);
}
Arrays.fill(w, 0);
Arrays.fill(buff, 0);
Arrays.fill(hh, 0);
Arrays.fill(bytes, (byte) 0);
return result;
}
private static int rot(int i, int count) {
return (i << (32 - count)) | (i >>> count);
}
private static int readInt(byte[] b, int i) {
return ((b[i] & 0xff) << 24) + ((b[i + 1] & 0xff) << 16)
+ ((b[i + 2] & 0xff) << 8) + (b[i + 3] & 0xff);
}
private static void writeInt(byte[] b, int i, int value) {
b[i] = (byte) (value >> 24);
b[i + 1] = (byte) (value >> 16);
b[i + 2] = (byte) (value >> 8);
b[i + 3] = (byte) value;
}
@Override
public void serialize(DataOutput out, byte[] value) throws IOException {
if(value==null) return;
int len = value.length;
if(len%ALIGN!=0)
len += ALIGN - len%ALIGN;
//write length difference
out.writeByte(len-value.length);
//write actual data
byte[] encrypted = Arrays.copyOf(value,len);
encrypt(encrypted,0, encrypted.length);
out.write(encrypted);
}
@Override
public byte[] deserialize(DataInput in, int available) throws IOException {
if(available==0) return null;
int cut = in.readUnsignedByte(); //length dif from 16bytes
byte[] b = new byte[available-1];
in.readFully(b);
decrypt(b, 0, b.length);
if(cut!=0)
b = Arrays.copyOf(b, b.length-cut);
return b;
}
}
|
|
/*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.bootstrap.service.transport;
import java.security.GeneralSecurityException;
import java.security.KeyPair;
import java.security.PublicKey;
import java.text.MessageFormat;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.kaaproject.kaa.common.endpoint.security.MessageEncoderDecoder;
import org.kaaproject.kaa.server.bootstrap.service.OperationsServerListService;
import org.kaaproject.kaa.server.bootstrap.service.security.KeyStoreService;
import org.kaaproject.kaa.server.sync.ClientSync;
import org.kaaproject.kaa.server.sync.ServerSync;
import org.kaaproject.kaa.server.sync.SyncStatus;
import org.kaaproject.kaa.server.sync.bootstrap.BootstrapClientSync;
import org.kaaproject.kaa.server.sync.bootstrap.BootstrapServerSync;
import org.kaaproject.kaa.server.sync.bootstrap.ProtocolConnectionData;
import org.kaaproject.kaa.server.sync.platform.PlatformEncDec;
import org.kaaproject.kaa.server.sync.platform.PlatformEncDecException;
import org.kaaproject.kaa.server.sync.platform.PlatformLookup;
import org.kaaproject.kaa.server.transport.AbstractTransportService;
import org.kaaproject.kaa.server.transport.TransportService;
import org.kaaproject.kaa.server.transport.channel.ChannelContext;
import org.kaaproject.kaa.server.transport.message.ErrorBuilder;
import org.kaaproject.kaa.server.transport.message.MessageBuilder;
import org.kaaproject.kaa.server.transport.message.MessageHandler;
import org.kaaproject.kaa.server.transport.message.SessionInitMessage;
import org.kaaproject.kaa.server.transport.session.SessionAware;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
/**
* Responsible for initialization and management of transport instances
*
* @author Andrew Shvayka
*
*/
@Service
public class BootstrapTransportService extends AbstractTransportService implements TransportService {
/** Constant LOG. */
private static final Logger LOG = LoggerFactory.getLogger(BootstrapTransportService.class);
private static final int DEFAULT_THREAD_POOL_SIZE = 1;
private static final String TRANSPORT_CONFIG_PREFIX = "bootstrap";
@Value("#{properties[worker_thread_pool]}")
private int threadPoolSize = DEFAULT_THREAD_POOL_SIZE;
@Value("#{properties[support_unencrypted_connection]}")
private boolean supportUnencryptedConnection;
@Autowired
private OperationsServerListService operationsServerListService;
@Autowired
private KeyStoreService bootstrapKeyStoreService;
@Autowired
private Properties properties;
private BootstrapMessageHandler handler;
public BootstrapTransportService() {
super();
}
@Override
protected String getTransportConfigPrefix() {
return TRANSPORT_CONFIG_PREFIX;
}
@Override
protected Properties getServiceProperties() {
return properties;
}
@Override
public void lookupAndInit() {
LOG.info("Lookup platform protocols");
Set<String> platformProtocols = PlatformLookup.lookupPlatformProtocols(PlatformLookup.DEFAULT_PROTOCOL_LOOKUP_PACKAGE_NAME);
LOG.info("Initializing message handler with {} worker threads", threadPoolSize);
handler = new BootstrapMessageHandler(operationsServerListService, Executors.newFixedThreadPool(threadPoolSize), platformProtocols,
new KeyPair(bootstrapKeyStoreService.getPublicKey(), bootstrapKeyStoreService.getPrivateKey()), supportUnencryptedConnection);
super.lookupAndInit();
}
@Override
protected MessageHandler getMessageHandler() {
return handler;
}
@Override
protected PublicKey getPublicKey() {
return bootstrapKeyStoreService.getPublicKey();
}
@Override
public void stop() {
super.stop();
handler.stop();
}
public static class BootstrapMessageHandler implements MessageHandler {
private final ExecutorService executor;
private final Set<String> platformProtocols;
private final KeyPair keyPair;
private final boolean supportUnencryptedConnection;
private final OperationsServerListService opsListService;
private static final ThreadLocal<Map<Integer, PlatformEncDec>> platformEncDecMap = new ThreadLocal<>(); //NOSONAR
private static final ThreadLocal<MessageEncoderDecoder> crypt = new ThreadLocal<>(); //NOSONAR
public BootstrapMessageHandler(OperationsServerListService opsListService, ExecutorService executor, Set<String> platformProtocols,
KeyPair keyPair, boolean supportUnencryptedConnection) {
super();
this.opsListService = opsListService;
this.executor = executor;
this.platformProtocols = platformProtocols;
this.keyPair = keyPair;
this.supportUnencryptedConnection = supportUnencryptedConnection;
}
@Override
public void process(SessionAware message) {
// Session messages are not processed
}
@Override
public void process(final SessionInitMessage message) {
executor.execute(new Runnable() {
@Override
public void run() {
MessageEncoderDecoder crypt = getOrInitCrypt();
Map<Integer, PlatformEncDec> platformEncDecMap = getOrInitMap(platformProtocols);
try {
ClientSync request = decodeRequest(message, crypt, platformEncDecMap);
LOG.trace("Processing request {}", request);
BootstrapClientSync bsRequest = request.getBootstrapSync();
Set<ProtocolConnectionData> transports = opsListService.filter(bsRequest.getKeys());
BootstrapServerSync bsResponse = new BootstrapServerSync(bsRequest.getRequestId(), transports);
ServerSync response = new ServerSync();
response.setRequestId(request.getRequestId());
response.setStatus(SyncStatus.SUCCESS);
response.setBootstrapSync(bsResponse);
LOG.trace("Response {}", response);
encodeAndForward(message, crypt, platformEncDecMap, response);
LOG.trace("Response forwarded to specific transport {}", response);
} catch (Exception e) {
processErrors(message.getChannelContext(), message.getErrorBuilder(), e);
}
}
private void encodeAndForward(final SessionInitMessage message, MessageEncoderDecoder crypt,
Map<Integer, PlatformEncDec> platformEncDecMap, ServerSync response) throws PlatformEncDecException,
GeneralSecurityException {
MessageBuilder converter = message.getMessageBuilder();
byte[] responseData = encodePlatformLevelData(platformEncDecMap, message.getPlatformId(), response);
Object[] objects;
if (message.isEncrypted()) {
byte[] responseSignature = crypt.sign(responseData);
responseData = crypt.encodeData(responseData);
LOG.trace("Response signature {}", responseSignature);
LOG.trace("Response data {}", responseData);
objects = converter.build(responseData, responseSignature, message.isEncrypted());
} else {
LOG.trace("Response data {}", responseData);
objects = converter.build(responseData, message.isEncrypted());
}
ChannelContext context = message.getChannelContext();
if (objects != null && objects.length > 0) {
for (Object object : objects) {
context.write(object);
}
context.flush();
}
}
private void processErrors(ChannelContext ctx, ErrorBuilder converter, Exception e) {
LOG.trace("Message processing failed", e);
Object[] responses = converter.build(e);
if (responses != null && responses.length > 0) {
for (Object response : responses) {
ctx.writeAndFlush(response);
}
} else {
ctx.fireExceptionCaught(e);
}
}
private ClientSync decodeRequest(SessionInitMessage message, MessageEncoderDecoder crypt,
Map<Integer, PlatformEncDec> platformEncDecMap) throws GeneralSecurityException, PlatformEncDecException {
ClientSync syncRequest = null;
if (message.isEncrypted()) {
syncRequest = decodeEncryptedRequest(message, crypt, platformEncDecMap);
} else if (supportUnencryptedConnection) {
syncRequest = decodeUnencryptedRequest(message, platformEncDecMap);
} else {
LOG.warn("Received unencrypted init message, but unencrypted connection forbidden by configuration.");
throw new GeneralSecurityException("Unencrypted connection forbidden by configuration.");
}
if (syncRequest.getBootstrapSync() == null) {
throw new IllegalArgumentException("Bootstrap sync message is missing");
}
return syncRequest;
}
private ClientSync decodeEncryptedRequest(SessionInitMessage message, MessageEncoderDecoder crypt,
Map<Integer, PlatformEncDec> platformEncDecMap) throws GeneralSecurityException, PlatformEncDecException {
byte[] requestRaw = crypt.decodeData(message.getEncodedMessageData(), message.getEncodedSessionKey());
LOG.trace("Request data decrypted");
ClientSync request = decodePlatformLevelData(platformEncDecMap, message.getPlatformId(), requestRaw);
LOG.trace("Request data deserialized");
return request;
}
private ClientSync decodeUnencryptedRequest(SessionInitMessage message, Map<Integer, PlatformEncDec> platformEncDecMap)
throws GeneralSecurityException, PlatformEncDecException {
byte[] requestRaw = message.getEncodedMessageData();
LOG.trace("Try to convert raw data to SynRequest object");
ClientSync request = decodePlatformLevelData(platformEncDecMap, message.getPlatformId(), requestRaw);
LOG.trace("Request data deserialized");
return request;
}
private byte[] encodePlatformLevelData(Map<Integer, PlatformEncDec> platformEncDecMap, int platformID, ServerSync sync)
throws PlatformEncDecException {
PlatformEncDec encDec = platformEncDecMap.get(platformID);
if (encDec != null) {
return platformEncDecMap.get(platformID).encode(sync);
} else {
throw new PlatformEncDecException(MessageFormat.format("Encoder for platform protocol [{0}] is not defined",
platformID));
}
}
private ClientSync decodePlatformLevelData(Map<Integer, PlatformEncDec> platformEncDecMap, Integer platformID,
byte[] requestRaw) throws PlatformEncDecException {
PlatformEncDec encDec = platformEncDecMap.get(platformID);
if (encDec != null) {
return platformEncDecMap.get(platformID).decode(requestRaw);
} else {
throw new PlatformEncDecException(MessageFormat.format("Decoder for platform protocol [{0}] is not defined",
platformID));
}
}
private MessageEncoderDecoder getOrInitCrypt() {
if (crypt.get() == null) {
crypt.set(new MessageEncoderDecoder(keyPair.getPrivate(), keyPair.getPublic()));
}
return crypt.get();
}
private Map<Integer, PlatformEncDec> getOrInitMap(Set<String> platformProtocols) {
if (platformEncDecMap.get() == null) {
platformEncDecMap.set(PlatformLookup.initPlatformProtocolMap(platformProtocols));
}
return platformEncDecMap.get();
}
});
}
public void stop() {
executor.shutdown();
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import java.lang.reflect.Field;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Random;
import java.util.stream.Collectors;
import org.apache.commons.collections4.map.LinkedMap;
import org.apache.wicket.core.util.lang.WicketObjects;
import org.apache.wicket.markup.IMarkupResourceStreamProvider;
import org.apache.wicket.markup.html.WebComponent;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.WebPage;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.form.TextField;
import org.apache.wicket.markup.html.panel.EmptyPanel;
import org.apache.wicket.util.resource.IResourceStream;
import org.apache.wicket.util.resource.StringResourceStream;
import org.apache.wicket.util.tester.WicketTestCase;
import org.junit.jupiter.api.Test;
class MarkupContainerTest extends WicketTestCase
{
private static final int NUMBER_OF_CHILDREN_FOR_A_MAP = MarkupContainer.MAPIFY_THRESHOLD + 1;
/**
* Make sure components are iterated in the order they were added. Required e.g. for Repeaters
*/
@Test
void iteratorOrder()
{
MarkupContainer container = new WebMarkupContainer("component");
for (int i = 0; i < 10; i++)
{
container.add(new WebComponent(Integer.toString(i)));
}
int i = 0;
for (Component component : container)
{
assertEquals(Integer.toString(i++), component.getId());
}
}
@Test
void markupId() throws Exception
{
executeTest(MarkupIdTestPage.class, "MarkupIdTestPageExpectedResult.html");
}
@Test
void get()
{
WebMarkupContainer a = new WebMarkupContainer("a");
WebMarkupContainer b = new WebMarkupContainer("b");
WebMarkupContainer c = new WebMarkupContainer("c");
WebMarkupContainer d = new WebMarkupContainer("d");
WebMarkupContainer e = new WebMarkupContainer("e");
WebMarkupContainer f = new WebMarkupContainer("f");
// ....A
// ...B....C
// .......D..E
// ...........F
a.add(b);
a.add(c);
c.add(d);
c.add(e);
e.add(f);
// basic gets
assertSame(a.get(null), a);
assertSame(a.get(""), a);
assertSame(a.get("b"), b);
assertSame(a.get("c"), c);
assertSame(a.get("c:d"), d);
assertSame(a.get("c:e:f"), f);
// parent path gets
assertSame(b.get(".."), a);
assertSame(e.get("..:.."), a);
assertSame(d.get("..:..:c:e:f"), f);
assertSame(e.get("..:d:..:e:f"), f);
assertSame(e.get("..:d:..:.."), a);
// invalid gets
assertNull(a.get(".."));
assertNull(a.get("..:a"));
assertNull(b.get("..|.."));
assertNull(a.get("q"));
}
/**
* https://issues.apache.org/jira/browse/WICKET-4006
*/
@Test
void addMyself()
{
WebMarkupContainer me = new WebMarkupContainer("a");
assertThrows(IllegalArgumentException.class, () -> me.add(me));
}
/**
* https://issues.apache.org/jira/browse/WICKET-5911
*/
@Test
void rerenderAfterRenderFailure()
{
FirstRenderFailsPage page = new FirstRenderFailsPage();
try
{
tester.startPage(page);
}
catch (WicketRuntimeException expected)
{
}
tester.startPage(page);
// rendering flags where properly reset, so second rendering works properly
assertEquals(2, page.beforeRenderCalls);
}
@Test
void hierarchyChangeDuringRender()
{
HierarchyChangePage page = new HierarchyChangePage();
try
{
tester.startPage(page);
fail();
}
catch (WicketRuntimeException expected)
{
assertEquals(
"Cannot modify component hierarchy after render phase has started (page version cant change then anymore)",
expected.getMessage());
}
}
/**
* https://issues.apache.org/jira/browse/WICKET-4012
*/
@Test
void afterRenderJustOnce()
{
AfterRenderJustOncePage page = new AfterRenderJustOncePage();
tester.startPage(page);
assertEquals(1, page.afterRenderCalls);
}
/**
* https://issues.apache.org/jira/browse/WICKET-4016
*/
@Test
void callToStringFromConstructor()
{
new ToStringComponent();
}
@Test
void noChildShouldNotIterate()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Iterator<Component> iterator = wmc.iterator();
assertFalse(iterator.hasNext());
}
@Test
void noChildAddingChildAfterIteratorAcquiredShouldIterateAndReturnNewChild()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Iterator<Component> iterator = wmc.iterator();
Label label1 = new Label("label1", "Label1");
wmc.add(label1);
assertEquals(1, wmc.size());
assertTrue(iterator.hasNext());
assertEquals(label1, iterator.next());
assertFalse(iterator.hasNext());
}
@Test
void noChildAddingNChildrenAfterIteratorAcquiredShouldIterateAndReturnNewChildren()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Iterator<Component> iterator = wmc.iterator();
addNChildren(wmc, NUMBER_OF_CHILDREN_FOR_A_MAP);
assertEquals(NUMBER_OF_CHILDREN_FOR_A_MAP, wmc.size());
Label label1 = new Label("label1", "Label1");
wmc.add(label1);
assertTrue(iterator.hasNext());
takeNChildren(iterator, NUMBER_OF_CHILDREN_FOR_A_MAP);
assertEquals(label1, iterator.next());
assertFalse(iterator.hasNext());
}
@Test
void noChildAddingNChildrenAfterIteratorAcquiredShouldIterateAndReturnNewChildren2()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
addNChildren(wmc, NUMBER_OF_CHILDREN_FOR_A_MAP);
assertEquals(NUMBER_OF_CHILDREN_FOR_A_MAP, wmc.size());
Iterator<Component> iterator = wmc.iterator();
takeNChildren(iterator, NUMBER_OF_CHILDREN_FOR_A_MAP);
Label label1 = new Label("label1", "Label1");
wmc.add(label1);
assertTrue(iterator.hasNext());
assertEquals(label1, iterator.next());
assertFalse(iterator.hasNext());
}
/*
* Iterator tests
*
* The tests below are specific for testing addition and removal of children while maintaining
* the correct order of iterators without throwing ConcurrentModificationException.
*/
@Test
void noChildAddingAndRemoveChildAfterIteratorAcquiredShouldNotIterate()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Label label1 = new Label("label1", "Label1");
Iterator<Component> iterator = wmc.iterator();
wmc.add(label1);
wmc.remove(label1);
assertEquals(0, wmc.size());
assertFalse(iterator.hasNext());
}
@Test
void addingNewChildAfterIterationHasStartedShouldIterateNewChild()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
// add one child
addNChildren(wmc, 1);
Iterator<Component> iterator = wmc.iterator();
// iterate
takeNChildren(iterator, 1);
// there are no more children to iterate
assertFalse(iterator.hasNext());
// add the new child
Label newChild = new Label("label1", "Label1");
wmc.add(newChild);
assertEquals(2, wmc.size());
// ensure that the newChild is up next (as it was added)
assertEquals(newChild, iterator.next());
assertFalse(iterator.hasNext());
}
@Test
void replacingTheFirstChildAfterIteratingDoesntIterateTheNewChild()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Label label1 = new Label("label1", "Label1");
Component label2 = new Label("label2", "Label2");
addNChildren(wmc, NUMBER_OF_CHILDREN_FOR_A_MAP);
wmc.add(label1);
wmc.add(label2);
Iterator<Component> iterator = wmc.iterator();
takeNChildren(iterator, NUMBER_OF_CHILDREN_FOR_A_MAP);
iterator.next();
// replace the first child **after** we already passed the child with the iterator
Label newChild = new Label("label1", "newChild");
wmc.replace(newChild);
// the next child is still label 2
assertSame(label2, iterator.next());
// and the new child is not iterated (was replaced before the current position of the
// iterator).
assertFalse(iterator.hasNext());
}
@Test
void removingComponentsDuringIterationDoesntFail()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Component label1 = new Label("label1", "Label1");
Component label2 = new Label("label2", "Label2");
Component label3 = new Label("label3", "Label3");
Component label4 = new Label("label4", "Label4");
Component label5 = new Label("label5", "Label5");
wmc.add(label1);
wmc.add(label2);
wmc.add(label3);
wmc.add(label4);
wmc.add(label5);
// start iterating the 5 children
Iterator<Component> iterator = wmc.iterator();
assertSame(label1, iterator.next());
assertSame(label2, iterator.next());
assertSame(label3, iterator.next());
// remove the current, previous and next children
wmc.remove(label3);
wmc.remove(label2);
wmc.remove(label4);
// ensure that the next iterated child is the 5th label
assertSame(label5, iterator.next());
// and that there are no more children to iterate
assertFalse(iterator.hasNext());
}
@Test
void childrenBecomesListWhenMoreThanOneChild() throws Exception
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
addNChildren(wmc, 5);
Field childrenField = MarkupContainer.class.getDeclaredField("children");
childrenField.setAccessible(true);
Object field = childrenField.get(wmc);
assertThat(field).isInstanceOf(List.class);
}
@Test
void childrenListBecomesMapWhenThresholdPassed()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
addNChildren(wmc, NUMBER_OF_CHILDREN_FOR_A_MAP - 1);
assertChildrenType(wmc, List.class);
addNChildren(wmc, 1);
assertChildrenType(wmc, LinkedMap.class);
}
@Test
void childrenBecomesLinkedMapWhenThresholdPassed()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
addNChildren(wmc, NUMBER_OF_CHILDREN_FOR_A_MAP + 1);
assertChildrenType(wmc, LinkedMap.class);
}
@Test
void linkedMapChildrenBecomesChild()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
addNChildren(wmc, NUMBER_OF_CHILDREN_FOR_A_MAP);
wmc.add(new EmptyPanel("panel"));
assertChildrenType(wmc, LinkedMap.class);
Iterator<Component> iterator = wmc.iterator();
removeNChildren(iterator, NUMBER_OF_CHILDREN_FOR_A_MAP);
assertChildrenType(wmc, EmptyPanel.class);
}
@Test
void listChildrenBecomesChild()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
addNChildren(wmc, NUMBER_OF_CHILDREN_FOR_A_MAP - 2);
wmc.add(new EmptyPanel("panel"));
assertChildrenType(wmc, List.class);
Iterator<Component> iterator = wmc.iterator();
removeNChildren(iterator, NUMBER_OF_CHILDREN_FOR_A_MAP - 2);
assertChildrenType(wmc, EmptyPanel.class);
}
@Test
void geenIdee3()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
addNChildren(wmc, NUMBER_OF_CHILDREN_FOR_A_MAP + 1);
Iterator<Component> iterator = wmc.iterator();
removeNChildren(iterator, NUMBER_OF_CHILDREN_FOR_A_MAP);
assertTrue(iterator.hasNext());
assertEquals(1, wmc.size());
iterator.next();
assertFalse(iterator.hasNext());
}
@Test
void noChildAddIterateAndRemoveChildShouldIterateChild()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Label label1 = new Label("label1", "Label1");
Iterator<Component> iterator = wmc.iterator();
wmc.add(label1);
assertEquals(label1, iterator.next());
wmc.remove(label1);
assertFalse(iterator.hasNext());
}
@Test
void noChildAddIterateAndRemoveAndAddSameChildShouldIterateChildTwice()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Label label1 = new Label("label1", "Label1");
Iterator<Component> iterator = wmc.iterator();
wmc.add(label1);
assertEquals(label1, iterator.next());
assertFalse(iterator.hasNext());
wmc.remove(label1);
assertFalse(iterator.hasNext());
wmc.add(label1);
assertEquals(label1, iterator.next());
}
@Test
void noChildAddIterateAndRemoveAndAddDifferentChildShouldIterateNewChild()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Label label1 = new Label("label1", "Label1");
Label label2 = new Label("label1", "Label2");
Iterator<Component> iterator = wmc.iterator();
wmc.add(label1);
assertEquals(label1, iterator.next());
assertFalse(iterator.hasNext());
wmc.remove(label1);
assertFalse(iterator.hasNext());
wmc.add(label2);
assertEquals(label2, iterator.next());
}
@Test
void noChildAddingAndReplaceChildAfterIteratorAcquiredShouldIterateAndReturnNewReplacementChild()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Label label1 = new Label("label1", "Label1");
Label label2 = new Label("label1", "Label2");
Iterator<Component> iterator = wmc.iterator();
wmc.add(label1);
wmc.replace(label2);
assertTrue(iterator.hasNext());
assertEquals(label2, iterator.next());
assertFalse(iterator.hasNext());
}
@Test
void singleChildIterateOneChild()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Label label1;
wmc.add(label1 = new Label("label1", "Label1"));
Iterator<Component> iterator = wmc.iterator();
assertTrue(iterator.hasNext());
assertEquals(label1, iterator.next());
assertFalse(iterator.hasNext());
}
@Test
void singleChildShouldAllowReplacingChildAfterIterationHasStarted()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Component label1 = new Label("label1", "Label1");
Component label2 = new Label("label1", "Label2");
wmc.add(label1);
Iterator<Component> iterator = wmc.iterator();
wmc.replace(label2);
assertTrue(iterator.hasNext());
assertSame(label2, iterator.next());
assertFalse(iterator.hasNext());
}
@Test
void singleChildShouldAllowReplacingVisitedChildButNotRevisitReplacementChild()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Label label1 = new Label("label1", "Label1");
Label label2 = new Label("label1", "Label2");
wmc.add(label1);
Iterator<Component> iterator = wmc.iterator();
assertTrue(iterator.hasNext());
assertEquals(label1, iterator.next());
wmc.replace(label2);
assertFalse(iterator.hasNext());
}
@Test
void multipleChildIteratorRetainsOrderOfAddition()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Label label1;
Label label2;
Label label3;
wmc.add(label1 = new Label("label1", "Label1"));
wmc.add(label2 = new Label("label2", "Label2"));
wmc.add(label3 = new Label("label3", "Label3"));
Iterator<Component> iterator = wmc.iterator();
assertEquals(label1, iterator.next());
assertEquals(label2, iterator.next());
assertEquals(label3, iterator.next());
assertFalse(iterator.hasNext());
}
@Test
void iteratorShouldAllowAddingComponentAfterIterationStarted()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Label label1;
Label label2;
Label label3;
wmc.add(label1 = new Label("label1", "Label1"));
wmc.add(label2 = new Label("label2", "Label2"));
Iterator<Component> iterator = wmc.iterator();
assertEquals(label1, iterator.next());
assertEquals(label2, iterator.next());
wmc.add(label3 = new Label("label3", "Label3"));
assertEquals(label3, iterator.next());
assertFalse(iterator.hasNext());
}
@Test
void iteratorShouldAllowRemovingComponentAfterIterationStarted0()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Label label1;
Label label2;
Label label3;
wmc.add(label1 = new Label("label1", "Label1"));
wmc.add(label2 = new Label("label2", "Label2"));
wmc.add(label3 = new Label("label3", "Label3"));
Iterator<Component> iterator = wmc.iterator();
wmc.remove(label1);
assertEquals(label2, iterator.next());
assertEquals(label3, iterator.next());
assertFalse(iterator.hasNext());
}
@Test
void iteratorShouldAllowRemovingComponentAfterIterationStarted1()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Label label1 = new Label("label1", "Label1");
Label label2 = new Label("label2", "Label2");
Label label3 = new Label("label3", "Label3");
wmc.add(label1);
wmc.add(label2);
wmc.add(label3);
Iterator<Component> iterator = wmc.iterator();
assertEquals(label1, iterator.next());
wmc.remove(label1);
assertEquals(label2, iterator.next());
assertEquals(label3, iterator.next());
assertFalse(iterator.hasNext());
}
@Test
void iteratorShouldAllowRemovingComponentAfterIterationStarted2()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Label label1;
Label label2;
Label label3;
wmc.add(label1 = new Label("label1", "Label1"));
wmc.add(label2 = new Label("label2", "Label2"));
wmc.add(label3 = new Label("label3", "Label3"));
Iterator<Component> iterator = wmc.iterator();
assertEquals(label1, iterator.next());
assertEquals(label2, iterator.next());
wmc.remove(label1);
assertEquals(label3, iterator.next());
assertFalse(iterator.hasNext());
}
@Test
void iteratorShouldAllowRemovingComponentAfterIterationStarted3()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Label label1;
Label label2;
Label label3;
wmc.add(label1 = new Label("label1", "Label1"));
wmc.add(label2 = new Label("label2", "Label2"));
wmc.add(label3 = new Label("label3", "Label3"));
Iterator<Component> iterator = wmc.iterator();
assertEquals(label1, iterator.next());
assertEquals(label2, iterator.next());
assertEquals(label3, iterator.next());
wmc.remove(label1);
assertFalse(iterator.hasNext());
}
@Test
void iteratorShouldAllowReplacingComponentAfterIterationStarted0()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Label label2;
Label label3;
wmc.add(new Label("label1", "Label1"));
wmc.add(label2 = new Label("label2", "Label2"));
Iterator<Component> iterator = wmc.iterator();
wmc.replace(label3 = new Label("label1", "Label3"));
assertEquals(label3, iterator.next());
assertEquals(label2, iterator.next());
assertFalse(iterator.hasNext());
}
@Test
void iteratorShouldAllowReplacingComponentAfterIterationStarted1()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Label label2;
Label label3;
wmc.add(new Label("label1", "Label1"));
wmc.add(label2 = new Label("label2", "Label2"));
Iterator<Component> iterator = wmc.iterator();
wmc.replace(label3 = new Label("label1", "Label3"));
assertEquals(label3, iterator.next());
assertEquals(label2, iterator.next());
assertFalse(iterator.hasNext());
}
@Test
void iteratorShouldAllowReplacingComponentAfterIterationStarted()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Label label1;
Label label2;
wmc.add(label1 = new Label("label1", "Label1"));
wmc.add(label2 = new Label("label2", "Label2"));
Iterator<Component> iterator = wmc.iterator();
assertEquals(label1, iterator.next());
assertEquals(label2, iterator.next());
wmc.replace(new Label("label1", "Label3"));
assertFalse(iterator.hasNext());
}
@Test
void iteratorShouldAllowReplacingComponentAfterIterationStarted24()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Label label1;
Label label3;
wmc.add(label1 = new Label("label1", "Label1"));
wmc.add(new Label("label2", "Label2"));
addNChildren(wmc, NUMBER_OF_CHILDREN_FOR_A_MAP);
Iterator<Component> iterator = wmc.iterator();
assertEquals(label1, iterator.next());
wmc.replace(label3 = new Label("label2", "Label3"));
assertEquals(label3, iterator.next());
takeNChildren(iterator, NUMBER_OF_CHILDREN_FOR_A_MAP);
assertFalse(iterator.hasNext());
}
@Test
void noChildLeftBehindRemoveEach()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
addNChildren(wmc, NUMBER_OF_CHILDREN_FOR_A_MAP);
Iterator<Component> iterator = wmc.iterator();
while (iterator.hasNext())
{
iterator.next();
iterator.remove();
}
assertEquals(0, wmc.size());
}
@Test
void noChildLeftBehindRemoveAll()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
addNChildren(wmc, NUMBER_OF_CHILDREN_FOR_A_MAP);
Iterator<Component> iterator = wmc.iterator();
wmc.removeAll();
assertEquals(0, wmc.size());
assertFalse(iterator.hasNext());
}
@Test
void noChildLeftBehindRemoveAll2()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
addNChildren(wmc, NUMBER_OF_CHILDREN_FOR_A_MAP);
Iterator<Component> iterator = wmc.iterator();
iterator.next();
wmc.removeAll();
assertEquals(0, wmc.size());
assertFalse(iterator.hasNext());
}
@Test
void ensureSerializationDeserializationWorks()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Iterator<Component> iterator = wmc.iterator();
addNChildren(wmc, NUMBER_OF_CHILDREN_FOR_A_MAP);
assertEquals(NUMBER_OF_CHILDREN_FOR_A_MAP, wmc.size());
assertNotNull(WicketObjects.cloneObject(wmc));
removeNChildren(iterator, 1);
assertEquals(NUMBER_OF_CHILDREN_FOR_A_MAP - 1, wmc.size());
assertNotNull(WicketObjects.cloneObject(wmc));
removeNChildren(iterator, NUMBER_OF_CHILDREN_FOR_A_MAP - 2);
assertNotNull(WicketObjects.cloneObject(wmc));
assertEquals(1, wmc.size());
removeNChildren(iterator, 1);
assertEquals(0, wmc.size());
assertNotNull(WicketObjects.cloneObject(wmc));
}
@Test
void detachDuringIterationWorks()
{
int halfOfChildren = NUMBER_OF_CHILDREN_FOR_A_MAP / 2;
int numberOfRemainingChildren = halfOfChildren + NUMBER_OF_CHILDREN_FOR_A_MAP % 2;
WebMarkupContainer wmc = new WebMarkupContainer("id");
Iterator<Component> iterator = wmc.iterator();
addNChildren(wmc, NUMBER_OF_CHILDREN_FOR_A_MAP);
takeNChildren(iterator, halfOfChildren);
wmc.detach();
takeNChildren(iterator, numberOfRemainingChildren);
assertFalse(iterator.hasNext());
}
@Test
void detachDuringIterationWithRemovalsSucceeds()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
Iterator<Component> iterator = wmc.iterator();
addNChildren(wmc, 2);
removeNChildren(iterator, 1);
wmc.detach();
takeNChildren(iterator, 1);
assertFalse(iterator.hasNext());
assertEquals(1, wmc.size());
}
/**
* Tests whether two iterators being used simultaneously keep correct score of where they are.
*/
@Test
void twoIteratorsWorkInTandem()
{
int n = NUMBER_OF_CHILDREN_FOR_A_MAP * 2;
WebMarkupContainer wmc = new WebMarkupContainer("id");
addNChildren(wmc, n);
Iterator<Component> iterator1 = wmc.iterator();
Iterator<Component> iterator2 = wmc.iterator();
Random r = new Random();
for (int i = 0; i < n; i++)
{
if (r.nextBoolean())
{
iterator1.next();
iterator1.remove();
}
else
{
iterator2.next();
iterator2.remove();
}
}
// after 2*N removals there should not be any child left
assertFalse(iterator1.hasNext());
assertFalse(iterator2.hasNext());
}
/**
* Tests removing a child when an iterator is active, followed by a detach still has the correct
* state for the iterator.
*/
@Test
void detachWithOneIteratorOneChild()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
addNChildren(wmc, 1);
Iterator<Component> iterator1 = wmc.iterator();
iterator1.next();
iterator1.remove();
wmc.detach();
assertFalse(iterator1.hasNext());
}
/**
* Tests removing and adding a component when an iterator is active, followed by a detach still
* has the correct state for the iterator.
*/
@Test
void detachWithOneIteratorOneChildRemovedAndAdded()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
addNChildren(wmc, 1);
Iterator<Component> iterator1 = wmc.iterator();
iterator1.next();
iterator1.remove();
addNChildren(wmc, 1);
assertTrue(iterator1.hasNext());
wmc.detach();
assertTrue(iterator1.hasNext());
assertNotNull(iterator1.next());
}
/**
* Tests the case when one child is removed from a list the iterator still works after a detach.
*/
@Test
void detachWithOneIteratorTwoChildren()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
addNChildren(wmc, 2);
Iterator<Component> iterator1 = wmc.iterator();
iterator1.next();
iterator1.remove();
assertTrue(iterator1.hasNext());
wmc.detach();
assertTrue(iterator1.hasNext());
assertNotNull(iterator1.next());
}
/**
* Tests whether when the children is a list, removal and iteration still work after a detach.
*/
@Test
void detachWithOneIteratorWithListForChildren()
{
WebMarkupContainer wmc = new WebMarkupContainer("id");
addNChildren(wmc, NUMBER_OF_CHILDREN_FOR_A_MAP - 2);
assertChildrenType(wmc, List.class);
Iterator<Component> iterator = wmc.iterator();
takeNChildren(iterator, 1);
removeNChildren(iterator, 1);
wmc.detach();
takeNChildren(iterator, NUMBER_OF_CHILDREN_FOR_A_MAP - 4);
assertFalse(iterator.hasNext());
}
/**
* Tests whether when the children is a map, removal and iteration still work after a detach.
*/
@Test
void detachWithOneIteratorsWithMapForChildren()
{
int n = NUMBER_OF_CHILDREN_FOR_A_MAP * 2;
WebMarkupContainer wmc = new WebMarkupContainer("id");
addNChildren(wmc, n);
Iterator<Component> iterator1 = wmc.iterator();
for (int i = 0; i < NUMBER_OF_CHILDREN_FOR_A_MAP; i++)
{
iterator1.next();
iterator1.remove();
}
wmc.detach();
for (int i = 0; i < NUMBER_OF_CHILDREN_FOR_A_MAP; i++)
{
iterator1.next();
iterator1.remove();
}
assertFalse(iterator1.hasNext());
}
@Test
void detachWithTwoIteratorsAndRemovals()
{
int n = NUMBER_OF_CHILDREN_FOR_A_MAP * 2;
WebMarkupContainer wmc = new WebMarkupContainer("id");
addNChildren(wmc, n);
Iterator<Component> iterator1 = wmc.iterator();
Iterator<Component> iterator2 = wmc.iterator();
Random r = new Random();
for (int i = 0; i < NUMBER_OF_CHILDREN_FOR_A_MAP; i++)
{
if (r.nextBoolean())
{
iterator1.next();
iterator1.remove();
}
else
{
iterator2.next();
iterator2.remove();
}
}
wmc.detach();
iterator1.next();
iterator1.remove();
iterator2.next();
}
@Test
void detachWithTwoIteratorsAndRemovals2()
{
int n = NUMBER_OF_CHILDREN_FOR_A_MAP * 2;
WebMarkupContainer wmc = new WebMarkupContainer("id");
addNChildren(wmc, n);
Iterator<Component> iterator1 = wmc.iterator();
Iterator<Component> iterator2 = wmc.iterator();
Random r = new Random();
for (int i = 0; i < NUMBER_OF_CHILDREN_FOR_A_MAP; i++)
{
Iterator<Component> iterator = r.nextBoolean() ? iterator1 : iterator2;
if (iterator.hasNext())
{
iterator.next();
iterator.remove();
}
}
wmc.detach();
iterator1.next();
iterator2.next();
iterator1.remove();
while (iterator1.hasNext() || iterator2.hasNext())
{
Iterator<Component> iterator = r.nextBoolean() ? iterator1 : iterator2;
if (iterator.hasNext())
{
iterator.next();
iterator.remove();
}
}
assertFalse(iterator1.hasNext());
assertFalse(iterator2.hasNext());
}
/**
* Asserts that the children property of the {@code wmc} is of a particular {@code type}.
*
* @param wmc
* the web markup container whose children property is to be checked
* @param type
* the expected type
*/
private void assertChildrenType(WebMarkupContainer wmc, Class<?> type)
{
try
{
Field childrenField = MarkupContainer.class.getDeclaredField("children");
childrenField.setAccessible(true);
Object field = childrenField.get(wmc);
assertThat(field).isInstanceOf(type);
}
catch (Exception e)
{
throw new AssertionError("Unable to read children", e);
}
}
/**
* Adds {@code numberOfChildrenToAdd} anonymous children to the {@code parent}.
*
* @param parent
* the parent to add the children to
* @param numberOfChildrenToAdd
* the number of children
*/
private void addNChildren(WebMarkupContainer parent, int numberOfChildrenToAdd)
{
assertThat(numberOfChildrenToAdd).isGreaterThanOrEqualTo(0);
int start = parent.size();
for (int i = 0; i < numberOfChildrenToAdd; i++)
{
int index = start + i;
parent.add(new Label("padding" + index, "padding" + index));
}
}
/**
* Removes {@code numberOfChildrenToRemove} anonymous children from the parent using the
* {@code iterator}.
*
* @param iterator
* the iterator to remove the children with
* @param numberOfChildrenToRemove
* the number of children
*/
private void removeNChildren(Iterator<Component> iterator, int numberOfChildrenToRemove)
{
for (int i = 0; i < numberOfChildrenToRemove; i++)
{
iterator.next();
iterator.remove();
}
}
/**
* Progresses the {@code iterator} with {@code numberOfChildrenToTake} anonymous children.
*
* @param iterator
* the iterator to progress
* @param numberOfChildrenToTake
* the number of children
*/
private void takeNChildren(Iterator<Component> iterator, int numberOfChildrenToTake)
{
for (int i = 0; i < numberOfChildrenToTake; i++)
iterator.next();
}
@Test
void stream()
{
LoginPage loginPage = new LoginPage();
Optional<Component> first = loginPage.stream()
.filter(c -> c.getId().equals("form"))
.findFirst();
assertFalse(first.isPresent());
loginPage.add(new Form<>("form"));
Optional<Component> second = loginPage.stream()
.filter(c -> c.getId().equals("form"))
.findFirst();
assertTrue(second.isPresent());
loginPage.add(new WebMarkupContainer("wmc"));
Optional<Form> form = loginPage.stream()
.filter(Form.class::isInstance)
.map(Form.class::cast)
.findFirst();
assertTrue(form.isPresent());
Optional<WebMarkupContainer> wmc = loginPage.stream()
.filter(WebMarkupContainer.class::isInstance)
.map(WebMarkupContainer.class::cast)
.findFirst();
assertTrue(wmc.isPresent());
}
@Test
void streamChildren()
{
LoginPage loginPage = new LoginPage();
Optional<Component> first = loginPage.stream()
.filter(c -> c.getId().equals("form"))
.findFirst();
assertFalse(first.isPresent());
Form<Object> form = new Form<>("form");
loginPage.add(form);
form.add(new TextField<>("field"));
assertTrue(loginPage.streamChildren()
.anyMatch(c -> c.getId().equals("form")));
assertTrue(loginPage.streamChildren()
.anyMatch(c -> c.getId().equals("field")));
assertTrue(loginPage.streamChildren()
.filter(TextField.class::isInstance)
.anyMatch(c -> c.getId().equals("field")));
}
// https://issues.apache.org/jira/browse/WICKET-6754
@Test
void streamChildrenNestedContainer() {
WebMarkupContainer wmc = new WebMarkupContainer("parent");
WebMarkupContainer wmc1 = new WebMarkupContainer("wmc1");
wmc.add(wmc1);
WebMarkupContainer wmc1_2= new WebMarkupContainer("wmc1_2");
wmc1.add(wmc1_2);
Label lbl2 = new Label("lbl2");
wmc.add(lbl2);
List l = wmc.streamChildren().map(Component::getId).collect(Collectors.toList());
assertEquals("[wmc1, wmc1_2, lbl2]", l.toString());
}
private static class HierarchyChangePage extends WebPage
implements
IMarkupResourceStreamProvider
{
@Override
protected void onRender()
{
// change hierarchy during render
add(new Label("child"));
}
@Override
public IResourceStream getMarkupResourceStream(MarkupContainer container,
Class<?> containerClass)
{
return new StringResourceStream("<html><body></body></html>");
}
}
private static class ToStringComponent extends WebMarkupContainer
{
private ToStringComponent()
{
super("id");
toString(true);
}
}
private static class AfterRenderJustOncePage extends WebPage
implements
IMarkupResourceStreamProvider
{
private int afterRenderCalls = 0;
private AfterRenderJustOncePage()
{
WebMarkupContainer a1 = new WebMarkupContainer("a1");
add(a1);
WebMarkupContainer a2 = new WebMarkupContainer("a2");
a1.add(a2);
WebMarkupContainer a3 = new WebMarkupContainer("a3")
{
@Override
protected void onAfterRender()
{
super.onAfterRender();
afterRenderCalls++;
}
};
a2.add(a3);
}
@Override
public IResourceStream getMarkupResourceStream(MarkupContainer container,
Class<?> containerClass)
{
return new StringResourceStream(
"<html><body><div wicket:id='a1'><div wicket:id='a2'><div wicket:id='a3'></div></div></div></body></html>");
}
}
private static class FirstRenderFailsPage extends WebPage
implements
IMarkupResourceStreamProvider
{
private boolean firstRender = true;
private int beforeRenderCalls = 0;
private FirstRenderFailsPage()
{
WebMarkupContainer a1 = new WebMarkupContainer("a1")
{
@Override
protected void onBeforeRender()
{
super.onBeforeRender();
beforeRenderCalls++;
if (firstRender)
{
firstRender = false;
throw new WicketRuntimeException();
}
}
};
add(a1);
}
@Override
public IResourceStream getMarkupResourceStream(MarkupContainer container,
Class<?> containerClass)
{
return new StringResourceStream("<html><body><div wicket:id='a1'></div></body></html>");
}
}
}
|
|
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver13;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import com.google.common.collect.ImmutableList;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFPacketQueueVer13 implements OFPacketQueue {
private static final Logger logger = LoggerFactory.getLogger(OFPacketQueueVer13.class);
// version: 1.3
final static byte WIRE_VERSION = 4;
final static int MINIMUM_LENGTH = 16;
// maximum OF message length: 16 bit, unsigned
final static int MAXIMUM_LENGTH = 0xFFFF;
private final static long DEFAULT_QUEUE_ID = 0x0L;
private final static OFPort DEFAULT_PORT = OFPort.ANY;
private final static List<OFQueueProp> DEFAULT_PROPERTIES = ImmutableList.<OFQueueProp>of();
// OF message fields
private final long queueId;
private final OFPort port;
private final List<OFQueueProp> properties;
//
// Immutable default instance
final static OFPacketQueueVer13 DEFAULT = new OFPacketQueueVer13(
DEFAULT_QUEUE_ID, DEFAULT_PORT, DEFAULT_PROPERTIES
);
// package private constructor - used by readers, builders, and factory
OFPacketQueueVer13(long queueId, OFPort port, List<OFQueueProp> properties) {
if(port == null) {
throw new NullPointerException("OFPacketQueueVer13: property port cannot be null");
}
if(properties == null) {
throw new NullPointerException("OFPacketQueueVer13: property properties cannot be null");
}
this.queueId = U32.normalize(queueId);
this.port = port;
this.properties = properties;
}
// Accessors for OF message fields
@Override
public long getQueueId() {
return queueId;
}
@Override
public List<OFQueueProp> getProperties() {
return properties;
}
@Override
public OFPort getPort() {
return port;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
public OFPacketQueue.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFPacketQueue.Builder {
final OFPacketQueueVer13 parentMessage;
// OF message fields
private boolean queueIdSet;
private long queueId;
private boolean portSet;
private OFPort port;
private boolean propertiesSet;
private List<OFQueueProp> properties;
BuilderWithParent(OFPacketQueueVer13 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public long getQueueId() {
return queueId;
}
@Override
public OFPacketQueue.Builder setQueueId(long queueId) {
this.queueId = queueId;
this.queueIdSet = true;
return this;
}
@Override
public List<OFQueueProp> getProperties() {
return properties;
}
@Override
public OFPacketQueue.Builder setProperties(List<OFQueueProp> properties) {
this.properties = properties;
this.propertiesSet = true;
return this;
}
@Override
public OFPort getPort() {
return port;
}
@Override
public OFPacketQueue.Builder setPort(OFPort port) {
this.port = port;
this.portSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
@Override
public OFPacketQueue build() {
long queueId = this.queueIdSet ? this.queueId : parentMessage.queueId;
OFPort port = this.portSet ? this.port : parentMessage.port;
if(port == null)
throw new NullPointerException("Property port must not be null");
List<OFQueueProp> properties = this.propertiesSet ? this.properties : parentMessage.properties;
if(properties == null)
throw new NullPointerException("Property properties must not be null");
//
return new OFPacketQueueVer13(
queueId,
port,
properties
);
}
}
static class Builder implements OFPacketQueue.Builder {
// OF message fields
private boolean queueIdSet;
private long queueId;
private boolean portSet;
private OFPort port;
private boolean propertiesSet;
private List<OFQueueProp> properties;
@Override
public long getQueueId() {
return queueId;
}
@Override
public OFPacketQueue.Builder setQueueId(long queueId) {
this.queueId = queueId;
this.queueIdSet = true;
return this;
}
@Override
public List<OFQueueProp> getProperties() {
return properties;
}
@Override
public OFPacketQueue.Builder setProperties(List<OFQueueProp> properties) {
this.properties = properties;
this.propertiesSet = true;
return this;
}
@Override
public OFPort getPort() {
return port;
}
@Override
public OFPacketQueue.Builder setPort(OFPort port) {
this.port = port;
this.portSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
//
@Override
public OFPacketQueue build() {
long queueId = this.queueIdSet ? this.queueId : DEFAULT_QUEUE_ID;
OFPort port = this.portSet ? this.port : DEFAULT_PORT;
if(port == null)
throw new NullPointerException("Property port must not be null");
List<OFQueueProp> properties = this.propertiesSet ? this.properties : DEFAULT_PROPERTIES;
if(properties == null)
throw new NullPointerException("Property properties must not be null");
return new OFPacketQueueVer13(
queueId,
port,
properties
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFPacketQueue> {
@Override
public OFPacketQueue readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
long queueId = U32.f(bb.readInt());
OFPort port = OFPort.read4Bytes(bb);
int length = U16.f(bb.readShort());
if(length < MINIMUM_LENGTH)
throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
// pad: 6 bytes
bb.skipBytes(6);
List<OFQueueProp> properties = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), OFQueuePropVer13.READER);
OFPacketQueueVer13 packetQueueVer13 = new OFPacketQueueVer13(
queueId,
port,
properties
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", packetQueueVer13);
return packetQueueVer13;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFPacketQueueVer13Funnel FUNNEL = new OFPacketQueueVer13Funnel();
static class OFPacketQueueVer13Funnel implements Funnel<OFPacketQueueVer13> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFPacketQueueVer13 message, PrimitiveSink sink) {
sink.putLong(message.queueId);
message.port.putTo(sink);
// FIXME: skip funnel of length
// skip pad (6 bytes)
FunnelUtils.putList(message.properties, sink);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFPacketQueueVer13> {
@Override
public void write(ByteBuf bb, OFPacketQueueVer13 message) {
int startIndex = bb.writerIndex();
bb.writeInt(U32.t(message.queueId));
message.port.write4Bytes(bb);
// length is length of variable message, will be updated at the end
int lengthIndex = bb.writerIndex();
bb.writeShort(U16.t(0));
// pad: 6 bytes
bb.writeZero(6);
ChannelUtils.writeList(bb, message.properties);
// update length field
int length = bb.writerIndex() - startIndex;
if (length > MAXIMUM_LENGTH) {
throw new IllegalArgumentException("OFPacketQueueVer13: message length (" + length + ") exceeds maximum (0xFFFF)");
}
bb.setShort(lengthIndex, length);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFPacketQueueVer13(");
b.append("queueId=").append(queueId);
b.append(", ");
b.append("port=").append(port);
b.append(", ");
b.append("properties=").append(properties);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFPacketQueueVer13 other = (OFPacketQueueVer13) obj;
if( queueId != other.queueId)
return false;
if (port == null) {
if (other.port != null)
return false;
} else if (!port.equals(other.port))
return false;
if (properties == null) {
if (other.properties != null)
return false;
} else if (!properties.equals(other.properties))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (queueId ^ (queueId >>> 32));
result = prime * result + ((port == null) ? 0 : port.hashCode());
result = prime * result + ((properties == null) ? 0 : properties.hashCode());
return result;
}
}
|
|
package org.ajwcc.pduUtils.gsm3040.ie;
//PduUtils Library - A Java library for generating GSM 3040 Protocol Data Units (PDUs)
//
//Copyright (C) 2008, Ateneo Java Wireless Competency Center/Blueblade Technologies, Philippines.
//PduUtils is distributed under the terms of the Apache License version 2.0
//
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
public class ConcatInformationElement extends InformationElement
{
private static final int CONCAT_IE_LENGTH_16BIT = 6;
private static final int CONCAT_IE_LENGTH_8BIT = 5;
public static final int CONCAT_8BIT_REF = 0x00;
public static final int CONCAT_16BIT_REF = 0x08;
private static int defaultConcatType = CONCAT_8BIT_REF;
private static int defaultConcatLength = CONCAT_IE_LENGTH_8BIT;
public static int getDefaultConcatLength()
{
return defaultConcatLength;
}
public static int getDefaultConcatType()
{
return defaultConcatType;
}
public static void setDefaultConcatType(int identifier)
{
switch (identifier)
{
case CONCAT_8BIT_REF:
defaultConcatType = CONCAT_8BIT_REF;
defaultConcatLength = CONCAT_IE_LENGTH_8BIT;
break;
case CONCAT_16BIT_REF:
defaultConcatType = CONCAT_16BIT_REF;
defaultConcatLength = CONCAT_IE_LENGTH_16BIT;
break;
default:
throw new RuntimeException("Invalid Concat type");
}
}
ConcatInformationElement(byte identifier, byte[] data)
{
super(identifier, data);
if (getIdentifier() == CONCAT_8BIT_REF)
{
// iei
// iel
// ref
// max
// seq
if (data.length != 3) { throw new RuntimeException("Invalid data length in: " + getClass().getSimpleName()); }
}
else if (getIdentifier() == CONCAT_16BIT_REF)
{
// iei
// iel
// ref(2 bytes)
// max
// seq
if (data.length != 4) { throw new RuntimeException("Invalid data length in: " + getClass().getSimpleName()); }
}
else
{
throw new RuntimeException("Invalid identifier in data in: " + getClass().getSimpleName());
}
validate();
}
ConcatInformationElement(int identifier, int mpRefNo, int mpMaxNo, int mpSeqNo)
{
super();
byte[] data = null;
switch (identifier)
{
case CONCAT_8BIT_REF:
data = new byte[3];
data[0] = (byte) (mpRefNo & 0xFF);
data[1] = (byte) (mpMaxNo & 0xFF);
data[2] = (byte) (mpSeqNo & 0xFF);
break;
case CONCAT_16BIT_REF:
data = new byte[4];
data[0] = (byte) ((mpRefNo & 0xFF00) >>> 8);
data[1] = (byte) (mpRefNo & 0xFF);
data[2] = (byte) (mpMaxNo & 0xFF);
data[3] = (byte) (mpSeqNo & 0xFF);
break;
default:
throw new RuntimeException("Invalid identifier for " + getClass().getSimpleName());
}
initialize((byte) (identifier & 0xFF), data);
validate();
}
public int getMpRefNo()
{
// this is 8-bit in 0x00 and 16-bit in 0x08
byte[] data = getData();
if (getIdentifier() == CONCAT_8BIT_REF)
{
return (data[0] & (0xFF));
}
else if (getIdentifier() == CONCAT_16BIT_REF) { return ((data[0] << 8) | data[1]) & (0xFFFF); }
throw new RuntimeException("Invalid identifier");
}
public void setMpRefNo(int mpRefNo)
{
// this is 8-bit in 0x00 and 16-bit in 0x08
byte[] data = getData();
if (getIdentifier() == CONCAT_8BIT_REF)
{
data[0] = (byte) (mpRefNo & (0xFF));
}
else if (getIdentifier() == CONCAT_16BIT_REF)
{
data[0] = (byte) ((mpRefNo >>> 8) & (0xFF));
data[1] = (byte) ((mpRefNo) & (0xFF));
}
else
{
throw new RuntimeException("Invalid identifier");
}
}
public int getMpMaxNo()
{
byte[] data = getData();
if (getIdentifier() == CONCAT_8BIT_REF)
{
return (data[1] & (0xFF));
}
else if (getIdentifier() == CONCAT_16BIT_REF) { return (data[2] & (0xFF)); }
throw new RuntimeException("Invalid identifier");
}
public void setMpMaxNo(int mpMaxNo)
{
byte[] data = getData();
if (getIdentifier() == CONCAT_8BIT_REF)
{
data[1] = (byte) (mpMaxNo & 0xFF);
}
else if (getIdentifier() == CONCAT_16BIT_REF)
{
data[2] = (byte) (mpMaxNo & 0xFF);
}
else
{
throw new RuntimeException("Invalid identifier");
}
}
public int getMpSeqNo()
{
byte[] data = getData();
if (getIdentifier() == CONCAT_8BIT_REF)
{
return (data[2] & (0xFF));
}
else if (getIdentifier() == CONCAT_16BIT_REF) { return (data[3] & (0xFF)); }
throw new RuntimeException("Invalid identifier");
}
public void setMpSeqNo(int mpSeqNo)
{
byte[] data = getData();
if (getIdentifier() == CONCAT_8BIT_REF)
{
data[2] = (byte) (mpSeqNo & (0xFF));
}
else if (getIdentifier() == CONCAT_16BIT_REF)
{
data[3] = (byte) (mpSeqNo & (0xFF));
}
else
{
throw new RuntimeException("Invalid identifier");
}
}
@Override
public String toString()
{
StringBuffer sb = new StringBuffer();
sb.append(super.toString());
sb.append("[MpRefNo: ");
sb.append(getMpRefNo());
sb.append(", MpMaxNo: ");
sb.append(getMpMaxNo());
sb.append(", MpSeqNo: ");
sb.append(getMpSeqNo());
sb.append("]");
return sb.toString();
}
private void validate()
{
if (getMpMaxNo() == 0) { throw new RuntimeException("mpMaxNo must be > 0"); }
if (getMpSeqNo() == 0) { throw new RuntimeException("mpSeqNo must be > 0"); }
}
}
|
|
package com.sinnerschrader.aem.react;
import java.io.Reader;
import java.util.Arrays;
import java.util.NoSuchElementException;
import javax.script.Bindings;
import javax.script.ScriptContext;
import javax.script.ScriptEngineFactory;
import javax.script.ScriptException;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.pool2.ObjectPool;
import org.apache.jackrabbit.util.Text;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.SlingHttpServletResponse;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.api.scripting.SlingBindings;
import org.apache.sling.commons.classloader.DynamicClassLoaderManager;
import org.apache.sling.commons.json.JSONException;
import org.apache.sling.commons.json.JSONObject;
import org.apache.sling.commons.json.sling.JsonObjectCreator;
import org.apache.sling.scripting.api.AbstractSlingScriptEngine;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.day.cq.wcm.api.WCMMode;
import com.fasterxml.jackson.core.JsonGenerator.Feature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.sinnerschrader.aem.react.api.Cqx;
import com.sinnerschrader.aem.react.api.ModelFactory;
import com.sinnerschrader.aem.react.api.OsgiServiceFinder;
import com.sinnerschrader.aem.react.api.Sling;
import com.sinnerschrader.aem.react.exception.TechnicalException;
public class ReactScriptEngine extends AbstractSlingScriptEngine {
public interface Command {
public Object execute(JavascriptEngine e);
}
private static final String SERVER_RENDERING_DISABLED = "disabled";
private static final String SERVER_RENDERING_PARAM = "serverRendering";
private static final Logger LOG = LoggerFactory.getLogger(ReactScriptEngine.class);
private ObjectPool<JavascriptEngine> enginePool;
private boolean reloadScripts;
private ObjectMapper mapper;
private OsgiServiceFinder finder;
private DynamicClassLoaderManager dynamicClassLoaderManager;
/**
* This class is the result of rendering a react component(-tree). It consists
* of html and cache.
*
* @author stemey
*
*/
public static class RenderResult {
public String html;
public String cache;
}
protected ReactScriptEngine(ScriptEngineFactory scriptEngineFactory, ObjectPool<JavascriptEngine> enginePool, boolean reloadScripts, OsgiServiceFinder finder,
DynamicClassLoaderManager dynamicClassLoaderManager) {
super(scriptEngineFactory);
this.mapper = new ObjectMapper();
mapper.configure(Feature.IGNORE_UNKNOWN, true);
this.enginePool = enginePool;
this.reloadScripts = reloadScripts;
this.finder = finder;
this.dynamicClassLoaderManager = dynamicClassLoaderManager;
}
@Override
public Object eval(Reader reader, ScriptContext scriptContext) throws ScriptException {
ClassLoader old = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(((ReactScriptEngineFactory) getFactory()).getClassLoader());
Bindings bindings = scriptContext.getBindings(ScriptContext.ENGINE_SCOPE);
SlingHttpServletRequest request = (SlingHttpServletRequest) bindings.get(SlingBindings.REQUEST);
SlingHttpServletResponse response = (SlingHttpServletResponse) bindings.get(SlingBindings.RESPONSE);
boolean renderAsJson = Arrays.asList(request.getRequestPathInfo().getSelectors()).indexOf("json") >= 0;
Resource resource = request.getResource();
boolean dialog = request.getAttribute(Sling.ATTRIBUTE_AEM_REACT_DIALOG) != null;
if (dialog) {
// just rendering to get the wrapper element and author mode js
scriptContext.getWriter().write("");
return null;
}
String renderedHtml;
boolean serverRendering = !SERVER_RENDERING_DISABLED.equals(request.getParameter(SERVER_RENDERING_PARAM));
String cacheString = null;
if (serverRendering) {
RenderResult result = renderReactMarkup(resource.getPath(), resource.getResourceType(), getWcmMode(request), scriptContext, renderAsJson);
renderedHtml = result.html;
cacheString = result.cache;
} else if (renderAsJson) {
// development mode: return cache with just the current resource.
JSONObject cache = new JSONObject();
JSONObject resources = new JSONObject();
JSONObject resourceEntry = new JSONObject();
resourceEntry.put("depth", -1);
// depth is inaccurate
resourceEntry.put("data", JsonObjectCreator.create(resource, -1));
resources.put(resource.getPath(), resourceEntry);
cache.put("resources", resources);
cacheString = cache.toString();
renderedHtml = "";
} else {
// initial rendering in development mode
renderedHtml = "";
}
String output;
if (renderAsJson) {
output = cacheString;
response.setContentType("application/json");
} else {
output = wrapHtml(resource.getPath(), resource, renderedHtml, serverRendering, getWcmMode(request), cacheString);
}
scriptContext.getWriter().write(output);
return null;
} catch (Exception e) {
throw new ScriptException(e);
} finally {
Thread.currentThread().setContextClassLoader(old);
}
}
/**
* wrap the rendered react markup with the teaxtarea that contains the
* component's props.
*
* @param path
* @param reactProps
* @param component
* @param renderedHtml
* @param serverRendering
* @return
*/
private String wrapHtml(String path, Resource resource, String renderedHtml, boolean serverRendering, String wcmmode, String cache) {
JSONObject reactProps = new JSONObject();
try {
if (cache != null) {
reactProps.put("cache", new JSONObject(cache));
}
reactProps.put("resourceType", resource.getResourceType());
reactProps.put("path", resource.getPath());
reactProps.put("wcmmode", wcmmode);
} catch (JSONException e) {
throw new TechnicalException("cannot create react props", e);
}
String jsonProps = StringEscapeUtils.escapeHtml4(reactProps.toString());
String allHtml = "<div data-react-server=\"" + String.valueOf(serverRendering) + "\" data-react=\"app\" data-react-id=\"" + path + "_component\">"
+ renderedHtml + "</div>" + "<textarea id=\"" + path + "_component\" style=\"display:none;\">" + jsonProps + "</textarea>";
return allHtml;
}
private Cqx createCqx(ScriptContext ctx) {
SlingHttpServletRequest request = (SlingHttpServletRequest) ctx.getBindings(ScriptContext.ENGINE_SCOPE).get(SlingBindings.REQUEST);
ClassLoader classLoader = dynamicClassLoaderManager.getDynamicClassLoader();
return new Cqx(new Sling(ctx), finder, new ModelFactory(classLoader, request));
}
/**
* render the react markup
*
* @param reactProps
* props
* @param component
* component name
* @return
*/
private RenderResult renderReactMarkup(String path, String resourceType, String wcmmode, ScriptContext scriptContext, boolean renderRootDialog) {
JavascriptEngine javascriptEngine;
try {
javascriptEngine = enginePool.borrowObject();
try {
if (reloadScripts) {
javascriptEngine.reloadScripts();
}
return javascriptEngine.render(path, resourceType, wcmmode, createCqx(scriptContext), renderRootDialog);
} finally {
enginePool.returnObject(javascriptEngine);
}
} catch (NoSuchElementException e) {
throw new TechnicalException("cannot get engine from pool", e);
} catch (IllegalStateException e) {
throw new TechnicalException("cannot return engine from pool", e);
} catch (Exception e) {
throw new TechnicalException("error rendering react markup", e);
}
}
private String getWcmMode(SlingHttpServletRequest request) {
return WCMMode.fromRequest(request).name().toLowerCase();
}
public void stop() {
enginePool.close();
}
public static void main(String[] args) {
System.out.println(Text.escapeIllegalJcrChars("[]"));
}
}
|
|
/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.java.sip.communicator.impl.protocol.jabber.extensions.jingle;
import java.util.*;
import net.java.sip.communicator.impl.protocol.jabber.extensions.*;
import org.jivesoftware.smack.packet.*;
/**
* Represents the content <tt>description</tt> elements described in XEP-0167.
*
* @author Emil Ivov
*/
public class RtpDescriptionPacketExtension
extends AbstractPacketExtension
{
/**
* The name space for RTP description elements.
*/
public static final String NAMESPACE = "urn:xmpp:jingle:apps:rtp:1";
/**
* The name of the "description" element.
*/
public static final String ELEMENT_NAME = "description";
/**
* The name of the <tt>media</tt> description argument.
*/
public static final String MEDIA_ATTR_NAME = "media";
/**
* The name of the <tt>ssrc</tt> description argument.
*/
public static final String SSRC_ATTR_NAME = "ssrc";
/**
* The list of payload types that this description element contains.
*/
private final List<PayloadTypePacketExtension> payloadTypes
= new ArrayList<PayloadTypePacketExtension>();
/**
* An optional encryption element that contains encryption parameters for
* this session.
*/
private EncryptionPacketExtension encryption;
/**
* An optional bandwidth element that specifies the allowable or preferred
* bandwidth for use by this application type.
*/
private BandwidthPacketExtension bandwidth;
/**
* A <tt>List</tt> of the optional <tt>extmap</tt> elements that allow
* negotiating RTP extension headers as per RFC 5282.
*/
private List<RTPHdrExtPacketExtension> extmapList
= new ArrayList<RTPHdrExtPacketExtension>();
/**
* The combined list of all child elements that this extension contains.
*/
private List<ExtensionElement> children;
/**
* Creates a new <tt>RtpDescriptionPacketExtension</tt>.
*/
public RtpDescriptionPacketExtension()
{
super(NAMESPACE, ELEMENT_NAME);
}
/**
* Create a new <tt>RtpDescriptionPacketExtension</tt> with a different
* namespace.
*
* @param namespace namespace to use
*/
public RtpDescriptionPacketExtension(String namespace)
{
super(namespace, ELEMENT_NAME);
}
/**
* Specifies the media type for the stream that this description element
* represents, such as "audio" or "video".
*
* @param media the media type for the stream that this element represents
* such as "audio" or "video".
*/
public void setMedia(String media)
{
super.setAttribute(MEDIA_ATTR_NAME, media);
}
/**
* Returns the media type for the stream that this description element
* represents, such as "audio" or "video".
*
* @return the media type for the stream that this description element
* represents, such as "audio" or "video".
*/
public String getMedia()
{
return getAttributeAsString(MEDIA_ATTR_NAME);
}
/**
* Sets the synchronization source ID (SSRC as per RFC 3550) that the stream
* represented by this description element will be using.
*
* @param ssrc the SSRC ID that the RTP stream represented here will be
* using.
*/
public void setSsrc(String ssrc)
{
super.setAttribute(SSRC_ATTR_NAME, ssrc);
}
/**
* Returns the synchronization source ID (SSRC as per RFC 3550) that the
* stream represented by this description element will be using.
*
* @return the synchronization source ID (SSRC as per RFC 3550) that the
* stream represented by this description element will be using.
*/
public String getSsrc()
{
return getAttributeAsString(SSRC_ATTR_NAME);
}
/**
* Adds a new payload type to this description element.
*
* @param payloadType the new payload to add.
*/
public void addPayloadType(PayloadTypePacketExtension payloadType)
{
this.payloadTypes.add(payloadType);
}
/**
* Returns a <b>reference</b> to the list of payload types that we have
* registered with this description so far.
*
* @return a <b>reference</b> to the list of payload types that we have
* registered with this description so far.
*/
public List<PayloadTypePacketExtension> getPayloadTypes()
{
return payloadTypes;
}
/**
* Returns all child elements that we currently have in this packet.
*
* @return the {@link List} of child elements currently registered with
* this packet.
*/
@Override
public List<? extends ExtensionElement> getChildExtensions()
{
if(children == null)
children = new ArrayList<ExtensionElement>();
else
children.clear();
//payload types
children.addAll(payloadTypes);
//encryption element
if (encryption != null)
children.add(encryption);
//bandwidth element
if (bandwidth != null)
children.add(bandwidth);
//extmap elements
if (extmapList != null)
children.addAll(extmapList);
children.addAll(super.getChildExtensions());
return children;
}
/**
* Casts <tt>childExtension</tt> to one of the extensions allowed here and
* sets the corresponding field.
*
* @param childExtension the extension we'd like to add here.
*/
@Override
public void addChildExtension(ExtensionElement childExtension)
{
if(childExtension instanceof PayloadTypePacketExtension)
this.addPayloadType((PayloadTypePacketExtension)childExtension);
else if (childExtension instanceof EncryptionPacketExtension)
this.setEncryption((EncryptionPacketExtension)childExtension);
else if (childExtension instanceof BandwidthPacketExtension)
this.setBandwidth((BandwidthPacketExtension)childExtension);
else if (childExtension instanceof RTPHdrExtPacketExtension)
this.addExtmap((RTPHdrExtPacketExtension)childExtension);
else
super.addChildExtension(childExtension);
}
/**
* Sets the optional encryption element that contains encryption parameters
* for this session.
*
* @param encryption the encryption {@link PacketExtension} we'd like to add
* to this packet.
*/
public void setEncryption(EncryptionPacketExtension encryption)
{
this.encryption = encryption;
}
/**
* Returns the optional encryption element that contains encryption
* parameters for this session.
*
* @return the encryption {@link PacketExtension} added to this packet or
* <tt>null</tt> if none has been set yet.
*/
public EncryptionPacketExtension getEncryption()
{
return encryption;
}
/**
* Sets an optional bandwidth element that specifies the allowable or
* preferred bandwidth for use by this application type.
*
* @param bandwidth the max/preferred bandwidth indication that we'd like
* to add to this packet.
*/
public void setBandwidth(BandwidthPacketExtension bandwidth)
{
this.bandwidth = bandwidth;
}
/**
* Returns an optional bandwidth element that specifies the allowable or
* preferred bandwidth for use by this application type.
*
* @return the max/preferred bandwidth set for this session or <tt>null</tt>
* if none has been set yet.
*/
public BandwidthPacketExtension getBandwidth()
{
return bandwidth;
}
/**
* Adds an optional <tt>extmap</tt> element that allows negotiation RTP
* extension headers as per RFC 5282.
*
* @param extmap an optional <tt>extmap</tt> element that allows negotiation
* RTP extension headers as per RFC 5282.
*/
public void addExtmap(RTPHdrExtPacketExtension extmap)
{
this.extmapList.add(extmap);
}
/**
* Returns a <tt>List</tt> of the optional <tt>extmap</tt> elements that
* allow negotiating RTP extension headers as per RFC 5282.
*
* @return a <tt>List</tt> of the optional <tt>extmap</tt> elements that
* allow negotiating RTP extension headers as per RFC 5282.
*/
public List<RTPHdrExtPacketExtension> getExtmapList()
{
return extmapList;
}
}
|
|
package com.mana.innovative.converter.response;
import com.mana.innovative.constants.DAOConstants;
import com.mana.innovative.constants.ServiceConstants;
import com.mana.innovative.dto.client.Item;
import com.mana.innovative.exception.IllegalArgumentValueException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.StringUtils;
import java.util.ArrayList;
import java.util.List;
/**
* The type Item domain dTO converter.
*
* @author Rono, AB, Vadim Servetnik
* @email arkoghosh @hotmail.com, ma@gmail.com, vsssadik@gmail.com
* @Copyright
*/
public class ItemDomainDTOConverter {
/**
* The constant logger.
*/
private static final Logger logger = LoggerFactory.getLogger( ItemDomainDTOConverter.class );
/**
* The constant ZERO.
*/
private static final int ZERO = DAOConstants.ZERO;
/**
* Gets converted item dTO from item domain.
*
* @param itemDTO the item dTO
* @param itemDomain the item domain
*
* @return the converted item dTO from item domain
*/
public static Item getConvertedDTOFromDomain( Item itemDTO, com.mana.innovative.domain.client.Item itemDomain ) {
if ( itemDomain == null ) {
String message = "Parameter itemDomain is required for conversion";
logger.error( message );
throw new NullPointerException( message );
}
if ( itemDTO == null ) {
itemDTO = new Item( );
logger.warn( " Creating itemDTO, received null object" );
}
if ( itemDomain.getItemId( ) >= ZERO ) {
itemDTO.setItemId( itemDomain.getItemId( ) );
}
if ( !StringUtils.isEmpty( itemDomain.getItemName( ) ) ) {
itemDTO.setItemName( itemDomain.getItemName( ) );
} else {
logger.warn( "Item Name was empty" );
itemDTO.setItemName( ServiceConstants.EMPTY );
}
itemDTO.setImageCount( itemDomain.getImageCount( ) );
if ( !StringUtils.isEmpty( itemDomain.getItemOrigin( ) ) ) {
itemDTO.setItemOrigin( itemDomain.getItemOrigin( ) );
} else {
itemDTO.setItemOrigin( ServiceConstants.DEFAULT_ITEM_ORIGIN );
}
if ( !StringUtils.isEmpty( itemDomain.getItemDescription( ) ) ) {
itemDTO.setItemDescription( itemDomain.getItemDescription( ) );
} else {
itemDTO.setItemDescription( ServiceConstants.DEFAULT_SHOP_DESCRIPTION );
}
if ( itemDomain.getItemPrice( ) != null ) {
itemDTO.setItemPrice( itemDomain.getItemPrice( ) );
}
if ( !StringUtils.isEmpty( itemDomain.getItemPriceCurrency( ) ) ) {
itemDTO.setItemPriceCurrency( itemDomain.getItemPriceCurrency( ) );
}
if ( !StringUtils.isEmpty( itemDomain.getItemType( ) ) ) {
itemDTO.setItemType( itemDomain.getItemType( ) );
}
if ( !StringUtils.isEmpty( itemDomain.getItemSubType( ) ) )
itemDTO.setItemSubType( itemDomain.getItemSubType( ) );
else
itemDTO.setItemSubType( "" );
if ( itemDomain.getBoughtDate( ) != null ) {
itemDTO.setBoughtDate( itemDomain.getBoughtDate( ) );
}
if ( !StringUtils.isEmpty( itemDomain.getBoughtFrom( ) ) ) {
itemDTO.setBoughtFrom( itemDomain.getBoughtFrom( ) );
}
if ( itemDomain.getQuantity( ) != null ) {
itemDTO.setQuantity( itemDomain.getQuantity( ) );
}
if ( !StringUtils.isEmpty( itemDomain.getQuantityType( ) ) ) {
itemDTO.setQuantityType( itemDomain.getQuantityType( ) );
}
if ( itemDomain.getWeight( ) != null ) {
itemDTO.setWeight( itemDomain.getWeight( ) );
}
if ( !StringUtils.isEmpty( itemDomain.getWeightedUnit( ) ) ) {
itemDTO.setWeightedUnit( itemDomain.getWeightedUnit( ) );
}
// item.setShopItem();
if ( itemDomain.getItemDiscountList( ) != null ) {
itemDTO.setItemDiscountList( ItemDiscountDomainDTOConverter.getConvertedListDTOFromDomain( itemDomain
.getItemDiscountList( ) ) );
}
if ( itemDomain.getItemImageList( ) != null ) {
itemDTO.setItemImageList( ItemImageDomainDTOConverter.getConvertedListDTOFromDomain( itemDomain
.getItemImageList( ) ) );
}
if ( itemDomain.getGemstoneList( ) != null ) {
itemDTO.setGemstoneList( GemstoneDomainDTOConverter.getConvertedListDTOFromDomain( itemDomain.getGemstoneList( ) ) );
}
return itemDTO;
}
/**
* Gets converted item dTO list.
*
* @param items the items
*
* @return the converted item dTO list
*/
public static List< Item > getConvertedListDTOFromDomain( List< com.mana.innovative.domain.client.Item > items ) {
List< Item > itemDTOList = new ArrayList<>( );
for ( com.mana.innovative.domain.client.Item item : items ) {
Item itemDTO = new Item( );
itemDTO = getConvertedDTOFromDomain( itemDTO, item );
itemDTOList.add( itemDTO );
}
return itemDTOList;
}
/**
* Gets converted item domain from item dTO.
*
* @param itemDomain the item domain
* @param itemDTO the item dTO
*
* @return the converted item domain from item dTO
*/
public static com.mana.innovative.domain.client.Item getConvertedDomainFromDTO( com.mana.innovative.domain.client.Item itemDomain, Item itemDTO ) {
if ( itemDTO == null ) {
String message = "Parameter itemDTO is required for conversion";
logger.error( message );
throw new NullPointerException( message );
}
if ( itemDomain == null ) {
itemDomain = new com.mana.innovative.domain.client.Item( );
logger.warn( "Creating itemDomain, received null object" );
}
boolean flag = false;
StringBuilder stringBuilder = new StringBuilder( " Value must not be null for " );
if ( !StringUtils.isEmpty( itemDTO.getItemName( ) ) ) {
itemDomain.setItemName( itemDTO.getItemName( ) );
} else {
flag = true;
stringBuilder.append( " ItemName," );
}
if ( !StringUtils.isEmpty( itemDTO.getItemDescription( ) ) ) {
itemDomain.setItemDescription( itemDTO.getItemDescription( ) );
} else {
itemDomain.setItemDescription( ServiceConstants.DEFAULT_ITEM_DESCRIPTION );
logger.warn( "Item Description not provided, setting default Description" );
}
if ( itemDTO.getItemPrice( ) != null && itemDTO.getItemPrice( ) > ZERO ) {
itemDomain.setItemPrice( itemDTO.getItemPrice( ) );
} else {
flag = true;
stringBuilder.append( " ItemPrice," );
}
if ( !StringUtils.isEmpty( itemDTO.getItemPriceCurrency( ) ) ) {
itemDomain.setItemPriceCurrency( itemDTO.getItemPriceCurrency( ) );
} else {
flag = true;
stringBuilder.append( " ItemPriceCurrency," );
}
if ( !StringUtils.isEmpty( itemDTO.getItemType( ) ) ) {
itemDomain.setItemType( itemDTO.getItemType( ) );
} else {
flag = true;
stringBuilder.append( " ItemType," );
}
if ( !StringUtils.isEmpty( itemDTO.getItemSubType( ) ) ) {
itemDomain.setItemSubType( itemDTO.getItemSubType( ) );
} else
itemDomain.setItemSubType( itemDTO.getItemSubType( ) );
if ( itemDTO.getBoughtDate( ) != null ) {
itemDomain.setBoughtDate( itemDTO.getBoughtDate( ) );
} else {
flag = true;
stringBuilder.append( " BoughtDate," );
}
if ( !StringUtils.isEmpty( itemDTO.getBoughtFrom( ) ) ) {
itemDomain.setBoughtFrom( itemDTO.getBoughtFrom( ) );
} else {
flag = true;
stringBuilder.append( " BoughtFrom," );
}
if ( itemDTO.getQuantity( ) != null && itemDTO.getQuantity( ) > ZERO ) {
itemDomain.setQuantity( itemDTO.getQuantity( ) );
} else {
flag = true;
stringBuilder.append( " Quantity," );
}
if ( !StringUtils.isEmpty( itemDTO.getQuantityType( ) ) ) {
itemDomain.setQuantityType( itemDTO.getQuantityType( ) );
} else {
flag = true;
stringBuilder.append( " QuantityType," );
}
if ( itemDTO.getWeight( ) != null && itemDTO.getWeight( ) > ZERO ) {
itemDomain.setWeight( itemDTO.getWeight( ) );
} else {
flag = true;
stringBuilder.append( " Weight," );
}
if ( !StringUtils.isEmpty( itemDTO.getWeightedUnit( ) ) ) {
itemDomain.setWeightedUnit( itemDTO.getWeightedUnit( ) );
} else {
flag = true;
stringBuilder.append( " WeightedUnit," );
}
if ( itemDTO.getImageCount( ) > ZERO ) {
itemDomain.setImageCount( itemDTO.getImageCount( ) );
} else {
// itemDomain.setImageCount( ZERO );
// logger.warn( "Item Image Count was empty" );
flag = true;
stringBuilder.append( " ItemImageCount," );
}
if ( !StringUtils.isEmpty( itemDTO.getItemOrigin( ) ) ) {
itemDomain.setItemOrigin( itemDTO.getItemOrigin( ) );
} else {
logger.warn( "No Item Origin was provided using default" );
itemDomain.setItemOrigin( ServiceConstants.DEFAULT_ITEM_ORIGIN );
}
try {
if ( itemDTO.getItemDiscountList( ) != null ) {
itemDomain.setItemDiscountList( ItemDiscountDomainDTOConverter.getConvertedListDomainFromDTO( itemDTO
.getItemDiscountList( ) ) );
}
} catch ( IllegalArgumentValueException exception ) {
flag = true;
stringBuilder.append( exception.getMessage( ) );
}
try {
if ( itemDTO.getItemImageList( ) != null ) {
itemDomain.setItemImageList( ItemImageDomainDTOConverter.getConvertedListDomainFromDTO( itemDTO
.getItemImageList( ) ) );
}
} catch ( IllegalArgumentValueException exception ) {
flag = true;
stringBuilder.append( exception.getMessage( ) );
}
try {
if ( itemDTO.getGemstoneList( ) != null ) {
itemDomain.setGemstoneList( GemstoneDomainDTOConverter.getConvertedListDomainFromDTO( itemDTO.getGemstoneList
( ) ) );
}
} catch ( IllegalArgumentValueException exception ) {
flag = true;
stringBuilder.append( exception.getMessage( ) );
}
// item.setShopItem();
if ( flag ) {
logger.error( stringBuilder.toString( ) );
throw new IllegalArgumentValueException( stringBuilder.toString( ) );
}
logger.info( stringBuilder.toString( ) );
return itemDomain;
}
/**
* Gets converted item domain list from item dTO list.
*
* @param itemDTOList the item dTO list
*
* @return the converted item domain list from item dTO list
*/
public static List< com.mana.innovative.domain.client.Item > getConvertedListDomainFromDTO( List< Item > itemDTOList ) {
List< com.mana.innovative.domain.client.Item > itemDomainList = new ArrayList<>( );
for ( Item itemDTO : itemDTOList ) {
com.mana.innovative.domain.client.Item itemDomain = new com.mana.innovative.domain.client.Item( );
itemDomain = getConvertedDomainFromDTO( itemDomain, itemDTO );
itemDomainList.add( itemDomain );
}
return itemDomainList;
}
}
|
|
/**
*/
package activity;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.eclipse.emf.common.util.Enumerator;
/**
* <!-- begin-user-doc -->
* A representation of the literals of the enumeration '<em><b>Activites</b></em>',
* and utility methods for working with them.
* <!-- end-user-doc -->
* @see activity.ActivityPackage#getActivites()
* @model
* @generated
*/
public enum Activites implements Enumerator {
/**
* The '<em><b>LABOUR</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #LABOUR_VALUE
* @generated
* @ordered
*/
LABOUR(0, "LABOUR", "labour"),
/**
* The '<em><b>SEMIS</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #SEMIS_VALUE
* @generated
* @ordered
*/
SEMIS(1, "SEMIS", "semis"),
/**
* The '<em><b>IRRIGATION</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #IRRIGATION_VALUE
* @generated
* @ordered
*/
IRRIGATION(2, "IRRIGATION", "irrigation"),
/**
* The '<em><b>FERTILISATION</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #FERTILISATION_VALUE
* @generated
* @ordered
*/
FERTILISATION(3, "FERTILISATION", "fertilisation"),
/**
* The '<em><b>RECOLTE</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #RECOLTE_VALUE
* @generated
* @ordered
*/
RECOLTE(4, "RECOLTE", "recolte"),
/**
* The '<em><b>ALIMENTATION</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #ALIMENTATION_VALUE
* @generated
* @ordered
*/
ALIMENTATION(5, "ALIMENTATION", "ALIMENTATION"),
/**
* The '<em><b>TRAITE</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #TRAITE_VALUE
* @generated
* @ordered
*/
TRAITE(6, "TRAITE", "traite"),
/**
* The '<em><b>SURVEILLANCE AGNELAGE</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #SURVEILLANCE_AGNELAGE_VALUE
* @generated
* @ordered
*/
SURVEILLANCE_AGNELAGE(7, "SURVEILLANCE_AGNELAGE", "surveillance_agnelage"),
/**
* The '<em><b>SURVEILLANCE VELAGE</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #SURVEILLANCE_VELAGE_VALUE
* @generated
* @ordered
*/
SURVEILLANCE_VELAGE(8, "SURVEILLANCE_VELAGE", "surveillange_velage");
/**
* The '<em><b>LABOUR</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>LABOUR</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #LABOUR
* @model literal="labour"
* @generated
* @ordered
*/
public static final int LABOUR_VALUE = 0;
/**
* The '<em><b>SEMIS</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>SEMIS</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #SEMIS
* @model literal="semis"
* @generated
* @ordered
*/
public static final int SEMIS_VALUE = 1;
/**
* The '<em><b>IRRIGATION</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>IRRIGATION</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #IRRIGATION
* @model literal="irrigation"
* @generated
* @ordered
*/
public static final int IRRIGATION_VALUE = 2;
/**
* The '<em><b>FERTILISATION</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>FERTILISATION</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #FERTILISATION
* @model literal="fertilisation"
* @generated
* @ordered
*/
public static final int FERTILISATION_VALUE = 3;
/**
* The '<em><b>RECOLTE</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>RECOLTE</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #RECOLTE
* @model literal="recolte"
* @generated
* @ordered
*/
public static final int RECOLTE_VALUE = 4;
/**
* The '<em><b>ALIMENTATION</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>ALIMENTATION</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #ALIMENTATION
* @model
* @generated
* @ordered
*/
public static final int ALIMENTATION_VALUE = 5;
/**
* The '<em><b>TRAITE</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>TRAITE</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #TRAITE
* @model literal="traite"
* @generated
* @ordered
*/
public static final int TRAITE_VALUE = 6;
/**
* The '<em><b>SURVEILLANCE AGNELAGE</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>SURVEILLANCE AGNELAGE</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #SURVEILLANCE_AGNELAGE
* @model literal="surveillance_agnelage"
* @generated
* @ordered
*/
public static final int SURVEILLANCE_AGNELAGE_VALUE = 7;
/**
* The '<em><b>SURVEILLANCE VELAGE</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>SURVEILLANCE VELAGE</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #SURVEILLANCE_VELAGE
* @model literal="surveillange_velage"
* @generated
* @ordered
*/
public static final int SURVEILLANCE_VELAGE_VALUE = 8;
/**
* An array of all the '<em><b>Activites</b></em>' enumerators.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private static final Activites[] VALUES_ARRAY =
new Activites[] {
LABOUR,
SEMIS,
IRRIGATION,
FERTILISATION,
RECOLTE,
ALIMENTATION,
TRAITE,
SURVEILLANCE_AGNELAGE,
SURVEILLANCE_VELAGE,
};
/**
* A public read-only list of all the '<em><b>Activites</b></em>' enumerators.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static final List<Activites> VALUES = Collections.unmodifiableList(Arrays.asList(VALUES_ARRAY));
/**
* Returns the '<em><b>Activites</b></em>' literal with the specified literal value.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static Activites get(String literal) {
for (int i = 0; i < VALUES_ARRAY.length; ++i) {
Activites result = VALUES_ARRAY[i];
if (result.toString().equals(literal)) {
return result;
}
}
return null;
}
/**
* Returns the '<em><b>Activites</b></em>' literal with the specified name.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static Activites getByName(String name) {
for (int i = 0; i < VALUES_ARRAY.length; ++i) {
Activites result = VALUES_ARRAY[i];
if (result.getName().equals(name)) {
return result;
}
}
return null;
}
/**
* Returns the '<em><b>Activites</b></em>' literal with the specified integer value.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static Activites get(int value) {
switch (value) {
case LABOUR_VALUE: return LABOUR;
case SEMIS_VALUE: return SEMIS;
case IRRIGATION_VALUE: return IRRIGATION;
case FERTILISATION_VALUE: return FERTILISATION;
case RECOLTE_VALUE: return RECOLTE;
case ALIMENTATION_VALUE: return ALIMENTATION;
case TRAITE_VALUE: return TRAITE;
case SURVEILLANCE_AGNELAGE_VALUE: return SURVEILLANCE_AGNELAGE;
case SURVEILLANCE_VELAGE_VALUE: return SURVEILLANCE_VELAGE;
}
return null;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private final int value;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private final String name;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private final String literal;
/**
* Only this class can construct instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private Activites(int value, String name, String literal) {
this.value = value;
this.name = name;
this.literal = literal;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public int getValue() {
return value;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getName() {
return name;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getLiteral() {
return literal;
}
/**
* Returns the literal value of the enumerator, which is its string representation.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
return literal;
}
} //Activites
|
|
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.go;
import com.facebook.buck.config.BuckConfig;
import com.facebook.buck.cxx.toolchain.CxxPlatform;
import com.facebook.buck.io.ExecutableFinder;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.FlavorDomain;
import com.facebook.buck.model.InternalFlavor;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.CommandTool;
import com.facebook.buck.rules.HashedFileTool;
import com.facebook.buck.rules.Tool;
import com.facebook.buck.rules.tool.config.ToolConfig;
import com.facebook.buck.util.HumanReadableException;
import com.facebook.buck.util.MoreCollectors;
import com.facebook.buck.util.ProcessExecutor;
import com.facebook.buck.util.ProcessExecutorParams;
import com.google.common.base.CharMatcher;
import com.google.common.base.Splitter;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.EnumSet;
import java.util.Optional;
public class GoBuckConfig {
private static final String SECTION = "go";
private static final Path DEFAULT_GO_TOOL = Paths.get("go");
private final BuckConfig delegate;
private Supplier<Path> goRootSupplier;
private Supplier<Path> goToolDirSupplier;
private Supplier<GoPlatformFlavorDomain> platformFlavorDomain;
private Supplier<GoPlatform> defaultPlatform;
public GoBuckConfig(
final BuckConfig delegate,
final ProcessExecutor processExecutor,
final FlavorDomain<CxxPlatform> cxxPlatforms) {
this.delegate = delegate;
goRootSupplier =
Suppliers.memoize(
() -> {
Optional<Path> configValue = delegate.getPath(SECTION, "root");
if (configValue.isPresent()) {
return configValue.get();
}
return Paths.get(getGoEnvFromTool(processExecutor, "GOROOT"));
});
goToolDirSupplier =
Suppliers.memoize(() -> Paths.get(getGoEnvFromTool(processExecutor, "GOTOOLDIR")));
platformFlavorDomain =
Suppliers.memoize(
() -> {
// TODO(mikekap): Allow adding goos/goarch values from config.
return new GoPlatformFlavorDomain(
delegate.getPlatform(), delegate.getArchitecture(), cxxPlatforms);
});
defaultPlatform =
Suppliers.memoize(
() -> {
Optional<String> configValue = delegate.getValue(SECTION, "default_platform");
Optional<GoPlatform> platform;
if (configValue.isPresent()) {
platform =
platformFlavorDomain.get().getValue(InternalFlavor.of(configValue.get()));
if (!platform.isPresent()) {
throw new HumanReadableException(
"Bad go platform value for %s.default_platform = %s", SECTION, configValue);
}
} else {
platform =
platformFlavorDomain
.get()
.getValue(delegate.getPlatform(), delegate.getArchitecture());
if (!platform.isPresent()) {
throw new HumanReadableException(
"Couldn't determine default go platform for %s %s",
delegate.getPlatform(), delegate.getArchitecture());
}
}
return platform.get();
});
}
GoPlatformFlavorDomain getPlatformFlavorDomain() {
return platformFlavorDomain.get();
}
GoPlatform getDefaultPlatform() {
return defaultPlatform.get();
}
Tool getCompiler() {
return getGoTool("compiler", "compile", "compiler_flags");
}
Tool getAssembler() {
return getGoTool("assembler", "asm", "asm_flags");
}
Tool getPacker() {
return getGoTool("packer", "pack", "");
}
Tool getLinker() {
return getGoTool("linker", "link", "linker_flags");
}
Path getDefaultPackageName(BuildTarget target) {
Path prefix = Paths.get(delegate.getValue(SECTION, "prefix").orElse(""));
return prefix.resolve(target.getBasePath());
}
ImmutableList<Path> getVendorPaths() {
Optional<ImmutableList<String>> vendorPaths =
delegate.getOptionalListWithoutComments(SECTION, "vendor_path", ':');
if (vendorPaths.isPresent()) {
return vendorPaths.get().stream().map(Paths::get).collect(MoreCollectors.toImmutableList());
}
return ImmutableList.of();
}
Optional<Tool> getGoTestMainGenerator(BuildRuleResolver resolver) {
return delegate.getView(ToolConfig.class).getTool(SECTION, "test_main_gen", resolver);
}
ImmutableList<Path> getAssemblerIncludeDirs() {
// TODO(mikekap): Allow customizing this via config.
return ImmutableList.of(goRootSupplier.get().resolve("pkg").resolve("include"));
}
private Tool getGoTool(
final String configName, final String toolName, final String extraFlagsConfigKey) {
Optional<Path> toolPath = delegate.getPath(SECTION, configName);
if (!toolPath.isPresent()) {
toolPath = Optional.of(goToolDirSupplier.get().resolve(toolName));
}
CommandTool.Builder builder = new CommandTool.Builder(new HashedFileTool(toolPath.get()));
if (!extraFlagsConfigKey.isEmpty()) {
for (String arg : getFlags(extraFlagsConfigKey)) {
builder.addArg(arg);
}
}
builder.addEnv("GOROOT", goRootSupplier.get().toString());
return builder.build();
}
private ImmutableList<String> getFlags(String key) {
return ImmutableList.copyOf(
Splitter.on(" ").omitEmptyStrings().split(delegate.getValue(SECTION, key).orElse("")));
}
private Path getGoToolPath() {
Optional<Path> goTool = delegate.getPath(SECTION, "tool");
if (goTool.isPresent()) {
return goTool.get();
}
// Try resolving it via the go root config var. We can't use goRootSupplier here since that
// would create a recursion.
Optional<Path> goRoot = delegate.getPath(SECTION, "root");
if (goRoot.isPresent()) {
return goRoot.get().resolve("bin").resolve("go");
}
return new ExecutableFinder().getExecutable(DEFAULT_GO_TOOL, delegate.getEnvironment());
}
private String getGoEnvFromTool(ProcessExecutor processExecutor, String env) {
Path goTool = getGoToolPath();
Optional<ImmutableMap<String, String>> goRootEnv =
delegate.getPath(SECTION, "root").map(input -> ImmutableMap.of("GOROOT", input.toString()));
try {
ProcessExecutor.Result goToolResult =
processExecutor.launchAndExecute(
ProcessExecutorParams.builder()
.addCommand(goTool.toString(), "env", env)
.setEnvironment(goRootEnv)
.build(),
EnumSet.of(ProcessExecutor.Option.EXPECTING_STD_OUT),
/* stdin */ Optional.empty(),
/* timeOutMs */ Optional.empty(),
/* timeoutHandler */ Optional.empty());
if (goToolResult.getExitCode() == 0) {
return CharMatcher.whitespace().trimFrom(goToolResult.getStdout().get());
} else {
throw new HumanReadableException(goToolResult.getStderr().get());
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new HumanReadableException(
e, "Could not run \"%s env %s\": %s", goTool, env, e.getMessage());
}
}
}
|
|
/**
* Copyright 2012, Jason Parraga, Marist College
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
**/
package net.floodlightcontroller.flowcache;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.projectfloodlight.openflow.protocol.match.Match;
import org.projectfloodlight.openflow.protocol.match.MatchField;
import org.projectfloodlight.openflow.protocol.OFFlowDelete;
import org.projectfloodlight.openflow.protocol.OFFlowStatsEntry;
import org.projectfloodlight.openflow.protocol.OFType;
import org.projectfloodlight.openflow.protocol.OFFlowStatsReply;
import org.projectfloodlight.openflow.protocol.OFFlowStatsRequest;
import org.projectfloodlight.openflow.types.OFPort;
import org.projectfloodlight.openflow.types.TableId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.floodlightcontroller.core.FloodlightContext;
import net.floodlightcontroller.core.IFloodlightProviderService;
import net.floodlightcontroller.core.IOFSwitch;
import net.floodlightcontroller.core.internal.IOFSwitchService;
import net.floodlightcontroller.core.module.FloodlightModuleContext;
import net.floodlightcontroller.core.module.FloodlightModuleException;
import net.floodlightcontroller.core.module.IFloodlightModule;
import net.floodlightcontroller.core.module.IFloodlightService;
import net.floodlightcontroller.flowcache.IFlowReconcileListener;
import net.floodlightcontroller.flowcache.IFlowReconcileService;
import net.floodlightcontroller.flowcache.OFMatchReconcile;
import net.floodlightcontroller.flowcache.PriorityPendingQueue.EventPriority;
import net.floodlightcontroller.linkdiscovery.ILinkDiscovery;
import net.floodlightcontroller.linkdiscovery.ILinkDiscovery.LDUpdate;
import net.floodlightcontroller.linkdiscovery.internal.LinkInfo;
import net.floodlightcontroller.linkdiscovery.ILinkDiscoveryService;
import net.floodlightcontroller.routing.Link;
import net.floodlightcontroller.topology.ITopologyListener;
import net.floodlightcontroller.topology.ITopologyService;
import net.floodlightcontroller.util.OFMatchWithSwDpid;
/**
* Flow reconciliation module that is triggered by PORT_DOWN events. This module
* will recursively trace back all flows from the immediately affected switch
* and remove them (specifically flows with an idle timeout that would not be
* exhausted). Once the flows are deleted Floodlight will re-evaluate the path
* the traffic should take with it's updated topology map.
*
* @author Jason Parraga
*/
@Deprecated
public class PortDownReconciliation implements IFloodlightModule,
ITopologyListener, IFlowReconcileListener {
protected static Logger log = LoggerFactory.getLogger(PortDownReconciliation.class);
protected ITopologyService topology;
protected IOFSwitchService switchService;
protected IFlowReconcileService frm;
protected ILinkDiscoveryService lds;
protected Map<Link, LinkInfo> links;
protected FloodlightContext cntx;
protected static boolean waiting = false;
protected int statsQueryXId;
protected static List<OFFlowStatsReply> statsReply;
// ITopologyListener
@Override
public void topologyChanged(List<LDUpdate> appliedUpdates) {
for (LDUpdate ldu : appliedUpdates) {
if (ldu.getOperation()
.equals(ILinkDiscovery.UpdateOperation.PORT_DOWN)) {
// Get the switch ID for the OFMatchWithSwDpid object
IOFSwitch affectedSwitch = switchService.getSwitch(ldu.getSrc());
// Create an OFMatchReconcile object
OFMatchReconcile ofmr = new OFMatchReconcile();
// Generate an OFMatch objects for the OFMatchWithSwDpid object
Match match = affectedSwitch.getOFFactory().buildMatch().build(); // nothing specific set, so all wildcarded
// Generate the OFMatchWithSwDpid
OFMatchWithSwDpid ofmatchsw = new OFMatchWithSwDpid(match, affectedSwitch.getId());
// Set the action to update the path to remove flows routing
// towards the downed port
ofmr.rcAction = OFMatchReconcile.ReconcileAction.UPDATE_PATH;
// Set the match, with the switch dpid
ofmr.ofmWithSwDpid = ofmatchsw;
// Assign the downed port to the OFMatchReconcile's outPort data
// member (I added this to
// the OFMatchReconcile class)
ofmr.outPort = ldu.getSrcPort();
// Tell the reconcile manager to reconcile matching flows
frm.reconcileFlow(ofmr, EventPriority.HIGH);
}
}
}
@Override
public Collection<Class<? extends IFloodlightService>>
getModuleServices() {
return null;
}
@Override
public Map<Class<? extends IFloodlightService>, IFloodlightService>
getServiceImpls() {
return null;
}
@Override
public Collection<Class<? extends IFloodlightService>>
getModuleDependencies() {
Collection<Class<? extends IFloodlightService>> l = new ArrayList<Class<? extends IFloodlightService>>();
l.add(IFloodlightProviderService.class);
l.add(ITopologyService.class);
l.add(IFlowReconcileService.class);
l.add(ILinkDiscoveryService.class);
return l;
}
@Override
public
void
init(FloodlightModuleContext context)
throws FloodlightModuleException {
switchService = context.getServiceImpl(IOFSwitchService.class);
topology = context.getServiceImpl(ITopologyService.class);
frm = context.getServiceImpl(IFlowReconcileService.class);
lds = context.getServiceImpl(ILinkDiscoveryService.class);
cntx = new FloodlightContext();
}
@Override
public void startUp(FloodlightModuleContext context) {
topology.addListener(this);
frm.addFlowReconcileListener(this);
}
@Override
public String getName() {
return "portdownreconciliation";
}
@Override
public boolean isCallbackOrderingPrereq(OFType type, String name) {
return false;
}
@Override
public boolean isCallbackOrderingPostreq(OFType type, String name) {
return true;
}
/**
* Base case for the reconciliation of flows. This is triggered at the
* switch which is immediately affected by the PORT_DOWN event
*
* @return the Command whether to STOP or Continue
*/
@Override
public net.floodlightcontroller.core.IListener.Command reconcileFlows(ArrayList<OFMatchReconcile> ofmRcList) {
if (lds != null) {
links = new HashMap<Link, LinkInfo>();
// Get all the switch links from the topology
if (lds.getLinks() != null) links.putAll(lds.getLinks());
for (OFMatchReconcile ofmr : ofmRcList) {
// We only care about OFMatchReconcile objects that wish to
// update the path to a switch
if (ofmr.rcAction.equals(OFMatchReconcile.ReconcileAction.UPDATE_PATH)) {
// Get the switch object from the OFMatchReconcile
IOFSwitch sw = switchService.getSwitch(ofmr.ofmWithSwDpid.getDpid());
// Map data structure that holds the invalid matches and the
// ingress ports of those matches
Map<OFPort, List<Match>> invalidBaseIngressAndMatches = new HashMap<OFPort, List<Match>>();
// Get the invalid flows
List<OFFlowStatsReply> flows = getFlows(sw, ofmr.outPort);
// Analyze all the flows with outPorts equaling the downed
// port and extract OFMatch's to trace back to neighbors
for (OFFlowStatsReply flow : flows) {
// Create a reference to the match for ease
for (OFFlowStatsEntry entry : flow.getEntries()) {
Match match = entry.getMatch();
// Here we utilize an index of input ports which point
// to multiple invalid matches
if (invalidBaseIngressAndMatches.containsKey(match.get(MatchField.IN_PORT)))
// If the input port is already in the index, add
// the match to it's list
invalidBaseIngressAndMatches.get(match.get(MatchField.IN_PORT))
.add(match);
else {
// Otherwise create a new list and add it to the
// index
List<Match> matches = new ArrayList<Match>();
matches.add(match);
invalidBaseIngressAndMatches.put(match.get(MatchField.IN_PORT), matches);
}
}
}
// Remove invalid flows from the base switch, if they exist
if (!flows.isEmpty()) {
log.debug("Removing flows on switch : " + sw.getId()
+ " with outport: " + ofmr.outPort);
clearFlowMods(sw, ofmr.outPort);
}
// Create a list of neighboring switches we need to remove
// invalid flows from
Map<IOFSwitch, Map<OFPort, List<Match>>> neighborSwitches = new HashMap<IOFSwitch, Map<OFPort, List<Match>>>();
// Loop through all the links
for (Link link : links.keySet()) {
// Filter out links we care about
if (link.getDst() == sw.getId()) {
// Loop through the links to neighboring switches
// which have invalid flows
for (Entry<OFPort, List<Match>> invalidBaseIngressAndMatch : invalidBaseIngressAndMatches.entrySet()) {
// Find links on the network which link to the
// ingress ports that have invalidly routed
// flows
if (link.getDstPort() == invalidBaseIngressAndMatch.getKey()) {
Map<OFPort, List<Match>> invalidNeighborOutportAndMatch = new HashMap<OFPort, List<Match>>();
// Insert the neighbor's outPort to the base
// switch and the invalid match
invalidNeighborOutportAndMatch.put(link.getSrcPort(),
invalidBaseIngressAndMatch.getValue());
// Link a neighbor switch's invalid match
// and outport to their Switch object
neighborSwitches.put(switchService.getSwitch(link.getSrc()), invalidNeighborOutportAndMatch);
}
}
}
}
log.debug("We have " + neighborSwitches.size()
+ " neighboring switches to deal with!");
// Loop through all the switches we found to have potential
// issues
for (IOFSwitch neighborSwitch : neighborSwitches.keySet()) {
log.debug("NeighborSwitch ID : " + neighborSwitch.getId());
if (neighborSwitches.get(neighborSwitch) != null)
deleteInvalidFlows(neighborSwitch, neighborSwitches.get(neighborSwitch));
}
}
return Command.CONTINUE;
}
} else {
log.error("Link Discovery Service Is Null");
}
return Command.CONTINUE;
}
/**
* @param sw
* the switch object that we wish to get flows from
* @param outPort
* the output action port we wish to find flows with
* @return a list of OFFlowStatisticsReply objects or essentially flows
*/
public List<OFFlowStatsReply> getFlows(IOFSwitch sw, OFPort outPort) {
statsReply = new ArrayList<OFFlowStatsReply>();
List<OFFlowStatsReply> values = null;
Future<List<OFFlowStatsReply>> future;
// Statistics request object for getting flows
OFFlowStatsRequest req = sw.getOFFactory().buildFlowStatsRequest()
.setMatch(sw.getOFFactory().buildMatch().build())
.setOutPort(outPort)
.setTableId(TableId.ALL)
.build();
try {
// System.out.println(sw.getStatistics(req));
future = sw.writeStatsRequest(req);
values = future.get(10, TimeUnit.SECONDS);
if (values != null) {
for (OFFlowStatsReply stat : values) {
statsReply.add(stat);
}
}
} catch (Exception e) {
log.error("Failure retrieving statistics from switch " + sw, e);
}
return statsReply;
}
/**
* @param sw
* The switch we wish to remove flows from
* @param outPort
* The specific Output Action OutPort of specific flows we wish
* to delete
*/
public void clearFlowMods(IOFSwitch sw, OFPort outPort) {
// Delete all pre-existing flows with the same output action port or
// outPort
Match match = sw.getOFFactory().buildMatch().build();
OFFlowDelete fm = sw.getOFFactory().buildFlowDelete()
.setMatch(match)
.setOutPort(outPort)
.build();
try {
sw.write(fm);
} catch (Exception e) {
log.error("Failed to clear flows on switch {} - {}", this, e);
}
}
/**
* @param sw
* The switch we wish to remove flows from
* @param match
* The specific OFMatch object of specific flows we wish to
* delete
* @param outPort
* The specific Output Action OutPort of specific flows we wish
* to delete
*/
public void clearFlowMods(IOFSwitch sw, Match match, OFPort outPort) {
// Delete pre-existing flows with the same match, and output action port
// or outPort
OFFlowDelete fm = sw.getOFFactory().buildFlowDelete()
.setMatch(match)
.setOutPort(outPort)
.build();
try {
sw.write(fm);
} catch (Exception e) {
log.error("Failed to clear flows on switch {} - {}", this, e);
}
}
/**
* Deletes flows with similar matches and output action ports on the
* specified switch
*
* @param sw
* the switch to query flows on
* @param match
* the problematic OFMatch from the base switch which we wish to
* find and remove
* @param outPort
* the output action port wanted from the flows, which follows
* the route to the base switch
*/
public void deleteInvalidFlows(IOFSwitch sw, Map<OFPort, List<Match>> invalidOutportAndMatch) {
log.debug("Deleting invalid flows on switch : " + sw.getId());
// A map that holds the input ports and invalid matches on a switch
Map<OFPort, List<Match>> invalidNeighborIngressAndMatches = new HashMap<OFPort, List<Match>>();
for (OFPort outPort : invalidOutportAndMatch.keySet()) {
// Get the flows on the switch
List<OFFlowStatsReply> flows = getFlows(sw, outPort);
// Analyze all the flows with outPorts pointing to problematic route
for (OFFlowStatsReply flow : flows) {
for (OFFlowStatsEntry entry : flow.getEntries()) {
// Loop through all the problematic matches
for (Match match : invalidOutportAndMatch.get(outPort)) {
// Compare the problematic matches with the match of the
// flow on the switch
if (entry.getMatch().get(MatchField.ETH_DST).equals(match.get(MatchField.ETH_DST))
&& entry.getMatch().get(MatchField.ETH_SRC).equals(match.get(MatchField.ETH_SRC))
&& entry.getMatch().get(MatchField.ETH_TYPE).equals(match.get(MatchField.ETH_TYPE))
&& entry.getMatch().get(MatchField.VLAN_VID).equals(match.get(MatchField.VLAN_VID))
&& entry.getMatch().get(MatchField.IPV4_DST).equals(match.get(MatchField.IPV4_DST))
&& entry.getMatch().get(MatchField.IP_PROTO).equals(match.get(MatchField.IP_PROTO))
&& entry.getMatch().get(MatchField.IPV4_SRC).equals(match.get(MatchField.IPV4_SRC))
&& entry.getMatch().get(MatchField.IP_DSCP).equals(match.get(MatchField.IP_DSCP)) // dscp and ecn replace tos
&& entry.getMatch().get(MatchField.IP_ECN).equals(match.get(MatchField.IP_ECN))) {
// Here we utilize an index of input ports which point
// to multiple invalid matches
if (invalidNeighborIngressAndMatches.containsKey(match.get(MatchField.IN_PORT)))
// If the input port is already in the index, add
// the match to it's list
invalidNeighborIngressAndMatches.get(match.get(MatchField.IN_PORT))
.add(match);
else {
// Otherwise create a new list and add it to the
// index
List<Match> matches = new ArrayList<Match>();
matches.add(match);
invalidNeighborIngressAndMatches.put(match.get(MatchField.IN_PORT), matches);
}
// Remove flows from the switch with the invalid match
// and outPort
clearFlowMods(sw, entry.getMatch(), outPort);
}
}
}
}
// Create a list of neighboring switches we need to check for
// invalid flows
Map<IOFSwitch, Map<OFPort, List<Match>>> neighborSwitches = new HashMap<IOFSwitch, Map<OFPort, List<Match>>>();
// Loop through all the links
for (Link link : links.keySet()) {
// Filter out links we care about
if (link.getDst().equals(sw.getId())) {
// Loop through the ingressPorts that are involved in
// invalid flows on neighboring switches
for (Entry<OFPort, List<Match>> ingressPort : invalidNeighborIngressAndMatches.entrySet()) {
// Filter out invalid links by matching the link
// destination port to our invalid flows ingress port
if (link.getDstPort().equals(ingressPort.getKey())) {
// Generate a match and outPort map since I don't
// want to create an object
Map<OFPort, List<Match>> invalidNeighborOutportAndMatch = new HashMap<OFPort, List<Match>>();
invalidNeighborOutportAndMatch.put(link.getSrcPort(),
ingressPort.getValue());
// Link a neighbor switch's invalid match and
// outport to their Switch object
neighborSwitches.put(switchService.getSwitch(link.getSrc()), invalidNeighborOutportAndMatch);
}
}
}
}
log.debug("We have " + neighborSwitches.size() + " neighbors to deal with!");
// Loop through all the neighbor switches we found to have
// invalid matches
for (IOFSwitch neighborSwitch : neighborSwitches.keySet()) {
log.debug("NeighborSwitch ID : " + neighborSwitch.getId());
// Recursively seek out and delete invalid flows on the
// neighbor switch
deleteInvalidFlows(neighborSwitch, neighborSwitches.get(neighborSwitch));
}
}
}
}
|
|
package org.daisy.pipeline.nlp.breakdetect.calabash.impl;
import java.io.File;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.regex.Pattern;
import javax.xml.transform.sax.SAXSource;
import net.sf.saxon.s9api.Axis;
import net.sf.saxon.s9api.DocumentBuilder;
import net.sf.saxon.s9api.Processor;
import net.sf.saxon.s9api.QName;
import net.sf.saxon.s9api.SaxonApiException;
import net.sf.saxon.s9api.XdmNode;
import net.sf.saxon.s9api.XdmNodeKind;
import net.sf.saxon.s9api.XdmSequenceIterator;
import org.daisy.pipeline.nlp.DummyLangDetector;
import org.daisy.pipeline.nlp.breakdetect.calabash.impl.DummyLexer.DummyLexerToken;
import org.daisy.pipeline.nlp.breakdetect.calabash.impl.DummyLexer.Strategy;
import org.daisy.pipeline.nlp.lexing.LexService.LexerInitException;
import org.daisy.pipeline.nlp.lexing.LexService.LexerToken;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.xml.sax.InputSource;
import com.xmlcalabash.util.TreeWriter;
/**
* Those are only tests to check that the text is not modified by the step on
* actual documents. It checks also that no exception or runtime error is
* raised.
*/
public class ActualFilesTest implements TreeWriterFactory {
static private Processor Proc;
static private DocumentBuilder Builder;
static private DummyLexerToken LexerToken;
static private HashMap<Locale, LexerToken> Lexers;
@BeforeClass
static public void setUp() throws URISyntaxException, LexerInitException {
Proc = new Processor(true);
Builder = Proc.newDocumentBuilder();
LexerToken = (DummyLexerToken) new DummyLexer().newToken();
Lexers = new HashMap<Locale, LexerToken>();
Lexers.put(null, LexerToken);
}
@Override
public TreeWriter newInstance() {
return new TreeWriter(Proc);
}
private static XdmNode getRoot(XdmNode node) {
XdmSequenceIterator iter = node.axisIterator(Axis.CHILD);
while (iter.hasNext()) {
XdmNode n = (XdmNode) iter.next();
if (n.getNodeKind() == XdmNodeKind.ELEMENT)
return n;
}
return null;
}
private static void getText(XdmNode node, StringBuilder sb) {
if (node.getNodeKind() == XdmNodeKind.TEXT
|| node.getNodeKind() == XdmNodeKind.COMMENT
|| node.getNodeKind() == XdmNodeKind.PROCESSING_INSTRUCTION) {
sb.append(node.toString());
}
XdmSequenceIterator iter = node.axisIterator(Axis.CHILD);
while (iter.hasNext()) {
getText((XdmNode) iter.next(), sb);
}
}
private static String getText(XdmNode node) {
StringBuilder sb = new StringBuilder();
getText(node, sb);
return sb.toString();
}
private static void getUnmutableElements(XdmNode node, StringBuilder sb,
FormatSpecifications specs) {
if (node.getNodeKind() == XdmNodeKind.TEXT
|| node.getNodeKind() == XdmNodeKind.COMMENT
|| node.getNodeKind() == XdmNodeKind.PROCESSING_INSTRUCTION) {
return;
}
boolean toprint = false;
XdmSequenceIterator iter;
if (node.getNodeName() != null
&& !specs.inlineElements.contains(node.getNodeName().getLocalName())
&& !specs.wordTag.equals(node.getNodeName())
&& !specs.sentenceTag.equals(node.getNodeName())) {
toprint = true;
sb.append(node.getNodeName().getLocalName());
iter = node.axisIterator(Axis.ATTRIBUTE);
while (iter.hasNext()) {
XdmNode attr = (XdmNode) iter.next();
sb.append(attr.getNodeName().getLocalName());
sb.append(node.getAttributeValue(attr.getNodeName()));
}
}
iter = node.axisIterator(Axis.CHILD);
while (iter.hasNext()) {
getUnmutableElements((XdmNode) iter.next(), sb, specs);
}
if (toprint) {
sb.append("{" + node.getNodeName().getLocalName() + "}");
}
}
private static String getUnmutableElements(XdmNode node, FormatSpecifications specs) {
StringBuilder sb = new StringBuilder();
getUnmutableElements(node, sb, specs);
return sb.toString();
}
private static void getOrphanWords(XdmNode node, boolean isInsideSentence,
FormatSpecifications specs, Map<String, Integer> orphans) {
if (node.getNodeKind() != XdmNodeKind.ELEMENT) {
return;
}
if (specs.sentenceTag.getLocalName().equals(node.getNodeName().getLocalName())) {
isInsideSentence = true;
} else if (specs.wordTag.getLocalName().equals(node.getNodeName().getLocalName())
&& !isInsideSentence) {
StringBuilder sb = new StringBuilder();
XdmNode parent = node.getParent();
while (parent.getNodeKind() == XdmNodeKind.ELEMENT) {
sb.append(parent.getNodeName().getLocalName() + "_"
+ parent.getAttributeValue(new QName("id")) + "_"
+ parent.getAttributeValue(new QName("class")) + "/");
parent = parent.getParent();
}
String key = sb.toString();
Integer count = orphans.get(key);
if (count == null)
count = 0;
orphans.put(key, count + 1);
}
XdmSequenceIterator iter = node.axisIterator(Axis.CHILD);
while (iter.hasNext()) {
getOrphanWords((XdmNode) iter.next(), isInsideSentence, specs, orphans);
}
}
private static boolean hasMoreOrphans(XdmNode before, XdmNode after,
FormatSpecifications specs) {
Map<String, Integer> obefore = new HashMap<String, Integer>();
Map<String, Integer> oafter = new HashMap<String, Integer>();
getOrphanWords(before, false, specs, obefore);
getOrphanWords(after, false, specs, oafter);
for (Map.Entry<String, Integer> e : oafter.entrySet()) {
Integer b = obefore.get(e.getKey());
if (b == null || b < e.getValue()) {
System.out.println("orphan before: " + b + " orphan now: " + e.getValue()
+ "; path " + e.getKey());
return true;
}
}
return false;
}
private static boolean hasTooManyLevels(XdmNode node, boolean isInsideTheElement,
String theElementName) {
if (node.getNodeKind() != XdmNodeKind.ELEMENT) {
return false;
}
if (theElementName.equals(node.getNodeName().getLocalName())) {
if (isInsideTheElement) {
return true;
}
isInsideTheElement = true;
}
XdmSequenceIterator iter = node.axisIterator(Axis.CHILD);
while (iter.hasNext()) {
if (hasTooManyLevels((XdmNode) iter.next(), isInsideTheElement, theElementName))
return true;
}
return false;
}
private static boolean sentenceContainNonInline(XdmNode node, boolean isInsideSentence,
FormatSpecifications specs) {
if (node.getNodeKind() != XdmNodeKind.ELEMENT) {
return false;
}
if (specs.sentenceTag.getLocalName().equals(node.getNodeName().getLocalName())) {
isInsideSentence = true;
} else if (!specs.inlineElements.contains(node.getNodeName().getLocalName())
&& isInsideSentence) {
return true;
}
XdmSequenceIterator iter = node.axisIterator(Axis.CHILD);
while (iter.hasNext()) {
if (sentenceContainNonInline((XdmNode) iter.next(), isInsideSentence, specs))
return true;
}
return false;
}
private static void getIDs(XdmNode node, Map<String, Integer> ids) {
if (node.getNodeKind() != XdmNodeKind.ELEMENT
&& node.getNodeKind() != XdmNodeKind.DOCUMENT) {
return;
}
String id = node.getAttributeValue(new QName("id"));
if (id != null) {
Integer existing = ids.get(id);
if (existing == null) {
existing = 0;
}
ids.put(id, existing + 1);
}
XdmSequenceIterator iter = node.axisIterator(Axis.CHILD);
while (iter.hasNext()) {
getIDs((XdmNode) iter.next(), ids);
}
}
public static int numberOfDuplicatedIDs(XdmNode ref, XdmNode actual) {
Map<String, Integer> idrefs = new HashMap<String, Integer>();
Map<String, Integer> idactuals = new HashMap<String, Integer>();
getIDs(ref, idrefs);
getIDs(actual, idactuals);
int res = 0;
for (Map.Entry<String, Integer> e : idrefs.entrySet()) {
if (e.getValue() == 1 && !(Integer.valueOf(1).equals(idactuals.get(e.getKey())))) {
++res;
}
}
return res;
}
private static boolean notLeftTrimmed(XdmNode node) {
return Pattern.compile("^[\\p{Z}\\s]").matcher(node.getStringValue()).find();
}
private static boolean notRightTrimmed(XdmNode node) {
return Pattern.compile("[\\p{Z}\\s]$").matcher(node.getStringValue()).find();
}
private boolean isWellTrimmed(XdmNode node, String lexElement, String subLexElement) {
if (node.getNodeKind() != XdmNodeKind.DOCUMENT
&& node.getNodeKind() != XdmNodeKind.ELEMENT)
return true;
XdmSequenceIterator iter = node.axisIterator(Axis.CHILD);
List<XdmNode> children = new ArrayList<XdmNode>();
while (iter.hasNext()) {
XdmNode child = (XdmNode) iter.next();
if (!isWellTrimmed(child, lexElement, subLexElement))
return false;
children.add(child);
}
if (node.getNodeName() != null && lexElement.equals(node.getNodeName().getLocalName())) {
XdmNode first = children.get(0);
XdmNode last = children.get(children.size() - 1);
if (children.size() == 1 && first.getNodeKind() != XdmNodeKind.TEXT
&& !first.getNodeName().getLocalName().equals(subLexElement)) {
System.out.print("bad location for: " + node);
return false;
}
if (first.getNodeKind() == XdmNodeKind.TEXT && notLeftTrimmed(first)) {
System.out.print("bad left trimming for: " + node);
return false;
}
if (last.getNodeKind() == XdmNodeKind.TEXT && notRightTrimmed(last)) {
System.out.print("bad right trimming for: " + node);
return false;
}
}
return true;
}
private void check(String file, String[] inlineElements, String[] spaceEquivalents)
throws SaxonApiException, LexerInitException {
for (boolean forbidAnyDuplication : new boolean[]{
false, true
}) {
for (Strategy strategy : new Strategy[]{
Strategy.ONE_SENTENCE, Strategy.SPACE_SEPARARED_SENTENCES,
Strategy.SPACE_SEPARATED_WORDS, Strategy.REGULAR
}) {
LexerToken.strategy = strategy;
SAXSource source = new SAXSource(new InputSource(getClass()
.getResourceAsStream(file)));
XdmNode document = Builder.build(source);
FormatSpecifications specs = new FormatSpecifications("http://tmp", "sss",
"www", "http://ns", "lang", Arrays.asList(inlineElements), Arrays
.asList(spaceEquivalents), Arrays.asList(spaceEquivalents),
null, null);
XdmNode tree = new XmlBreakRebuilder()
.rebuild(this, Lexers, document, specs, new DummyLangDetector(),
forbidAnyDuplication, new ArrayList<String>());
//check the tree well-formedness
XdmNode root = getRoot(tree);
Assert.assertFalse(hasMoreOrphans(getRoot(document), root, specs));
Assert.assertFalse(sentenceContainNonInline(root, false, specs));
Assert.assertFalse(hasTooManyLevels(root, false, specs.wordTag.getLocalName()));
Assert.assertFalse(hasTooManyLevels(root, false, specs.sentenceTag
.getLocalName()));
Assert.assertEquals(0, numberOfDuplicatedIDs(document, tree));
Assert.assertTrue(isWellTrimmed(tree, specs.sentenceTag.getLocalName(),
specs.wordTag.getLocalName()));
Assert.assertTrue(isWellTrimmed(tree, specs.wordTag.getLocalName(), null));
//check that the content has not changed
String ref = getUnmutableElements(document, specs);
String processed = getUnmutableElements(tree, specs);
Assert.assertEquals(ref, processed);
Assert.assertEquals(getText(document), getText(tree));
}
}
}
private static String[] DTBookInline = new String[]{
"strong", "a", "acronym", "abbr", "dfn", "linenum", "pagenum", "pagebreak",
"samp", "span", "sub", "w", "noteref"
};
private static String[] ZedaiInline = new String[]{
"emph", "span", "ref", "char", "term", "sub", "ref", "sup", "pagebreak", "name"
};
private static String[] EpubInline = new String[]{
"span", "i", "b", "a", "br", "del", "font", "ruby", "s", "small", "strike",
"strong", "sup", "u", "q", "address", "abbr", "em", "style"
};
private static String[] DTBookSpace = new String[]{
"acronym", "span", "linenum", "pagenum", "samp", "noteref"
};
private static String[] ZedaiSpace = new String[]{
"span"
};
private static String[] EpubSpace = new String[]{
"span", "br", "ruby", "s", "address", "abbr", "style"
};
@Test
public void dtbook1() throws SaxonApiException, LexerInitException {
check("/dtbook_test.xml", DTBookInline, DTBookSpace);
}
@Test
public void zedai1() throws SaxonApiException, LexerInitException {
check("/zedai_test.xml", ZedaiInline, ZedaiSpace);
}
@Test
public void epub1() throws SaxonApiException, LexerInitException {
check("/epub3_test.html", EpubInline, EpubSpace);
}
private Collection<String> getSamples(String suffix) {
String path = getClass().getResource("/").getPath();
File[] files = new File(path).listFiles();
ArrayList<String> samples = new ArrayList<String>();
for (File file : files) {
if (file.getName().endsWith(suffix))
samples.add("/" + file.getName());
}
return samples;
}
@Test
public void extraDtbook() throws SaxonApiException, LexerInitException {
for (String sample : getSamples(".dtbook.xml")) {
check(sample, DTBookInline, DTBookSpace);
}
}
@Test
public void extraZedai() throws SaxonApiException, LexerInitException {
for (String sample : getSamples(".zedai.xml")) {
check(sample, ZedaiInline, ZedaiSpace);
}
}
@Test
public void extraEpub3() throws SaxonApiException, LexerInitException {
for (String sample : getSamples(".epub3.html")) {
check(sample, EpubInline, EpubSpace);
}
}
}
|
|
/**
*******************************************************************************
* Copyright (C) 1996-2010, International Business Machines Corporation and
* others. All Rights Reserved.
*******************************************************************************
*/
package com.ibm.icu.dev.test.lang;
import java.io.BufferedReader;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import com.ibm.icu.dev.test.TestFmwk;
import com.ibm.icu.dev.test.TestUtil;
import com.ibm.icu.impl.Utility;
import com.ibm.icu.lang.UCharacter;
import com.ibm.icu.lang.UProperty;
import com.ibm.icu.text.BreakIterator;
import com.ibm.icu.text.RuleBasedBreakIterator;
import com.ibm.icu.text.UTF16;
import com.ibm.icu.util.ULocale;
/**
* <p>Testing character casing</p>
* <p>Mostly following the test cases in strcase.cpp for ICU</p>
* @author Syn Wee Quek
* @since march 14 2002
*/
public final class UCharacterCaseTest extends TestFmwk
{
// constructor -----------------------------------------------------------
/**
* Constructor
*/
public UCharacterCaseTest()
{
}
// public methods --------------------------------------------------------
public static void main(String[] arg)
{
try
{
UCharacterCaseTest test = new UCharacterCaseTest();
test.run(arg);
}
catch (Exception e)
{
e.printStackTrace();
}
}
/**
* Testing the uppercase and lowercase function of UCharacter
*/
public void TestCharacter()
{
for (int i = 0; i < CHARACTER_LOWER_.length; i ++) {
if (UCharacter.isLetter(CHARACTER_LOWER_[i]) &&
!UCharacter.isLowerCase(CHARACTER_LOWER_[i])) {
errln("FAIL isLowerCase test for \\u" +
hex(CHARACTER_LOWER_[i]));
break;
}
if (UCharacter.isLetter(CHARACTER_UPPER_[i]) &&
!(UCharacter.isUpperCase(CHARACTER_UPPER_[i]) ||
UCharacter.isTitleCase(CHARACTER_UPPER_[i]))) {
errln("FAIL isUpperCase test for \\u" +
hex(CHARACTER_UPPER_[i]));
break;
}
if (CHARACTER_LOWER_[i] !=
UCharacter.toLowerCase(CHARACTER_UPPER_[i]) ||
(CHARACTER_UPPER_[i] !=
UCharacter.toUpperCase(CHARACTER_LOWER_[i]) &&
CHARACTER_UPPER_[i] !=
UCharacter.toTitleCase(CHARACTER_LOWER_[i]))) {
errln("FAIL case conversion test for \\u" +
hex(CHARACTER_UPPER_[i]) +
" to \\u" + hex(CHARACTER_LOWER_[i]));
break;
}
if (CHARACTER_LOWER_[i] !=
UCharacter.toLowerCase(CHARACTER_LOWER_[i])) {
errln("FAIL lower case conversion test for \\u" +
hex(CHARACTER_LOWER_[i]));
break;
}
if (CHARACTER_UPPER_[i] !=
UCharacter.toUpperCase(CHARACTER_UPPER_[i]) &&
CHARACTER_UPPER_[i] !=
UCharacter.toTitleCase(CHARACTER_UPPER_[i])) {
errln("FAIL upper case conversion test for \\u" +
hex(CHARACTER_UPPER_[i]));
break;
}
logln("Ok \\u" + hex(CHARACTER_UPPER_[i]) + " and \\u" +
hex(CHARACTER_LOWER_[i]));
}
}
public void TestFolding()
{
// test simple case folding
for (int i = 0; i < FOLDING_SIMPLE_.length; i += 3) {
if (UCharacter.foldCase(FOLDING_SIMPLE_[i], true) !=
FOLDING_SIMPLE_[i + 1]) {
errln("FAIL: foldCase(\\u" + hex(FOLDING_SIMPLE_[i]) +
", true) should be \\u" + hex(FOLDING_SIMPLE_[i + 1]));
}
if (UCharacter.foldCase(FOLDING_SIMPLE_[i],
UCharacter.FOLD_CASE_DEFAULT) !=
FOLDING_SIMPLE_[i + 1]) {
errln("FAIL: foldCase(\\u" + hex(FOLDING_SIMPLE_[i]) +
", UCharacter.FOLD_CASE_DEFAULT) should be \\u"
+ hex(FOLDING_SIMPLE_[i + 1]));
}
if (UCharacter.foldCase(FOLDING_SIMPLE_[i], false) !=
FOLDING_SIMPLE_[i + 2]) {
errln("FAIL: foldCase(\\u" + hex(FOLDING_SIMPLE_[i]) +
", false) should be \\u" + hex(FOLDING_SIMPLE_[i + 2]));
}
if (UCharacter.foldCase(FOLDING_SIMPLE_[i],
UCharacter.FOLD_CASE_EXCLUDE_SPECIAL_I) !=
FOLDING_SIMPLE_[i + 2]) {
errln("FAIL: foldCase(\\u" + hex(FOLDING_SIMPLE_[i]) +
", UCharacter.FOLD_CASE_EXCLUDE_SPECIAL_I) should be \\u"
+ hex(FOLDING_SIMPLE_[i + 2]));
}
}
// Test full string case folding with default option and separate
// buffers
if (!FOLDING_DEFAULT_[0].equals(UCharacter.foldCase(FOLDING_MIXED_[0], true))) {
errln("FAIL: foldCase(" + prettify(FOLDING_MIXED_[0]) +
", true)=" + prettify(UCharacter.foldCase(FOLDING_MIXED_[0], true)) +
" should be " + prettify(FOLDING_DEFAULT_[0]));
}
if (!FOLDING_DEFAULT_[0].equals(UCharacter.foldCase(FOLDING_MIXED_[0], UCharacter.FOLD_CASE_DEFAULT))) {
errln("FAIL: foldCase(" + prettify(FOLDING_MIXED_[0]) +
", UCharacter.FOLD_CASE_DEFAULT)=" + prettify(UCharacter.foldCase(FOLDING_MIXED_[0], UCharacter.FOLD_CASE_DEFAULT))
+ " should be " + prettify(FOLDING_DEFAULT_[0]));
}
if (!FOLDING_EXCLUDE_SPECIAL_I_[0].equals(
UCharacter.foldCase(FOLDING_MIXED_[0], false))) {
errln("FAIL: foldCase(" + prettify(FOLDING_MIXED_[0]) +
", false)=" + prettify(UCharacter.foldCase(FOLDING_MIXED_[0], false))
+ " should be " + prettify(FOLDING_EXCLUDE_SPECIAL_I_[0]));
}
if (!FOLDING_EXCLUDE_SPECIAL_I_[0].equals(
UCharacter.foldCase(FOLDING_MIXED_[0], UCharacter.FOLD_CASE_EXCLUDE_SPECIAL_I))) {
errln("FAIL: foldCase(" + prettify(FOLDING_MIXED_[0]) +
", UCharacter.FOLD_CASE_EXCLUDE_SPECIAL_I)=" + prettify(UCharacter.foldCase(FOLDING_MIXED_[0], UCharacter.FOLD_CASE_EXCLUDE_SPECIAL_I))
+ " should be " + prettify(FOLDING_EXCLUDE_SPECIAL_I_[0]));
}
if (!FOLDING_DEFAULT_[1].equals(UCharacter.foldCase(FOLDING_MIXED_[1], true))) {
errln("FAIL: foldCase(" + prettify(FOLDING_MIXED_[1]) +
", true)=" + prettify(UCharacter.foldCase(FOLDING_MIXED_[1], true))
+ " should be " + prettify(FOLDING_DEFAULT_[1]));
}
if (!FOLDING_DEFAULT_[1].equals(UCharacter.foldCase(FOLDING_MIXED_[1], UCharacter.FOLD_CASE_DEFAULT))) {
errln("FAIL: foldCase(" + prettify(FOLDING_MIXED_[1]) +
", UCharacter.FOLD_CASE_DEFAULT)=" + prettify(UCharacter.foldCase(FOLDING_MIXED_[1], UCharacter.FOLD_CASE_DEFAULT))
+ " should be " + prettify(FOLDING_DEFAULT_[1]));
}
// alternate handling for dotted I/dotless i (U+0130, U+0131)
if (!FOLDING_EXCLUDE_SPECIAL_I_[1].equals(
UCharacter.foldCase(FOLDING_MIXED_[1], false))) {
errln("FAIL: foldCase(" + prettify(FOLDING_MIXED_[1]) +
", false)=" + prettify(UCharacter.foldCase(FOLDING_MIXED_[1], false))
+ " should be " + prettify(FOLDING_EXCLUDE_SPECIAL_I_[1]));
}
if (!FOLDING_EXCLUDE_SPECIAL_I_[1].equals(
UCharacter.foldCase(FOLDING_MIXED_[1], UCharacter.FOLD_CASE_EXCLUDE_SPECIAL_I))) {
errln("FAIL: foldCase(" + prettify(FOLDING_MIXED_[1]) +
", UCharacter.FOLD_CASE_EXCLUDE_SPECIAL_I)=" + prettify(UCharacter.foldCase(FOLDING_MIXED_[1], UCharacter.FOLD_CASE_EXCLUDE_SPECIAL_I))
+ " should be "
+ prettify(FOLDING_EXCLUDE_SPECIAL_I_[1]));
}
}
/**
* Testing the strings case mapping methods
*/
public void TestUpper()
{
// uppercase with root locale and in the same buffer
if (!UPPER_ROOT_.equals(UCharacter.toUpperCase(UPPER_BEFORE_))) {
errln("Fail " + UPPER_BEFORE_ + " after uppercase should be " +
UPPER_ROOT_ + " instead got " +
UCharacter.toUpperCase(UPPER_BEFORE_));
}
// uppercase with turkish locale and separate buffers
if (!UPPER_TURKISH_.equals(UCharacter.toUpperCase(TURKISH_LOCALE_,
UPPER_BEFORE_))) {
errln("Fail " + UPPER_BEFORE_ +
" after turkish-sensitive uppercase should be " +
UPPER_TURKISH_ + " instead of " +
UCharacter.toUpperCase(TURKISH_LOCALE_, UPPER_BEFORE_));
}
// uppercase a short string with root locale
if (!UPPER_MINI_UPPER_.equals(UCharacter.toUpperCase(UPPER_MINI_))) {
errln("error in toUpper(root locale)=\"" + UPPER_MINI_ +
"\" expected \"" + UPPER_MINI_UPPER_ + "\"");
}
if (!SHARED_UPPERCASE_TOPKAP_.equals(
UCharacter.toUpperCase(SHARED_LOWERCASE_TOPKAP_))) {
errln("toUpper failed: expected \"" +
SHARED_UPPERCASE_TOPKAP_ + "\", got \"" +
UCharacter.toUpperCase(SHARED_LOWERCASE_TOPKAP_) + "\".");
}
if (!SHARED_UPPERCASE_TURKISH_.equals(
UCharacter.toUpperCase(TURKISH_LOCALE_,
SHARED_LOWERCASE_TOPKAP_))) {
errln("toUpper failed: expected \"" +
SHARED_UPPERCASE_TURKISH_ + "\", got \"" +
UCharacter.toUpperCase(TURKISH_LOCALE_,
SHARED_LOWERCASE_TOPKAP_) + "\".");
}
if (!SHARED_UPPERCASE_GERMAN_.equals(
UCharacter.toUpperCase(GERMAN_LOCALE_,
SHARED_LOWERCASE_GERMAN_))) {
errln("toUpper failed: expected \"" + SHARED_UPPERCASE_GERMAN_
+ "\", got \"" + UCharacter.toUpperCase(GERMAN_LOCALE_,
SHARED_LOWERCASE_GERMAN_) + "\".");
}
if (!SHARED_UPPERCASE_GREEK_.equals(
UCharacter.toUpperCase(SHARED_LOWERCASE_GREEK_))) {
errln("toLower failed: expected \"" + SHARED_UPPERCASE_GREEK_ +
"\", got \"" + UCharacter.toUpperCase(
SHARED_LOWERCASE_GREEK_) + "\".");
}
}
public void TestLower()
{
if (!LOWER_ROOT_.equals(UCharacter.toLowerCase(LOWER_BEFORE_))) {
errln("Fail " + LOWER_BEFORE_ + " after lowercase should be " +
LOWER_ROOT_ + " instead of " +
UCharacter.toLowerCase(LOWER_BEFORE_));
}
// lowercase with turkish locale
if (!LOWER_TURKISH_.equals(UCharacter.toLowerCase(TURKISH_LOCALE_,
LOWER_BEFORE_))) {
errln("Fail " + LOWER_BEFORE_ +
" after turkish-sensitive lowercase should be " +
LOWER_TURKISH_ + " instead of " +
UCharacter.toLowerCase(TURKISH_LOCALE_, LOWER_BEFORE_));
}
if (!SHARED_LOWERCASE_ISTANBUL_.equals(
UCharacter.toLowerCase(SHARED_UPPERCASE_ISTANBUL_))) {
errln("1. toLower failed: expected \"" +
SHARED_LOWERCASE_ISTANBUL_ + "\", got \"" +
UCharacter.toLowerCase(SHARED_UPPERCASE_ISTANBUL_) + "\".");
}
if (!SHARED_LOWERCASE_TURKISH_.equals(
UCharacter.toLowerCase(TURKISH_LOCALE_,
SHARED_UPPERCASE_ISTANBUL_))) {
errln("2. toLower failed: expected \"" +
SHARED_LOWERCASE_TURKISH_ + "\", got \"" +
UCharacter.toLowerCase(TURKISH_LOCALE_,
SHARED_UPPERCASE_ISTANBUL_) + "\".");
}
if (!SHARED_LOWERCASE_GREEK_.equals(
UCharacter.toLowerCase(GREEK_LOCALE_,
SHARED_UPPERCASE_GREEK_))) {
errln("toLower failed: expected \"" + SHARED_LOWERCASE_GREEK_ +
"\", got \"" + UCharacter.toLowerCase(GREEK_LOCALE_,
SHARED_UPPERCASE_GREEK_) + "\".");
}
}
public void TestTitleRegression() throws java.io.IOException {
boolean isIgnorable = UCharacter.hasBinaryProperty('\'', UProperty.CASE_IGNORABLE);
assertTrue("Case Ignorable check of ASCII apostrophe", isIgnorable);
assertEquals("Titlecase check",
"The Quick Brown Fox Can't Jump Over The Lazy Dogs.",
UCharacter.toTitleCase(ULocale.ENGLISH, "THE QUICK BROWN FOX CAN'T JUMP OVER THE LAZY DOGS.", null));
}
public void TestTitle()
{
try{
for (int i = 0; i < TITLE_DATA_.length;) {
String test = TITLE_DATA_[i++];
String expected = TITLE_DATA_[i++];
ULocale locale = new ULocale(TITLE_DATA_[i++]);
int breakType = Integer.parseInt(TITLE_DATA_[i++]);
String optionsString = TITLE_DATA_[i++];
BreakIterator iter =
breakType >= 0 ?
BreakIterator.getBreakInstance(locale, breakType) :
breakType == -2 ?
// Open a trivial break iterator that only delivers { 0, length }
// or even just { 0 } as boundaries.
new RuleBasedBreakIterator(".*;") :
null;
int options = 0;
if (optionsString.indexOf('L') >= 0) {
options |= UCharacter.TITLECASE_NO_LOWERCASE;
}
if (optionsString.indexOf('A') >= 0) {
options |= UCharacter.TITLECASE_NO_BREAK_ADJUSTMENT;
}
String result = UCharacter.toTitleCase(locale, test, iter, options);
if (!expected.equals(result)) {
errln("titlecasing for " + prettify(test) + " (options " + options + ") should be " +
prettify(expected) + " but got " +
prettify(result));
}
if (options == 0) {
result = UCharacter.toTitleCase(locale, test, iter);
if (!expected.equals(result)) {
errln("titlecasing for " + prettify(test) + " should be " +
prettify(expected) + " but got " +
prettify(result));
}
}
}
}catch(Exception ex){
warnln("Could not find data for BreakIterators");
}
}
public void TestDutchTitle() {
ULocale LOC_DUTCH = new ULocale("nl");
int options = 0;
options |= UCharacter.TITLECASE_NO_LOWERCASE;
BreakIterator iter = BreakIterator.getWordInstance(LOC_DUTCH);
assertEquals("Dutch titlecase check in English",
"Ijssel Igloo Ijmuiden",
UCharacter.toTitleCase(ULocale.ENGLISH, "ijssel igloo IJMUIDEN", null));
assertEquals("Dutch titlecase check in Dutch",
"IJssel Igloo IJmuiden",
UCharacter.toTitleCase(LOC_DUTCH, "ijssel igloo IJMUIDEN", null));
iter.setText("ijssel igloo IjMUIdEN iPoD ijenough");
assertEquals("Dutch titlecase check in Dutch with nolowercase option",
"IJssel Igloo IJMUIdEN IPoD IJenough",
UCharacter.toTitleCase(LOC_DUTCH, "ijssel igloo IjMUIdEN iPoD ijenough", iter, options));
}
public void TestSpecial()
{
for (int i = 0; i < SPECIAL_LOCALES_.length; i ++) {
int j = i * 3;
Locale locale = SPECIAL_LOCALES_[i];
String str = SPECIAL_DATA_[j];
if (locale != null) {
if (!SPECIAL_DATA_[j + 1].equals(
UCharacter.toLowerCase(locale, str))) {
errln("error lowercasing special characters " +
hex(str) + " expected " + hex(SPECIAL_DATA_[j + 1])
+ " for locale " + locale.toString() + " but got " +
hex(UCharacter.toLowerCase(locale, str)));
}
if (!SPECIAL_DATA_[j + 2].equals(
UCharacter.toUpperCase(locale, str))) {
errln("error uppercasing special characters " +
hex(str) + " expected " + SPECIAL_DATA_[j + 2]
+ " for locale " + locale.toString() + " but got " +
hex(UCharacter.toUpperCase(locale, str)));
}
}
else {
if (!SPECIAL_DATA_[j + 1].equals(
UCharacter.toLowerCase(str))) {
errln("error lowercasing special characters " +
hex(str) + " expected " + SPECIAL_DATA_[j + 1] +
" but got " +
hex(UCharacter.toLowerCase(locale, str)));
}
if (!SPECIAL_DATA_[j + 2].equals(
UCharacter.toUpperCase(locale, str))) {
errln("error uppercasing special characters " +
hex(str) + " expected " + SPECIAL_DATA_[j + 2] +
" but got " +
hex(UCharacter.toUpperCase(locale, str)));
}
}
}
// turkish & azerbaijani dotless i & dotted I
// remove dot above if there was a capital I before and there are no
// more accents above
if (!SPECIAL_DOTTED_LOWER_TURKISH_.equals(UCharacter.toLowerCase(
TURKISH_LOCALE_, SPECIAL_DOTTED_))) {
errln("error in dots.toLower(tr)=\"" + SPECIAL_DOTTED_ +
"\" expected \"" + SPECIAL_DOTTED_LOWER_TURKISH_ +
"\" but got " + UCharacter.toLowerCase(TURKISH_LOCALE_,
SPECIAL_DOTTED_));
}
if (!SPECIAL_DOTTED_LOWER_GERMAN_.equals(UCharacter.toLowerCase(
GERMAN_LOCALE_, SPECIAL_DOTTED_))) {
errln("error in dots.toLower(de)=\"" + SPECIAL_DOTTED_ +
"\" expected \"" + SPECIAL_DOTTED_LOWER_GERMAN_ +
"\" but got " + UCharacter.toLowerCase(GERMAN_LOCALE_,
SPECIAL_DOTTED_));
}
// lithuanian dot above in uppercasing
if (!SPECIAL_DOT_ABOVE_UPPER_LITHUANIAN_.equals(
UCharacter.toUpperCase(LITHUANIAN_LOCALE_, SPECIAL_DOT_ABOVE_))) {
errln("error in dots.toUpper(lt)=\"" + SPECIAL_DOT_ABOVE_ +
"\" expected \"" + SPECIAL_DOT_ABOVE_UPPER_LITHUANIAN_ +
"\" but got " + UCharacter.toUpperCase(LITHUANIAN_LOCALE_,
SPECIAL_DOT_ABOVE_));
}
if (!SPECIAL_DOT_ABOVE_UPPER_GERMAN_.equals(UCharacter.toUpperCase(
GERMAN_LOCALE_, SPECIAL_DOT_ABOVE_))) {
errln("error in dots.toUpper(de)=\"" + SPECIAL_DOT_ABOVE_ +
"\" expected \"" + SPECIAL_DOT_ABOVE_UPPER_GERMAN_ +
"\" but got " + UCharacter.toUpperCase(GERMAN_LOCALE_,
SPECIAL_DOT_ABOVE_));
}
// lithuanian adds dot above to i in lowercasing if there are more
// above accents
if (!SPECIAL_DOT_ABOVE_LOWER_LITHUANIAN_.equals(
UCharacter.toLowerCase(LITHUANIAN_LOCALE_,
SPECIAL_DOT_ABOVE_UPPER_))) {
errln("error in dots.toLower(lt)=\"" + SPECIAL_DOT_ABOVE_UPPER_ +
"\" expected \"" + SPECIAL_DOT_ABOVE_LOWER_LITHUANIAN_ +
"\" but got " + UCharacter.toLowerCase(LITHUANIAN_LOCALE_,
SPECIAL_DOT_ABOVE_UPPER_));
}
if (!SPECIAL_DOT_ABOVE_LOWER_GERMAN_.equals(
UCharacter.toLowerCase(GERMAN_LOCALE_,
SPECIAL_DOT_ABOVE_UPPER_))) {
errln("error in dots.toLower(de)=\"" + SPECIAL_DOT_ABOVE_UPPER_ +
"\" expected \"" + SPECIAL_DOT_ABOVE_LOWER_GERMAN_ +
"\" but got " + UCharacter.toLowerCase(GERMAN_LOCALE_,
SPECIAL_DOT_ABOVE_UPPER_));
}
}
/**
* Tests for case mapping in the file SpecialCasing.txt
* This method reads in SpecialCasing.txt file for testing purposes.
* A default path is provided relative to the src path, however the user
* could set a system property to change the directory path.<br>
* e.g. java -DUnicodeData="data_dir_path" com.ibm.dev.test.lang.UCharacterTest
*/
public void TestSpecialCasingTxt()
{
try
{
// reading in the SpecialCasing file
BufferedReader input = TestUtil.getDataReader(
"unicode/SpecialCasing.txt");
while (true)
{
String s = input.readLine();
if (s == null) {
break;
}
if (s.length() == 0 || s.charAt(0) == '#') {
continue;
}
String chstr[] = getUnicodeStrings(s);
StringBuffer strbuffer = new StringBuffer(chstr[0]);
StringBuffer lowerbuffer = new StringBuffer(chstr[1]);
StringBuffer upperbuffer = new StringBuffer(chstr[3]);
Locale locale = null;
for (int i = 4; i < chstr.length; i ++) {
String condition = chstr[i];
if (Character.isLowerCase(chstr[i].charAt(0))) {
// specified locale
locale = new Locale(chstr[i], "");
}
else if (condition.compareToIgnoreCase("Not_Before_Dot")
== 0) {
// turns I into dotless i
}
else if (condition.compareToIgnoreCase(
"More_Above") == 0) {
strbuffer.append((char)0x300);
lowerbuffer.append((char)0x300);
upperbuffer.append((char)0x300);
}
else if (condition.compareToIgnoreCase(
"After_Soft_Dotted") == 0) {
strbuffer.insert(0, 'i');
lowerbuffer.insert(0, 'i');
String lang = "";
if (locale != null) {
lang = locale.getLanguage();
}
if (lang.equals("tr") || lang.equals("az")) {
// this is to be removed when 4.0 data comes out
// and upperbuffer.insert uncommented
// see jitterbug 2344
chstr[i] = "After_I";
strbuffer.deleteCharAt(0);
lowerbuffer.deleteCharAt(0);
i --;
continue;
// upperbuffer.insert(0, '\u0130');
}
else {
upperbuffer.insert(0, 'I');
}
}
else if (condition.compareToIgnoreCase(
"Final_Sigma") == 0) {
strbuffer.insert(0, 'c');
lowerbuffer.insert(0, 'c');
upperbuffer.insert(0, 'C');
}
else if (condition.compareToIgnoreCase("After_I") == 0) {
strbuffer.insert(0, 'I');
lowerbuffer.insert(0, 'i');
String lang = "";
if (locale != null) {
lang = locale.getLanguage();
}
if (lang.equals("tr") || lang.equals("az")) {
upperbuffer.insert(0, 'I');
}
}
}
chstr[0] = strbuffer.toString();
chstr[1] = lowerbuffer.toString();
chstr[3] = upperbuffer.toString();
if (locale == null) {
if (!UCharacter.toLowerCase(chstr[0]).equals(chstr[1])) {
errln(s);
errln("Fail: toLowerCase for character " +
Utility.escape(chstr[0]) + ", expected "
+ Utility.escape(chstr[1]) + " but resulted in " +
Utility.escape(UCharacter.toLowerCase(chstr[0])));
}
if (!UCharacter.toUpperCase(chstr[0]).equals(chstr[3])) {
errln(s);
errln("Fail: toUpperCase for character " +
Utility.escape(chstr[0]) + ", expected "
+ Utility.escape(chstr[3]) + " but resulted in " +
Utility.escape(UCharacter.toUpperCase(chstr[0])));
}
}
else {
if (!UCharacter.toLowerCase(locale, chstr[0]).equals(
chstr[1])) {
errln(s);
errln("Fail: toLowerCase for character " +
Utility.escape(chstr[0]) + ", expected "
+ Utility.escape(chstr[1]) + " but resulted in " +
Utility.escape(UCharacter.toLowerCase(locale,
chstr[0])));
}
if (!UCharacter.toUpperCase(locale, chstr[0]).equals(
chstr[3])) {
errln(s);
errln("Fail: toUpperCase for character " +
Utility.escape(chstr[0]) + ", expected "
+ Utility.escape(chstr[3]) + " but resulted in " +
Utility.escape(UCharacter.toUpperCase(locale,
chstr[0])));
}
}
}
input.close();
}
catch (Exception e)
{
e.printStackTrace();
}
}
public void TestUpperLower()
{
int upper[] = {0x0041, 0x0042, 0x00b2, 0x01c4, 0x01c6, 0x01c9, 0x01c8,
0x01c9, 0x000c};
int lower[] = {0x0061, 0x0062, 0x00b2, 0x01c6, 0x01c6, 0x01c9, 0x01c9,
0x01c9, 0x000c};
String upperTest = "abcdefg123hij.?:klmno";
String lowerTest = "ABCDEFG123HIJ.?:KLMNO";
// Checks LetterLike Symbols which were previously a source of
// confusion [Bertrand A. D. 02/04/98]
for (int i = 0x2100; i < 0x2138; i ++) {
/* Unicode 5.0 adds lowercase U+214E (TURNED SMALL F) to U+2132 (TURNED CAPITAL F) */
if (i != 0x2126 && i != 0x212a && i != 0x212b && i!=0x2132) {
if (i != UCharacter.toLowerCase(i)) { // itself
errln("Failed case conversion with itself: \\u"
+ Utility.hex(i, 4));
}
if (i != UCharacter.toUpperCase(i)) {
errln("Failed case conversion with itself: \\u"
+ Utility.hex(i, 4));
}
}
}
for (int i = 0; i < upper.length; i ++) {
if (UCharacter.toLowerCase(upper[i]) != lower[i]) {
errln("FAILED UCharacter.tolower() for \\u"
+ Utility.hex(upper[i], 4)
+ " Expected \\u" + Utility.hex(lower[i], 4)
+ " Got \\u"
+ Utility.hex(UCharacter.toLowerCase(upper[i]), 4));
}
}
logln("testing upper lower");
for (int i = 0; i < upperTest.length(); i ++) {
logln("testing to upper to lower");
if (UCharacter.isLetter(upperTest.charAt(i)) &&
!UCharacter.isLowerCase(upperTest.charAt(i))) {
errln("Failed isLowerCase test at \\u"
+ Utility.hex(upperTest.charAt(i), 4));
}
else if (UCharacter.isLetter(lowerTest.charAt(i))
&& !UCharacter.isUpperCase(lowerTest.charAt(i))) {
errln("Failed isUpperCase test at \\u"
+ Utility.hex(lowerTest.charAt(i), 4));
}
else if (upperTest.charAt(i)
!= UCharacter.toLowerCase(lowerTest.charAt(i))) {
errln("Failed case conversion from \\u"
+ Utility.hex(lowerTest.charAt(i), 4) + " To \\u"
+ Utility.hex(upperTest.charAt(i), 4));
}
else if (lowerTest.charAt(i)
!= UCharacter.toUpperCase(upperTest.charAt(i))) {
errln("Failed case conversion : \\u"
+ Utility.hex(upperTest.charAt(i), 4) + " To \\u"
+ Utility.hex(lowerTest.charAt(i), 4));
}
else if (upperTest.charAt(i)
!= UCharacter.toLowerCase(upperTest.charAt(i))) {
errln("Failed case conversion with itself: \\u"
+ Utility.hex(upperTest.charAt(i)));
}
else if (lowerTest.charAt(i)
!= UCharacter.toUpperCase(lowerTest.charAt(i))) {
errln("Failed case conversion with itself: \\u"
+ Utility.hex(lowerTest.charAt(i)));
}
}
logln("done testing upper Lower");
}
// private data members - test data --------------------------------------
private static final Locale TURKISH_LOCALE_ = new Locale("tr", "TR");
private static final Locale GERMAN_LOCALE_ = new Locale("de", "DE");
private static final Locale GREEK_LOCALE_ = new Locale("el", "GR");
private static final Locale ENGLISH_LOCALE_ = new Locale("en", "US");
private static final Locale LITHUANIAN_LOCALE_ = new Locale("lt", "LT");
private static final int CHARACTER_UPPER_[] =
{0x41, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047,
0x00b1, 0x00b2, 0xb3, 0x0048, 0x0049, 0x004a, 0x002e,
0x003f, 0x003a, 0x004b, 0x004c, 0x4d, 0x004e, 0x004f,
0x01c4, 0x01c8, 0x000c, 0x0000};
private static final int CHARACTER_LOWER_[] =
{0x61, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067,
0x00b1, 0x00b2, 0xb3, 0x0068, 0x0069, 0x006a, 0x002e,
0x003f, 0x003a, 0x006b, 0x006c, 0x6d, 0x006e, 0x006f,
0x01c6, 0x01c9, 0x000c, 0x0000};
/*
* CaseFolding.txt says about i and its cousins:
* 0049; C; 0069; # LATIN CAPITAL LETTER I
* 0049; T; 0131; # LATIN CAPITAL LETTER I
*
* 0130; F; 0069 0307; # LATIN CAPITAL LETTER I WITH DOT ABOVE
* 0130; T; 0069; # LATIN CAPITAL LETTER I WITH DOT ABOVE
* That's all.
* See CaseFolding.txt and the Unicode Standard for how to apply the case foldings.
*/
private static final int FOLDING_SIMPLE_[] = {
// input, default, exclude special i
0x61, 0x61, 0x61,
0x49, 0x69, 0x131,
0x130, 0x130, 0x69,
0x131, 0x131, 0x131,
0xdf, 0xdf, 0xdf,
0xfb03, 0xfb03, 0xfb03,
0x1040e,0x10436,0x10436,
0x5ffff,0x5ffff,0x5ffff
};
private static final String FOLDING_MIXED_[] =
{"\u0061\u0042\u0130\u0049\u0131\u03d0\u00df\ufb03\ud93f\udfff",
"A\u00df\u00b5\ufb03\uD801\uDC0C\u0130\u0131"};
private static final String FOLDING_DEFAULT_[] =
{"\u0061\u0062\u0069\u0307\u0069\u0131\u03b2\u0073\u0073\u0066\u0066\u0069\ud93f\udfff",
"ass\u03bcffi\uD801\uDC34i\u0307\u0131"};
private static final String FOLDING_EXCLUDE_SPECIAL_I_[] =
{"\u0061\u0062\u0069\u0131\u0131\u03b2\u0073\u0073\u0066\u0066\u0069\ud93f\udfff",
"ass\u03bcffi\uD801\uDC34i\u0131"};
/**
* "IESUS CHRISTOS"
*/
private static final String SHARED_UPPERCASE_GREEK_ =
"\u0399\u0395\u03a3\u03a5\u03a3\u0020\u03a7\u03a1\u0399\u03a3\u03a4\u039f\u03a3";
/**
* "iesus christos"
*/
private static final String SHARED_LOWERCASE_GREEK_ =
"\u03b9\u03b5\u03c3\u03c5\u03c2\u0020\u03c7\u03c1\u03b9\u03c3\u03c4\u03bf\u03c2";
private static final String SHARED_LOWERCASE_TURKISH_ =
"\u0069\u0073\u0074\u0061\u006e\u0062\u0075\u006c\u002c\u0020\u006e\u006f\u0074\u0020\u0063\u006f\u006e\u0073\u0074\u0061\u006e\u0074\u0131\u006e\u006f\u0070\u006c\u0065\u0021";
private static final String SHARED_UPPERCASE_TURKISH_ =
"\u0054\u004f\u0050\u004b\u0041\u0050\u0049\u0020\u0050\u0041\u004c\u0041\u0043\u0045\u002c\u0020\u0130\u0053\u0054\u0041\u004e\u0042\u0055\u004c";
private static final String SHARED_UPPERCASE_ISTANBUL_ =
"\u0130STANBUL, NOT CONSTANTINOPLE!";
private static final String SHARED_LOWERCASE_ISTANBUL_ =
"i\u0307stanbul, not constantinople!";
private static final String SHARED_LOWERCASE_TOPKAP_ =
"topkap\u0131 palace, istanbul";
private static final String SHARED_UPPERCASE_TOPKAP_ =
"TOPKAPI PALACE, ISTANBUL";
private static final String SHARED_LOWERCASE_GERMAN_ =
"S\u00FC\u00DFmayrstra\u00DFe";
private static final String SHARED_UPPERCASE_GERMAN_ =
"S\u00DCSSMAYRSTRASSE";
private static final String UPPER_BEFORE_ =
"\u0061\u0042\u0069\u03c2\u00df\u03c3\u002f\ufb03\ufb03\ufb03\ud93f\udfff";
private static final String UPPER_ROOT_ =
"\u0041\u0042\u0049\u03a3\u0053\u0053\u03a3\u002f\u0046\u0046\u0049\u0046\u0046\u0049\u0046\u0046\u0049\ud93f\udfff";
private static final String UPPER_TURKISH_ =
"\u0041\u0042\u0130\u03a3\u0053\u0053\u03a3\u002f\u0046\u0046\u0049\u0046\u0046\u0049\u0046\u0046\u0049\ud93f\udfff";
private static final String UPPER_MINI_ = "\u00df\u0061";
private static final String UPPER_MINI_UPPER_ = "\u0053\u0053\u0041";
private static final String LOWER_BEFORE_ =
"\u0061\u0042\u0049\u03a3\u00df\u03a3\u002f\ud93f\udfff";
private static final String LOWER_ROOT_ =
"\u0061\u0062\u0069\u03c3\u00df\u03c2\u002f\ud93f\udfff";
private static final String LOWER_TURKISH_ =
"\u0061\u0062\u0131\u03c3\u00df\u03c2\u002f\ud93f\udfff";
/**
* each item is an array with input string, result string, locale ID, break iterator, options
* the break iterator is specified as an int, same as in BreakIterator.KIND_*:
* 0=KIND_CHARACTER 1=KIND_WORD 2=KIND_LINE 3=KIND_SENTENCE 4=KIND_TITLE -1=default (NULL=words) -2=no breaks (.*)
* options: T=U_FOLD_CASE_EXCLUDE_SPECIAL_I L=U_TITLECASE_NO_LOWERCASE A=U_TITLECASE_NO_BREAK_ADJUSTMENT
* see ICU4C source/test/testdata/casing.txt
*/
private static final String TITLE_DATA_[] = {
"\u0061\u0042\u0020\u0069\u03c2\u0020\u00df\u03c3\u002f\ufb03\ud93f\udfff",
"\u0041\u0042\u0020\u0049\u03a3\u0020\u0053\u0073\u03a3\u002f\u0046\u0066\u0069\ud93f\udfff",
"",
"0",
"",
"\u0061\u0042\u0020\u0069\u03c2\u0020\u00df\u03c3\u002f\ufb03\ud93f\udfff",
"\u0041\u0062\u0020\u0049\u03c2\u0020\u0053\u0073\u03c3\u002f\u0046\u0066\u0069\ud93f\udfff",
"",
"1",
"",
"\u02bbaMeLikA huI P\u016b \u02bb\u02bb\u02bbiA", "\u02bbAmelika Hui P\u016b \u02bb\u02bb\u02bbIa", // titlecase first _cased_ letter, j4933
"",
"-1",
"",
" tHe QUIcK bRoWn", " The Quick Brown",
"",
"4",
"",
"\u01c4\u01c5\u01c6\u01c7\u01c8\u01c9\u01ca\u01cb\u01cc",
"\u01c5\u01c5\u01c5\u01c8\u01c8\u01c8\u01cb\u01cb\u01cb", // UBRK_CHARACTER
"",
"0",
"",
"\u01c9ubav ljubav", "\u01c8ubav Ljubav", // Lj vs. L+j
"",
"-1",
"",
"'oH dOn'T tItLeCaSe AfTeR lEtTeR+'", "'Oh Don't Titlecase After Letter+'",
"",
"-1",
"",
"a \u02bbCaT. A \u02bbdOg! \u02bbeTc.",
"A \u02bbCat. A \u02bbDog! \u02bbEtc.",
"",
"-1",
"", // default
"a \u02bbCaT. A \u02bbdOg! \u02bbeTc.",
"A \u02bbcat. A \u02bbdog! \u02bbetc.",
"",
"-1",
"A", // U_TITLECASE_NO_BREAK_ADJUSTMENT
"a \u02bbCaT. A \u02bbdOg! \u02bbeTc.",
"A \u02bbCaT. A \u02bbdOg! \u02bbETc.",
"",
"3",
"L", // UBRK_SENTENCE and U_TITLECASE_NO_LOWERCASE
"\u02bbcAt! \u02bbeTc.",
"\u02bbCat! \u02bbetc.",
"",
"-2",
"", // -2=Trivial break iterator
"\u02bbcAt! \u02bbeTc.",
"\u02bbcat! \u02bbetc.",
"",
"-2",
"A", // U_TITLECASE_NO_BREAK_ADJUSTMENT
"\u02bbcAt! \u02bbeTc.",
"\u02bbCAt! \u02bbeTc.",
"",
"-2",
"L", // U_TITLECASE_NO_LOWERCASE
"\u02bbcAt! \u02bbeTc.",
"\u02bbcAt! \u02bbeTc.",
"",
"-2",
"AL", // Both options
// Test case for ticket #7251: UCharacter.toTitleCase() throws OutOfMemoryError
// when TITLECASE_NO_LOWERCASE encounters a single-letter word
"a b c",
"A B C",
"",
"1",
"L" // U_TITLECASE_NO_LOWERCASE
};
/**
* <p>basic string, lower string, upper string, title string</p>
*/
private static final String SPECIAL_DATA_[] = {
UTF16.valueOf(0x1043C) + UTF16.valueOf(0x10414),
UTF16.valueOf(0x1043C) + UTF16.valueOf(0x1043C),
UTF16.valueOf(0x10414) + UTF16.valueOf(0x10414),
"ab'cD \uFB00i\u0131I\u0130 \u01C7\u01C8\u01C9 " +
UTF16.valueOf(0x1043C) + UTF16.valueOf(0x10414),
"ab'cd \uFB00i\u0131ii\u0307 \u01C9\u01C9\u01C9 " +
UTF16.valueOf(0x1043C) + UTF16.valueOf(0x1043C),
"AB'CD FFIII\u0130 \u01C7\u01C7\u01C7 " +
UTF16.valueOf(0x10414) + UTF16.valueOf(0x10414),
// sigmas followed/preceded by cased letters
"i\u0307\u03a3\u0308j \u0307\u03a3\u0308j i\u00ad\u03a3\u0308 \u0307\u03a3\u0308 ",
"i\u0307\u03c3\u0308j \u0307\u03c3\u0308j i\u00ad\u03c2\u0308 \u0307\u03c3\u0308 ",
"I\u0307\u03a3\u0308J \u0307\u03a3\u0308J I\u00ad\u03a3\u0308 \u0307\u03a3\u0308 "
};
private static final Locale SPECIAL_LOCALES_[] = {
null,
ENGLISH_LOCALE_,
null,
};
private static final String SPECIAL_DOTTED_ =
"I \u0130 I\u0307 I\u0327\u0307 I\u0301\u0307 I\u0327\u0307\u0301";
private static final String SPECIAL_DOTTED_LOWER_TURKISH_ =
"\u0131 i i i\u0327 \u0131\u0301\u0307 i\u0327\u0301";
private static final String SPECIAL_DOTTED_LOWER_GERMAN_ =
"i i\u0307 i\u0307 i\u0327\u0307 i\u0301\u0307 i\u0327\u0307\u0301";
private static final String SPECIAL_DOT_ABOVE_ =
"a\u0307 \u0307 i\u0307 j\u0327\u0307 j\u0301\u0307";
private static final String SPECIAL_DOT_ABOVE_UPPER_LITHUANIAN_ =
"A\u0307 \u0307 I J\u0327 J\u0301\u0307";
private static final String SPECIAL_DOT_ABOVE_UPPER_GERMAN_ =
"A\u0307 \u0307 I\u0307 J\u0327\u0307 J\u0301\u0307";
private static final String SPECIAL_DOT_ABOVE_UPPER_ =
"I I\u0301 J J\u0301 \u012e \u012e\u0301 \u00cc\u00cd\u0128";
private static final String SPECIAL_DOT_ABOVE_LOWER_LITHUANIAN_ =
"i i\u0307\u0301 j j\u0307\u0301 \u012f \u012f\u0307\u0301 i\u0307\u0300i\u0307\u0301i\u0307\u0303";
private static final String SPECIAL_DOT_ABOVE_LOWER_GERMAN_ =
"i i\u0301 j j\u0301 \u012f \u012f\u0301 \u00ec\u00ed\u0129";
// private methods -------------------------------------------------------
/**
* Converting the hex numbers represented betwee n ';' to Unicode strings
* @param str string to break up into Unicode strings
* @return array of Unicode strings ending with a null
*/
private String[] getUnicodeStrings(String str)
{
List<String> v = new ArrayList<String>(10);
int start = 0;
for (int casecount = 4; casecount > 0; casecount --) {
int end = str.indexOf("; ", start);
String casestr = str.substring(start, end);
StringBuffer buffer = new StringBuffer();
int spaceoffset = 0;
while (spaceoffset < casestr.length()) {
int nextspace = casestr.indexOf(' ', spaceoffset);
if (nextspace == -1) {
nextspace = casestr.length();
}
buffer.append((char)Integer.parseInt(
casestr.substring(spaceoffset, nextspace),
16));
spaceoffset = nextspace + 1;
}
start = end + 2;
v.add(buffer.toString());
}
int comments = str.indexOf(" #", start);
if (comments != -1 && comments != start) {
if (str.charAt(comments - 1) == ';') {
comments --;
}
String conditions = str.substring(start, comments);
int offset = 0;
while (offset < conditions.length()) {
int spaceoffset = conditions.indexOf(' ', offset);
if (spaceoffset == -1) {
spaceoffset = conditions.length();
}
v.add(conditions.substring(offset, spaceoffset));
offset = spaceoffset + 1;
}
}
int size = v.size();
String result[] = new String[size];
for (int i = 0; i < size; i ++) {
result[i] = v.get(i);
}
return result;
}
}
|
|
/**
* Apache License
* Version 2.0, January 2004
* http://www.apache.org/licenses/
*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
*
* 1. Definitions.
*
* "License" shall mean the terms and conditions for use, reproduction,
* and distribution as defined by Sections 1 through 9 of this document.
*
* "Licensor" shall mean the copyright owner or entity authorized by
* the copyright owner that is granting the License.
*
* "Legal Entity" shall mean the union of the acting entity and all
* other entities that control, are controlled by, or are under common
* control with that entity. For the purposes of this definition,
* "control" means (i) the power, direct or indirect, to cause the
* direction or management of such entity, whether by contract or
* otherwise, or (ii) ownership of fifty percent (50%) or more of the
* outstanding shares, or (iii) beneficial ownership of such entity.
*
* "You" (or "Your") shall mean an individual or Legal Entity
* exercising permissions granted by this License.
*
* "Source" form shall mean the preferred form for making modifications,
* including but not limited to software source code, documentation
* source, and configuration files.
*
* "Object" form shall mean any form resulting from mechanical
* transformation or translation of a Source form, including but
* not limited to compiled object code, generated documentation,
* and conversions to other media types.
*
* "Work" shall mean the work of authorship, whether in Source or
* Object form, made available under the License, as indicated by a
* copyright notice that is included in or attached to the work
* (an example is provided in the Appendix below).
*
* "Derivative Works" shall mean any work, whether in Source or Object
* form, that is based on (or derived from) the Work and for which the
* editorial revisions, annotations, elaborations, or other modifications
* represent, as a whole, an original work of authorship. For the purposes
* of this License, Derivative Works shall not include works that remain
* separable from, or merely link (or bind by name) to the interfaces of,
* the Work and Derivative Works thereof.
*
* "Contribution" shall mean any work of authorship, including
* the original version of the Work and any modifications or additions
* to that Work or Derivative Works thereof, that is intentionally
* submitted to Licensor for inclusion in the Work by the copyright owner
* or by an individual or Legal Entity authorized to submit on behalf of
* the copyright owner. For the purposes of this definition, "submitted"
* means any form of electronic, verbal, or written communication sent
* to the Licensor or its representatives, including but not limited to
* communication on electronic mailing lists, source code control systems,
* and issue tracking systems that are managed by, or on behalf of, the
* Licensor for the purpose of discussing and improving the Work, but
* excluding communication that is conspicuously marked or otherwise
* designated in writing by the copyright owner as "Not a Contribution."
*
* "Contributor" shall mean Licensor and any individual or Legal Entity
* on behalf of whom a Contribution has been received by Licensor and
* subsequently incorporated within the Work.
*
* 2. Grant of Copyright License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* copyright license to reproduce, prepare Derivative Works of,
* publicly display, publicly perform, sublicense, and distribute the
* Work and such Derivative Works in Source or Object form.
*
* 3. Grant of Patent License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* (except as stated in this section) patent license to make, have made,
* use, offer to sell, sell, import, and otherwise transfer the Work,
* where such license applies only to those patent claims licensable
* by such Contributor that are necessarily infringed by their
* Contribution(s) alone or by combination of their Contribution(s)
* with the Work to which such Contribution(s) was submitted. If You
* institute patent litigation against any entity (including a
* cross-claim or counterclaim in a lawsuit) alleging that the Work
* or a Contribution incorporated within the Work constitutes direct
* or contributory patent infringement, then any patent licenses
* granted to You under this License for that Work shall terminate
* as of the date such litigation is filed.
*
* 4. Redistribution. You may reproduce and distribute copies of the
* Work or Derivative Works thereof in any medium, with or without
* modifications, and in Source or Object form, provided that You
* meet the following conditions:
*
* (a) You must give any other recipients of the Work or
* Derivative Works a copy of this License; and
*
* (b) You must cause any modified files to carry prominent notices
* stating that You changed the files; and
*
* (c) You must retain, in the Source form of any Derivative Works
* that You distribute, all copyright, patent, trademark, and
* attribution notices from the Source form of the Work,
* excluding those notices that do not pertain to any part of
* the Derivative Works; and
*
* (d) If the Work includes a "NOTICE" text file as part of its
* distribution, then any Derivative Works that You distribute must
* include a readable copy of the attribution notices contained
* within such NOTICE file, excluding those notices that do not
* pertain to any part of the Derivative Works, in at least one
* of the following places: within a NOTICE text file distributed
* as part of the Derivative Works; within the Source form or
* documentation, if provided along with the Derivative Works; or,
* within a display generated by the Derivative Works, if and
* wherever such third-party notices normally appear. The contents
* of the NOTICE file are for informational purposes only and
* do not modify the License. You may add Your own attribution
* notices within Derivative Works that You distribute, alongside
* or as an addendum to the NOTICE text from the Work, provided
* that such additional attribution notices cannot be construed
* as modifying the License.
*
* You may add Your own copyright statement to Your modifications and
* may provide additional or different license terms and conditions
* for use, reproduction, or distribution of Your modifications, or
* for any such Derivative Works as a whole, provided Your use,
* reproduction, and distribution of the Work otherwise complies with
* the conditions stated in this License.
*
* 5. Submission of Contributions. Unless You explicitly state otherwise,
* any Contribution intentionally submitted for inclusion in the Work
* by You to the Licensor shall be under the terms and conditions of
* this License, without any additional terms or conditions.
* Notwithstanding the above, nothing herein shall supersede or modify
* the terms of any separate license agreement you may have executed
* with Licensor regarding such Contributions.
*
* 6. Trademarks. This License does not grant permission to use the trade
* names, trademarks, service marks, or product names of the Licensor,
* except as required for reasonable and customary use in describing the
* origin of the Work and reproducing the content of the NOTICE file.
*
* 7. Disclaimer of Warranty. Unless required by applicable law or
* agreed to in writing, Licensor provides the Work (and each
* Contributor provides its Contributions) on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied, including, without limitation, any warranties or conditions
* of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
* PARTICULAR PURPOSE. You are solely responsible for determining the
* appropriateness of using or redistributing the Work and assume any
* risks associated with Your exercise of permissions under this License.
*
* 8. Limitation of Liability. In no event and under no legal theory,
* whether in tort (including negligence), contract, or otherwise,
* unless required by applicable law (such as deliberate and grossly
* negligent acts) or agreed to in writing, shall any Contributor be
* liable to You for damages, including any direct, indirect, special,
* incidental, or consequential damages of any character arising as a
* result of this License or out of the use or inability to use the
* Work (including but not limited to damages for loss of goodwill,
* work stoppage, computer failure or malfunction, or any and all
* other commercial damages or losses), even if such Contributor
* has been advised of the possibility of such damages.
*
* 9. Accepting Warranty or Additional Liability. While redistributing
* the Work or Derivative Works thereof, You may choose to offer,
* and charge a fee for, acceptance of support, warranty, indemnity,
* or other liability obligations and/or rights consistent with this
* License. However, in accepting such obligations, You may act only
* on Your own behalf and on Your sole responsibility, not on behalf
* of any other Contributor, and only if You agree to indemnify,
* defend, and hold each Contributor harmless for any liability
* incurred by, or claims asserted against, such Contributor by reason
* of your accepting any such warranty or additional liability.
*
* END OF TERMS AND CONDITIONS
*
* APPENDIX: How to apply the Apache License to your work.
*
* To apply the Apache License to your work, attach the following
* boilerplate notice, with the fields enclosed by brackets "{}"
* replaced with your own identifying information. (Don't include
* the brackets!) The text should be enclosed in the appropriate
* comment syntax for the file format. We also recommend that a
* file or class name and description of purpose be included on the
* same "printed page" as the copyright notice for easier
* identification within third-party archives.
*
* Copyright {yyyy} {name of copyright owner}
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.deleidos.rtws.webapp.repository.util;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Enumeration;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import java.util.zip.ZipOutputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class provides zipping and unzipping functionality.
*/
public class ZipUtil {
/** The logger. */
private static final Logger log = LoggerFactory.getLogger(ZipUtil.class);
/** The write buffer size. */
private static final int BUFFER_SIZE = 2048;
/** A private constructor to prevent instantiation of the class. */
private ZipUtil() { }
/**
* Takes a list of files and zip them up.
*/
public static boolean pack(String zipFilePath, String zipEntryStartLink, List<String> files) {
FileOutputStream fos = null;
ZipOutputStream zos = null;
BufferedInputStream bis = null;
try {
fos = new FileOutputStream(zipFilePath);
zos = new ZipOutputStream(new BufferedOutputStream(fos));
byte [] data = new byte[BUFFER_SIZE];
for (String file : files) {
FileInputStream fis = new FileInputStream(file);
bis = new BufferedInputStream(fis, BUFFER_SIZE);
String entryName = file;
if (zipEntryStartLink != null) {
int index = file.lastIndexOf(zipEntryStartLink);
if (index != -1 ) {
entryName = file.substring(index);
}
}
ZipEntry entry = new ZipEntry(entryName);
zos.putNextEntry(entry);
int count = 0;
while((count = bis.read(data, 0, BUFFER_SIZE)) != -1) {
zos.write(data, 0, count);
}
bis.close();
}
return true;
} catch (Exception ex) {
log.error("pack - Exception: " + ex.getMessage(), ex);
} finally {
if (zos != null) {
try { zos.close(); } catch (IOException e) { }
}
}
return false;
}
/**
* Takes a zip file and unpack it into the given directory.
* @throws IOException
*/
public static boolean unpack(String zipFilePath, String destPath) throws IOException {
ZipFile zipFile = new ZipFile(zipFilePath);
Enumeration<? extends ZipEntry> entries = zipFile.entries();
String zipFileName = (new File(zipFile.getName())).getName();
String zipFileNameWithoutExt = zipFileName.substring(0, zipFileName.lastIndexOf('.'));
while (entries.hasMoreElements()) {
ZipEntry entry = (ZipEntry) entries.nextElement();
String destFilePath = destPath + '/' + zipFileNameWithoutExt + '/' + entry.getName();
FileUtil.makeFileParentDir(destFilePath);
BufferedInputStream bis = null;
BufferedOutputStream bos = null;
try {
bis = new BufferedInputStream(zipFile.getInputStream(entry));
bos = new BufferedOutputStream(new FileOutputStream(destFilePath), BUFFER_SIZE);
int count;
byte [] data = new byte[BUFFER_SIZE];
while ((count = bis.read(data, 0, BUFFER_SIZE)) != -1) {
bos.write(data, 0, count);
}
bos.flush();
} finally {
if (bos != null) {
bos.close();
}
if (bis != null) {
bis.close();
}
}
}
return true;
}
}
|
|
package com.gelakinetic.mtgfam.fragments;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.os.Bundle;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.inputmethod.EditorInfo;
import android.widget.ArrayAdapter;
import android.widget.AutoCompleteTextView;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.MultiAutoCompleteTextView;
import android.widget.Spinner;
import android.widget.TextView;
import com.alertdialogpro.AlertDialogPro;
import com.gelakinetic.mtgfam.FamiliarActivity;
import com.gelakinetic.mtgfam.R;
import com.gelakinetic.mtgfam.helpers.AutocompleteCursorAdapter;
import com.gelakinetic.mtgfam.helpers.SearchCriteria;
import com.gelakinetic.mtgfam.helpers.SpaceTokenizer;
import com.gelakinetic.mtgfam.helpers.ToastWrapper;
import com.gelakinetic.mtgfam.helpers.database.CardDbAdapter;
import com.gelakinetic.mtgfam.helpers.database.DatabaseManager;
import com.gelakinetic.mtgfam.helpers.database.FamiliarDbException;
import org.jetbrains.annotations.NotNull;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.Arrays;
import java.util.List;
/**
* This fragment lets users configure search parameters, and then search for a card
* The actual database query is handled in the ResultListFragment
*/
public class SearchViewFragment extends FamiliarFragment {
/* String keys */
public static final String CRITERIA = "criteria";
/* Dialog IDs */
private static final int SET_LIST = 1;
private static final int FORMAT_LIST = 2;
private static final int RARITY_LIST = 3;
/* Default search file */
private static final String DEFAULT_CRITERIA_FILE = "defaultSearchCriteria.ser";
/* Spinner Data Structures */
private String[] mSetNames;
private boolean[] mSetChecked;
private String[] mSetSymbols;
private String[] mFormatNames;
private char[] mRarityCodes;
private String[] mRarityNames;
private boolean[] mRarityChecked;
private int mSelectedFormat;
/* UI Elements */
private AutoCompleteTextView mNameField;
private EditText mTextField;
private MultiAutoCompleteTextView mSupertypeField;
private EditText mSubtypeField;
private EditText mCollectorsNumberField;
private CheckBox mCheckboxW;
private CheckBox mCheckboxU;
private CheckBox mCheckboxB;
private CheckBox mCheckboxR;
private CheckBox mCheckboxG;
private CheckBox mCheckboxL;
private Spinner mColorSpinner;
private Button mSetButton;
private Button mFormatButton;
private Button mRarityButton;
private Spinner mPowLogic;
private Spinner mPowChoice;
private Spinner mTouLogic;
private Spinner mTouChoice;
private Spinner mCmcLogic;
private Spinner mCmcChoice;
private AlertDialog mSetDialog;
private AlertDialog mFormatDialog;
private AlertDialog mRarityDialog;
private EditText mFlavorField;
private EditText mArtistField;
private Spinner mTextSpinner;
private Spinner mTypeSpinner;
private Spinner mSetSpinner;
/**
* This will query the database to populate the set and format spinner dialogs.
* The rarity dialog is pulled from resources
*
* @param savedInstanceState If the fragment is being re-created from a previous saved state, this is the state.
*/
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
SQLiteDatabase database = DatabaseManager.getInstance(getActivity(), false).openDatabase(false);
try {
/* Query the database for all sets and fill the arrays to populate the list of choices with */
Cursor setCursor = CardDbAdapter.fetchAllSets(database);
setCursor.moveToFirst();
mSetNames = new String[setCursor.getCount()];
mSetSymbols = new String[setCursor.getCount()];
mSetChecked = new boolean[setCursor.getCount()];
for (int i = 0; i < setCursor.getCount(); i++) {
mSetSymbols[i] = setCursor.getString(setCursor.getColumnIndex(CardDbAdapter.KEY_CODE));
mSetNames[i] = setCursor.getString(setCursor.getColumnIndex(CardDbAdapter.KEY_NAME));
mSetChecked[i] = false;
setCursor.moveToNext();
}
setCursor.close();
/* Query the database for all formats and fill the arrays to populate the list of choices with */
Cursor formatCursor = CardDbAdapter.fetchAllFormats(database);
formatCursor.moveToFirst();
mFormatNames = new String[formatCursor.getCount()];
for (int i = 0; i < formatCursor.getCount(); i++) {
mFormatNames[i] = formatCursor.getString(formatCursor.getColumnIndex(CardDbAdapter.KEY_NAME));
formatCursor.moveToNext();
}
formatCursor.close();
mSelectedFormat = -1;
} catch (FamiliarDbException e) {
handleFamiliarDbException(true);
}
DatabaseManager.getInstance(getActivity(), false).closeDatabase(false);
/* Get the different rarities out of resources to populate the list of choices with */
Resources res = getResources();
TypedArray mRarityNamesTemp = res.obtainTypedArray(R.array.rarities);
int i = mRarityNamesTemp.length();
mRarityNames = new String[i];
mRarityCodes = new char[i];
mRarityChecked = new boolean[i];
while (i-- > 0) {
int resID = mRarityNamesTemp.peekValue(i).resourceId;
String resEntryName = res.getResourceEntryName(resID);
int p = resEntryName.lastIndexOf("_");
if (-1 != p && p + 1 < resEntryName.length())
mRarityCodes[i] = resEntryName.charAt(p + 1);
else mRarityCodes[i] = ' ';
mRarityNames[i] = res.getString(resID);
}
mRarityNamesTemp.recycle();
}
/**
* Find all the UI elements. set actions for buttons. Attach array adapters for autocomplete
*
* @param inflater The LayoutInflater object that can be used to inflate any views in the fragment,
* @param container If non-null, this is the parent view that the fragment's UI should be attached to. The
* fragment should not add the view itself, but this can be used to generate the
* LayoutParams of the view.
* @param savedInstanceState If non-null, this fragment is being re-constructed from a previous saved state as given
* here.
* @return the inflated view
*/
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
/* Inflate the view */
View myFragmentView = inflater.inflate(R.layout.search_frag, container, false);
assert myFragmentView != null;
/* Get references to UI elements. When a search is preformed, these values will be queried */
mNameField = (AutoCompleteTextView) myFragmentView.findViewById(R.id.name_search);
mTextField = (EditText) myFragmentView.findViewById(R.id.textsearch);
mSupertypeField = (MultiAutoCompleteTextView) myFragmentView.findViewById(R.id.supertypesearch);
mSubtypeField = (EditText) myFragmentView.findViewById(R.id.subtypesearch);
mFlavorField = (EditText) myFragmentView.findViewById(R.id.flavorsearch);
mArtistField = (EditText) myFragmentView.findViewById(R.id.artistsearch);
mCollectorsNumberField = (EditText) myFragmentView.findViewById(R.id.collectorsnumbersearch);
Button searchButton = (Button) myFragmentView.findViewById(R.id.searchbutton);
mCheckboxW = (CheckBox) myFragmentView.findViewById(R.id.checkBoxW);
mCheckboxU = (CheckBox) myFragmentView.findViewById(R.id.checkBoxU);
mCheckboxB = (CheckBox) myFragmentView.findViewById(R.id.checkBoxB);
mCheckboxR = (CheckBox) myFragmentView.findViewById(R.id.checkBoxR);
mCheckboxG = (CheckBox) myFragmentView.findViewById(R.id.checkBoxG);
mCheckboxL = (CheckBox) myFragmentView.findViewById(R.id.checkBoxL);
mColorSpinner = (Spinner) myFragmentView.findViewById(R.id.colorlogic);
mTextSpinner = (Spinner) myFragmentView.findViewById(R.id.textlogic);
mTypeSpinner = (Spinner) myFragmentView.findViewById(R.id.typelogic);
mSetSpinner = (Spinner) myFragmentView.findViewById(R.id.setlogic);
mSetButton = (Button) myFragmentView.findViewById(R.id.setsearch);
mFormatButton = (Button) myFragmentView.findViewById(R.id.formatsearch);
mRarityButton = (Button) myFragmentView.findViewById(R.id.raritysearch);
mPowLogic = (Spinner) myFragmentView.findViewById(R.id.powLogic);
mPowChoice = (Spinner) myFragmentView.findViewById(R.id.powChoice);
mTouLogic = (Spinner) myFragmentView.findViewById(R.id.touLogic);
mTouChoice = (Spinner) myFragmentView.findViewById(R.id.touChoice);
mCmcLogic = (Spinner) myFragmentView.findViewById(R.id.cmcLogic);
mCmcChoice = (Spinner) myFragmentView.findViewById(R.id.cmcChoice);
/* set the buttons to open the dialogs */
mSetButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
showDialog(SET_LIST);
}
});
mFormatButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
showDialog(FORMAT_LIST);
}
});
mRarityButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
showDialog(RARITY_LIST);
}
});
/* This is a better default, might want to reorder the array */
mColorSpinner.setSelection(2);
/* The button colors change whether an option is selected or not */
checkDialogButtonColors();
/* This listener will do searches directly from the TextViews. Attach it to everything! */
TextView.OnEditorActionListener doSearchListener = new TextView.OnEditorActionListener() {
public boolean onEditorAction(TextView arg0, int arg1, KeyEvent arg2) {
if (arg1 == EditorInfo.IME_ACTION_SEARCH) {
doSearch();
return true;
}
return false;
}
};
mNameField.setOnEditorActionListener(doSearchListener);
mTextField.setOnEditorActionListener(doSearchListener);
mSupertypeField.setOnEditorActionListener(doSearchListener);
mSubtypeField.setOnEditorActionListener(doSearchListener);
mFlavorField.setOnEditorActionListener(doSearchListener);
mArtistField.setOnEditorActionListener(doSearchListener);
mCollectorsNumberField.setOnEditorActionListener(doSearchListener);
/* set the autocomplete for card names */
mNameField.setAdapter(new AutocompleteCursorAdapter(this, new String[]{CardDbAdapter.KEY_NAME}, new int[]{R.id.text1}, mNameField));
/* set the autocomplete for supertypes */
String[] supertypes = getResources().getStringArray(R.array.supertypes);
ArrayAdapter<String> supertypeAdapter = new ArrayAdapter<>(this.getActivity(),
R.layout.list_item_1, supertypes);
mSupertypeField.setThreshold(1);
mSupertypeField.setAdapter(supertypeAdapter);
mSupertypeField.setTokenizer(new SpaceTokenizer());
/* set the search button! */
searchButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
doSearch();
}
});
return myFragmentView;
}
/**
* Generic onResume. Catches when consolidation is changed in preferences
*/
@Override
public void onResume() {
super.onResume();
/* Do we want to consolidate different printings of the same card in results, or not? */
boolean consolidate = getFamiliarActivity().mPreferenceAdapter.getConsolidateSearch();
mSetSpinner.setSelection(consolidate ? CardDbAdapter.MOST_RECENT_PRINTING : CardDbAdapter.ALL_PRINTINGS);
}
/**
* This function creates a results fragment, sends it the search criteria, and starts it
*/
private void doSearch() {
SearchCriteria searchCriteria = parseForm();
Bundle args = new Bundle();
args.putSerializable(CRITERIA, searchCriteria);
ResultListFragment rlFrag = new ResultListFragment();
startNewFragment(rlFrag, args);
}
/**
* This function combs through all the UI elements and returns a SearchCriteria with the current search options
*
* @return a SearchCriteria with what the user wants to search for
*/
private SearchCriteria parseForm() {
SearchCriteria searchCriteria = new SearchCriteria();
/* Because Android Studio whines */
assert mNameField.getText() != null;
assert mTextField.getText() != null;
assert mSupertypeField.getText() != null;
assert mSubtypeField.getText() != null;
assert mFlavorField.getText() != null;
assert mArtistField.getText() != null;
assert mCollectorsNumberField.getText() != null;
/* Read EditTexts */
searchCriteria.name = mNameField.getText().toString();
searchCriteria.text = mTextField.getText().toString();
String supertype = mSupertypeField.getText().toString();
String subtype = mSubtypeField.getText().toString();
searchCriteria.type = supertype + " - " + subtype;
searchCriteria.flavor = mFlavorField.getText().toString();
searchCriteria.artist = mArtistField.getText().toString();
searchCriteria.collectorsNumber = mCollectorsNumberField.getText().toString();
if (searchCriteria.name.length() == 0) {
searchCriteria.name = null;
}
if (searchCriteria.text.length() == 0) {
searchCriteria.text = null;
}
if (searchCriteria.type.length() == 0) {
searchCriteria.type = null;
}
if (searchCriteria.flavor.length() == 0) {
searchCriteria.flavor = null;
}
if (searchCriteria.artist.length() == 0) {
searchCriteria.artist = null;
}
if(searchCriteria.collectorsNumber.length() == 0) {
searchCriteria.collectorsNumber = null;
}
/* Build a color string. capital letters means the user is search for that color */
searchCriteria.color = null;
if (mCheckboxW.isChecked()) {
searchCriteria.color = "W";
} else {
searchCriteria.color = "w";
}
if (mCheckboxU.isChecked()) {
searchCriteria.color += "U";
} else {
searchCriteria.color += "u";
}
if (mCheckboxB.isChecked()) {
searchCriteria.color += "B";
} else {
searchCriteria.color += "b";
}
if (mCheckboxR.isChecked()) {
searchCriteria.color += "R";
} else {
searchCriteria.color += "r";
}
if (mCheckboxG.isChecked()) {
searchCriteria.color += "G";
} else {
searchCriteria.color += "g";
}
if (mCheckboxL.isChecked()) {
searchCriteria.color += "L";
} else {
searchCriteria.color += "l";
}
searchCriteria.colorLogic = mColorSpinner.getSelectedItemPosition();
searchCriteria.set = null;
for (int i = 0; i < mSetChecked.length; i++) {
if (mSetChecked[i]) {
if (searchCriteria.set == null) {
searchCriteria.set = mSetSymbols[i];
} else {
searchCriteria.set += "-" + mSetSymbols[i];
}
}
}
searchCriteria.format = null;
if (mSelectedFormat != -1) {
searchCriteria.format = mFormatNames[mSelectedFormat];
}
searchCriteria.rarity = null;
for (int i = 0; i < mRarityChecked.length; i++) {
if (mRarityChecked[i]) {
if (searchCriteria.rarity == null) {
searchCriteria.rarity = mRarityCodes[i] + "";
} else {
searchCriteria.rarity += mRarityCodes[i];
}
}
}
String[] logicChoices = getResources().getStringArray(R.array.logic_spinner);
String power = getResources().getStringArray(R.array.pt_spinner)[mPowChoice.getSelectedItemPosition()];
String toughness = getResources().getStringArray(R.array.pt_spinner)[mTouChoice.getSelectedItemPosition()];
float pow = CardDbAdapter.NO_ONE_CARES;
try {
pow = Float.parseFloat(power);
} catch (NumberFormatException e) {
switch (power) {
case "*":
pow = CardDbAdapter.STAR;
break;
case "1+*":
pow = CardDbAdapter.ONE_PLUS_STAR;
break;
case "2+*":
pow = CardDbAdapter.TWO_PLUS_STAR;
break;
case "7-*":
pow = CardDbAdapter.SEVEN_MINUS_STAR;
break;
case "*^2":
pow = CardDbAdapter.STAR_SQUARED;
break;
}
}
searchCriteria.powChoice = pow;
searchCriteria.powLogic = logicChoices[mPowLogic.getSelectedItemPosition()];
float tou = CardDbAdapter.NO_ONE_CARES;
try {
tou = Float.parseFloat(toughness);
} catch (NumberFormatException e) {
switch (toughness) {
case "*":
tou = CardDbAdapter.STAR;
break;
case "1+*":
tou = CardDbAdapter.ONE_PLUS_STAR;
break;
case "2+*":
tou = CardDbAdapter.TWO_PLUS_STAR;
break;
case "7-*":
tou = CardDbAdapter.SEVEN_MINUS_STAR;
break;
case "*^2":
tou = CardDbAdapter.STAR_SQUARED;
break;
}
}
searchCriteria.touChoice = tou;
searchCriteria.touLogic = logicChoices[mTouLogic.getSelectedItemPosition()];
String[] cmcChoices = getResources().getStringArray(R.array.cmc_spinner);
int cmc;
try {
cmc = Integer.parseInt(cmcChoices[mCmcChoice.getSelectedItemPosition()]);
} catch (NumberFormatException e) {
cmc = -1;
}
searchCriteria.cmc = cmc;
searchCriteria.cmcLogic = logicChoices[mCmcLogic.getSelectedItemPosition()];
searchCriteria.typeLogic = mTypeSpinner.getSelectedItemPosition();
searchCriteria.textLogic = mTextSpinner.getSelectedItemPosition();
searchCriteria.setLogic = mSetSpinner.getSelectedItemPosition();
return searchCriteria;
}
/**
* This function clears all the search options, it's called from the ActionBar
*/
private void clear() {
mNameField.setText("");
mSupertypeField.setText("");
mSubtypeField.setText("");
mTextField.setText("");
mArtistField.setText("");
mFlavorField.setText("");
mCollectorsNumberField.setText("");
mCheckboxW.setChecked(false);
mCheckboxU.setChecked(false);
mCheckboxB.setChecked(false);
mCheckboxR.setChecked(false);
mCheckboxG.setChecked(false);
mCheckboxL.setChecked(false);
mColorSpinner.setSelection(2);
mTextSpinner.setSelection(0);
mTypeSpinner.setSelection(0);
mSetSpinner.setSelection(0);
mPowLogic.setSelection(0);
mPowChoice.setSelection(0);
mTouLogic.setSelection(0);
mTouChoice.setSelection(0);
mCmcLogic.setSelection(0);
mCmcLogic.setSelection(1); /* CMC should default to < */
mCmcChoice.setSelection(0);
for (int i = 0; i < mSetChecked.length; i++) {
mSetChecked[i] = false;
}
mSelectedFormat = -1;
for (int i = 0; i < mRarityChecked.length; i++) {
mRarityChecked[i] = false;
}
this.removeDialog(getFragmentManager());
checkDialogButtonColors();
}
/**
* This function saves the current search options into a file, so the user can have a default search
*/
private void persistOptions() {
try {
SearchCriteria searchCriteria = parseForm();
FileOutputStream fileStream = this.getActivity()
.openFileOutput(DEFAULT_CRITERIA_FILE, Context.MODE_PRIVATE);
ObjectOutputStream os = new ObjectOutputStream(fileStream);
os.writeObject(searchCriteria);
os.close();
} catch (IOException e) {
ToastWrapper.makeText(this.getActivity(), R.string.search_toast_cannot_save, ToastWrapper.LENGTH_LONG).show();
}
}
/**
* This function reads the saved default search options and populates the UI elements
*/
private void fetchPersistedOptions() {
try {
FileInputStream fileInputStream = this.getActivity().openFileInput(DEFAULT_CRITERIA_FILE);
ObjectInputStream oInputStream = new ObjectInputStream(fileInputStream);
SearchCriteria criteria = (SearchCriteria) oInputStream.readObject();
oInputStream.close();
mNameField.setText(criteria.name);
String[] type = criteria.type.split(" - ");
if (type.length > 0 && type[0] != null) {
mSupertypeField.setText(type[0]);
}
if (type.length > 1 && type[1] != null) {
mSubtypeField.setText(type[1]);
}
mTextField.setText(criteria.text);
mArtistField.setText(criteria.artist);
mFlavorField.setText(criteria.flavor);
mCollectorsNumberField.setText(criteria.collectorsNumber);
mCheckboxW.setChecked(criteria.color.contains("W"));
mCheckboxU.setChecked(criteria.color.contains("U"));
mCheckboxB.setChecked(criteria.color.contains("B"));
mCheckboxR.setChecked(criteria.color.contains("R"));
mCheckboxG.setChecked(criteria.color.contains("G"));
mCheckboxL.setChecked(criteria.color.contains("L"));
mColorSpinner.setSelection(criteria.colorLogic);
mTextSpinner.setSelection(criteria.textLogic);
mTypeSpinner.setSelection(criteria.typeLogic);
mSetSpinner.setSelection(criteria.setLogic);
List<String> logicChoices = Arrays.asList(getResources().getStringArray(R.array.logic_spinner));
mPowLogic.setSelection(logicChoices.indexOf(criteria.powLogic));
List<String> ptList = Arrays.asList(getResources().getStringArray(R.array.pt_spinner));
float p = criteria.powChoice;
if (p != CardDbAdapter.NO_ONE_CARES) {
if (p == CardDbAdapter.STAR)
mPowChoice.setSelection(ptList.indexOf("*"));
else if (p == CardDbAdapter.ONE_PLUS_STAR)
mPowChoice.setSelection(ptList.indexOf("1+*"));
else if (p == CardDbAdapter.TWO_PLUS_STAR)
mPowChoice.setSelection(ptList.indexOf("2+*"));
else if (p == CardDbAdapter.SEVEN_MINUS_STAR)
mPowChoice.setSelection(ptList.indexOf("7-*"));
else if (p == CardDbAdapter.STAR_SQUARED)
mPowChoice.setSelection(ptList.indexOf("*^2"));
else {
if (p == (int) p) {
mPowChoice.setSelection(ptList.indexOf(((int) p) + ""));
} else {
mPowChoice.setSelection(ptList.indexOf(p + ""));
}
}
}
mTouLogic.setSelection(logicChoices.indexOf(criteria.touLogic));
float t = criteria.touChoice;
if (t != CardDbAdapter.NO_ONE_CARES) {
if (t == CardDbAdapter.STAR)
mTouChoice.setSelection(ptList.indexOf("*"));
else if (t == CardDbAdapter.ONE_PLUS_STAR)
mTouChoice.setSelection(ptList.indexOf("1+*"));
else if (t == CardDbAdapter.TWO_PLUS_STAR)
mTouChoice.setSelection(ptList.indexOf("2+*"));
else if (t == CardDbAdapter.SEVEN_MINUS_STAR)
mTouChoice.setSelection(ptList.indexOf("7-*"));
else if (t == CardDbAdapter.STAR_SQUARED)
mTouChoice.setSelection(ptList.indexOf("*^2"));
else {
if (t == (int) t) {
mTouChoice.setSelection(ptList.indexOf(((int) t) + ""));
} else {
mTouChoice.setSelection(ptList.indexOf(t + ""));
}
}
}
mCmcLogic.setSelection(logicChoices.indexOf(criteria.cmcLogic));
mCmcChoice.setSelection(Arrays.asList(getResources().getStringArray(R.array.cmc_spinner))
.indexOf(String.valueOf(criteria.cmc)));
if (criteria.set != null) {
List<String> sets = Arrays.asList(criteria.set.split("-"));
for (int i = 0; i < mSetChecked.length; i++)
mSetChecked[i] = sets.contains(mSetSymbols[i]);
} else
for (int i = 0; i < mSetChecked.length; i++)
mSetChecked[i] = false;
mSelectedFormat = Arrays.asList(mFormatNames).indexOf(criteria.format);
for (int i = 0; i < mRarityChecked.length; i++) {
mRarityChecked[i] = (criteria.rarity != null && criteria.rarity
.contains(mRarityNames[i].charAt(0) + ""));
}
this.removeDialog(getFragmentManager());
checkDialogButtonColors();
} catch (IOException | ClassNotFoundException e) {
ToastWrapper.makeText(this.getActivity(), R.string.search_toast_cannot_load, ToastWrapper.LENGTH_LONG).show();
}
}
/**
* This function is checked when building the menu.
* Since it returns true, the menu button will call onInterceptSearchKey() instead of being a quick search
*
* @return True
*/
@Override
boolean canInterceptSearchKey() {
return true;
}
/**
* This is called when the hardware search key is pressed.
*
* @return the fragment did something, so true
*/
@Override
public boolean onInterceptSearchKey() {
doSearch();
return true;
}
/**
* Process a button press on the ActionBar
*
* @param item The item pressed
* @return true if the action was taken, false otherwise
*/
@Override
public boolean onOptionsItemSelected(MenuItem item) {
/* Handle item selection */
switch (item.getItemId()) {
case R.id.search_menu_clear:
clear();
return true;
case R.id.search_menu_save_defaults:
persistOptions();
return true;
case R.id.search_menu_load_defaults:
fetchPersistedOptions();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
/**
* This is a convenience function to set the colors for the buttons which pop dialogs
*/
private void checkDialogButtonColors() {
if (mSetNames == null || mSetChecked == null || mSetSymbols == null || mFormatNames == null ||
mRarityNames == null || mRarityChecked == null || !isAdded()) {
return;
}
mSetButton.setTextColor(getResources().getColor(getResourceIdFromAttr(R.attr.color_text)));
for (boolean aSetChecked : mSetChecked) {
if (aSetChecked) {
mSetButton.setTextColor(getResources().getColor(getResourceIdFromAttr(R.attr.colorPrimary_attr)));
}
}
mFormatButton.setTextColor(getResources().getColor(getResourceIdFromAttr(R.attr.color_text)));
if (mSelectedFormat != -1) {
mFormatButton.setTextColor(getResources().getColor(getResourceIdFromAttr(R.attr.colorPrimary_attr)));
}
mRarityButton.setTextColor(getResources().getColor(getResourceIdFromAttr(R.attr.color_text)));
for (boolean aRarityChecked : mRarityChecked) {
if (aRarityChecked) {
mRarityButton.setTextColor(getResources().getColor(getResourceIdFromAttr(R.attr.colorPrimary_attr)));
}
}
}
/**
* This will remove any currently showing dialogs and display the one given by id
* Usually the dialogs are created here, but in this case they were created in onCreate, because of the db calls,
* and that they are recreated in order to clear them
*
* @param id the id of the dialog to be shown
*/
private void showDialog(final int id) throws IllegalStateException {
removeDialog(getFragmentManager());
/* Create and show the dialog. */
FamiliarDialogFragment newFragment = new FamiliarDialogFragment() {
@Override
public void onDismiss(DialogInterface dialog) {
super.onDismiss(dialog);
checkDialogButtonColors();
}
@NotNull
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
super.onCreateDialog(savedInstanceState);
/* This will be set to false if we are returning a null dialog. It prevents a crash */
setShowsDialog(true);
DialogInterface.OnMultiChoiceClickListener multiChoiceClickListener =
new DialogInterface.OnMultiChoiceClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i, boolean b) {
}
};
DialogInterface.OnClickListener clickListener = new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
}
};
try {
/* Build the dialogs to display format, rarity, and set choices. The arrays were already filled in
onCreate() */
switch (id) {
case SET_LIST: {
mSetDialog = new AlertDialogPro.Builder(this.getActivity()).setTitle(R.string.search_sets)
.setMultiChoiceItems(mSetNames, mSetChecked, multiChoiceClickListener)
.setPositiveButton(R.string.dialog_ok, clickListener).create();
return mSetDialog;
}
case FORMAT_LIST: {
mFormatDialog = new AlertDialogPro.Builder(this.getActivity()).
setTitle(R.string.search_formats).setSingleChoiceItems(mFormatNames,
mSelectedFormat, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mSelectedFormat = which;
}
}
).setPositiveButton(R.string.dialog_ok, clickListener).create();
return mFormatDialog;
}
case RARITY_LIST: {
mRarityDialog = new AlertDialogPro.Builder(this.getActivity())
.setTitle(R.string.search_rarities).setMultiChoiceItems(mRarityNames,
mRarityChecked, multiChoiceClickListener)
.setPositiveButton(R.string.dialog_ok, clickListener).create();
return mRarityDialog;
}
default: {
return DontShowDialog();
}
}
} catch (NullPointerException e) {
/* if the db failed to open, these arrays will be null. */
handleFamiliarDbException(false);
return DontShowDialog();
}
}
};
newFragment.show(getFragmentManager(), FamiliarActivity.DIALOG_TAG);
}
/**
* Inflate the menu
*
* @param menu The options menu in which you place your items.
* @param inflater The inflater to use to inflate the menu
*/
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
super.onCreateOptionsMenu(menu, inflater);
inflater.inflate(R.menu.search_menu, menu);
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.mediaconvert.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* Required when you set (Codec) under (AudioDescriptions)>(CodecSettings) to the value WAV.
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediaconvert-2017-08-29/WavSettings" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class WavSettings implements Serializable, Cloneable, StructuredPojo {
/** Specify Bit depth (BitDepth), in bits per sample, to choose the encoding quality for this audio track. */
private Integer bitDepth;
/**
* Specify the number of channels in this output audio track. Valid values are 1 and even numbers up to 64. For
* example, 1, 2, 4, 6, and so on, up to 64.
*/
private Integer channels;
/**
* The service defaults to using RIFF for WAV outputs. If your output audio is likely to exceed 4 GB in file size,
* or if you otherwise need the extended support of the RF64 format, set your output WAV file format to RF64.
*/
private String format;
/** Sample rate in Hz. */
private Integer sampleRate;
/**
* Specify Bit depth (BitDepth), in bits per sample, to choose the encoding quality for this audio track.
*
* @param bitDepth
* Specify Bit depth (BitDepth), in bits per sample, to choose the encoding quality for this audio track.
*/
public void setBitDepth(Integer bitDepth) {
this.bitDepth = bitDepth;
}
/**
* Specify Bit depth (BitDepth), in bits per sample, to choose the encoding quality for this audio track.
*
* @return Specify Bit depth (BitDepth), in bits per sample, to choose the encoding quality for this audio track.
*/
public Integer getBitDepth() {
return this.bitDepth;
}
/**
* Specify Bit depth (BitDepth), in bits per sample, to choose the encoding quality for this audio track.
*
* @param bitDepth
* Specify Bit depth (BitDepth), in bits per sample, to choose the encoding quality for this audio track.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public WavSettings withBitDepth(Integer bitDepth) {
setBitDepth(bitDepth);
return this;
}
/**
* Specify the number of channels in this output audio track. Valid values are 1 and even numbers up to 64. For
* example, 1, 2, 4, 6, and so on, up to 64.
*
* @param channels
* Specify the number of channels in this output audio track. Valid values are 1 and even numbers up to 64.
* For example, 1, 2, 4, 6, and so on, up to 64.
*/
public void setChannels(Integer channels) {
this.channels = channels;
}
/**
* Specify the number of channels in this output audio track. Valid values are 1 and even numbers up to 64. For
* example, 1, 2, 4, 6, and so on, up to 64.
*
* @return Specify the number of channels in this output audio track. Valid values are 1 and even numbers up to 64.
* For example, 1, 2, 4, 6, and so on, up to 64.
*/
public Integer getChannels() {
return this.channels;
}
/**
* Specify the number of channels in this output audio track. Valid values are 1 and even numbers up to 64. For
* example, 1, 2, 4, 6, and so on, up to 64.
*
* @param channels
* Specify the number of channels in this output audio track. Valid values are 1 and even numbers up to 64.
* For example, 1, 2, 4, 6, and so on, up to 64.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public WavSettings withChannels(Integer channels) {
setChannels(channels);
return this;
}
/**
* The service defaults to using RIFF for WAV outputs. If your output audio is likely to exceed 4 GB in file size,
* or if you otherwise need the extended support of the RF64 format, set your output WAV file format to RF64.
*
* @param format
* The service defaults to using RIFF for WAV outputs. If your output audio is likely to exceed 4 GB in file
* size, or if you otherwise need the extended support of the RF64 format, set your output WAV file format to
* RF64.
* @see WavFormat
*/
public void setFormat(String format) {
this.format = format;
}
/**
* The service defaults to using RIFF for WAV outputs. If your output audio is likely to exceed 4 GB in file size,
* or if you otherwise need the extended support of the RF64 format, set your output WAV file format to RF64.
*
* @return The service defaults to using RIFF for WAV outputs. If your output audio is likely to exceed 4 GB in file
* size, or if you otherwise need the extended support of the RF64 format, set your output WAV file format
* to RF64.
* @see WavFormat
*/
public String getFormat() {
return this.format;
}
/**
* The service defaults to using RIFF for WAV outputs. If your output audio is likely to exceed 4 GB in file size,
* or if you otherwise need the extended support of the RF64 format, set your output WAV file format to RF64.
*
* @param format
* The service defaults to using RIFF for WAV outputs. If your output audio is likely to exceed 4 GB in file
* size, or if you otherwise need the extended support of the RF64 format, set your output WAV file format to
* RF64.
* @return Returns a reference to this object so that method calls can be chained together.
* @see WavFormat
*/
public WavSettings withFormat(String format) {
setFormat(format);
return this;
}
/**
* The service defaults to using RIFF for WAV outputs. If your output audio is likely to exceed 4 GB in file size,
* or if you otherwise need the extended support of the RF64 format, set your output WAV file format to RF64.
*
* @param format
* The service defaults to using RIFF for WAV outputs. If your output audio is likely to exceed 4 GB in file
* size, or if you otherwise need the extended support of the RF64 format, set your output WAV file format to
* RF64.
* @return Returns a reference to this object so that method calls can be chained together.
* @see WavFormat
*/
public WavSettings withFormat(WavFormat format) {
this.format = format.toString();
return this;
}
/**
* Sample rate in Hz.
*
* @param sampleRate
* Sample rate in Hz.
*/
public void setSampleRate(Integer sampleRate) {
this.sampleRate = sampleRate;
}
/**
* Sample rate in Hz.
*
* @return Sample rate in Hz.
*/
public Integer getSampleRate() {
return this.sampleRate;
}
/**
* Sample rate in Hz.
*
* @param sampleRate
* Sample rate in Hz.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public WavSettings withSampleRate(Integer sampleRate) {
setSampleRate(sampleRate);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getBitDepth() != null)
sb.append("BitDepth: ").append(getBitDepth()).append(",");
if (getChannels() != null)
sb.append("Channels: ").append(getChannels()).append(",");
if (getFormat() != null)
sb.append("Format: ").append(getFormat()).append(",");
if (getSampleRate() != null)
sb.append("SampleRate: ").append(getSampleRate());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof WavSettings == false)
return false;
WavSettings other = (WavSettings) obj;
if (other.getBitDepth() == null ^ this.getBitDepth() == null)
return false;
if (other.getBitDepth() != null && other.getBitDepth().equals(this.getBitDepth()) == false)
return false;
if (other.getChannels() == null ^ this.getChannels() == null)
return false;
if (other.getChannels() != null && other.getChannels().equals(this.getChannels()) == false)
return false;
if (other.getFormat() == null ^ this.getFormat() == null)
return false;
if (other.getFormat() != null && other.getFormat().equals(this.getFormat()) == false)
return false;
if (other.getSampleRate() == null ^ this.getSampleRate() == null)
return false;
if (other.getSampleRate() != null && other.getSampleRate().equals(this.getSampleRate()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getBitDepth() == null) ? 0 : getBitDepth().hashCode());
hashCode = prime * hashCode + ((getChannels() == null) ? 0 : getChannels().hashCode());
hashCode = prime * hashCode + ((getFormat() == null) ? 0 : getFormat().hashCode());
hashCode = prime * hashCode + ((getSampleRate() == null) ? 0 : getSampleRate().hashCode());
return hashCode;
}
@Override
public WavSettings clone() {
try {
return (WavSettings) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.mediaconvert.model.transform.WavSettingsMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
|
|
/*
Copyright 2013-2014, JUMA Technology
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.bcsphere.bluetooth.tools;
import java.lang.reflect.Method;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.UUID;
import org.apache.cordova.CallbackContext;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.annotation.SuppressLint;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.content.Context;
import android.content.pm.PackageManager;
import android.util.Base64;
import android.bluetooth.BluetoothGattCharacteristic;
@SuppressLint({ "UseSparseArrays", "SimpleDateFormat", "DefaultLocale" })
public class Tools {
public static final String ADVERTISEMENT_DATA = "advertisementData";
public static final String BLUETOOTH_CLOSE = "bluetoothclose";
public static final String BLUETOOTH_OPEN = "bluetoothopen";
public static final String BLUETOOTH_STATE = "state";
public static final String CHARACTERISTIC_INDEX = "characteristicIndex";
public static final String CHARACTERISTIC_NAME = "characteristicName";
public static final String CHARACTERISTIC_PERMISSION = "characteristicPermission";
public static final String CHARACTERISTIC_PROPERTY = "characteristicProperty";
public static final String CHARACTERISTIC_UUID = "characteristicUUID";
public static final String CHARACTERISTIC_UUIDS = "characteristicUUIDs";
public static final String CHARACTERISTIC_VALUE_TYPE = "characteristicValueType";
public static final String CHARACTERISTIC_VALUE = "characteristicValue";
public static final String CHARACTERISTICS = "characteristics";
public static final String DATE = "date";
public static final String DATE_FORMATE = "yyyy-MM-dd HH:mm:ss:SSS";
public static final String DESCRIPTOR_INDEX = "descriptorIndex";
public static final String DESCRIPTOR_NAME = "descriptorName";
public static final String DESCRIPTOR_PERMISSION = "descriptorPermission";
public static final String DESCRIPTOR_UUID = "descriptorUUID";
public static final String DESCRIPTOR_VALUE = "descriptorValue";
public static final String DESCRIPTOR_VALUE_TYPE = "descriptorValueType";
public static final String DESCRIPTORS = "descriptors";
public static final String DEVICE_ADDRESS = "deviceAddress";
public static final String DEVICE_NAME = "deviceName";
public static final String DISCONNECT = "disconnect";
public static final String ENABLE = "enable";
public static final String EVENT_NAME = "eventName";
public static final String IS_CONNECTED = "isConnected";
public static final String IS_FALSE = "false";
public static final String IS_TRUE = "true";
public static final String ON_READ_REQUEST = "onReadRequest";
public static final String ON_WRITE_REQUEST = "onWriteRequest";
public static final String PERMISSION_READ = "read";
public static final String PERMISSION_READ_ENCRYPTED = "readEncrypted";
public static final String PERMISSION_READ_ENCRYPTED_MITM = "readEncryptedMitm";
public static final String PERMISSION_WRITE = "write";
public static final String PERMISSION_WRITE_ENCRYPTED_MITM = "writeEncryptedMitm";
public static final String PERMISSION_WRITE_ENCRYPTED = "writeEncrypted";
public static final String PERMISSION_WRITE_SIGEND = "writeSigend";
public static final String PERMISSION_WRITE_SIGEND_MITM = "writeSigendMitm";
public static final String PROPERTY_SIGNED_WRITE = "authenticatedSignedWrites";
public static final String PROPERTY_BROADCAST = "broadcast";
public static final String PROPERTY_EXTENDED_PROPS = "extendedProperties";
public static final String PROPERTY_INDICATE = "indicate";
public static final String PROPERTY_NOTIFY = "notify";
public static final String PROPERTY_READ = "read";
public static final String PROPERTY_WRITE = "write";
public static final String PROPERTY_WRITE_NO_RESPONSE = "writeWithoutResponse";
public static final String RSSI = "RSSI";
public static final String SERVICE_INDEX = "serviceIndex";
public static final String SERVICE_NAME = "serviceName";
public static final String SERVICE_PACKET = "servicePacket";
public static final String SERVICE_TYPE = "serviceType";
public static final String SERVICE_UUID = "serviceUUID";
public static final String SERVICE_UUIDS = "serviceUUIDs";
public static final String SERVICES = "services";
public static final String UINQUE_ID = "uniqueID";
public static final String VALUE = "value";
public static final String WRITE_TYPE = "writeType";
public static final String WRITE_VALUE = "writeValue";
public static final String SUCCESS = "success";
public static final String ERROR = "error";
public static final String MES = "mes";
public static final UUID NOTIFICATION_UUID = UUID
.fromString("00002902-0000-1000-8000-00805f9b34fb");
public static final UUID GENERIC_ACCESS_UUID = UUID
.fromString("00001800-0000-1000-8000-00805f9b34fb");
public static final UUID GENERIC_ATTRIBUTE_UUID = UUID
.fromString("00001801-0000-1000-8000-00805f9b34fb");
public static final String LOCAL_NAME = "localName";
public static final String TXPOWER_LEVEL = "txPowerLevel";
public static final String SERVICE_DATA = "serviceData";
public static final String MANUFACTURER_DATA = "manufacturerData";
public static final String OVERFLOW_SERVICE_UUIDS = "overflowServiceUUIDs";
public static final String ISCONNECTABLE = "isConnectable";
public static final String SOLICITED_SERVICE_UUIDS = "solicitedServiceUUIDs";
private static final String REMOVE_BOND = "removeBond";
private static final String CREATE_BOND = "createBond";
private static final String UNKNOWN = "unknown";
private static HashMap<Integer, String> propertys = new HashMap<Integer, String>();
static {
propertys.put(1, PROPERTY_BROADCAST);
propertys.put(2, PROPERTY_READ);
propertys.put(4, PROPERTY_WRITE_NO_RESPONSE);
propertys.put(8, PROPERTY_WRITE);
propertys.put(16, PROPERTY_NOTIFY);
propertys.put(32, PROPERTY_INDICATE);
propertys.put(64, PROPERTY_SIGNED_WRITE);
propertys.put(128, PROPERTY_EXTENDED_PROPS);
}
private static HashMap<String, String> UUIDInstructions = new HashMap<String, String>();
static {
UUIDInstructions.put("00001800-0000-1000-8000-00805f9b34fb",
"Generic Access");
UUIDInstructions.put("00001801-0000-1000-8000-00805f9b34fb",
"Generic Attribute");
UUIDInstructions.put("00001802-0000-1000-8000-00805f9b34fb",
"Immediate Alert");
UUIDInstructions.put("00001803-0000-1000-8000-00805f9b34fb",
"Link Loss");
UUIDInstructions
.put("00001804-0000-1000-8000-00805f9b34fb", "Tx Power");
UUIDInstructions.put("00001805-0000-1000-8000-00805f9b34fb",
"Current Time Service");
UUIDInstructions.put("00001806-0000-1000-8000-00805f9b34fb",
"Reference Time Update Service");
UUIDInstructions.put("00001807-0000-1000-8000-00805f9b34fb",
"Next DST Change Service");
UUIDInstructions.put("00001808-0000-1000-8000-00805f9b34fb", "Glucose");
UUIDInstructions.put("00001809-0000-1000-8000-00805f9b34fb",
"Health Thermometer");
UUIDInstructions.put("0000180a-0000-1000-8000-00805f9b34fb",
"Device Information");
UUIDInstructions.put("0000180b-0000-1000-8000-00805f9b34fb",
"Network Availability Service");
UUIDInstructions
.put("0000180c-0000-1000-8000-00805f9b34fb", "Watchdog");
UUIDInstructions.put("0000180d-0000-1000-8000-00805f9b34fb",
"Heart Rate");
UUIDInstructions.put("0000180e-0000-1000-8000-00805f9b34fb",
"Phone Alert Status Service");
UUIDInstructions.put("0000180f-0000-1000-8000-00805f9b34fb",
"Battery Service");
UUIDInstructions.put("00001810-0000-1000-8000-00805f9b34fb",
"Blood Pressure");
UUIDInstructions.put("00001811-0000-1000-8000-00805f9b34fb",
"Alert Notification Service");
UUIDInstructions.put("00001812-0000-1000-8000-00805f9b34fb",
"Human Interface Device");
UUIDInstructions.put("00001813-0000-1000-8000-00805f9b34fb",
"Scan Parameters");
UUIDInstructions.put("00001814-0000-1000-8000-00805f9b34fb",
"RUNNING SPEED AND CADENCE");
UUIDInstructions.put("00001815-0000-1000-8000-00805f9b34fb",
"Automation IO");
UUIDInstructions.put("00001816-0000-1000-8000-00805f9b34fb",
"Cycling Speed and Cadence");
UUIDInstructions.put("00001817-0000-1000-8000-00805f9b34fb",
"Pulse Oximeter");
UUIDInstructions.put("00001818-0000-1000-8000-00805f9b34fb",
"Cycling Power Service");
UUIDInstructions.put("00001819-0000-1000-8000-00805f9b34fb",
"Location and Navigation Service");
UUIDInstructions.put("0000181a-0000-1000-8000-00805f9b34fb",
"Continous Glucose Measurement Service");
UUIDInstructions.put("00002a00-0000-1000-8000-00805f9b34fb",
"Device Name");
UUIDInstructions.put("00002a01-0000-1000-8000-00805f9b34fb",
"Appearance");
UUIDInstructions.put("00002a02-0000-1000-8000-00805f9b34fb",
"Peripheral Privacy Flag");
UUIDInstructions.put("00002a03-0000-1000-8000-00805f9b34fb",
"Reconnection Address");
UUIDInstructions.put("00002a04-0000-1000-8000-00805f9b34fb",
"Peripheral Preferred Connection Parameters");
UUIDInstructions.put("00002a05-0000-1000-8000-00805f9b34fb",
"Service Changed");
UUIDInstructions.put("00002a06-0000-1000-8000-00805f9b34fb",
"Alert Level");
UUIDInstructions.put("00002a07-0000-1000-8000-00805f9b34fb",
"Tx Power Level");
UUIDInstructions.put("00002a08-0000-1000-8000-00805f9b34fb",
"Date Time");
UUIDInstructions.put("00002a09-0000-1000-8000-00805f9b34fb",
"Day of Week");
UUIDInstructions.put("00002a0a-0000-1000-8000-00805f9b34fb",
"Day Date Time");
UUIDInstructions.put("00002a0b-0000-1000-8000-00805f9b34fb",
"Exact Time 100");
UUIDInstructions.put("00002a0c-0000-1000-8000-00805f9b34fb",
"Exact Time 256");
UUIDInstructions.put("00002a0d-0000-1000-8000-00805f9b34fb",
"DST Offset");
UUIDInstructions.put("00002a0e-0000-1000-8000-00805f9b34fb",
"Time Zone");
UUIDInstructions.put("00002a1f-0000-1000-8000-00805f9b34fb",
"Local Time Information");
UUIDInstructions.put("00002a10-0000-1000-8000-00805f9b34fb",
"Secondary Time Zone");
UUIDInstructions.put("00002a11-0000-1000-8000-00805f9b34fb",
"Time with DST");
UUIDInstructions.put("00002a12-0000-1000-8000-00805f9b34fb",
"Time Accuracy");
UUIDInstructions.put("00002a13-0000-1000-8000-00805f9b34fb",
"Time Source");
UUIDInstructions.put("00002a14-0000-1000-8000-00805f9b34fb",
"Reference Time Information");
UUIDInstructions.put("00002a15-0000-1000-8000-00805f9b34fb",
"Time Broadcast");
UUIDInstructions.put("00002a16-0000-1000-8000-00805f9b34fb",
"Time Update Control Point");
UUIDInstructions.put("00002a17-0000-1000-8000-00805f9b34fb",
"Time Update State");
UUIDInstructions.put("00002a18-0000-1000-8000-00805f9b34fb",
"Glucose Measurement");
UUIDInstructions.put("00002a19-0000-1000-8000-00805f9b34fb",
"Battery Level");
UUIDInstructions.put("00002a1a-0000-1000-8000-00805f9b34fb",
"Battery Power State");
UUIDInstructions.put("00002a1b-0000-1000-8000-00805f9b34fb",
"Battery Level State");
UUIDInstructions.put("00002a1c-0000-1000-8000-00805f9b34fb",
"Temperature Measurement");
UUIDInstructions.put("00002a1d-0000-1000-8000-00805f9b34fb",
"Temperature Type");
UUIDInstructions.put("00002a1e-0000-1000-8000-00805f9b34fb",
"Intermediate Temperature");
UUIDInstructions.put("00002a1f-0000-1000-8000-00805f9b34fb",
"Temperature in Celsius");
UUIDInstructions.put("00002a20-0000-1000-8000-00805f9b34fb",
"Temperature in Fahrenheit");
UUIDInstructions.put("00002a21-0000-1000-8000-00805f9b34fb",
"Measurement Interval");
UUIDInstructions.put("00002a22-0000-1000-8000-00805f9b34fb",
"Boot Keyboard Input Report");
UUIDInstructions.put("00002a23-0000-1000-8000-00805f9b34fb",
"System ID");
UUIDInstructions.put("00002a24-0000-1000-8000-00805f9b34fb",
"Model Number String");
UUIDInstructions.put("00002a25-0000-1000-8000-00805f9b34fb",
"Serial Number String");
UUIDInstructions.put("00002a26-0000-1000-8000-00805f9b34fb",
"Firmware Revision String");
UUIDInstructions.put("00002a27-0000-1000-8000-00805f9b34fb",
"Hardware Revision String");
UUIDInstructions.put("00002a28-0000-1000-8000-00805f9b34fb",
"Software Revision String");
UUIDInstructions.put("00002a29-0000-1000-8000-00805f9b34fb",
"Manufacturer Name String");
UUIDInstructions.put("00002a2a-0000-1000-8000-00805f9b34fb",
"IEEE 11073-20601 Regulatory Certification Data List");
UUIDInstructions.put("00002a2b-0000-1000-8000-00805f9b34fb",
"Current Time");
UUIDInstructions.put("00002a2c-0000-1000-8000-00805f9b34fb",
"Elevation");
UUIDInstructions
.put("00002a2d-0000-1000-8000-00805f9b34fb", "Latitude");
UUIDInstructions.put("00002a2e-0000-1000-8000-00805f9b34fb",
"Longitude");
UUIDInstructions.put("00002a2f-0000-1000-8000-00805f9b34fb",
"Position 2D");
UUIDInstructions.put("00002a30-0000-1000-8000-00805f9b34fb",
"Position 3D");
UUIDInstructions.put("00002a31-0000-1000-8000-00805f9b34fb",
"Scan Refresh");
UUIDInstructions.put("00002a32-0000-1000-8000-00805f9b34fb",
"Boot Keyboard Output Report");
UUIDInstructions.put("00002a33-0000-1000-8000-00805f9b34fb",
"Boot Mouse Input Report");
UUIDInstructions.put("00002a34-0000-1000-8000-00805f9b34fb",
"Glucose Measurement Context");
UUIDInstructions.put("00002a35-0000-1000-8000-00805f9b34fb",
"Blood Pressure Measurement");
UUIDInstructions.put("00002a36-0000-1000-8000-00805f9b34fb",
"Intermediate Cuff Pressure");
UUIDInstructions.put("00002a37-0000-1000-8000-00805f9b34fb",
"Heart Rate Measurement");
UUIDInstructions.put("00002a38-0000-1000-8000-00805f9b34fb",
"Body Sensor Location");
UUIDInstructions.put("00002a39-0000-1000-8000-00805f9b34fb",
"Heart Rate Control Point");
UUIDInstructions.put("00002a3a-0000-1000-8000-00805f9b34fb",
"Removable");
UUIDInstructions.put("00002a3b-0000-1000-8000-00805f9b34fb",
"Service Required");
UUIDInstructions.put("00002a3c-0000-1000-8000-00805f9b34fb",
"Scientific Temperature in Celsius");
UUIDInstructions.put("00002a3d-0000-1000-8000-00805f9b34fb", "String");
UUIDInstructions.put("00002a3e-0000-1000-8000-00805f9b34fb",
"Network Availability");
UUIDInstructions.put("00002a3g-0000-1000-8000-00805f9b34fb",
"Alert Status");
UUIDInstructions.put("00002a40-0000-1000-8000-00805f9b34fb",
"Ringer Control Point");
UUIDInstructions.put("00002a41-0000-1000-8000-00805f9b34fb",
"Ringer Setting");
UUIDInstructions.put("00002a42-0000-1000-8000-00805f9b34fb",
"Alert Category ID Bit Mask");
UUIDInstructions.put("00002a43-0000-1000-8000-00805f9b34fb",
"Alert Category ID");
UUIDInstructions.put("00002a44-0000-1000-8000-00805f9b34fb",
"Alert Notification Control Point");
UUIDInstructions.put("00002a45-0000-1000-8000-00805f9b34fb",
"Unread Alert Status");
UUIDInstructions.put("00002a46-0000-1000-8000-00805f9b34fb",
"New Alert");
UUIDInstructions.put("00002a47-0000-1000-8000-00805f9b34fb",
"Supported New Alert Category");
UUIDInstructions.put("00002a48-0000-1000-8000-00805f9b34fb",
"Supported Unread Alert Category");
UUIDInstructions.put("00002a49-0000-1000-8000-00805f9b34fb",
"Blood Pressure Feature");
UUIDInstructions.put("00002a4a-0000-1000-8000-00805f9b34fb",
"HID Information");
UUIDInstructions.put("00002a4b-0000-1000-8000-00805f9b34fb",
"Report Map");
UUIDInstructions.put("00002a4c-0000-1000-8000-00805f9b34fb",
"HID Control Point");
UUIDInstructions.put("00002a4d-0000-1000-8000-00805f9b34fb", "Report");
UUIDInstructions.put("00002a4e-0000-1000-8000-00805f9b34fb",
"Protocol Mode");
UUIDInstructions.put("00002a4g-0000-1000-8000-00805f9b34fb",
"Scan Interval Window");
UUIDInstructions.put("00002a50-0000-1000-8000-00805f9b34fb", "PnP ID");
UUIDInstructions.put("00002a51-0000-1000-8000-00805f9b34fb",
"Glucose Features");
UUIDInstructions.put("00002a52-0000-1000-8000-00805f9b34fb",
"Record Access Control Point");
UUIDInstructions.put("00002a53-0000-1000-8000-00805f9b34fb",
"RSC Measurement");
UUIDInstructions.put("00002a54-0000-1000-8000-00805f9b34fb",
"RSC Feature");
UUIDInstructions.put("00002a55-0000-1000-8000-00805f9b34fb",
"SC Control Point");
UUIDInstructions.put("00002a56-0000-1000-8000-00805f9b34fb",
"Digital Input");
UUIDInstructions.put("00002a57-0000-1000-8000-00805f9b34fb",
"Digital Output");
UUIDInstructions.put("00002a58-0000-1000-8000-00805f9b34fb",
"Analog Input");
UUIDInstructions.put("00002a59-0000-1000-8000-00805f9b34fb",
"Analog Output");
UUIDInstructions.put("00002a5a-0000-1000-8000-00805f9b34fb",
"Aggregate Input");
UUIDInstructions.put("00002a5b-0000-1000-8000-00805f9b34fb",
"CSC Measurement");
UUIDInstructions.put("00002a5c-0000-1000-8000-00805f9b34fb",
"CSC Feature");
UUIDInstructions.put("00002a5d-0000-1000-8000-00805f9b34fb",
"Sensor Location");
UUIDInstructions.put("00002a5e-0000-1000-8000-00805f9b34fb",
"Pulse Oximetry Spot-check Measurement");
UUIDInstructions.put("00002a5f-0000-1000-8000-00805f9b34fb",
"Pulse Oximetry Continuous Measurement");
UUIDInstructions.put("00002a60-0000-1000-8000-00805f9b34fb",
"Pulse Oximetry Pulsatile Event");
UUIDInstructions.put("00002a61-0000-1000-8000-00805f9b34fb",
"Pulse Oximetry Features");
UUIDInstructions.put("00002a62-0000-1000-8000-00805f9b34fb",
"Pulse Oximetry Control Point");
UUIDInstructions.put("00002a63-0000-1000-8000-00805f9b34fb",
"Cycling Power Measurement Characteristic");
UUIDInstructions.put("00002a64-0000-1000-8000-00805f9b34fb",
"Cycling Power Vector Characteristic");
UUIDInstructions.put("00002a65-0000-1000-8000-00805f9b34fb",
"Cycling Power Feature Characteristic");
UUIDInstructions.put("00002a66-0000-1000-8000-00805f9b34fb",
"Cycling Power Control Point Characteristic");
UUIDInstructions.put("00002a67-0000-1000-8000-00805f9b34fb",
"Location and Speed Characteristic");
UUIDInstructions.put("00002a68-0000-1000-8000-00805f9b34fb",
"Navigation Characteristic");
UUIDInstructions.put("00002a69-0000-1000-8000-00805f9b34fb",
"Position Quality Characteristic");
UUIDInstructions.put("00002a6a-0000-1000-8000-00805f9b34fb",
"LN Feature Characteristic");
UUIDInstructions.put("00002a6b-0000-1000-8000-00805f9b34fb",
"LN Control Point Characteristic");
UUIDInstructions.put("00002a6c-0000-1000-8000-00805f9b34fb",
"CGM Measurement Characteristic");
UUIDInstructions.put("00002a6d-0000-1000-8000-00805f9b34fb",
"CGM Features Characteristic");
UUIDInstructions.put("00002a6e-0000-1000-8000-00805f9b34fb",
"CGM Status Characteristic");
UUIDInstructions.put("00002a6f-0000-1000-8000-00805f9b34fb",
"CGM Session Start Time Characteristic");
UUIDInstructions.put("00002a70-0000-1000-8000-00805f9b34fb",
"Application Security Point Characteristic");
UUIDInstructions.put("00002a71-0000-1000-8000-00805f9b34fb",
"CGM Specific Ops Control Point Characteristic");
}
public static String lookup(int propertySum, int property) {
if ((propertySum & property) == property) {
String propertyName = propertys.get(property);
return propertyName == null ? null : propertyName;
} else {
return null;
}
}
public static String lookup(UUID uuid) {
String instruction = UUIDInstructions.get(uuid.toString());
return instruction == null ? UNKNOWN : instruction;
}
public static String getOSVersionNumber() {
return android.os.Build.VERSION.RELEASE;
}
public static String getPhoneModel() {
return android.os.Build.MODEL;
}
public static String getPhoneManufacturers() {
return android.os.Build.MANUFACTURER;
}
public static String getPhoneBrand() {
return android.os.Build.BRAND;
}
public static String getPhoneBasebandVersion() {
try {
Class<?> cl = Class.forName("android.os.SystemProperties");
Object invoker = cl.newInstance();
Method m = cl.getMethod("get", new Class[] { String.class,
String.class });
Object result = m.invoke(invoker, new Object[] {
"gsm.version.baseband", "no message" });
return result.toString();
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
public static boolean creatBond(Class<?> btcClass, BluetoothDevice device)
throws Exception {
Method creatBondMethod = btcClass.getMethod(CREATE_BOND);
Boolean returnValue = (Boolean) creatBondMethod.invoke(device);
return returnValue.booleanValue();
}
static public boolean removeBond(Class<?> btClass, BluetoothDevice btDevice)
throws Exception {
Method removeBondMethod = btClass.getMethod(REMOVE_BOND);
Boolean returnValue = (Boolean) removeBondMethod.invoke(btDevice);
return returnValue.booleanValue();
}
static public boolean isSupportBluetooth() {
BluetoothAdapter bluetoothAdapter = BluetoothAdapter
.getDefaultAdapter();
if (bluetoothAdapter == null) {
return false;
} else {
return true;
}
}
static public boolean isBLE(Context context) {
if (!context.getPackageManager().hasSystemFeature(
PackageManager.FEATURE_BLUETOOTH_LE)) {
return false;
} else {
return true;
}
}
static public boolean isOpenBluetooth() {
BluetoothAdapter bluetoothAdapter = BluetoothAdapter
.getDefaultAdapter();
if (bluetoothAdapter.isEnabled()) {
return true;
} else {
return false;
}
}
static public int encodeProperty(JSONArray ary) {
int property = 0;
if (PROPERTY_BROADCAST.equals(getDataFromArray(ary, PROPERTY_BROADCAST))) {
property = property + 1;
}
if (PROPERTY_READ.equals(getDataFromArray(ary, PROPERTY_READ))) {
property = property + 2;
}
if (PROPERTY_WRITE_NO_RESPONSE.equals(getDataFromArray(ary, PROPERTY_WRITE_NO_RESPONSE))) {
property = property + 4;
}
if (PROPERTY_WRITE.equals(getDataFromArray(ary, PROPERTY_WRITE))) {
property = property + 8;
}
if (PROPERTY_NOTIFY.equals(getDataFromArray(ary, PROPERTY_NOTIFY))) {
property = property + 16;
}
if (PROPERTY_INDICATE.equals(getDataFromArray(ary, PROPERTY_INDICATE))) {
property = property + 32;
}
if (PROPERTY_WRITE_NO_RESPONSE.equals(getDataFromArray(ary, PROPERTY_WRITE_NO_RESPONSE))) {
property = property + 64;
}
if (PROPERTY_EXTENDED_PROPS.equals(getDataFromArray(ary, PROPERTY_EXTENDED_PROPS))) {
property = property + 128;
}
return property;
}
public static JSONArray decodeProperty(int property) {
JSONArray properties = new JSONArray();
String strProperty = null;
if ((strProperty = lookup(property,
BluetoothGattCharacteristic.PROPERTY_BROADCAST)) != null) {
properties.put(strProperty);
}
if ((strProperty = lookup(property,
BluetoothGattCharacteristic.PROPERTY_READ)) != null) {
properties.put(strProperty);
}
if ((strProperty = lookup(property,
BluetoothGattCharacteristic.PROPERTY_WRITE_NO_RESPONSE)) != null) {
properties.put(strProperty);
}
if ((strProperty = lookup(property,
BluetoothGattCharacteristic.PROPERTY_WRITE)) != null) {
properties.put(strProperty);
}
if ((strProperty = lookup(property,
BluetoothGattCharacteristic.PROPERTY_NOTIFY)) != null) {
properties.put(strProperty);
}
if ((strProperty = lookup(property,
BluetoothGattCharacteristic.PROPERTY_INDICATE)) != null) {
properties.put(strProperty);
}
if ((strProperty = lookup(property,
BluetoothGattCharacteristic.PROPERTY_SIGNED_WRITE)) != null) {
properties.put(strProperty);
}
if ((strProperty = lookup(property,
BluetoothGattCharacteristic.PROPERTY_EXTENDED_PROPS)) != null) {
properties.put(strProperty);
}
return properties;
}
static public int encodePermission(JSONArray ary) {
int permission = 0;
if (PERMISSION_READ.equals(getDataFromArray(ary, PERMISSION_READ))) {
permission = permission + 1;
}
if (PERMISSION_READ_ENCRYPTED.equals(getDataFromArray(ary, PERMISSION_READ_ENCRYPTED))) {
permission = permission + 2;
}
if (PERMISSION_READ_ENCRYPTED_MITM.equals(getDataFromArray(ary, PERMISSION_READ_ENCRYPTED_MITM))) {
permission = permission + 4;
}
if (PERMISSION_WRITE.equals(getDataFromArray(ary, PERMISSION_WRITE))) {
permission = permission + 16;
}
if (PERMISSION_WRITE_ENCRYPTED.equals(getDataFromArray(ary, PERMISSION_WRITE_ENCRYPTED))) {
permission = permission + 32;
}
if (PERMISSION_WRITE_ENCRYPTED_MITM.equals(getDataFromArray(ary, PERMISSION_WRITE_ENCRYPTED_MITM))) {
permission = permission + 64;
}
if (PERMISSION_WRITE_SIGEND.equals(getDataFromArray(ary, PERMISSION_WRITE_SIGEND))) {
permission = permission + 128;
}
if (PERMISSION_WRITE_SIGEND_MITM.equals(getDataFromArray(ary, PERMISSION_WRITE_SIGEND_MITM))) {
permission = permission + 256;
}
return permission;
}
public static String encodeBase64(byte[] value) {
return Base64.encodeToString(value, Base64.NO_WRAP | Base64.NO_PADDING);
}
public static byte[] decodeBase64(String value){
byte[] result = Base64.decode(value, Base64.DEFAULT);
return result;
}
public static void addProperty(JSONObject obj, String key, Object value) {
try {
obj.put(key, value);
} catch (JSONException e) {
}
}
public static JSONObject getObjectFromArray(JSONArray ary) {
JSONObject jsonObject = null;
if (ary != null && ary.length() > 0) {
try {
return new JSONObject(ary.get(0).toString());
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return jsonObject;
}
public static JSONObject getObjectFromArray(JSONArray jsonArray,
int objectIndex) {
JSONObject jsonObject = null;
if (jsonArray != null && jsonArray.length() > 0) {
try {
jsonObject = new JSONObject(jsonArray.get(objectIndex).toString());
} catch (JSONException e) {
}
}
return jsonObject;
}
public static JSONArray getArray(JSONArray jsonArray, String key) {
JSONArray newJsonArray = null;
try {
newJsonArray = new JSONArray(getObjectFromArray(jsonArray).get(key).toString());
} catch (JSONException e) {
e.printStackTrace();
}
return newJsonArray;
}
public static JSONArray getArray(JSONArray jsonArray, int objectIndex,
String key) {
JSONArray newJsonArray = null;
try {
newJsonArray = new JSONArray(getObjectFromArray(jsonArray, objectIndex).get(key).toString());
} catch (JSONException e) {
e.printStackTrace();
}
return newJsonArray;
}
public static String getData(JSONArray ary, String key) {
String result = null;
try {
result = getObjectFromArray(ary).getString(key);
} catch (JSONException e) {
}
return result;
}
public static String getDataFromArray(JSONArray jsonArray, String key) {
if (jsonArray == null || jsonArray.length() == 0) {
return null;
}
int length = jsonArray.length();
String result = null;
try {
for (int i = 0; i < length; i++) {
if (key.equals(jsonArray.getString(i))) {
result = jsonArray.getString(i);
break;
}
}
} catch (JSONException e) {
}
return result;
}
public static String getData(JSONArray jsonArray, int objectIndex,
String key) {
String result = null;
try {
result = getObjectFromArray(jsonArray, objectIndex).getString(key);
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return result;
}
public static UUID[] getUUIDs(JSONArray ary) {
try {
if (getObjectFromArray(ary).getJSONArray(SERVICE_UUIDS) == null) {
return null;
} else {
UUID[] uuids = new UUID[getObjectFromArray(ary).getJSONArray(
SERVICE_UUIDS).length()];
for (int i = 0; i < uuids.length; i++) {
uuids[i] = (UUID) getObjectFromArray(ary).getJSONArray(
SERVICE_UUIDS).get(i);
}
return uuids;
}
} catch (JSONException e) {
}
return null;
}
public static JSONObject decodeAdvData(byte[] advData) {
JSONObject jsonAdvData = new JSONObject();
JSONArray serviceUUIDs = new JSONArray();
JSONArray solicitedServiceUUIDs = new JSONArray();
JSONArray overflowServiceUUIDs = new JSONArray();
boolean isOver = true;
while (isOver) {
int dataLen = advData[0];
if (dataLen == 0) {
isOver = false;
break;
}
byte[] allData = new byte[dataLen];
for (int i = 0; i < allData.length; i++) {
allData[i] = advData[i + 1];
}
byte[] type = { allData[0] };
byte[] data = new byte[allData.length - 1];
for (int i = 0; i < data.length; i++) {
data[i] = allData[i + 1];
}
if ((0xff & type[0]) == 0x02) {
byte[] mByte = new byte[data.length];
for (int i = 0; i < mByte.length; i++) {
mByte[i] = data[data.length - i - 1];
}
serviceUUIDs.put(bytesToHexString(mByte));
} else if ((0xff & type[0]) == 0x03) {
int number = data.length / 2;
for (int i = 0; i < number; i++) {
byte[] mByte = { data[i * 2], data[i * 2 + 1] };
serviceUUIDs.put(bytesToHexString(mByte));
}
} else if ((0xff & type[0]) == 0x04) {
byte[] mByte = new byte[data.length];
for (int i = 0; i < mByte.length; i++) {
mByte[i] = data[data.length - i - 1];
}
serviceUUIDs.put(bytesToHexString(mByte));
} else if ((0xff & type[0]) == 0x05) {
int number = data.length / 4;
for (int i = 0; i < number; i++) {
byte[] mByte = { data[i * 4], data[i * 4 + 1],
data[i * 4 + 2], data[i * 4 + 3] };
serviceUUIDs.put(bytesToHexString(mByte));
}
} else if ((0xff & type[0]) == 0x06) {
byte[] mByte = new byte[data.length];
for (int i = 0; i < mByte.length; i++) {
mByte[i] = data[data.length - i - 1];
}
serviceUUIDs.put(bytesToHexString(mByte));
} else if ((0xff & type[0]) == 0x07) {
int number = data.length / 16;
for (int i = 0; i < number; i++) {
byte[] mByte = { data[i * 16], data[i * 16 + 1],
data[i * 16 + 2], data[i * 16 + 3],
data[i * 16 + 4], data[i * 16 + 5],
data[i * 16 + 6], data[i * 16 + 7],
data[i * 16 + 8], data[i * 16 + 9],
data[i * 16 + 10], data[i * 16 + 11],
data[i * 16 + 12], data[i * 16 + 13],
data[i * 16 + 14], data[i * 16 + 15] };
serviceUUIDs.put(bytesToHexString(mByte));
}
} else if ((0xff & type[0]) == 0x08) {
addProperty(jsonAdvData, LOCAL_NAME, hexStrToStr(bytesToHexString(data)));
} else if ((0xff & type[0]) == 0x09) {
addProperty(jsonAdvData, LOCAL_NAME, hexStrToStr(bytesToHexString(data)));
} else if ((0xff & type[0]) == 0x0a) {
addProperty(jsonAdvData, TXPOWER_LEVEL,bytesToHexString(data));
} else if ((0xff & type[0]) == 0x12) {
addProperty(jsonAdvData,IS_CONNECTED, bytesToHexString(data));
} else if ((0xff & type[0]) == 0x14) {
int number = data.length / 2;
for (int i = 0; i < number; i++) {
byte[] mByte = { data[i * 2], data[i * 2 + 1] };
solicitedServiceUUIDs.put(bytesToHexString(mByte));
}
} else if ((0xff & type[0]) == 0x15) {
int number = data.length / 16;
for (int i = 0; i < number; i++) {
byte[] mByte = { data[i * 16], data[i * 16 + 1],
data[i * 16 + 2], data[i * 16 + 3],
data[i * 16 + 4], data[i * 16 + 5],
data[i * 16 + 6], data[i * 16 + 7],
data[i * 16 + 8], data[i * 16 + 9],
data[i * 16 + 10], data[i * 16 + 11],
data[i * 16 + 12], data[i * 16 + 13],
data[i * 16 + 14], data[i * 16 + 15] };
solicitedServiceUUIDs.put(bytesToHexString(mByte));
}
} else if ((0xff & type[0]) == 0x16) {
addProperty(jsonAdvData, SERVICE_DATA, bytesToHexString(data));
} else if ((0xff & type[0]) == 0xff) {
addProperty(jsonAdvData, MANUFACTURER_DATA,encodeBase64(data));
}
byte[] newData = new byte[advData.length - dataLen - 1];
for (int i = 0; i < newData.length; i++) {
newData[i] = advData[i + 1 + dataLen];
}
advData = newData;
}
addProperty(jsonAdvData, SERVICE_UUIDS, serviceUUIDs);
addProperty(jsonAdvData, SOLICITED_SERVICE_UUIDS, solicitedServiceUUIDs);
addProperty(jsonAdvData, OVERFLOW_SERVICE_UUIDS, overflowServiceUUIDs);
return jsonAdvData;
}
public static void sendErrorMsg(CallbackContext callbackContext) {
JSONObject jsonObject = new JSONObject();
Tools.addProperty(jsonObject, Tools.MES, Tools.ERROR);
callbackContext.error(jsonObject);
}
public static void sendSuccessMsg(CallbackContext callbackContext) {
JSONObject jsonObject = new JSONObject();
Tools.addProperty(jsonObject, Tools.MES, Tools.SUCCESS);
callbackContext.success(jsonObject);
}
public static String getDateString() {
return new SimpleDateFormat(DATE_FORMATE).format(new Date());
}
public static final String bytesToHexString(byte[] bArray)
{
StringBuffer sb = new StringBuffer(bArray.length);
String sTemp;
for (int i = 0; i < bArray.length; i++) {
sTemp = Integer.toHexString(0xFF & bArray[i]);
if (sTemp.length() < 2)
sb.append(0);
sb.append(sTemp.toUpperCase());
}
return sb.toString();
}
public static byte[] hexStringToByte(String hexString)
{
hexString = hexString.toLowerCase();
if(hexString.length() % 2 != 0){
hexString = "0" + hexString;
}
int len = (hexString.length() / 2);
byte[] result = new byte[len];
char[] achar = hexString.toCharArray();
for (int i = 0; i <len; i++) {
int pos = i * 2;
result[len - i -1] = (byte) ("0123456789abcdef".indexOf(achar[pos]) << 4 | "0123456789abcdef".indexOf(achar[pos + 1]));
}
return result;
}
public static byte[] ascIIStringToByte(String ascIIString)
{
int asc = Integer.parseInt(ascIIString);
byte[] b = new byte[ascIIString.length()];
for (int i = 0; i < b.length; i++) {
b[i] = (byte) (asc >> i*8);
}
return b;
}
public static String hexStrToStr(String hexStr)
{
String str = "0123456789ABCDEF";
char[] hexs = hexStr.toCharArray();
byte[] bytes = new byte[hexStr.length() / 2];
int n;
for (int i = 0; i < bytes.length; i++)
{
n = str.indexOf(hexs[2 * i]) * 16;
n += str.indexOf(hexs[2 * i + 1]);
bytes[i] = (byte) (n & 0xff);
}
return new String(bytes);
}
public static boolean isSupportUniversalAPI()
{
char[] universalVersion = {'4','.','3','.','0'};
char[] currentVersion = getOSVersionNumber().toCharArray();
if (universalVersion[0] > currentVersion[0])
{
return false;
}
if (universalVersion[0] < currentVersion[0])
{
return true;
}
if (universalVersion[0] == currentVersion[0]) {
if (universalVersion[2] > currentVersion[2]) {
return false;
}else {
return true;
}
}
return false;
}
public static boolean isSupportSpecificAPI(String specificBrand)
{
if (getPhoneBrand().toLowerCase().indexOf(specificBrand.toLowerCase()) != -1 ||
getPhoneManufacturers().toLowerCase().indexOf(specificBrand.toLowerCase()) != -1)
{
return true;
}else {
return false;
}
}
public static String getSupportBasebandVersionBrand()
{
String bv = getPhoneBasebandVersion().toLowerCase();
String xiaomi = "xiaomi";
if (bv.indexOf("m8064") != -1)
{
return xiaomi;
}
return null;
}
}
|
|
/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
* http://creativecommons.org/publicdomain/zero/1.0/
*/
package java.util.concurrent;
import java.util.*;
/**
* Provides default implementations of {@link ExecutorService}
* execution methods. This class implements the <tt>submit</tt>,
* <tt>invokeAny</tt> and <tt>invokeAll</tt> methods using a
* {@link RunnableFuture} returned by <tt>newTaskFor</tt>, which defaults
* to the {@link FutureTask} class provided in this package. For example,
* the implementation of <tt>submit(Runnable)</tt> creates an
* associated <tt>RunnableFuture</tt> that is executed and
* returned. Subclasses may override the <tt>newTaskFor</tt> methods
* to return <tt>RunnableFuture</tt> implementations other than
* <tt>FutureTask</tt>.
*
* <p> <b>Extension example</b>. Here is a sketch of a class
* that customizes {@link ThreadPoolExecutor} to use
* a <tt>CustomTask</tt> class instead of the default <tt>FutureTask</tt>:
* <pre> {@code
* public class CustomThreadPoolExecutor extends ThreadPoolExecutor {
*
* static class CustomTask<V> implements RunnableFuture<V> {...}
*
* protected <V> RunnableFuture<V> newTaskFor(Callable<V> c) {
* return new CustomTask<V>(c);
* }
* protected <V> RunnableFuture<V> newTaskFor(Runnable r, V v) {
* return new CustomTask<V>(r, v);
* }
* // ... add constructors, etc.
* }}</pre>
*
* @since 1.5
* @author Doug Lea
*/
public abstract class AbstractExecutorService implements ExecutorService {
/**
* Returns a <tt>RunnableFuture</tt> for the given runnable and default
* value.
*
* @param runnable the runnable task being wrapped
* @param value the default value for the returned future
* @return a <tt>RunnableFuture</tt> which when run will run the
* underlying runnable and which, as a <tt>Future</tt>, will yield
* the given value as its result and provide for cancellation of
* the underlying task.
* @since 1.6
*/
protected <T> RunnableFuture<T> newTaskFor(Runnable runnable, T value) {
return new FutureTask<T>(runnable, value);
}
/**
* Returns a <tt>RunnableFuture</tt> for the given callable task.
*
* @param callable the callable task being wrapped
* @return a <tt>RunnableFuture</tt> which when run will call the
* underlying callable and which, as a <tt>Future</tt>, will yield
* the callable's result as its result and provide for
* cancellation of the underlying task.
* @since 1.6
*/
protected <T> RunnableFuture<T> newTaskFor(Callable<T> callable) {
return new FutureTask<T>(callable);
}
/**
* @throws RejectedExecutionException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
*/
public Future<?> submit(Runnable task) {
if (task == null) throw new NullPointerException();
RunnableFuture<Void> ftask = newTaskFor(task, null);
execute(ftask);
return ftask;
}
/**
* @throws RejectedExecutionException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
*/
public <T> Future<T> submit(Runnable task, T result) {
if (task == null) throw new NullPointerException();
RunnableFuture<T> ftask = newTaskFor(task, result);
execute(ftask);
return ftask;
}
/**
* @throws RejectedExecutionException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
*/
public <T> Future<T> submit(Callable<T> task) {
if (task == null) throw new NullPointerException();
RunnableFuture<T> ftask = newTaskFor(task);
execute(ftask);
return ftask;
}
/**
* the main mechanics of invokeAny.
*/
private <T> T doInvokeAny(Collection<? extends Callable<T>> tasks,
boolean timed, long nanos)
throws InterruptedException, ExecutionException, TimeoutException {
if (tasks == null)
throw new NullPointerException();
int ntasks = tasks.size();
if (ntasks == 0)
throw new IllegalArgumentException();
List<Future<T>> futures= new ArrayList<Future<T>>(ntasks);
ExecutorCompletionService<T> ecs =
new ExecutorCompletionService<T>(this);
// For efficiency, especially in executors with limited
// parallelism, check to see if previously submitted tasks are
// done before submitting more of them. This interleaving
// plus the exception mechanics account for messiness of main
// loop.
try {
// Record exceptions so that if we fail to obtain any
// result, we can throw the last exception we got.
ExecutionException ee = null;
long lastTime = timed ? System.nanoTime() : 0;
Iterator<? extends Callable<T>> it = tasks.iterator();
// Start one task for sure; the rest incrementally
futures.add(ecs.submit(it.next()));
--ntasks;
int active = 1;
for (;;) {
Future<T> f = ecs.poll();
if (f == null) {
if (ntasks > 0) {
--ntasks;
futures.add(ecs.submit(it.next()));
++active;
}
else if (active == 0)
break;
else if (timed) {
f = ecs.poll(nanos, TimeUnit.NANOSECONDS);
if (f == null)
throw new TimeoutException();
long now = System.nanoTime();
nanos -= now - lastTime;
lastTime = now;
}
else
f = ecs.take();
}
if (f != null) {
--active;
try {
return f.get();
} catch (ExecutionException eex) {
ee = eex;
} catch (RuntimeException rex) {
ee = new ExecutionException(rex);
}
}
}
if (ee == null)
ee = new ExecutionException();
throw ee;
} finally {
for (Future<T> f : futures)
f.cancel(true);
}
}
public <T> T invokeAny(Collection<? extends Callable<T>> tasks)
throws InterruptedException, ExecutionException {
try {
return doInvokeAny(tasks, false, 0);
} catch (TimeoutException cannotHappen) {
assert false;
return null;
}
}
public <T> T invokeAny(Collection<? extends Callable<T>> tasks,
long timeout, TimeUnit unit)
throws InterruptedException, ExecutionException, TimeoutException {
return doInvokeAny(tasks, true, unit.toNanos(timeout));
}
public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks)
throws InterruptedException {
if (tasks == null)
throw new NullPointerException();
List<Future<T>> futures = new ArrayList<Future<T>>(tasks.size());
boolean done = false;
try {
for (Callable<T> t : tasks) {
RunnableFuture<T> f = newTaskFor(t);
futures.add(f);
execute(f);
}
for (Future<T> f : futures) {
if (!f.isDone()) {
try {
f.get();
} catch (CancellationException ignore) {
} catch (ExecutionException ignore) {
}
}
}
done = true;
return futures;
} finally {
if (!done)
for (Future<T> f : futures)
f.cancel(true);
}
}
public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks,
long timeout, TimeUnit unit)
throws InterruptedException {
if (tasks == null || unit == null)
throw new NullPointerException();
long nanos = unit.toNanos(timeout);
List<Future<T>> futures = new ArrayList<Future<T>>(tasks.size());
boolean done = false;
try {
for (Callable<T> t : tasks)
futures.add(newTaskFor(t));
long lastTime = System.nanoTime();
// Interleave time checks and calls to execute in case
// executor doesn't have any/much parallelism.
Iterator<Future<T>> it = futures.iterator();
while (it.hasNext()) {
execute((Runnable)(it.next()));
long now = System.nanoTime();
nanos -= now - lastTime;
lastTime = now;
if (nanos <= 0)
return futures;
}
for (Future<T> f : futures) {
if (!f.isDone()) {
if (nanos <= 0)
return futures;
try {
f.get(nanos, TimeUnit.NANOSECONDS);
} catch (CancellationException ignore) {
} catch (ExecutionException ignore) {
} catch (TimeoutException toe) {
return futures;
}
long now = System.nanoTime();
nanos -= now - lastTime;
lastTime = now;
}
}
done = true;
return futures;
} finally {
if (!done)
for (Future<T> f : futures)
f.cancel(true);
}
}
}
|
|
/**
* Copyright (c) 2015 MapR, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ojai;
import java.util.Iterator;
import java.util.NoSuchElementException;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.BaseErrorListener;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.RecognitionException;
import org.antlr.v4.runtime.Recognizer;
import org.ojai.FieldPathLexer;
import org.ojai.FieldPathParser;
import org.ojai.FieldSegment.IndexSegment;
import org.ojai.FieldSegment.NameSegment;
import org.ojai.annotation.API;
import org.ojai.util.Fields;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
/**
* Immutable class for representing a field path.
*/
@API.Public
public final class FieldPath implements Comparable<FieldPath>, Iterable<FieldSegment> {
private static Logger logger = LoggerFactory.getLogger(FieldPath.class);
public static final FieldPath EMPTY = new FieldPath(new NameSegment("", null, false));
/**
* Use this method to translate a <code>String</code> into <code>FieldPath</code>.
*
* @param fieldPath The String to parse.
*
* @return An immutable instance of {@link FieldPath} parsed from the input string.
*
* @throws NullPointerException If the input string is null.
* @throws IllegalArgumentException If the input string has syntax error.
*/
public static FieldPath parseFrom(String fieldPath) {
if (fieldPath == null) {
throw new NullPointerException("Can not parse null string as FieldPath.");
} else if (fieldPath.isEmpty()) {
return EMPTY;
}
FieldPath fp = null;
if ((fp = fieldPathCache.getIfPresent(fieldPath)) == null) {
try {
CommonTokenStream tokens = new CommonTokenStream(
new FieldPathLexer(new ANTLRInputStream(fieldPath)));
FieldPathErrorListener listener = new FieldPathErrorListener();
FieldPathParser parser = new FieldPathParser(tokens);
parser.removeErrorListeners();
parser.addErrorListener(listener);
fp = parser.parse().e;
if (listener.isError()) {
throw new IllegalArgumentException(
"'" + fieldPath + "' is not a valid FieldPath: "
+ listener.getErrorMsg(), listener.getException());
}
fieldPathCache.put(fieldPath, fp);
} catch (RecognitionException e) {
logger.error("Error parsing {} as a FieldPath: {}.", fieldPath, e.getMessage());
throw new IllegalArgumentException(
"Unable to parse '" + fieldPath + "' as a FieldPath.", e);
}
}
return fp;
}
@Override
public Iterator<FieldSegment> iterator() {
return new FieldSegmentIterator();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof FieldPath)) {
return false;
}
FieldPath other = (FieldPath) obj;
if (rootSegment == null) {
return (other.rootSegment == null);
}
return rootSegment.equals(other.rootSegment);
}
@Override
public int hashCode() {
return ((rootSegment == null) ? 0 : rootSegment.hashCode());
}
@Override
public String toString() {
return asPathString();
}
/**
* Compares two FieldPath segment by segment from left to right.
* Named segments are compared lexicographically while indexed
* segments are compared based on their index value. For same
* position in a FieldPath, a name segment is considered greater
* than the indexed segment.
*/
@Override
public int compareTo(FieldPath other) {
return rootSegment.compareTo(other.getRootSegment());
}
/**
* Return the {@code String} representation of this field path, quoting
* the name segments which were parsed from a quoted identifier.
* @return The {@code String} representation of this {@code FieldPath}.
*/
public String asPathString() {
return rootSegment.asPathString(false);
}
public String asPathString(boolean escape) {
return rootSegment.asPathString(escape);
}
public FieldSegment getRootSegment() {
return rootSegment;
}
/**
* @return A FieldPath with the specified name segment as the
* parent of the this FieldPath.
*/
public FieldPath cloneWithNewParent(String parentSegment) {
String rootSegmentName = Fields.unquoteFieldName(parentSegment);
NameSegment newRoot = new NameSegment(
parentSegment, rootSegment.clone(), (rootSegmentName != parentSegment));
return new FieldPath(newRoot);
}
/**
* @return A FieldPath with the specified name segment added as the
* child of the leaf of this FieldPath.
*/
public FieldPath cloneWithNewChild(String childSegment) {
NameSegment newRoot = rootSegment.cloneWithNewChild(new NameSegment(childSegment));
return new FieldPath(newRoot);
}
/**
* @return A FieldPath with the specified index segment added as the
* child of the leaf of this FieldPath.
*/
public FieldPath cloneWithNewChild(int index) {
NameSegment newRoot = rootSegment.cloneWithNewChild(new IndexSegment(index));
return new FieldPath(newRoot);
}
/**
* @return A sub-segment of this FieldPath starting after the specified
* ancestor. For example if the current FieldPath is "a.b.c.d"
* and the specified ancestor is "a.b", will return "c.d".<br/>
* If the ancestor is same as this, will return EMPTY. If the
* ancestor is not an actual ancestor, will return null.
*/
public FieldPath cloneAfterAncestor(FieldPath ancestor) {
if (this == ancestor) {
return EMPTY;
}
FieldSegment c1 = rootSegment;
FieldSegment c2 = ancestor.rootSegment;
while (c1 != null && c2 != null) {
if (!c1.segmentEquals(c2)) { // bad blood
return null;
}
c1 = c1.getChild();
c2 = c2.getChild();
}
if (c1 == null && c2 == null) {
return EMPTY; // ancestor same as this
} else if (c1 == null // ancestor is actually a progeny
|| c1.isIndexed()) { // or progeny starts at an index segment
return null;
}
return new FieldPath((NameSegment) c1.clone());
}
/**
* @return true if the other {@code FieldPath} is same or a child of this.
*/
public boolean isAtOrBelow(FieldPath other) {
return rootSegment.isAtOrBelow(other.rootSegment);
}
/**
* @return true if the other {@code FieldPath} is same or parent of this.
*/
public boolean isAtOrAbove(FieldPath other) {
return rootSegment.isAtOrAbove(other.rootSegment);
}
/**
* Internal cache which stores recently parsed {@link FieldPath}
* objects in an LRU map.
*/
private static Cache<String, FieldPath> fieldPathCache =
CacheBuilder.newBuilder().maximumSize(1000).build();
final private NameSegment rootSegment;
/**
* Package default for parser
* @param root
*/
FieldPath(NameSegment root) {
this.rootSegment = root;
}
final class FieldSegmentIterator implements Iterator<FieldSegment> {
FieldSegment current = rootSegment;
@Override
public void remove() {
throw new UnsupportedOperationException();
}
@Override
public FieldSegment next() {
if (current == null) {
throw new NoSuchElementException();
}
FieldSegment ret = current;
current = current.getChild();
return ret;
}
@Override
public boolean hasNext() {
return current != null;
}
}
final static class FieldPathErrorListener extends BaseErrorListener {
private String errorMsg;
private RecognitionException exception;
@Override
public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line,
int charPositionInLine, String msg, RecognitionException e) {
this.errorMsg = String.format("At line:%d:%d: %s", line, charPositionInLine, msg);
this.exception = e;
}
public boolean isError() {
return errorMsg != null;
}
public String getErrorMsg() {
return errorMsg;
}
public RecognitionException getException() {
return exception;
}
}
}
|
|
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.basics.currency;
import java.io.Serializable;
import java.util.Locale;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.joda.beans.Bean;
import org.joda.beans.BeanBuilder;
import org.joda.beans.BeanDefinition;
import org.joda.beans.ImmutableBean;
import org.joda.beans.ImmutableValidator;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectFieldsBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import com.google.common.math.DoubleMath;
import com.opengamma.strata.collect.ArgChecker;
import com.opengamma.strata.collect.Messages;
/**
* A single foreign exchange rate between two currencies, such as 'EUR/USD 1.25'.
* <p>
* This represents a rate of foreign exchange. The rate 'EUR/USD 1.25' consists of three
* elements - the base currency 'EUR', the counter currency 'USD' and the rate '1.25'.
* When performing a conversion a rate of '1.25' means that '1 EUR = 1.25 USD'.
* <p>
* See {@link CurrencyPair} for the representation that does not contain a rate.
* <p>
* This class is immutable and thread-safe.
*/
@BeanDefinition(builderScope = "private")
public final class FxRate
implements FxRateProvider, ImmutableBean, Serializable {
/**
* Regular expression to parse the textual format.
*/
private static final Pattern REGEX_FORMAT = Pattern.compile("([A-Z]{3})[/]([A-Z]{3})[ ]([0-9+.-]+)");
/**
* The currency pair.
* The pair is formed of two parts, the base and the counter.
* In the pair 'AAA/BBB' the base is 'AAA' and the counter is 'BBB'.
*/
@PropertyDefinition(validate = "notNull")
private final CurrencyPair pair;
/**
* The rate applicable to the currency pair.
* One unit of the base currency is exchanged for this amount of the counter currency.
*/
@PropertyDefinition(validate = "ArgChecker.notNegativeOrZero", get = "private")
private final double rate;
//-------------------------------------------------------------------------
/**
* Obtains an instance from two currencies.
* <p>
* The first currency is the base and the second is the counter.
* The two currencies may be the same, but if they are then the rate must be one.
*
* @param base the base currency
* @param counter the counter currency
* @param rate the conversion rate, greater than zero
* @return the FX rate
* @throws IllegalArgumentException if the rate is invalid
*/
public static FxRate of(Currency base, Currency counter, double rate) {
return new FxRate(CurrencyPair.of(base, counter), rate);
}
/**
* Obtains an instance from a currency pair.
* <p>
* The two currencies may be the same, but if they are then the rate must be one.
*
* @param pair the currency pair
* @param rate the conversion rate, greater than zero
* @return the FX rate
* @throws IllegalArgumentException if the rate is invalid
*/
public static FxRate of(CurrencyPair pair, double rate) {
return new FxRate(pair, rate);
}
//-------------------------------------------------------------------------
/**
* Parses a rate from a string with format AAA/BBB RATE.
* <p>
* The parsed format is '${baseCurrency}/${counterCurrency} ${rate}'.
* Currency parsing is case insensitive.
*
* @param rateStr the rate as a string AAA/BBB RATE
* @return the FX rate
* @throws IllegalArgumentException if the FX rate cannot be parsed
*/
public static FxRate parse(String rateStr) {
ArgChecker.notNull(rateStr, "rateStr");
Matcher matcher = REGEX_FORMAT.matcher(rateStr.toUpperCase(Locale.ENGLISH));
if (!matcher.matches()) {
throw new IllegalArgumentException("Invalid rate: " + rateStr);
}
try {
Currency base = Currency.parse(matcher.group(1));
Currency counter = Currency.parse(matcher.group(2));
double rate = Double.parseDouble(matcher.group(3));
return new FxRate(CurrencyPair.of(base, counter), rate);
} catch (RuntimeException ex) {
throw new IllegalArgumentException("Unable to parse rate: " + rateStr, ex);
}
}
//-------------------------------------------------------------------------
@ImmutableValidator
private void validate() {
if (pair.getBase().equals(pair.getCounter()) && rate != 1d) {
throw new IllegalArgumentException("Conversion rate between identical currencies must be one");
}
}
//-------------------------------------------------------------------------
/**
* Gets the inverse rate.
* <p>
* The inverse rate has the same currencies but in reverse order.
* The rate is the reciprocal of the original.
*
* @return the inverse pair
*/
public FxRate inverse() {
return new FxRate(pair.inverse(), 1d / rate);
}
/**
* Gets the FX rate for the specified currency pair.
* <p>
* The rate returned is the rate from the base currency to the counter currency
* as defined by this formula: {@code (1 * baseCurrency = fxRate * counterCurrency)}.
* <p>
* This will return the rate or inverse rate, or 1 if the two input currencies are the same.
*
* @param baseCurrency the base currency, to convert from
* @param counterCurrency the counter currency, to convert to
* @return the FX rate for the currency pair
* @throws IllegalArgumentException if no FX rate could be found
*/
@Override
public double fxRate(Currency baseCurrency, Currency counterCurrency) {
if (baseCurrency.equals(counterCurrency)) {
return 1d;
}
if (baseCurrency.equals(pair.getBase()) && counterCurrency.equals(pair.getCounter())) {
return rate;
}
if (counterCurrency.equals(pair.getBase()) && baseCurrency.equals(pair.getCounter())) {
return 1d / rate;
}
throw new IllegalArgumentException(Messages.format(
"No FX rate found for {}/{}", baseCurrency, counterCurrency));
}
/**
* Derives an FX rate from two related FX rates.
* <p>
* Given two FX rates it is possible to derive another rate if they have a currency in common.
* For example, given rates for EUR/GBP and EUR/CHF it is possible to derive rates for GBP/CHF.
* The result will always have a currency pair in the conventional order.
* <p>
* The cross is only returned if the two pairs contains three currencies in total.
* If the inputs are invalid, an exception is thrown.
* <ul>
* <li>AAA/BBB and BBB/CCC - valid, producing AAA/CCC
* <li>AAA/BBB and CCC/BBB - valid, producing AAA/CCC
* <li>AAA/BBB and BBB/AAA - invalid, exception thrown
* <li>AAA/BBB and BBB/BBB - invalid, exception thrown
* <li>AAA/BBB and CCC/DDD - invalid, exception thrown
* </ul>
*
* @param other the other rates
* @return a set of FX rates derived from these rates and the other rates
* @throws IllegalArgumentException if the cross rate cannot be calculated
*/
public FxRate crossRate(FxRate other) {
return pair.cross(other.pair).map(cross -> computeCross(this, other, cross))
.orElseThrow(() -> new IllegalArgumentException(Messages.format(
"Unable to cross when no unique common currency: {} and {}", pair, other.pair)));
}
// computes the cross rate
private static FxRate computeCross(FxRate fx1, FxRate fx2, CurrencyPair crossPairAC) {
// aim is to convert AAA/BBB and BBB/CCC to AAA/CCC
Currency currA = crossPairAC.getBase();
Currency currC = crossPairAC.getCounter();
// given the conventional cross rate pair, order the two rates to match
boolean crossBaseCurrencyInFx1 = fx1.pair.contains(currA);
FxRate fxABorBA = crossBaseCurrencyInFx1 ? fx1 : fx2;
FxRate fxBCorCB = crossBaseCurrencyInFx1 ? fx2 : fx1;
// extract the rates, taking the inverse if the pair is in the inverse order
double rateAB = fxABorBA.getPair().getBase().equals(currA) ? fxABorBA.rate : 1d / fxABorBA.rate;
double rateBC = fxBCorCB.getPair().getCounter().equals(currC) ? fxBCorCB.rate : 1d / fxBCorCB.rate;
return FxRate.of(crossPairAC, rateAB * rateBC);
}
/**
* Returns an FX rate object representing the market convention rate between the two currencies.
* <p>
* If the currency pair is the market convention pair, this method returns {@code this}, otherwise
* it returns an {@code FxRate} with the inverse currency pair and reciprocal rate.
*
* @return an FX rate object representing the market convention rate between the two currencies
*/
public FxRate toConventional() {
return pair.isConventional() ? this : FxRate.of(pair.toConventional(), 1 / rate);
}
//-------------------------------------------------------------------------
/**
* Returns the formatted string version of the currency pair.
* <p>
* The format is '${baseCurrency}/${counterCurrency} ${rate}'.
*
* @return the formatted string
*/
@Override
public String toString() {
return pair + " " + (DoubleMath.isMathematicalInteger(rate) ? Long.toString((long) rate) : Double.toString(rate));
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code FxRate}.
* @return the meta-bean, not null
*/
public static FxRate.Meta meta() {
return FxRate.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(FxRate.Meta.INSTANCE);
}
/**
* The serialization version id.
*/
private static final long serialVersionUID = 1L;
private FxRate(
CurrencyPair pair,
double rate) {
JodaBeanUtils.notNull(pair, "pair");
ArgChecker.notNegativeOrZero(rate, "rate");
this.pair = pair;
this.rate = rate;
validate();
}
@Override
public FxRate.Meta metaBean() {
return FxRate.Meta.INSTANCE;
}
@Override
public <R> Property<R> property(String propertyName) {
return metaBean().<R>metaProperty(propertyName).createProperty(this);
}
@Override
public Set<String> propertyNames() {
return metaBean().metaPropertyMap().keySet();
}
//-----------------------------------------------------------------------
/**
* Gets the currency pair.
* The pair is formed of two parts, the base and the counter.
* In the pair 'AAA/BBB' the base is 'AAA' and the counter is 'BBB'.
* @return the value of the property, not null
*/
public CurrencyPair getPair() {
return pair;
}
//-----------------------------------------------------------------------
/**
* Gets the rate applicable to the currency pair.
* One unit of the base currency is exchanged for this amount of the counter currency.
* @return the value of the property
*/
private double getRate() {
return rate;
}
//-----------------------------------------------------------------------
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
FxRate other = (FxRate) obj;
return JodaBeanUtils.equal(pair, other.pair) &&
JodaBeanUtils.equal(rate, other.rate);
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(pair);
hash = hash * 31 + JodaBeanUtils.hashCode(rate);
return hash;
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code FxRate}.
*/
public static final class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code pair} property.
*/
private final MetaProperty<CurrencyPair> pair = DirectMetaProperty.ofImmutable(
this, "pair", FxRate.class, CurrencyPair.class);
/**
* The meta-property for the {@code rate} property.
*/
private final MetaProperty<Double> rate = DirectMetaProperty.ofImmutable(
this, "rate", FxRate.class, Double.TYPE);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"pair",
"rate");
/**
* Restricted constructor.
*/
private Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case 3433178: // pair
return pair;
case 3493088: // rate
return rate;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BeanBuilder<? extends FxRate> builder() {
return new FxRate.Builder();
}
@Override
public Class<? extends FxRate> beanType() {
return FxRate.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code pair} property.
* @return the meta-property, not null
*/
public MetaProperty<CurrencyPair> pair() {
return pair;
}
/**
* The meta-property for the {@code rate} property.
* @return the meta-property, not null
*/
public MetaProperty<Double> rate() {
return rate;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case 3433178: // pair
return ((FxRate) bean).getPair();
case 3493088: // rate
return ((FxRate) bean).getRate();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
metaProperty(propertyName);
if (quiet) {
return;
}
throw new UnsupportedOperationException("Property cannot be written: " + propertyName);
}
}
//-----------------------------------------------------------------------
/**
* The bean-builder for {@code FxRate}.
*/
private static final class Builder extends DirectFieldsBeanBuilder<FxRate> {
private CurrencyPair pair;
private double rate;
/**
* Restricted constructor.
*/
private Builder() {
}
//-----------------------------------------------------------------------
@Override
public Object get(String propertyName) {
switch (propertyName.hashCode()) {
case 3433178: // pair
return pair;
case 3493088: // rate
return rate;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
}
@Override
public Builder set(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case 3433178: // pair
this.pair = (CurrencyPair) newValue;
break;
case 3493088: // rate
this.rate = (Double) newValue;
break;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
return this;
}
@Override
public Builder set(MetaProperty<?> property, Object value) {
super.set(property, value);
return this;
}
@Override
public Builder setString(String propertyName, String value) {
setString(meta().metaProperty(propertyName), value);
return this;
}
@Override
public Builder setString(MetaProperty<?> property, String value) {
super.setString(property, value);
return this;
}
@Override
public Builder setAll(Map<String, ? extends Object> propertyValueMap) {
super.setAll(propertyValueMap);
return this;
}
@Override
public FxRate build() {
return new FxRate(
pair,
rate);
}
//-----------------------------------------------------------------------
@Override
public String toString() {
StringBuilder buf = new StringBuilder(96);
buf.append("FxRate.Builder{");
buf.append("pair").append('=').append(JodaBeanUtils.toString(pair)).append(',').append(' ');
buf.append("rate").append('=').append(JodaBeanUtils.toString(rate));
buf.append('}');
return buf.toString();
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.connectors.kafka;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.operators.StreamingRuntimeContext;
import org.apache.flink.streaming.connectors.kafka.internals.Kafka08PartitionDiscoverer;
import org.apache.flink.streaming.util.serialization.DeserializationSchema;
import org.apache.flink.streaming.util.serialization.SimpleStringSchema;
import org.apache.flink.util.NetUtils;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Matchers;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import java.net.InetAddress;
import java.net.URL;
import java.net.UnknownHostException;
import java.util.Collections;
import java.util.Properties;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.powermock.api.mockito.PowerMockito.when;
/**
* Tests for the {@link FlinkKafkaConsumer08}.
*/
@RunWith(PowerMockRunner.class)
@PrepareForTest(Kafka08PartitionDiscoverer.class)
@PowerMockIgnore("javax.management.*")
public class KafkaConsumer08Test {
@Test
public void testValidateZooKeeperConfig() {
try {
// empty
Properties emptyProperties = new Properties();
try {
FlinkKafkaConsumer08.validateZooKeeperConfig(emptyProperties);
fail("should fail with an exception");
}
catch (IllegalArgumentException e) {
// expected
}
// no connect string (only group string)
Properties noConnect = new Properties();
noConnect.put(ConsumerConfig.GROUP_ID_CONFIG, "flink-test-group");
try {
FlinkKafkaConsumer08.validateZooKeeperConfig(noConnect);
fail("should fail with an exception");
}
catch (IllegalArgumentException e) {
// expected
}
// no group string (only connect string)
Properties noGroup = new Properties();
noGroup.put("zookeeper.connect", "localhost:47574");
try {
FlinkKafkaConsumer08.validateZooKeeperConfig(noGroup);
fail("should fail with an exception");
}
catch (IllegalArgumentException e) {
// expected
}
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testCreateSourceWithoutCluster() {
try {
Properties props = new Properties();
props.setProperty("zookeeper.connect", "localhost:56794");
props.setProperty("bootstrap.servers", "localhost:11111, localhost:22222");
props.setProperty("group.id", "non-existent-group");
props.setProperty(FlinkKafkaConsumer08.GET_PARTITIONS_RETRIES_KEY, "1");
FlinkKafkaConsumer08<String> consumer = new FlinkKafkaConsumer08<>(
Collections.singletonList("no op topic"), new SimpleStringSchema(), props);
StreamingRuntimeContext mockRuntimeContext = mock(StreamingRuntimeContext.class);
Mockito.when(mockRuntimeContext.isCheckpointingEnabled()).thenReturn(true);
consumer.setRuntimeContext(mockRuntimeContext);
consumer.open(new Configuration());
fail();
}
catch (Exception e) {
assertTrue(e.getMessage().contains("Unable to retrieve any partitions"));
}
}
@Test
public void testAllBoostrapServerHostsAreInvalid() {
try {
String unknownHost = "foobar:11111";
URL unknownHostURL = NetUtils.getCorrectHostnamePort(unknownHost);
PowerMockito.mockStatic(InetAddress.class);
when(InetAddress.getByName(Matchers.eq(unknownHostURL.getHost()))).thenThrow(new UnknownHostException("Test exception"));
String zookeeperConnect = "localhost:56794";
String groupId = "non-existent-group";
Properties props = createKafkaProps(zookeeperConnect, unknownHost, groupId);
FlinkKafkaConsumer08<String> consumer = new FlinkKafkaConsumer08<>(
Collections.singletonList("no op topic"), new SimpleStringSchema(), props);
StreamingRuntimeContext mockRuntimeContext = mock(StreamingRuntimeContext.class);
Mockito.when(mockRuntimeContext.isCheckpointingEnabled()).thenReturn(true);
consumer.setRuntimeContext(mockRuntimeContext);
consumer.open(new Configuration());
fail();
} catch (Exception expected) {
assertTrue("Exception should be thrown containing 'all bootstrap servers invalid' message!",
expected.getMessage().contains("All the servers provided in: '" + ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG
+ "' config are invalid"));
}
}
@Test
public void testAtLeastOneBootstrapServerHostIsValid() throws Exception {
try {
String zookeeperConnect = "localhost:56794";
String unknownHost = "foobar:11111";
// we declare one valid bootstrap server, namely the one with 'localhost'
String bootstrapServers = unknownHost + ", localhost:22222";
URL unknownHostURL = NetUtils.getCorrectHostnamePort(unknownHost);
PowerMockito.mockStatic(InetAddress.class);
when(InetAddress.getByName(Matchers.eq(unknownHostURL.getHost()))).thenThrow(new UnknownHostException("Test exception"));
String groupId = "non-existent-group";
Properties props = createKafkaProps(zookeeperConnect, bootstrapServers, groupId);
DummyFlinkKafkaConsumer consumer = new DummyFlinkKafkaConsumer(
"no op topic",
new SimpleStringSchema(),
props);
consumer.open(new Configuration());
// no exception should be thrown, because we have one valid bootstrap server; test passes if we reach here
} catch (Exception e) {
assertFalse("No exception should be thrown containing 'all bootstrap servers invalid' message!",
e.getMessage().contains("All the servers provided in: '" + ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG
+ "' config are invalid"));
}
}
private Properties createKafkaProps(String zookeeperConnect, String bootstrapServers, String groupId) {
Properties props = new Properties();
props.setProperty("zookeeper.connect", zookeeperConnect);
props.setProperty("bootstrap.servers", bootstrapServers);
props.setProperty("group.id", groupId);
props.setProperty("socket.timeout.ms", "100");
props.setProperty(FlinkKafkaConsumer08.GET_PARTITIONS_RETRIES_KEY, "1");
return props;
}
private static class DummyFlinkKafkaConsumer extends FlinkKafkaConsumer08<String> {
private static final long serialVersionUID = -3939402845009972810L;
public DummyFlinkKafkaConsumer(String topic, DeserializationSchema<String> schema, Properties props) {
super(Collections.singletonList(topic), schema, props);
}
@Override
public RuntimeContext getRuntimeContext() {
RuntimeContext mockRuntimeContext = mock(RuntimeContext.class);
when(mockRuntimeContext.getIndexOfThisSubtask()).thenReturn(0);
when(mockRuntimeContext.getNumberOfParallelSubtasks()).thenReturn(1);
return mockRuntimeContext;
}
}
}
|
|
package jhberges.camel.consul.leader;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyDouble;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import java.util.Optional;
import org.apache.camel.CamelContext;
import org.apache.camel.ProducerTemplate;
import org.apache.camel.ServiceStatus;
import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class ConsulLeaderElectorTest {
private static final String SERVICE_NAME = "SERVICE_NAME";
private static final String ROUTE_ID = "ROUTE_ID";
private static final int TTL = 2;
private static final int LOCK_DELAY = 3;
private static final int TRIES = 4;
private static final int RETRYPERIOD = 5;
private static final double BACKOFF = 6;
@Mock
private ConsulFacadeBean consulFacade;
@Mock
private CamelContext camelContext;
@Mock
private ProducerTemplate producerTemplate;
@After
public void after() {
verifyNoMoreInteractions(consulFacade, camelContext, producerTemplate);
}
@Test
public void onContextStop() throws Exception {
final TerminationMock termination = new TerminationMock();
ConsulLeaderElector.TERMINATION_CALLBACK = termination;
when(consulFacade.initSessionKey(anyString()))
.thenReturn(Optional.of("SESSION"));
final ConsulLeaderElector elector = new ConsulLeaderElector(consulFacade, SERVICE_NAME, ROUTE_ID, camelContext, producerTemplate,
true);
elector.onContextStop(camelContext);
assertEquals(0, termination.getCalled());
verify(consulFacade, times(1)).initSessionKey(anyString());
verify(consulFacade, times(1)).close();
}
@Test
public void runWhenAlreadyLeader() throws Exception {
final TerminationMock termination = new TerminationMock();
ConsulLeaderElector.TERMINATION_CALLBACK = termination;
when(consulFacade.initSessionKey(anyString()))
.thenReturn(Optional.of("SESSION"));
when(consulFacade.pollConsul(eq(SERVICE_NAME)))
.thenReturn(Optional.of(true));
when(camelContext.getRouteStatus(eq(ROUTE_ID)))
.thenReturn(ServiceStatus.Started);
final ConsulLeaderElector elector = new ConsulLeaderElector(consulFacade, SERVICE_NAME, ROUTE_ID, camelContext, producerTemplate,
true);
elector.run();
assertEquals(0, termination.getCalled());
verify(consulFacade, times(1)).initSessionKey(anyString());
verify(consulFacade, times(1)).pollConsul(eq(SERVICE_NAME));
verify(camelContext, times(1)).getRouteStatus(eq(ROUTE_ID));
verify(producerTemplate, times(0)).sendBody(
eq(ConsulLeaderElector.CONTROLBUS_ROUTE), anyString());
}
@Test
public void runWhenBecomingLeader() throws Exception {
final TerminationMock termination = new TerminationMock();
ConsulLeaderElector.TERMINATION_CALLBACK = termination;
when(consulFacade.initSessionKey(anyString()))
.thenReturn(Optional.of("SESSION"));
when(consulFacade.pollConsul(eq(SERVICE_NAME)))
.thenReturn(Optional.of(true));
when(camelContext.getRouteStatus(eq(ROUTE_ID)))
.thenReturn(ServiceStatus.Stopped);
final ConsulLeaderElector elector = new ConsulLeaderElector(consulFacade, SERVICE_NAME, ROUTE_ID, camelContext, producerTemplate,
true);
elector.run();
assertEquals(0, termination.getCalled());
verify(consulFacade, times(1)).initSessionKey(anyString());
verify(consulFacade, times(1)).pollConsul(eq(SERVICE_NAME));
verify(camelContext, times(1)).getRouteStatus(eq(ROUTE_ID));
verify(producerTemplate, times(1)).sendBody(
eq(ConsulLeaderElector.CONTROLBUS_ROUTE), anyString());
}
@Test
public void runWhenLoosingLeadership() throws Exception {
final TerminationMock termination = new TerminationMock();
ConsulLeaderElector.TERMINATION_CALLBACK = termination;
when(consulFacade.initSessionKey(anyString()))
.thenReturn(Optional.of("SESSION"));
when(consulFacade.pollConsul(eq(SERVICE_NAME)))
.thenReturn(Optional.of(false));
when(camelContext.getRouteStatus(eq(ROUTE_ID)))
.thenReturn(ServiceStatus.Started);
final ConsulLeaderElector elector = new ConsulLeaderElector(consulFacade, SERVICE_NAME, ROUTE_ID, camelContext, producerTemplate,
true);
elector.run();
assertEquals(0, termination.getCalled());
verify(consulFacade, times(1)).initSessionKey(anyString());
verify(consulFacade, times(1)).pollConsul(eq(SERVICE_NAME));
verify(camelContext, times(1)).getRouteStatus(eq(ROUTE_ID));
verify(producerTemplate, times(1)).sendBody(
eq(ConsulLeaderElector.CONTROLBUS_ROUTE), anyString());
}
@Test
public void runWhenLoosingLeadershipButAlreadyStopped() throws Exception {
final TerminationMock termination = new TerminationMock();
ConsulLeaderElector.TERMINATION_CALLBACK = termination;
when(consulFacade.initSessionKey(anyString()))
.thenReturn(Optional.of("SESSION"));
when(consulFacade.pollConsul(eq(SERVICE_NAME)))
.thenReturn(Optional.of(false));
when(camelContext.getRouteStatus(eq(ROUTE_ID)))
.thenReturn(ServiceStatus.Stopped);
final ConsulLeaderElector elector = new ConsulLeaderElector(consulFacade, SERVICE_NAME, ROUTE_ID, camelContext, producerTemplate,
true);
elector.run();
assertEquals(0, termination.getCalled());
verify(consulFacade, times(1)).initSessionKey(anyString());
verify(consulFacade, times(1)).pollConsul(eq(SERVICE_NAME));
verify(camelContext, times(1)).getRouteStatus(eq(ROUTE_ID));
verify(producerTemplate, times(0)).sendBody(
eq(ConsulLeaderElector.CONTROLBUS_ROUTE), anyString());
}
@Test
public void runWhenPreparingToBeLeader() throws Exception {
final TerminationMock termination = new TerminationMock();
ConsulLeaderElector.TERMINATION_CALLBACK = termination;
when(consulFacade.initSessionKey(anyString()))
.thenReturn(Optional.of("SESSION"));
when(consulFacade.pollConsul(eq(SERVICE_NAME)))
.thenReturn(Optional.of(true));
when(camelContext.getRouteStatus(eq(ROUTE_ID)))
.thenReturn(ServiceStatus.Starting);
final ConsulLeaderElector elector = new ConsulLeaderElector(consulFacade, SERVICE_NAME, ROUTE_ID, camelContext, producerTemplate,
true);
elector.run();
assertEquals(0, termination.getCalled());
verify(consulFacade, times(1)).initSessionKey(anyString());
verify(consulFacade, times(1)).pollConsul(eq(SERVICE_NAME));
verify(camelContext, times(1)).getRouteStatus(eq(ROUTE_ID));
verify(producerTemplate, times(0)).sendBody(
eq(ConsulLeaderElector.CONTROLBUS_ROUTE), anyString());
}
@Test
public void terminateIfIslandWhenAllowed() throws Exception {
final TerminationMock termination = new TerminationMock();
ConsulLeaderElector.TERMINATION_CALLBACK = termination;
when(consulFacade.initSessionKey(anyString()))
.thenReturn(Optional.empty());
new ConsulLeaderElector(consulFacade, SERVICE_NAME, ROUTE_ID, camelContext, producerTemplate, true);
assertEquals(0, termination.getCalled());
verify(consulFacade, times(1)).initSessionKey(anyString());
}
@Test
public void terminateIfIslandWhenNotAllowed() throws Exception {
final TerminationMock termination = new TerminationMock();
ConsulLeaderElector.TERMINATION_CALLBACK = termination;
when(consulFacade.initSessionKey(anyString()))
.thenReturn(Optional.empty());
new ConsulLeaderElector(consulFacade, SERVICE_NAME, ROUTE_ID, camelContext, producerTemplate, false);
assertEquals(1, termination.getCalled());
verify(consulFacade, times(1)).initSessionKey(anyString());
}
}
|
|
/*
* This file is part of the DITA Open Toolkit project.
* See the accompanying license.txt file for applicable licenses.
*/
/*
* (c) Copyright IBM Corp. 2010 All Rights Reserved.
*/
package org.dita.dost.reader;
import static java.util.Arrays.*;
import static org.dita.dost.util.Constants.*;
import static org.dita.dost.util.URLUtils.*;
import static org.dita.dost.util.XMLUtils.*;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.util.*;
import javax.xml.parsers.DocumentBuilder;
import org.dita.dost.util.KeyDef;
import org.dita.dost.util.KeyScope;
import org.dita.dost.util.XMLUtils;
import org.w3c.dom.*;
import org.dita.dost.log.DITAOTLogger;
import org.xml.sax.InputSource;
/**
* KeyrefReader class which reads DITA map file to collect key definitions. Instances are reusable but not thread-safe.
*/
public final class KeyrefReader implements AbstractReader {
private static final List<String> ATTS = Collections.unmodifiableList(asList(
ATTRIBUTE_NAME_HREF,
ATTRIBUTE_NAME_AUDIENCE,
ATTRIBUTE_NAME_PLATFORM,
ATTRIBUTE_NAME_PRODUCT,
ATTRIBUTE_NAME_OTHERPROPS,
"rev",
ATTRIBUTE_NAME_PROPS,
"linking",
ATTRIBUTE_NAME_TOC,
ATTRIBUTE_NAME_PRINT,
"search",
ATTRIBUTE_NAME_FORMAT,
ATTRIBUTE_NAME_SCOPE,
ATTRIBUTE_NAME_TYPE,
ATTRIBUTE_NAME_XML_LANG,
"dir",
"translate",
ATTRIBUTE_NAME_PROCESSING_ROLE,
ATTRIBUTE_NAME_CASCADE));
private DITAOTLogger logger;
private final DocumentBuilder builder;
private KeyScope rootScope;
/**
* Constructor.
*/
public KeyrefReader() {
builder = XMLUtils.getDocumentBuilder();
}
@Override
public void read(final File filename) {
throw new UnsupportedOperationException();
}
@Override
public void setLogger(final DITAOTLogger logger) {
this.logger = logger;
}
/**
* Get key definitions for root scope. Each key definition Element has a distinct Document.
*
* @return root key scope
*/
public KeyScope getKeyDefinition() {
return rootScope;
}
/**
* Read key definitions
*
* @param filename absolute URI to DITA map with key definitions
*/
public void read(final URI filename, final Document doc) {
rootScope = null;
// TODO: use KeyScope implementation that retains order
KeyScope keyScope = readScopes(doc);
keyScope = cascadeChildKeys(keyScope);
// TODO: determine effective key definitions here
keyScope = inheritParentKeys(keyScope);
rootScope = resolveIntermediate(keyScope);
}
/** Read keys scopes in map. */
private KeyScope readScopes(final Document doc) {
final List<KeyScope> scopes = readScopes(doc.getDocumentElement());
if (scopes.size() == 1) {
return scopes.get(0);
} else {
return new KeyScope(null, Collections.<String, KeyDef>emptyMap(), scopes);
}
}
private List<KeyScope> readScopes(final Element root) {
final List<KeyScope> childScopes = new ArrayList<>();
final Map<String, KeyDef> keyDefs = new HashMap<>();
readScope(root, keyDefs);
readChildScopes(root, childScopes);
final String keyscope = root.getAttribute(ATTRIBUTE_NAME_KEYSCOPE).trim();
if (keyscope.isEmpty()) {
return asList(new KeyScope(null, keyDefs, childScopes));
} else {
final List<KeyScope> res = new ArrayList<>();
for (final String scope: keyscope.split("\\s+")) {
res.add(new KeyScope(scope, keyDefs, childScopes));
}
return res;
}
}
private void readChildScopes(final Element elem, final List<KeyScope> childScopes) {
for (final Element child: getChildElements(elem)) {
if (child.getAttributeNode(ATTRIBUTE_NAME_KEYSCOPE) != null) {
final List<KeyScope> childScope = readScopes(child);
childScopes.addAll(childScope);
}
else {
readChildScopes(child, childScopes);
}
}
}
/** Read key definitions from a key scope. */
private void readScope(final Element scope, final Map<String, KeyDef> keyDefs) {
final List<Element> maps = new ArrayList<>();
maps.add(scope);
for (final Element child: getChildElements(scope)) {
collectMaps(child, maps);
}
for (final Element map: maps) {
readMap(map, keyDefs);
}
}
private void collectMaps(final Element elem, final List<Element> maps) {
if (elem.getAttributeNode(ATTRIBUTE_NAME_KEYSCOPE) != null) {
return;
}
final String classValue = elem.getAttribute(ATTRIBUTE_NAME_CLASS);
if (MAP_MAP.matches(classValue) || SUBMAP.matches(classValue)) {
maps.add(elem);
}
for (final Element child: getChildElements(elem)) {
collectMaps(child, maps);
}
}
/** Recursively read key definitions from a single map fragment. */
private void readMap(final Element map, final Map<String, KeyDef> keyDefs) {
readKeyDefinition(map, keyDefs);
for (final Element elem: getChildElements(map)) {
if (!(SUBMAP.matches(elem) || elem.getAttributeNode(ATTRIBUTE_NAME_KEYSCOPE) != null)) {
readMap(elem, keyDefs);
}
}
}
private void readKeyDefinition(final Element elem, final Map<String, KeyDef> keyDefs) {
final String keyName = elem.getAttribute(ATTRIBUTE_NAME_KEYS);
if (!keyName.isEmpty()) {
for (final String key: keyName.trim().split("\\s+")) {
if (!keyDefs.containsKey(key)) {
final Document d = builder.newDocument();
final Element copy = (Element) d.importNode(elem, true);
d.appendChild(copy);
final String h = copy.getAttribute(ATTRIBUTE_NAME_HREF);
final URI href = h.isEmpty() ? null : toURI(h);
final String s = copy.getAttribute(ATTRIBUTE_NAME_SCOPE);
final String scope = s.isEmpty() ? null : s;
final KeyDef keyDef = new KeyDef(key, href, scope, null, copy);
keyDefs.put(key, keyDef);
}
}
}
}
/** Cascade child keys with prefixes to parent key scopes. */
private KeyScope cascadeChildKeys(final KeyScope rootScope) {
final Map<String, KeyDef> res = new HashMap<>(rootScope.keyDefinition);
cascadeChildKeys(rootScope, res, "");
return new KeyScope(rootScope.name, res, new ArrayList<>(rootScope.childScopes.values()));
}
private void cascadeChildKeys(final KeyScope scope, final Map<String, KeyDef> keys, final String prefix) {
final StringBuilder buf = new StringBuilder(prefix);
if (scope.name != null) {
buf.append(scope.name).append(".");
}
final String p = buf.toString();
for (final Map.Entry<String, KeyDef> e: scope.keyDefinition.entrySet()) {
final KeyDef oldKeyDef = e.getValue();
final KeyDef newKeyDef = new KeyDef(p + oldKeyDef.keys, oldKeyDef.href, oldKeyDef.scope, oldKeyDef.source, oldKeyDef.element);
if (!keys.containsKey(newKeyDef.keys)) {
keys.put(newKeyDef.keys, newKeyDef);
}
}
for (final KeyScope child: scope.childScopes.values()) {
cascadeChildKeys(child, keys, p);
}
}
/** Inherit parent keys to child key scopes. */
private KeyScope inheritParentKeys(final KeyScope rootScope) {
return inheritParentKeys(rootScope, Collections.<String, KeyDef>emptyMap());
}
private KeyScope inheritParentKeys(final KeyScope current, final Map<String, KeyDef> parent) {
if (parent.keySet().isEmpty() && current.childScopes.isEmpty()) {
return current;
} else {
final Map<String, KeyDef> resKeys = new HashMap<>();
resKeys.putAll(current.keyDefinition);
resKeys.putAll(parent);
final List<KeyScope> resChildren = new ArrayList<>();
for (final KeyScope child: current.childScopes.values()) {
final KeyScope resChild = inheritParentKeys(child, resKeys);
resChildren.add(resChild);
}
return new KeyScope(current.name, resKeys, resChildren);
}
}
/** Resolve intermediate key references. */
private KeyScope resolveIntermediate(final KeyScope scope) {
final Map<String, KeyDef> keys = new HashMap<>(scope.keyDefinition);
for (final Map.Entry<String, KeyDef> e: scope.keyDefinition.entrySet()) {
final KeyDef res = resolveIntermediate(scope, e.getValue());
keys.put(e.getKey(), res);
}
final List<KeyScope> children = new ArrayList<>();
for (final KeyScope child: scope.childScopes.values()) {
final KeyScope resolvedChild = resolveIntermediate(child);
children.add(resolvedChild);
}
return new KeyScope(scope.name, keys, children);
}
private KeyDef resolveIntermediate(final KeyScope scope, final KeyDef keyDef) {
final Element elem = keyDef.element;
final String keyref = elem.getAttribute(ATTRIBUTE_NAME_KEYREF);
if (!keyref.isEmpty() && scope.keyDefinition.containsKey(keyref)) {
KeyDef keyRefDef = scope.keyDefinition.get(keyref);
Element defElem = keyRefDef.element;
final String defElemKeyref = defElem.getAttribute(ATTRIBUTE_NAME_KEYREF);
if (!defElemKeyref.isEmpty()) {
keyRefDef = resolveIntermediate(scope, keyRefDef);
}
final Element res = mergeMetadata(keyRefDef.element, elem);
res.removeAttribute(ATTRIBUTE_NAME_KEYREF);
return new KeyDef(keyDef.keys, keyRefDef.href, keyRefDef.scope, keyRefDef.source, res);
} else {
return keyDef;
}
}
private Element mergeMetadata(final Element defElem, final Element elem) {
final Element res = (Element) elem.cloneNode(true);
final Document d = res.getOwnerDocument();
final Element defMeta = getTopicmeta(defElem);
if (defMeta != null) {
Element resMeta = getTopicmeta(res);
if (resMeta == null) {
resMeta = d.createElement(MAP_TOPICMETA.localName);
resMeta.setAttribute(ATTRIBUTE_NAME_CLASS, MAP_TOPICMETA.toString());
res.appendChild(resMeta);
}
final NodeList cs = defMeta.getChildNodes();
for (int i = 0; i < cs.getLength(); i++) {
final Node c = cs.item(i);
final Node copy = d.importNode(c, true);
resMeta.appendChild(copy);
}
}
for (final String attr: ATTS) {
if (res.getAttributeNode(attr) == null) {
final Attr defAttr = defElem.getAttributeNode(attr);
if (defAttr != null) {
final Attr copy = (Attr) d.importNode(defAttr, true);
res.setAttributeNode(copy);
}
}
}
return res;
}
private Element getTopicmeta(final Element topicref) {
final NodeList ns = topicref.getChildNodes();
for (int i = 0; i < ns.getLength(); i++) {
final Node n = ns.item(i);
if (MAP_TOPICMETA.matches(n)) {
return (Element) n;
}
}
return null;
}
}
|
|
/*
* Copyright 1999-2010 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.sql;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Map.Entry;
import com.orientechnologies.common.parser.OStringParser;
import com.orientechnologies.common.util.OPair;
import com.orientechnologies.orient.core.command.OCommandRequestText;
import com.orientechnologies.orient.core.command.OCommandResultListener;
import com.orientechnologies.orient.core.exception.OCommandExecutionException;
import com.orientechnologies.orient.core.metadata.schema.OProperty;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.metadata.security.ODatabaseSecurityResources;
import com.orientechnologies.orient.core.metadata.security.ORole;
import com.orientechnologies.orient.core.query.OQuery;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.sql.filter.OSQLFilterItem;
import com.orientechnologies.orient.core.sql.functions.OSQLFunctionRuntime;
import com.orientechnologies.orient.core.sql.query.OSQLAsynchQuery;
/**
* SQL UPDATE command.
*
* @author Luca Garulli
*
*/
public class OCommandExecutorSQLUpdate extends OCommandExecutorSQLAbstract implements OCommandResultListener {
public static final String KEYWORD_UPDATE = "UPDATE";
private static final String KEYWORD_SET = "SET";
private static final String KEYWORD_ADD = "ADD";
private static final String KEYWORD_PUT = "PUT";
private static final String KEYWORD_REMOVE = "REMOVE";
private Map<String, Object> setEntries = new LinkedHashMap<String, Object>();
private Map<String, Object> addEntries = new LinkedHashMap<String, Object>();
private Map<String, OPair<String, Object>> putEntries = new LinkedHashMap<String, OPair<String, Object>>();
private Map<String, Object> removeEntries = new LinkedHashMap<String, Object>();
private OQuery<?> query;
private int recordCount = 0;
private String subjectName;
private static final Object EMPTY_VALUE = new Object();
private int parameterCounter = 0;
private Map<Object, Object> parameters;
@SuppressWarnings("unchecked")
public OCommandExecutorSQLUpdate parse(final OCommandRequestText iRequest) {
iRequest.getDatabase().checkSecurity(ODatabaseSecurityResources.COMMAND, ORole.PERMISSION_UPDATE);
init(iRequest.getDatabase(), iRequest.getText());
setEntries.clear();
query = null;
recordCount = 0;
final StringBuilder word = new StringBuilder();
int pos = OSQLHelper.nextWord(text, textUpperCase, 0, word, true);
if (pos == -1 || !word.toString().equals(OCommandExecutorSQLUpdate.KEYWORD_UPDATE))
throw new OCommandSQLParsingException("Keyword " + OCommandExecutorSQLUpdate.KEYWORD_UPDATE + " not found", text, 0);
int newPos = OSQLHelper.nextWord(text, textUpperCase, pos, word, true);
if (newPos == -1)
throw new OCommandSQLParsingException("Invalid target", text, pos);
pos = newPos;
subjectName = word.toString();
newPos = OSQLHelper.nextWord(text, textUpperCase, pos, word, true);
if (newPos == -1
|| (!word.toString().equals(KEYWORD_SET) && !word.toString().equals(KEYWORD_ADD) && !word.toString().equals(KEYWORD_PUT) && !word
.toString().equals(KEYWORD_REMOVE)))
throw new OCommandSQLParsingException("Expected keyword " + KEYWORD_SET + "," + KEYWORD_ADD + "," + KEYWORD_PUT + " or "
+ KEYWORD_REMOVE, text, pos);
pos = newPos;
while (pos != -1 && !word.toString().equals(OCommandExecutorSQLAbstract.KEYWORD_WHERE)) {
if (word.toString().equals(KEYWORD_SET))
pos = parseSetFields(word, pos);
else if (word.toString().equals(KEYWORD_ADD))
pos = parseAddFields(word, pos);
else if (word.toString().equals(KEYWORD_PUT))
pos = parsePutFields(word, pos);
else if (word.toString().equals(KEYWORD_REMOVE))
pos = parseRemoveFields(word, pos);
else
break;
}
String whereCondition = word.toString();
if (whereCondition.equals(OCommandExecutorSQLAbstract.KEYWORD_WHERE))
query = new OSQLAsynchQuery<ODocument>("select from " + subjectName + " where " + text.substring(pos), this);
else
query = new OSQLAsynchQuery<ODocument>("select from " + subjectName, this);
return this;
}
public Object execute(final Map<Object, Object> iArgs) {
if (subjectName == null)
throw new OCommandExecutionException("Can't execute the command because it hasn't been parsed yet");
parameters = iArgs;
Map<Object, Object> queryArgs = new HashMap<Object, Object>();
for (int i = parameterCounter; parameters != null && i < parameters.size(); i++) {
if (parameters.get(i) != null)
queryArgs.put(i - parameterCounter, parameters.get(i));
}
database.query(query, queryArgs);
return recordCount;
}
/**
* Update current record.
*/
@SuppressWarnings("unchecked")
public boolean result(final Object iRecord) {
final ODocument record = (ODocument) iRecord;
boolean recordUpdated = false;
// BIND VALUES TO UPDATE
Object v;
if (setEntries.size() > 0) {
OSQLHelper.bindParameters(record, setEntries, parameters);
recordUpdated = true;
}
// BIND VALUES TO ADD
Collection<Object> coll;
Object fieldValue;
for (Map.Entry<String, Object> entry : addEntries.entrySet()) {
coll = null;
if (!record.containsField(entry.getKey())) {
// GET THE TYPE IF ANY
if (record.getSchemaClass() != null) {
OProperty prop = record.getSchemaClass().getProperty(entry.getKey());
if (prop != null && prop.getType() == OType.LINKSET)
// SET TYPE
coll = new HashSet<Object>();
}
if (coll == null)
// IN ALL OTHER CASES USE A LIST
coll = new ArrayList<Object>();
record.field(entry.getKey(), coll);
} else {
fieldValue = record.field(entry.getKey());
if (fieldValue instanceof Collection<?>)
coll = (Collection<Object>) fieldValue;
else
continue;
}
v = entry.getValue();
if (v instanceof OSQLFilterItem)
v = ((OSQLFilterItem) v).getValue(record);
else if (v instanceof OSQLFunctionRuntime)
v = ((OSQLFunctionRuntime) v).execute(record);
coll.add(v);
recordUpdated = true;
}
// BIND VALUES TO PUT (AS MAP)
Map<String, Object> map;
OPair<String, Object> pair;
for (Entry<String, OPair<String, Object>> entry : putEntries.entrySet()) {
fieldValue = record.field(entry.getKey());
if (fieldValue == null) {
if (record.getSchemaClass() != null) {
final OProperty property = record.getSchemaClass().getProperty(entry.getKey());
if (property != null
&& (property.getType() != null && (!property.getType().equals(OType.EMBEDDEDMAP) && !property.getType().equals(
OType.LINKMAP)))) {
throw new OCommandExecutionException("field " + entry.getKey() + " is not defined as a map");
}
}
fieldValue = new HashMap<String, Object>();
record.field(entry.getKey(), fieldValue);
}
if (fieldValue instanceof Map<?, ?>) {
map = (Map<String, Object>) fieldValue;
pair = entry.getValue();
if (pair.getValue() instanceof OSQLFilterItem)
pair.setValue(((OSQLFilterItem) pair.getValue()).getValue(record));
else if (pair.getValue() instanceof OSQLFunctionRuntime)
v = ((OSQLFunctionRuntime) pair.getValue()).execute(record);
map.put(pair.getKey(), pair.getValue());
recordUpdated = true;
}
}
// REMOVE FIELD IF ANY
for (Map.Entry<String, Object> entry : removeEntries.entrySet()) {
v = entry.getValue();
if (v == EMPTY_VALUE) {
record.removeField(entry.getKey());
recordUpdated = true;
} else {
fieldValue = record.field(entry.getKey());
if (fieldValue instanceof Collection<?>) {
coll = (Collection<Object>) fieldValue;
if (coll.remove(v))
recordUpdated = true;
} else if (fieldValue instanceof Map<?, ?>) {
map = (Map<String, Object>) fieldValue;
if (map.remove(v) != null)
recordUpdated = true;
}
}
}
if (recordUpdated) {
record.setDirty();
record.save();
recordCount++;
}
return true;
}
private int parseSetFields(final StringBuilder word, int pos) {
String fieldName;
String fieldValue;
int newPos = pos;
while (pos != -1 && (setEntries.size() == 0 || word.toString().equals(","))) {
newPos = OSQLHelper.nextWord(text, textUpperCase, pos, word, false);
if (newPos == -1)
throw new OCommandSQLParsingException("Field name expected", text, pos);
pos = newPos;
fieldName = word.toString();
newPos = OStringParser.jumpWhiteSpaces(text, pos);
if (newPos == -1 || text.charAt(newPos) != '=')
throw new OCommandSQLParsingException("Character '=' was expected", text, pos);
pos = newPos;
newPos = OSQLHelper.nextWord(text, textUpperCase, pos + 1, word, false, " =><");
if (pos == -1)
throw new OCommandSQLParsingException("Value expected", text, pos);
fieldValue = word.toString();
if (fieldValue.endsWith(",")) {
pos = newPos - 1;
fieldValue = fieldValue.substring(0, fieldValue.length() - 1);
} else
pos = newPos;
// INSERT TRANSFORMED FIELD VALUE
setEntries.put(fieldName, getFieldValueCountingParameters(fieldValue));
pos = OSQLHelper.nextWord(text, textUpperCase, pos, word, true);
}
if (setEntries.size() == 0)
throw new OCommandSQLParsingException("Entries to set <field> = <value> are missed. Example: name = 'Bill', salary = 300.2",
text, pos);
return pos;
}
private int parseAddFields(final StringBuilder word, int pos) {
String fieldName;
String fieldValue;
int newPos = pos;
while (pos != -1 && (setEntries.size() == 0 || word.toString().equals(",")) && !word.toString().equals(KEYWORD_WHERE)) {
newPos = OSQLHelper.nextWord(text, textUpperCase, pos, word, false);
if (newPos == -1)
throw new OCommandSQLParsingException("Field name expected", text, pos);
pos = newPos;
fieldName = word.toString();
newPos = OStringParser.jumpWhiteSpaces(text, pos);
if (newPos == -1 || text.charAt(newPos) != '=')
throw new OCommandSQLParsingException("Character '=' was expected", text, pos);
pos = newPos;
newPos = OSQLHelper.nextWord(text, textUpperCase, pos + 1, word, false, " =><");
if (pos == -1)
throw new OCommandSQLParsingException("Value expected", text, pos);
fieldValue = word.toString();
if (fieldValue.endsWith(",")) {
pos = newPos - 1;
fieldValue = fieldValue.substring(0, fieldValue.length() - 1);
} else
pos = newPos;
// INSERT TRANSFORMED FIELD VALUE
addEntries.put(fieldName, getFieldValueCountingParameters(fieldValue));
pos = OSQLHelper.nextWord(text, textUpperCase, pos, word, true);
}
if (addEntries.size() == 0)
throw new OCommandSQLParsingException("Entries to add <field> = <value> are missed. Example: name = 'Bill', salary = 300.2",
text, pos);
return pos;
}
private int parsePutFields(final StringBuilder word, int pos) {
String fieldName;
String fieldKey;
String fieldValue;
int newPos = pos;
while (pos != -1 && (setEntries.size() == 0 || word.toString().equals(",")) && !word.toString().equals(KEYWORD_WHERE)) {
newPos = OSQLHelper.nextWord(text, textUpperCase, pos, word, false);
if (newPos == -1)
throw new OCommandSQLParsingException("Field name expected", text, pos);
pos = newPos;
fieldName = word.toString();
newPos = OStringParser.jumpWhiteSpaces(text, pos);
if (newPos == -1 || text.charAt(newPos) != '=')
throw new OCommandSQLParsingException("Character '=' was expected", text, pos);
pos = newPos;
newPos = OSQLHelper.nextWord(text, textUpperCase, pos + 1, word, false, " =><,");
if (pos == -1)
throw new OCommandSQLParsingException("Key expected", text, pos);
fieldKey = word.toString();
if (fieldKey.endsWith(",")) {
pos = newPos + 1;
fieldKey = fieldKey.substring(0, fieldKey.length() - 1);
} else {
pos = newPos;
newPos = OStringParser.jumpWhiteSpaces(text, pos);
if (newPos == -1 || text.charAt(pos) != ',')
throw new OCommandSQLParsingException("',' expected", text, pos);
pos = newPos;
}
newPos = OSQLHelper.nextWord(text, textUpperCase, pos + 1, word, false, " =><,");
if (pos == -1)
throw new OCommandSQLParsingException("Value expected", text, pos);
fieldValue = word.toString();
if (fieldValue.endsWith(",")) {
pos = newPos - 1;
fieldValue = fieldValue.substring(0, fieldValue.length() - 1);
} else
pos = newPos;
// INSERT TRANSFORMED FIELD VALUE
putEntries.put(fieldName, new OPair<String, Object>((String) getFieldValueCountingParameters(fieldKey),
getFieldValueCountingParameters(fieldValue)));
pos = OSQLHelper.nextWord(text, textUpperCase, pos, word, true);
}
if (putEntries.size() == 0)
throw new OCommandSQLParsingException("Entries to put <field> = <key>, <value> are missed. Example: name = 'Bill', 30", text,
pos);
return pos;
}
private int parseRemoveFields(final StringBuilder word, int pos) {
String fieldName;
String fieldValue;
Object value;
int newPos = pos;
while (pos != -1 && (removeEntries.size() == 0 || word.toString().equals(",")) && !word.toString().equals(KEYWORD_WHERE)) {
newPos = OSQLHelper.nextWord(text, textUpperCase, pos, word, false);
if (newPos == -1)
throw new OCommandSQLParsingException("Field name expected", text, pos);
fieldName = word.toString();
pos = OStringParser.jumpWhiteSpaces(text, newPos);
if (pos > -1 && text.charAt(pos) == '=') {
pos = OSQLHelper.nextWord(text, textUpperCase, pos + 1, word, false, " =><,");
if (pos == -1)
throw new OCommandSQLParsingException("Value expected", text, pos);
fieldValue = word.toString();
if (fieldValue.endsWith(",")) {
pos = newPos - 1;
fieldValue = fieldValue.substring(0, fieldValue.length() - 1);
} else
pos = newPos;
value = getFieldValueCountingParameters(fieldValue);
} else
value = EMPTY_VALUE;
// INSERT FIELD NAME TO BE REMOVED
removeEntries.put(fieldName, value);
pos = OSQLHelper.nextWord(text, textUpperCase, pos, word, true);
}
if (removeEntries.size() == 0)
throw new OCommandSQLParsingException("Field(s) to remove are missed. Example: name, salary", text, pos);
return pos;
}
private Object getFieldValueCountingParameters(String fieldValue) {
if (fieldValue.trim().equals("?"))
parameterCounter++;
return OSQLHelper.parseValue(database, this, fieldValue);
}
}
|
|
package com.plexobject.dp.domain;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
import java.util.Arrays;
import org.junit.Before;
import org.junit.Test;
public class MetadataTest {
@Before
public void setup() {
MetaFieldFactory.reset();
}
@Test
public void testCreateEmpty() {
Metadata fields = new Metadata();
assertEquals(0, fields.size());
}
@Test
public void testGetMetaFieldsByKinds() {
MetaField field1 = MetaFieldFactory.createText("name", "Kind1", false);
MetaField field2 = MetaFieldFactory.createText("phone", "Kind2", false);
Metadata fields = new Metadata(field1, field2);
assertTrue(fields.hasMetaFieldsByAnyKinds("Kind1", "Kind2"));
assertEquals(1, fields.getMetaFieldsByKinds("Kind1").size());
assertEquals(1, fields.getMetaFieldsByKinds("Kind2").size());
assertEquals(2, fields.getMetaFieldsByKinds("Kind1", "Kind2").size());
}
@Test
public void testCreateVector() {
MetaField field1 = MetaFieldFactory.createText("name", "Test", false);
MetaField field2 = MetaFieldFactory.createText("phone", "Test", false);
Metadata fields = new Metadata(field1, field2);
assertEquals(2, fields.size());
assertTrue(fields.getMetaFields().contains(field1));
assertTrue(fields.getMetaFields().contains(field2));
}
@Test
public void testCreateCollection() {
MetaField field1 = MetaFieldFactory.createText("name", "Test", false);
MetaField field2 = MetaFieldFactory.createText("phone", "Test", false);
Metadata fields = new Metadata(Arrays.asList(field1, field2));
assertEquals(2, fields.size());
assertTrue(fields.getMetaFields().contains(field1));
assertTrue(fields.getMetaFields().contains(field2));
}
@Test
public void testAddMetaField() {
Metadata fields = new Metadata();
MetaField field1 = MetaFieldFactory.createText("name", "Test", false);
MetaField field2 = MetaFieldFactory.createText("phone", "Test", false);
fields.addMetaField(field1);
fields.addMetaField(field1);
fields.addMetaField(field2);
assertEquals(2, fields.size());
assertTrue(fields.getMetaFields().contains(field1));
assertTrue(fields.getMetaFields().contains(field2));
}
@Test
public void testRemoveMetaField() {
Metadata fields = new Metadata();
MetaField field1 = MetaFieldFactory.createText("name", "Test", false);
MetaField field2 = MetaFieldFactory.createText("phone", "Test", false);
fields.addMetaField(field1);
fields.addMetaField(field2);
assertEquals(2, fields.size());
assertTrue(fields.getMetaFields().contains(field1));
assertTrue(fields.getMetaFields().contains(field2));
fields.removeMetaField(field1);
assertEquals(1, fields.size());
assertFalse(fields.getMetaFields().contains(field1));
assertTrue(fields.getMetaFields().contains(field2));
}
@Test
public void testAddMetaFields() {
Metadata fields1 = new Metadata();
MetaField field1a = MetaFieldFactory.createText("name", "Test", false);
MetaField field1b = MetaFieldFactory.createText("phone", "Test", false);
fields1.addMetaField(field1a);
fields1.addMetaField(field1b);
Metadata fields2 = new Metadata();
fields2.merge(fields1);
assertEquals(2, fields2.size());
fields2.getMetaFields().contains(field1a);
fields2.getMetaFields().contains(field1b);
}
@Test
public void testRemoveMetaFields() {
Metadata fields1 = new Metadata();
MetaField field1a = MetaFieldFactory.createText("name", "Test", false);
MetaField field1b = MetaFieldFactory.createText("phone", "Test", false);
fields1.addMetaField(field1a);
fields1.addMetaField(field1b);
Metadata fields2 = new Metadata();
fields2.merge(fields1);
assertEquals(2, fields2.size());
fields2.getMetaFields().contains(field1a);
fields2.getMetaFields().contains(field1b);
fields2.removeMetadata(fields1);
assertEquals(0, fields2.size());
}
@Test
public void testContains() {
Metadata fields1 = new Metadata();
MetaField field1a = MetaFieldFactory.createText("name", "Test", false);
MetaField field1b = MetaFieldFactory.createText("phone", "Test", false);
fields1.addMetaField(field1a);
fields1.addMetaField(field1b);
Metadata fields2 = new Metadata();
fields2.merge(fields1);
assertTrue(fields1.containsAll(fields2));
fields1.removeMetaField(field1a);
assertFalse(fields1.containsAll(fields2));
//
assertTrue(fields1.contains(field1b));
assertFalse(fields1.contains(field1a));
}
@Test
public void testGetMissingCount() {
MetaField field1 = MetaFieldFactory.createText("name", "Test", false);
MetaField field2 = MetaFieldFactory.createText("phone", "Test", false);
MetaField field3 = MetaFieldFactory
.createText("address", "Test", false);
Metadata fields1 = Metadata.from(field1, field2);
Metadata fields2 = Metadata.from(field1, field2, field3);
//
assertEquals(1, fields1.getMissingCount(fields2));
Metadata missing = fields1.getMissingMetadata(fields2);
assertEquals(1, missing.size());
}
@Test
public void testGetMatchingCount() {
MetaField field1 = MetaFieldFactory.createText("name", "Test", false);
MetaField field2 = MetaFieldFactory.createText("phone", "Test", false);
MetaField field3 = MetaFieldFactory
.createText("address", "Test", false);
Metadata fields1 = Metadata.from(field1, field2);
Metadata fields2 = Metadata.from(field1, field2, field3);
//
assertEquals(2, fields1.getMatchingCount(fields2));
}
@Test
public void testOf() {
MetaField field1 = MetaFieldFactory.createText("name", "Test", false);
MetaField field2 = MetaFieldFactory.createText("phone", "Test", false);
Metadata fields = Metadata.from(field1, field2);
assertEquals(2, fields.size());
assertTrue(fields.getMetaFields().contains(field1));
assertTrue(fields.getMetaFields().contains(field2));
}
@Test
public void testOfString() {
MetaField field1 = MetaFieldFactory.createText("name", "Test", false);
MetaField field2 = MetaFieldFactory.createText("phone", "Test", false);
Metadata fields = Metadata.from(field1, field2);
assertEquals(2, fields.size());
}
@Test
public void testHashcode() {
MetaField field1 = MetaFieldFactory.createText("name", "Test", false);
MetaField field2 = MetaFieldFactory.createText("phone", "Test", false);
Metadata fields = Metadata.from(field1, field2);
assertTrue(fields.hashCode() != 0);
}
@Test
public void testEquals() {
MetaField field1 = MetaFieldFactory.createText("name", "Test", false);
MetaField field2 = MetaFieldFactory.createText("phone", "Test", false);
MetaField field3 = MetaFieldFactory
.createText("address", "Test", false);
Metadata fields1 = Metadata.from(field1, field2);
Metadata fields2 = Metadata.from(field1, field2);
Metadata fields3 = Metadata.from(field1, field3);
Metadata fields4 = Metadata.from(field1, field2, field3);
assertEquals(fields1, fields1);
assertEquals(fields1, fields2);
assertNotEquals(fields1, fields3);
assertNotEquals(fields1, fields4);
assertNotEquals(fields1, 1);
assertNotEquals(fields1, null);
}
@Test
public void testToString() {
MetaField field1 = MetaFieldFactory.createText("name", "Test", false);
MetaField field2 = MetaFieldFactory.createText("phone", "Test", false);
Metadata fields = Metadata.from(field1, field2);
assertTrue(fields.toString().contains("name"));
}
}
|
|
/*
* Copyright (C) 2014 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.cassandra.lucene.schema.mapping;
import com.stratio.cassandra.lucene.IndexException;
import com.stratio.cassandra.lucene.schema.mapping.builder.TextMapperBuilder;
import org.apache.lucene.document.Field;
import org.junit.Test;
import java.util.UUID;
import static com.stratio.cassandra.lucene.schema.SchemaBuilders.textMapper;
import static org.junit.Assert.*;
/**
* @author Andres de la Pena {@literal <adelapena@stratio.com>}
*/
public class TextMapperTest extends AbstractMapperTest {
@Test
public void testConstructorWithoutArgs() {
TextMapper mapper = textMapper().build("field");
assertEquals("Field is not set", "field", mapper.field);
assertEquals("Column is not set to default value", "field", mapper.column);
assertEquals("Mapped columns are not set", 1, mapper.mappedColumns.size());
assertTrue("Mapped columns are not set", mapper.mappedColumns.contains("field"));
assertNull("Analyzer is not set to default value", mapper.analyzer);
}
@Test
public void testConstructorWithAllArgs() {
TextMapper mapper = textMapper().column("column").analyzer("spanish").build("field");
assertEquals("Field is not set", "field", mapper.field);
assertEquals("Column is not set", "column", mapper.column);
assertEquals("Mapped columns are not set", 1, mapper.mappedColumns.size());
assertTrue("Mapped columns are not set", mapper.mappedColumns.contains("column"));
assertEquals("Analyzer is not set", "spanish", mapper.analyzer);
}
@Test
public void testJsonSerialization() {
TextMapperBuilder builder = textMapper().column("column").analyzer("spanish");
testJson(builder, "{type:\"text\",column:\"column\",analyzer:\"spanish\"}");
}
@Test
public void testJsonSerializationDefaults() {
TextMapperBuilder builder = textMapper();
testJson(builder, "{type:\"text\"}");
}
@Test
public void testBaseClass() {
TextMapper mapper = textMapper().analyzer("SpanishAnalyzer").build("field");
assertEquals("Base class is wrong", String.class, mapper.base);
}
@Test(expected = IndexException.class)
public void testSortField() {
TextMapper mapper = textMapper().analyzer("SpanishAnalyzer").build("field");
mapper.sortField("field", true);
}
@Test
public void testValueNull() {
TextMapper mapper = textMapper().build("field");
assertNull("Base for nulls is wrong", mapper.base("test", null));
}
@Test
public void testValueInteger() {
TextMapper mapper = textMapper().analyzer("org.apache.lucene.analysis.en.EnglishAnalyzer").build("field");
String parsed = mapper.base("test", 3);
assertEquals("Base for integers is wrong", "3", parsed);
}
@Test
public void testValueLong() {
TextMapper mapper = textMapper().analyzer("org.apache.lucene.analysis.en.EnglishAnalyzer").build("field");
String parsed = mapper.base("test", 3l);
assertEquals("Base for longs is wrong", "3", parsed);
}
@Test
public void testValueShort() {
TextMapper mapper = textMapper().analyzer("org.apache.lucene.analysis.en.EnglishAnalyzer").build("field");
String parsed = mapper.base("test", new Short("3"));
assertEquals("Base for longs is wrong", "3", parsed);
}
@Test
public void testValueByte() {
TextMapper mapper = textMapper().analyzer("org.apache.lucene.analysis.en.EnglishAnalyzer").build("field");
String parsed = mapper.base("test", new Byte("3"));
assertEquals("Base for longs is wrong", "3", parsed);
}
@Test
public void testValueFloatWithoutDecimal() {
TextMapper mapper = textMapper().analyzer("org.apache.lucene.analysis.en.EnglishAnalyzer").build("field");
String parsed = mapper.base("test", 3f);
assertEquals("Base for floats is wrong", "3.0", parsed);
}
@Test
public void testValueFloatWithDecimalFloor() {
TextMapper mapper = textMapper().analyzer("org.apache.lucene.analysis.en.EnglishAnalyzer").build("field");
String parsed = mapper.base("test", 3.5f);
assertEquals("Base for floats is wrong", "3.5", parsed);
}
@Test
public void testValueFloatWithDecimalCeil() {
TextMapper mapper = textMapper().analyzer("org.apache.lucene.analysis.en.EnglishAnalyzer").build("field");
String parsed = mapper.base("test", 3.6f);
assertEquals("Base for floats is wrong", "3.6", parsed);
}
@Test
public void testValueDoubleWithoutDecimal() {
TextMapper mapper = textMapper().analyzer("org.apache.lucene.analysis.en.EnglishAnalyzer").build("field");
String parsed = mapper.base("test", 3d);
assertEquals("Base for doubles is wrong", "3.0", parsed);
}
@Test
public void testValueDoubleWithDecimalFloor() {
TextMapper mapper = textMapper().analyzer("org.apache.lucene.analysis.en.EnglishAnalyzer").build("field");
String parsed = mapper.base("test", 3.5d);
assertEquals("Base for doubles is wrong", "3.5", parsed);
}
@Test
public void testValueDoubleWithDecimalCeil() {
TextMapper mapper = textMapper().analyzer("org.apache.lucene.analysis.en.EnglishAnalyzer").build("field");
String parsed = mapper.base("test", 3.6d);
assertEquals("Base for doubles is wrong", "3.6", parsed);
}
@Test
public void testValueStringWithoutDecimal() {
TextMapper mapper = textMapper().analyzer("org.apache.lucene.analysis.en.EnglishAnalyzer").build("field");
String parsed = mapper.base("test", "3");
assertEquals("Base for strings is wrong", "3", parsed);
}
@Test
public void testValueStringWithDecimalFloor() {
TextMapper mapper = textMapper().analyzer("org.apache.lucene.analysis.en.EnglishAnalyzer").build("field");
String parsed = mapper.base("test", "3.2");
assertEquals("Base for strings is wrong", "3.2", parsed);
}
@Test
public void testValueStringWithDecimalCeil() {
TextMapper mapper = textMapper().analyzer("org.apache.lucene.analysis.en.EnglishAnalyzer").build("field");
String parsed = mapper.base("test", "3.6");
assertEquals("Base for strings is wrong", "3.6", parsed);
}
@Test
public void testValueUUID() {
TextMapper mapper = textMapper().analyzer("org.apache.lucene.analysis.en.EnglishAnalyzer").build("field");
String parsed = mapper.base("test", UUID.fromString("550e8400-e29b-41d4-a716-446655440000"));
assertEquals("Base for UUIDs is wrong", "550e8400-e29b-41d4-a716-446655440000", parsed);
}
@Test
public void testIndexedField() {
TextMapper mapper = textMapper().analyzer("org.apache.lucene.analysis.en.EnglishAnalyzer").build("field");
Field field = mapper.indexedField("name", "hello")
.orElseThrow(() -> new AssertionError("Indexed field is not created"));
assertEquals("Indexed field name is wrong", "name", field.name());
assertEquals("Indexed field value is wrong", "hello", field.stringValue());
assertFalse("Indexed field type is wrong", field.fieldType().stored());
}
@Test
public void testSortedField() {
TextMapper mapper = textMapper().analyzer("org.apache.lucene.analysis.en.EnglishAnalyzer").build("field");
assertFalse("Sorted field should not be created", mapper.sortedField("name", "hello").isPresent());
}
@Test
public void testExtractAnalyzers() {
TextMapper mapper = textMapper().analyzer("org.apache.lucene.analysis.en.EnglishAnalyzer").build("field");
assertEquals("Method #analyzer is wrong", "org.apache.lucene.analysis.en.EnglishAnalyzer", mapper.analyzer);
}
@Test
public void testToString() {
TextMapper mapper = textMapper().validated(true).analyzer("English").build("field");
assertEquals("Method #toString is wrong",
"TextMapper{field=field, validated=true, column=field, analyzer=English}",
mapper.toString());
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.node.tasks.list;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.action.support.tasks.BaseTasksResponse;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.tasks.TaskInfo;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* Returns the list of tasks currently running on the nodes
*/
public class ListTasksResponse extends BaseTasksResponse implements ToXContent {
private List<TaskInfo> tasks;
private Map<String, List<TaskInfo>> perNodeTasks;
private List<TaskGroup> groups;
private DiscoveryNodes discoveryNodes;
public ListTasksResponse() {
}
public ListTasksResponse(List<TaskInfo> tasks, List<TaskOperationFailure> taskFailures,
List<? extends FailedNodeException> nodeFailures) {
super(taskFailures, nodeFailures);
this.tasks = tasks == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(tasks));
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
tasks = Collections.unmodifiableList(in.readList(TaskInfo::new));
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeList(tasks);
}
/**
* Returns the list of tasks by node
*/
public Map<String, List<TaskInfo>> getPerNodeTasks() {
if (perNodeTasks == null) {
perNodeTasks = tasks.stream().collect(Collectors.groupingBy(t -> t.getTaskId().getNodeId()));
}
return perNodeTasks;
}
public List<TaskGroup> getTaskGroups() {
if (groups == null) {
buildTaskGroups();
}
return groups;
}
private void buildTaskGroups() {
Map<TaskId, TaskGroup.Builder> taskGroups = new HashMap<>();
List<TaskGroup.Builder> topLevelTasks = new ArrayList<>();
// First populate all tasks
for (TaskInfo taskInfo : this.tasks) {
taskGroups.put(taskInfo.getTaskId(), TaskGroup.builder(taskInfo));
}
// Now go through all task group builders and add children to their parents
for (TaskGroup.Builder taskGroup : taskGroups.values()) {
TaskId parentTaskId = taskGroup.getTaskInfo().getParentTaskId();
if (parentTaskId.isSet()) {
TaskGroup.Builder parentTask = taskGroups.get(parentTaskId);
if (parentTask != null) {
// we found parent in the list of tasks - add it to the parent list
parentTask.addGroup(taskGroup);
} else {
// we got zombie or the parent was filtered out - add it to the the top task list
topLevelTasks.add(taskGroup);
}
} else {
// top level task - add it to the top task list
topLevelTasks.add(taskGroup);
}
}
this.groups = Collections.unmodifiableList(topLevelTasks.stream().map(TaskGroup.Builder::build).collect(Collectors.toList()));
}
public List<TaskInfo> getTasks() {
return tasks;
}
/**
* Set a reference to the {@linkplain DiscoveryNodes}. Used for calling {@link #toXContent(XContentBuilder, ToXContent.Params)} with
* {@code group_by=nodes}.
*/
public void setDiscoveryNodes(DiscoveryNodes discoveryNodes) {
//WTF is this? Why isn't this set by default;
this.discoveryNodes = discoveryNodes;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (getTaskFailures() != null && getTaskFailures().size() > 0) {
builder.startArray("task_failures");
for (TaskOperationFailure ex : getTaskFailures()){
builder.startObject();
builder.value(ex);
builder.endObject();
}
builder.endArray();
}
if (getNodeFailures() != null && getNodeFailures().size() > 0) {
builder.startArray("node_failures");
for (FailedNodeException ex : getNodeFailures()) {
builder.startObject();
ex.toXContent(builder, params);
builder.endObject();
}
builder.endArray();
}
String groupBy = params.param("group_by", "nodes");
if ("nodes".equals(groupBy)) {
if (discoveryNodes == null) {
throw new IllegalStateException("discoveryNodes must be set before calling toXContent with group_by=nodes");
}
builder.startObject("nodes");
for (Map.Entry<String, List<TaskInfo>> entry : getPerNodeTasks().entrySet()) {
DiscoveryNode node = discoveryNodes.get(entry.getKey());
builder.startObject(entry.getKey());
if (node != null) {
// If the node is no longer part of the cluster, oh well, we'll just skip it's useful information.
builder.field("name", node.getName());
builder.field("transport_address", node.getAddress().toString());
builder.field("host", node.getHostName());
builder.field("ip", node.getAddress());
builder.startArray("roles");
for (DiscoveryNode.Role role : node.getRoles()) {
builder.value(role.getRoleName());
}
builder.endArray();
if (!node.getAttributes().isEmpty()) {
builder.startObject("attributes");
for (Map.Entry<String, String> attrEntry : node.getAttributes().entrySet()) {
builder.field(attrEntry.getKey(), attrEntry.getValue());
}
builder.endObject();
}
}
builder.startObject("tasks");
for(TaskInfo task : entry.getValue()) {
builder.field(task.getTaskId().toString());
task.toXContent(builder, params);
}
builder.endObject();
builder.endObject();
}
builder.endObject();
} else if ("parents".equals(groupBy)) {
builder.startObject("tasks");
for (TaskGroup group : getTaskGroups()) {
builder.field(group.getTaskInfo().getTaskId().toString());
group.toXContent(builder, params);
}
builder.endObject();
}
return builder;
}
@Override
public String toString() {
return Strings.toString(this);
}
}
|
|
/*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apiman.common.es.util;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.util.Properties;
/**
* Some useful elasticsearch impl constants.
*
* @author eric.wittmann@redhat.com
*/
public class EsConstants {
public static final String GATEWAY_INDEX_NAME = "apiman_gateway"; //$NON-NLS-1$
public static final String METRICS_INDEX_NAME = "apiman_metrics"; //$NON-NLS-1$
public static final String CACHE_INDEX_NAME = "apiman_cache"; //$NON-NLS-1$
public static final String MANAGER_INDEX_NAME = "apiman_manager"; //$NON-NLS-1$
//cache indices
public static final String INDEX_CACHE_CACHE_ENTRY = "cacheEntry";
//gateway indices (indices have to be lowercase)
public static final String INDEX_APIS = "apis"; //$NON-NLS-1$
public static final String INDEX_CLIENTS = "clients"; //$NON-NLS-1$
public static final String INDEX_RATE_BUCKET = "ratebucket"; //$NON-NLS-1$
public static final String INDEX_SHARED_STATE_PROPERTY = "sharedstateproperty"; //$NON-NLS-1$
public static final String INDEX_DATA_VERSION = "dataversion"; //$NON-NLS-1$
// manager indices (indices have to be lowercase)
public static final String INDEX_MANAGER_POSTFIX_GATEWAY = "gateway";
public static final String INDEX_MANAGER_POSTFIX_DOWNLOAD = "download";
public static final String INDEX_MANAGER_POSTFIX_POLICY_DEF = "policydef";
public static final String INDEX_MANAGER_POSTFIX_PLUGIN = "plugin";
public static final String INDEX_MANAGER_POSTFIX_ROLE = "role";
public static final String INDEX_MANAGER_POSTFIX_USER = "user";
public static final String INDEX_MANAGER_POSTFIX_ROLE_MEMBERSHIP = "rolemembership";
public static final String INDEX_MANAGER_POSTFIX_ORGANIZATION = "organization";
public static final String INDEX_MANAGER_POSTFIX_AUDIT_ENTRY = "auditentry";
public static final String INDEX_MANAGER_POSTFIX_PLAN = "plan";
public static final String INDEX_MANAGER_POSTFIX_PLAN_VERSION = "planversion";
public static final String INDEX_MANAGER_POSTFIX_PLAN_POLICIES = "planpolicies";
public static final String INDEX_MANAGER_POSTFIX_API = "api";
public static final String INDEX_MANAGER_POSTFIX_API_DEFINITION = "apidefinition";
public static final String INDEX_MANAGER_POSTFIX_API_VERSION = "apiversion";
public static final String INDEX_MANAGER_POSTFIX_API_POLICIES = "apipolicies";
public static final String INDEX_MANAGER_POSTFIX_CLIENT = "client";
public static final String INDEX_MANAGER_POSTFIX_CLIENT_VERSION = "clientversion";
public static final String INDEX_MANAGER_POSTFIX_CLIENT_POLICIES = "clientpolicies";
public static final String INDEX_MANAGER_POSTFIX_CONTRACT = "contract";
public static final String INDEX_MANAGER_POSTFIX_DEVELOPER = "developer";
public static final String INDEX_MANAGER_POSTFIX_METADATA = "metadata";
public static final String[] MANAGER_INDEX_POSTFIXES = {
INDEX_MANAGER_POSTFIX_GATEWAY,
INDEX_MANAGER_POSTFIX_DOWNLOAD,
INDEX_MANAGER_POSTFIX_POLICY_DEF,
INDEX_MANAGER_POSTFIX_PLUGIN,
INDEX_MANAGER_POSTFIX_ROLE,
INDEX_MANAGER_POSTFIX_USER,
INDEX_MANAGER_POSTFIX_ROLE_MEMBERSHIP,
INDEX_MANAGER_POSTFIX_ORGANIZATION,
INDEX_MANAGER_POSTFIX_AUDIT_ENTRY,
INDEX_MANAGER_POSTFIX_PLAN,
INDEX_MANAGER_POSTFIX_PLAN_VERSION,
INDEX_MANAGER_POSTFIX_PLAN_POLICIES,
INDEX_MANAGER_POSTFIX_API,
INDEX_MANAGER_POSTFIX_API_DEFINITION,
INDEX_MANAGER_POSTFIX_API_VERSION,
INDEX_MANAGER_POSTFIX_API_POLICIES,
INDEX_MANAGER_POSTFIX_CLIENT,
INDEX_MANAGER_POSTFIX_CLIENT_VERSION,
INDEX_MANAGER_POSTFIX_CLIENT_POLICIES,
INDEX_MANAGER_POSTFIX_CONTRACT,
INDEX_MANAGER_POSTFIX_DEVELOPER,
INDEX_MANAGER_POSTFIX_METADATA
};
// es fields (field names could be camelcase)
public static final String ES_FIELD_APIMAN_VERSION = "apimanVersion";
public static final String ES_FIELD_APIMAN_VERSION_AT_IMPORT = "apimanVersionAtImport";
public static final String ES_FIELD_API_DESCRIPTION = "apiDescription";
public static final String ES_FIELD_API_DURATION = "apiDuration";
public static final String ES_FIELD_API_END = "apiEnd";
public static final String ES_FIELD_API_ID = "apiId";
public static final String ES_FIELD_API_KEY = "apiKey";
public static final String ES_FIELD_API_NAME = "apiName";
public static final String ES_FIELD_API_ORGANIZATION_ID = "apiOrganizationId";
public static final String ES_FIELD_API_ORGANIZATION_NAME = "apiOrganizationName";
public static final String ES_FIELD_API_ORG_ID = "apiOrgId";
public static final String ES_FIELD_API_START = "apiStart";
public static final String ES_FIELD_API_VERSION = "apiVersion";
public static final String ES_FIELD_ARTIFACT_ID = "artifactId";
public static final String ES_FIELD_AUTO_GRANT = "autoGrant";
public static final String ES_FIELD_BYTES_DOWNLOADED = "bytesDownloaded";
public static final String ES_FIELD_BYTES_UPLOADED = "bytesUploaded";
public static final String ES_FIELD_CLIENT_DESCRIPTION = "clientDescription";
public static final String ES_FIELD_CLIENT_ID = "clientId";
public static final String ES_FIELD_CLIENT_NAME = "clientName";
public static final String ES_FIELD_CLIENT_ORGANIZATION_ID = "clientOrganizationId";
public static final String ES_FIELD_CLIENT_ORGANIZATION_NAME = "clientOrganizationName";
public static final String ES_FIELD_CLIENT_ORG_ID = "clientOrgId";
public static final String ES_FIELD_CLIENT_VERSION = "clientVersion";
public static final String ES_FIELD_CONFIGURATION = "configuration";
public static final String ES_FIELD_CONTRACT_ID = "contractId";
public static final String ES_FIELD_COUNT = "count";
public static final String ES_FIELD_CREATED_BY = "createdBy";
public static final String ES_FIELD_CREATED_ON = "createdOn";
public static final String ES_FIELD_DATA = "data";
public static final String ES_FIELD_DEFINITION_ID = "definitionId";
public static final String ES_FIELD_DEFINITION_TYPE = "definitionType";
public static final String ES_FIELD_DEFINITION_URL = "definitionUrl";
public static final String ES_FIELD_DELETED = "deleted";
public static final String ES_FIELD_DESCRIPTION = "description";
public static final String ES_FIELD_DISABLE_KEYS_STRIP = "disableKeysStrip";
public static final String ES_FIELD_EMAIL = "email";
public static final String ES_FIELD_ENDPOINT = "endpoint";
public static final String ES_FIELD_ENDPOINT_CONTENT_TYPE = "endpointContentType";
public static final String ES_FIELD_ENDPOINT_PROPERTIES = "endpointProperties";
public static final String ES_FIELD_ENDPOINT_TYPE = "endpointType";
public static final String ES_FIELD_ENTITY_ID = "entityId";
public static final String ES_FIELD_ENTITY_TYPE = "entityType";
public static final String ES_FIELD_ENTITY_VERSION = "entityVersion";
public static final String ES_FIELD_ERROR = "error";
public static final String ES_FIELD_ERROR_MESSAGE = "errorMessage";
public static final String ES_FIELD_EXPIRES = "expires";
public static final String ES_FIELD_EXPORTED_ON = "exportedOn";
public static final String ES_FIELD_FAILURE = "failure";
public static final String ES_FIELD_FAILURE_CODE = "failureCode";
public static final String ES_FIELD_FAILURE_REASON = "failureReason";
public static final String ES_FIELD_FORM = "form";
public static final String ES_FIELD_FORM_TYPE = "formType";
public static final String ES_FIELD_FULL_NAME = "fullName";
public static final String ES_FIELD_GATEWAY_ID = "gatewayId";
public static final String ES_FIELD_GROUP_ID = "groupId";
public static final String ES_FIELD_ICON = "icon";
public static final String ES_FIELD_ID = "id";
public static final String ES_FIELD_IMPORTED_ON = "importedOn";
public static final String ES_FIELD_JOINED_ON = "joinedOn";
public static final String ES_FIELD_KEYS_STRIPPING_DISABLED = "keysStrippingDisabled";
public static final String ES_FIELD_LAST = "last";
public static final String ES_FIELD_LOCKED_ON = "lockedOn";
public static final String ES_FIELD_METHOD = "method";
public static final String ES_FIELD_MODIFIED_BY = "modifiedBy";
public static final String ES_FIELD_MODIFIED_ON = "modifiedOn";
public static final String ES_FIELD_NAME = "name";
public static final String ES_FIELD_NUM_PUBLISHED = "numPublished";
public static final String ES_FIELD_ORDER_INDEX = "orderIndex";
public static final String ES_FIELD_ORGANIZATION_ID = "organizationId";
public static final String ES_FIELD_ORGANIZATION_NAME = "organizationName";
public static final String ES_FIELD_PARSE_PAYLOAD = "parsePayload";
public static final String ES_FIELD_PATH = "path";
public static final String ES_FIELD_PERMISSIONS = "permissions";
public static final String ES_FIELD_PLAN = "plan";
public static final String ES_FIELD_PLAN_DESCRIPTION = "planDescription";
public static final String ES_FIELD_PLAN_ID = "planId";
public static final String ES_FIELD_PLAN_NAME = "planName";
public static final String ES_FIELD_PLAN_VERSION = "planVersion";
public static final String ES_FIELD_PLUGIN_ID = "pluginId";
public static final String ES_FIELD_POLICY_IMPL = "policyImpl";
public static final String ES_FIELD_POLICY_JSON_CONFIG = "policyJsonConfig";
public static final String ES_FIELD_PUBLIC_API = "publicAPI";
public static final String ES_FIELD_PUBLISHED_ON = "publishedOn";
public static final String ES_FIELD_REMOTE_ADDR = "remoteAddr";
public static final String ES_FIELD_REQUEST_DURATION = "requestDuration";
public static final String ES_FIELD_REQUEST_END = "requestEnd";
public static final String ES_FIELD_REQUEST_START = "requestStart";
public static final String ES_FIELD_RESOURCE = "resource";
public static final String ES_FIELD_RESPONSE_CODE = "responseCode";
public static final String ES_FIELD_RESPONSE_MESSAGE = "responseMessage";
public static final String ES_FIELD_RETIRED_ON = "retiredOn";
public static final String ES_FIELD_ROLE_ID = "roleId";
public static final String ES_FIELD_STATUS = "status";
public static final String ES_FIELD_SUCCESS = "success";
public static final String ES_FIELD_TEMPLATE = "template";
public static final String ES_FIELD_TYPE = "type";
public static final String ES_FIELD_UPDATED_ON = "updatedOn";
public static final String ES_FIELD_URL = "url";
public static final String ES_FIELD_USER = "user";
public static final String ES_FIELD_USERNAME = "username";
public static final String ES_FIELD_USER_ID = "userId";
public static final String ES_FIELD_VALUE = "value";
public static final String ES_FIELD_VERSION = "version";
public static final String ES_FIELD_WHAT = "what";
public static final String ES_FIELD_WHO = "who";
// nested es field prefixes
public static final String ES_NESTED_FIELD_PREFIX_API_POLICIES = "apiPolicies.";
public static final String ES_NESTED_FIELD_PREFIX_CLIENTS = "clients.";
public static final String ES_NESTED_FIELD_PREFIX_CONTRACTS = "contracts.";
public static final String ES_NESTED_FIELD_PREFIX_GATEWAYS = "gateways.";
public static final String ES_NESTED_FIELD_PREFIX_PLANS = "plans.";
public static final String ES_NESTED_FIELD_PREFIX_POLICIES = "policies.";
public static final String ES_NESTED_FIELD_PREFIX_TEMPLATES = "templates.";
// nested es fields
public static final String ES_NESTED_FIELD_API_POLICIES_POLICY_IMPL = ES_NESTED_FIELD_PREFIX_API_POLICIES + ES_FIELD_POLICY_IMPL;
public static final String ES_NESTED_FIELD_API_POLICIES_POLICY_JSON_CONFIG = ES_NESTED_FIELD_PREFIX_API_POLICIES + ES_FIELD_POLICY_JSON_CONFIG;
public static final String ES_NESTED_FIELD_POLICIES_CONFIGURATION = ES_NESTED_FIELD_PREFIX_POLICIES + ES_FIELD_CONFIGURATION;
public static final String ES_NESTED_FIELD_POLICIES_CREATED_BY = ES_NESTED_FIELD_PREFIX_POLICIES + ES_FIELD_CREATED_BY;
public static final String ES_NESTED_FIELD_POLICIES_CREATED_ON = ES_NESTED_FIELD_PREFIX_POLICIES + ES_FIELD_CREATED_ON;
public static final String ES_NESTED_FIELD_POLICIES_DEFINITION_ID = ES_NESTED_FIELD_PREFIX_POLICIES + ES_FIELD_DEFINITION_ID;
public static final String ES_NESTED_FIELD_POLICIES_ID = ES_NESTED_FIELD_PREFIX_POLICIES + ES_FIELD_ID;
public static final String ES_NESTED_FIELD_POLICIES_MODIFIED_BY = ES_NESTED_FIELD_PREFIX_POLICIES + ES_FIELD_MODIFIED_BY;
public static final String ES_NESTED_FIELD_POLICIES_MODIFIED_ON = ES_NESTED_FIELD_PREFIX_POLICIES + ES_FIELD_MODIFIED_ON;
public static final String ES_NESTED_FIELD_POLICIES_NAME = ES_NESTED_FIELD_PREFIX_POLICIES + ES_FIELD_NAME;
public static final String ES_NESTED_FIELD_POLICIES_ORDER_INDEX = ES_NESTED_FIELD_PREFIX_POLICIES + ES_FIELD_ORDER_INDEX;
public static final String ES_NESTED_FIELD_POLICIES_POLICY_IMPL = ES_NESTED_FIELD_PREFIX_POLICIES + ES_FIELD_POLICY_IMPL;
public static final String ES_NESTED_FIELD_POLICIES_POLICY_JSON_CONFIG = ES_NESTED_FIELD_PREFIX_POLICIES + ES_FIELD_POLICY_JSON_CONFIG;
public static final String ES_NESTED_FIELD_CLIENTS_CLIENT_ID = ES_NESTED_FIELD_PREFIX_CLIENTS + ES_FIELD_CLIENT_ID;
public static final String ES_NESTED_FIELD_CLIENTS_ORGANIZATION_ID = ES_NESTED_FIELD_PREFIX_CLIENTS + ES_FIELD_ORGANIZATION_ID;
public static final String ES_NESTED_FIELD_GATEWAYS_GATEWAY_ID = ES_NESTED_FIELD_PREFIX_GATEWAYS + ES_FIELD_GATEWAY_ID;
public static final String ES_NESTED_FIELD_PLANS_PLAN_ID = ES_NESTED_FIELD_PREFIX_PLANS + ES_FIELD_PLAN_ID;
public static final String ES_NESTED_FIELD_PLANS_VERSION = ES_NESTED_FIELD_PREFIX_PLANS + ES_FIELD_VERSION;
public static final String ES_NESTED_FIELD_CONTRACTS_API_ID = ES_NESTED_FIELD_PREFIX_CONTRACTS + ES_FIELD_API_ID;
public static final String ES_NESTED_FIELD_CONTRACTS_API_ORGANIZATION_ID = ES_NESTED_FIELD_PREFIX_CONTRACTS + ES_FIELD_API_ORGANIZATION_ID;
public static final String ES_NESTED_FIELD_CONTRACTS_API_VERSION = ES_NESTED_FIELD_PREFIX_CONTRACTS + ES_FIELD_API_VERSION;
public static final String ES_NESTED_FIELD_CONTRACTS_PLAN = ES_NESTED_FIELD_PREFIX_CONTRACTS + ES_FIELD_PLAN;
public static final String ES_NESTED_FIELD_CONTRACTS_POLICIES_POLICY_IMPL = ES_NESTED_FIELD_PREFIX_CONTRACTS + ES_NESTED_FIELD_POLICIES_POLICY_IMPL;
public static final String ES_NESTED_FIELD_CONTRACTS_POLICIES_POLICY_JSON_CONFIG = ES_NESTED_FIELD_PREFIX_CONTRACTS + ES_NESTED_FIELD_POLICIES_POLICY_JSON_CONFIG;
public static final String ES_NESTED_FIELD_TEMPLATES_TEMPLATE = ES_NESTED_FIELD_PREFIX_TEMPLATES + ES_FIELD_TEMPLATE;
public static final String ES_MAPPING_TYPE_BINARY = "binary";
public static final String ES_MAPPING_TYPE_BOOLEAN = "boolean";
public static final String ES_MAPPING_TYPE_DATE = "date";
public static final String ES_MAPPING_TYPE_IP = "ip";
public static final String ES_MAPPING_TYPE_KEYWORD = "keyword";
public static final String ES_MAPPING_TYPE_LONG = "long";
public static final String ES_MAPPING_TYPE_OBJECT = "object";
public static final String ES_MAPPING_TYPE_TEXT = "text";
// caches the es version read from the property file
private static Properties esVersions;
/**
* Reads the elasticsearch version from the maven-generated properties file
*
* @return version the elasticsearch version
*/
public static Properties getEsVersion() {
if (esVersions == null) {
InputStream is = EsConstants.class.getResourceAsStream("apiman-es.properties");
Properties p = new Properties();
try {
p.load(is);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
esVersions = p;
}
return esVersions;
}
}
|
|
/**
* Copyright 2013-2019 the original author or authors from the Jeddict project (https://jeddict.github.io/).
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package io.github.jeddict.relation.mapper.widget.column;
import io.github.jeddict.jpa.modeler.rules.attribute.AttributeValidator;
import io.github.jeddict.jpa.modeler.rules.entity.SQLKeywords;
import io.github.jeddict.jpa.modeler.widget.FlowPinWidget;
import io.github.jeddict.relation.mapper.initializer.RelationMapperScene;
import io.github.jeddict.relation.mapper.initializer.RelationMapperUtil;
import static io.github.jeddict.relation.mapper.initializer.RelationMapperUtil.COLUMN;
import static io.github.jeddict.relation.mapper.initializer.RelationMapperUtil.COLUMN_ICON_PATH;
import io.github.jeddict.relation.mapper.spec.DBColumn;
import io.github.jeddict.relation.mapper.spec.DBTable;
import io.github.jeddict.relation.mapper.widget.api.IColumnWidget;
import io.github.jeddict.relation.mapper.widget.flow.ReferenceFlowWidget;
import io.github.jeddict.relation.mapper.widget.table.TableWidget;
import io.github.jeddict.settings.diagram.ClassDiagramSettings;
import io.github.jeddict.settings.view.AttributeViewAs;
import java.awt.Image;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import javax.swing.JMenuItem;
import io.github.jeddict.util.StringUtils;
import org.netbeans.modeler.widget.context.ContextPaletteModel;
import org.netbeans.modeler.widget.node.IPNodeWidget;
import static org.netbeans.modeler.widget.node.IWidgetStateHandler.StateType.ERROR;
import org.netbeans.modeler.widget.pin.info.PinWidgetInfo;
/**
*
* @author Gaurav Gupta
*/
public abstract class ColumnWidget<E extends DBColumn> extends FlowPinWidget<E, RelationMapperScene> implements IColumnWidget {
private final List<ReferenceFlowWidget> referenceFlowWidget = new ArrayList<>();
public ColumnWidget(RelationMapperScene scene, IPNodeWidget nodeWidget, PinWidgetInfo pinWidgetInfo) {
super(scene, nodeWidget, pinWidgetInfo);
this.setImage(RelationMapperUtil.COLUMN);
}
public void visualizeDataType() {
AttributeViewAs viewAs = ClassDiagramSettings.getDataType();
DBColumn column = this.getBaseElementSpec();
String dataType = column.getDataType();
if (null != viewAs) switch (viewAs) {
case CLASS_FQN:
case SIMPLE_CLASS_NAME:
dataType = dataType + "(" + column.getSize()+ ")";
break;
//skip
case SHORT_CLASS_NAME:
break;
case NONE:
return;
default:
break;
}
visualizeDataType(dataType);
}
public void setDatatypeTooltip() {
DBColumn column = this.getBaseElementSpec();
StringBuilder writer = new StringBuilder();
writer.append(column.getDataType());
if (column.getSize() != 0) {
writer.append('(').append(column.getSize()).append(')');
}
this.setToolTipText(writer.toString());
}
@Override
public void setLabel(String label) {
if (label != null && !label.trim().isEmpty()) {
this.setPinName(label.replaceAll("\\s+", ""));
}
}
@Override
public void init() {
validateName(this.getName());
visualizeDataType();
setDatatypeTooltip();
}
@Override
public void destroy() {
}
public TableWidget getTableWidget() {
return (TableWidget) this.getPNodeWidget();
}
public boolean addReferenceFlowWidget(ReferenceFlowWidget flowWidget) {
return getReferenceFlowWidget().add(flowWidget);
}
public boolean removeReferenceFlowWidget(ReferenceFlowWidget flowWidget) {
return getReferenceFlowWidget().remove(flowWidget);
}
@Override
public ContextPaletteModel getContextPaletteModel() {
return null;
}
/**
* @return the referenceFlowWidget
*/
public List<ReferenceFlowWidget> getReferenceFlowWidget() {
return referenceFlowWidget;
}
@Override
public void setName(String name) {
if (StringUtils.isNotBlank(name)) {
this.name = name.replaceAll("\\s+", "");
if (this.getModelerScene().getModelerFile().isLoaded()) {
updateName(this.name);
}
} else {
setDefaultName();
}
validateName(this.name);
}
/**
* Called when developer delete value
*/
protected void setDefaultName() {
if (!prePersistName()) {
return;
}
this.name = evaluateName();
if (this.getModelerScene().getModelerFile().isLoaded()) {
updateName(null);
}
setLabel(name);
}
abstract protected String evaluateName();
abstract protected void updateName(String newName);
/**
* Listener called before persistence event of the name, useful in case to
* skip process
*/
protected boolean prePersistName() {
return true;
}
/**
* Called when value changed by property panel Override it if multiple name
* property available
*/
protected void setPropertyName(String name) {
if (!prePersistName()) {
return;
}
this.name = name;
validateName(name);
setLabel(name);
}
protected void setMultiPropertyName(String name) {
if (!prePersistName()) {
return;
}
this.name = evaluateName();
validateName(name);
setLabel(name);
}
protected void validateName(String name) {
if (SQLKeywords.isSQL99ReservedKeyword(name)) {
getSignalManager().fire(ERROR, AttributeValidator.ATTRIBUTE_COLUMN_NAME_WITH_RESERVED_SQL_KEYWORD);
} else {
getSignalManager().clear(ERROR, AttributeValidator.ATTRIBUTE_COLUMN_NAME_WITH_RESERVED_SQL_KEYWORD);
}
DBTable tableSpec = (DBTable) this.getTableWidget().getBaseElementSpec();
if (tableSpec.findColumns(name).size() > 1) {
getSignalManager().fire(ERROR, AttributeValidator.NON_UNIQUE_COLUMN_NAME);
} else {
getSignalManager().clear(ERROR, AttributeValidator.NON_UNIQUE_COLUMN_NAME);
}
}
protected void validateTableName(String name) {
if (name != null && !name.trim().isEmpty()) {
if (SQLKeywords.isSQL99ReservedKeyword(name)) {
getSignalManager().fire(ERROR, AttributeValidator.ATTRIBUTE_TABLE_NAME_WITH_RESERVED_SQL_KEYWORD);
} else {
getSignalManager().clear(ERROR, AttributeValidator.ATTRIBUTE_TABLE_NAME_WITH_RESERVED_SQL_KEYWORD);
}
} else {
getSignalManager().clear(ERROR, AttributeValidator.ATTRIBUTE_TABLE_NAME_WITH_RESERVED_SQL_KEYWORD);
}
}
@Override
protected List<JMenuItem> getPopupMenuItemList() {
List<JMenuItem> menuItemList = new LinkedList<>();
menuItemList.add(getPropertyMenu());
return menuItemList;
}
@Override
public String getIconPath() {
return COLUMN_ICON_PATH;
}
@Override
public Image getIcon() {
return COLUMN;
}
}
|
|
/*
* Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package org.elasticsearch.action.bulk;
import com.carrotsearch.hppc.cursors.IntCursor;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import io.crate.Constants;
import io.crate.exceptions.Exceptions;
import io.crate.metadata.settings.CrateSettings;
import io.crate.planner.symbol.Symbol;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.create.BulkCreateIndicesRequest;
import org.elasticsearch.action.admin.indices.create.BulkCreateIndicesResponse;
import org.elasticsearch.action.admin.indices.create.TransportBulkCreateIndicesAction;
import org.elasticsearch.action.support.AutoCreateIndex;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.IndexMissingException;
import javax.annotation.Nullable;
import java.util.*;
import java.util.concurrent.CancellationException;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
/**
* Processor to do Bulk Inserts, similar to {@link org.elasticsearch.action.bulk.BulkProcessor}
* but less flexible (only supports IndexRequests)
*
* If the Bulk threadPool Queue is full retries are made and
* the {@link #add} method will start to block.
*/
public class SymbolBasedBulkShardProcessor<Request extends BulkProcessorRequest, Response extends BulkProcessorResponse<?>> {
public static final int MAX_CREATE_INDICES_BULK_SIZE = 100;
private final boolean autoCreateIndices;
private final Predicate<String> shouldAutocreateIndexPredicate;
private final int bulkSize;
private final int createIndicesBulkSize;
private final Map<ShardId, Request> requestsByShard = new HashMap<>();
private final AtomicInteger globalCounter = new AtomicInteger(0);
private final AtomicInteger requestItemCounter = new AtomicInteger(0);
private final AtomicInteger pending = new AtomicInteger(0);
private final Semaphore executeLock = new Semaphore(1);
private final SettableFuture<BitSet> result;
private final AtomicReference<Throwable> failure = new AtomicReference<>();
private final BitSet responses;
private final Object responsesLock = new Object();
private volatile boolean closed = false;
private final ClusterService clusterService;
private final TransportBulkCreateIndicesAction transportBulkCreateIndicesAction;
private final AutoCreateIndex autoCreateIndex;
private final AtomicInteger pendingNewIndexRequests = new AtomicInteger(0);
private final Map<String, List<PendingRequest>> requestsForNewIndices = new HashMap<>();
private final Set<String> indicesCreated = new HashSet<>();
private final BulkRetryCoordinatorPool bulkRetryCoordinatorPool;
private final BulkRequestBuilder<Request> requestBuilder;
private final BulkRequestExecutor<Request, Response> requestExecutor;
private static final ESLogger LOGGER = Loggers.getLogger(SymbolBasedBulkShardProcessor.class);
public SymbolBasedBulkShardProcessor(ClusterService clusterService,
TransportBulkCreateIndicesAction transportBulkCreateIndicesAction,
Settings settings,
BulkRetryCoordinatorPool bulkRetryCoordinatorPool,
boolean autoCreateIndices,
int bulkSize,
BulkRequestBuilder<Request> requestBuilder,
BulkRequestExecutor<Request, Response> requestExecutor) {
this.bulkRetryCoordinatorPool = bulkRetryCoordinatorPool;
this.clusterService = clusterService;
this.autoCreateIndices = autoCreateIndices;
this.bulkSize = bulkSize;
this.createIndicesBulkSize = Math.min(bulkSize, MAX_CREATE_INDICES_BULK_SIZE);
this.autoCreateIndex = new AutoCreateIndex(settings);
this.transportBulkCreateIndicesAction = transportBulkCreateIndicesAction;
this.shouldAutocreateIndexPredicate = new Predicate<String>() {
@Override
public boolean apply(@Nullable String input) {
assert input != null;
return autoCreateIndex.shouldAutoCreate(input, SymbolBasedBulkShardProcessor.this.clusterService.state());
}
};
responses = new BitSet();
result = SettableFuture.create();
this.requestExecutor = requestExecutor;
this.requestBuilder = requestBuilder;
}
public boolean add(String indexName,
String id,
@Nullable Symbol[] assignments,
@Nullable Object[] missingAssignments,
@Nullable String routing,
@Nullable Long version) {
assert id != null : "id must not be null";
pending.incrementAndGet();
Throwable throwable = failure.get();
if (throwable != null) {
result.setException(throwable);
return false;
}
ShardId shardId = shardId(indexName, id, routing);
if (shardId == null) {
addRequestForNewIndex(indexName, id, assignments, missingAssignments, routing, version);
} else {
try {
// will only block if retries/writer are active
bulkRetryCoordinatorPool.coordinator(shardId).retryLock().acquireReadLock();
} catch (InterruptedException e) {
Thread.interrupted();
} catch (Throwable e) {
setFailure(e);
return false;
}
partitionRequestByShard(shardId, id, assignments, missingAssignments, routing, version);
}
executeIfNeeded();
return true;
}
public boolean add(String indexName,
String id,
Object[] missingAssignments,
@Nullable String routing,
@Nullable Long version) {
return add(indexName, id, null, missingAssignments, routing, version);
}
public boolean addForExistingShard(ShardId shardId,
String id,
@Nullable Symbol[] assignments,
@Nullable Object[] missingAssignments,
@Nullable String routing,
@Nullable Long version) {
assert id != null : "id must not be null";
pending.incrementAndGet();
Throwable throwable = failure.get();
if (throwable != null) {
result.setException(throwable);
return false;
}
// will only block if retries/writer are active
try {
bulkRetryCoordinatorPool.coordinator(shardId).retryLock().acquireReadLock();
} catch (InterruptedException e) {
Thread.interrupted();
} catch (Throwable e) {
setFailure(e);
return false;
}
partitionRequestByShard(shardId, id, assignments, missingAssignments, routing, version);
executeIfNeeded();
return true;
}
@Nullable
private ShardId shardId(String indexName,
String id,
@Nullable String routing) {
ShardId shardId = null;
try {
shardId = clusterService.operationRouting().indexShards(
clusterService.state(),
indexName,
Constants.DEFAULT_MAPPING_TYPE,
id,
routing
).shardId();
} catch (IndexMissingException e) {
if (!autoCreateIndices) {
throw e;
}
}
return shardId;
}
private void addRequestForNewIndex(String indexName,
String id,
@Nullable Symbol[] assignments,
@Nullable Object[] missingAssignments,
@Nullable String routing,
@Nullable Long version) {
synchronized (requestsForNewIndices) {
List<PendingRequest> pendingRequestList = requestsForNewIndices.get(indexName);
if (pendingRequestList == null) {
pendingRequestList = new ArrayList<>();
requestsForNewIndices.put(indexName, pendingRequestList);
}
pendingRequestList.add(new PendingRequest(indexName, id, assignments,
missingAssignments, routing, version));
pendingNewIndexRequests.incrementAndGet();
}
}
private void partitionRequestByShard(ShardId shardId,
String id,
@Nullable Symbol[] assignments,
@Nullable Object[] missingAssignments,
@Nullable String routing,
@Nullable Long version) {
try {
executeLock.acquire();
Request request = requestsByShard.get(shardId);
if (request == null) {
request = requestBuilder.newRequest(shardId);
requestsByShard.put(shardId, request);
}
requestItemCounter.getAndIncrement();
requestBuilder.addItem(
request,
shardId,
globalCounter.getAndIncrement(),
id,
assignments,
missingAssignments,
routing,
version
);
} catch (InterruptedException e) {
Thread.interrupted();
} finally {
executeLock.release();
}
}
private void executeRequests() {
try {
executeLock.acquire();
for (Iterator<Map.Entry<ShardId, Request>> it = requestsByShard.entrySet().iterator(); it.hasNext(); ) {
if (failure.get() != null) {
return;
}
Map.Entry<ShardId, Request> entry = it.next();
final Request request = entry.getValue();
final ShardId shardId = entry.getKey();
requestExecutor.execute(request, new ActionListener<Response>() {
@Override
public void onResponse(Response response) {
processResponse(response);
}
@Override
public void onFailure(Throwable e) {
processFailure(e, shardId, request, false);
}
});
it.remove();
}
} catch (InterruptedException e) {
Thread.interrupted();
} catch (Throwable e) {
setFailure(e);
} finally {
requestItemCounter.set(0);
executeLock.release();
}
}
private void createPendingIndices() {
final List<PendingRequest> pendings = new ArrayList<>();
final Set<String> indices;
synchronized (requestsForNewIndices) {
indices = ImmutableSet.copyOf(
Iterables.filter(
Sets.difference(requestsForNewIndices.keySet(), indicesCreated),
shouldAutocreateIndexPredicate)
);
for (Map.Entry<String, List<PendingRequest>> entry : requestsForNewIndices.entrySet()) {
pendings.addAll(entry.getValue());
}
requestsForNewIndices.clear();
pendingNewIndexRequests.set(0);
}
if (pendings.size() > 0 || indices.size() > 0) {
LOGGER.debug("create {} pending indices in bulk...", indices.size());
TimeValue timeout = CrateSettings.BULK_PARTITION_CREATION_TIMEOUT.extractTimeValue(clusterService.state().metaData().settings());
if (timeout.millis() == 0L) {
// apply default
// wait up to 10 seconds for every single create index request
timeout = new TimeValue(indices.size() * 10L, TimeUnit.SECONDS);
}
BulkCreateIndicesRequest bulkCreateIndicesRequest = new BulkCreateIndicesRequest(indices)
.timeout(timeout);
FutureCallback<Void> indicesCreatedCallback = new FutureCallback<Void>() {
@Override
public void onSuccess(@Nullable Void result) {
if (failure.get() != null) {
return;
}
trace("applying pending requests for created indices...");
for (final PendingRequest pendingRequest : pendings) {
// add pending requests for created indices
ShardId shardId = shardId(pendingRequest.indexName, pendingRequest.id, pendingRequest.routing);
partitionRequestByShard(shardId, pendingRequest.id,
pendingRequest.assignments,
pendingRequest.missingAssignments,
pendingRequest.routing, pendingRequest.version);
}
trace("added %d pending requests, lets see if we can execute them", pendings.size());
executeRequestsIfNeeded();
}
@Override
public void onFailure(Throwable t) {
setFailure(t);
}
};
if (indices.isEmpty()) {
indicesCreatedCallback.onSuccess(null);
} else {
// initialize callback for when all indices are created
IndicesCreatedObserver.waitForIndicesCreated(clusterService, LOGGER, indices, indicesCreatedCallback, timeout);
// initiate the request
transportBulkCreateIndicesAction.execute(bulkCreateIndicesRequest, new ActionListener<BulkCreateIndicesResponse>() {
@Override
public void onResponse(BulkCreateIndicesResponse response) {
indicesCreated.addAll(indices);
}
@Override
public void onFailure(Throwable t) {
LOGGER.error("error when creating pending indices in bulk", t);
setFailure(ExceptionsHelper.unwrapCause(t));
}
});
}
}
}
public ListenableFuture<BitSet> result() {
return result;
}
public void close() {
trace("close");
closed = true;
executeIfNeeded();
if (pending.get() == 0) {
setResult();
}
}
public void kill() {
failure.compareAndSet(null, new CancellationException());
result.cancel(true);
}
private void setFailure(Throwable e) {
failure.compareAndSet(null, e);
result.setException(e);
}
private void setResult() {
Throwable throwable = failure.get();
if (throwable == null) {
result.set(responses);
} else {
result.setException(throwable);
}
}
private void setResultIfDone(int successes) {
if (pending.addAndGet(-successes) == 0 && closed) {
setResult();
}
}
private void executeIfNeeded() {
if ((closed
|| requestsForNewIndices.size() >= createIndicesBulkSize
|| pendingNewIndexRequests.get() >= bulkSize) && failure.get() == null) {
createPendingIndices();
}
executeRequestsIfNeeded();
}
private void executeRequestsIfNeeded() {
if ((closed || requestItemCounter.get() >= bulkSize) && failure.get() == null) {
executeRequests();
}
}
private void processResponse(Response response) {
trace("executing response...");
for (int i = 0; i < response.itemIndices().size(); i++) {
int location = response.itemIndices().get(i);
synchronized (responsesLock) {
responses.set(location, response.responses().get(i) != null);
}
}
setResultIfDone(response.itemIndices().size());
trace("response executed.");
}
private void processFailure(Throwable e, final ShardId shardId, final Request request, boolean repeatingRetry) {
trace("execute failure");
e = Exceptions.unwrap(e);
BulkRetryCoordinator coordinator;
try {
coordinator = bulkRetryCoordinatorPool.coordinator(shardId);
} catch (Throwable coordinatorException) {
setFailure(coordinatorException);
return;
}
if (e instanceof EsRejectedExecutionException) {
LOGGER.trace("{}, retrying", e.getMessage());
coordinator.retry(request, requestExecutor, repeatingRetry, new ActionListener<Response>() {
@Override
public void onResponse(Response response) {
processResponse(response);
}
@Override
public void onFailure(Throwable e) {
processFailure(e, shardId, request, true);
}
});
} else {
if (repeatingRetry) {
// release failed retry
try {
coordinator.retryLock().releaseWriteLock();
} catch (InterruptedException ex) {
Thread.interrupted();
}
}
for (IntCursor intCursor : request.itemIndices()) {
synchronized (responsesLock) {
responses.set(intCursor.value, false);
}
}
setFailure(e);
}
}
private void trace(String message, Object ... args) {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("SymbolBasedBulkShardProcessor: pending: {}; {}",
pending.get(),
String.format(Locale.ENGLISH, message, args));
}
}
static class PendingRequest {
private final String indexName;
private final String id;
@Nullable
private final Symbol[] assignments;
@Nullable
private final Object[] missingAssignments;
@Nullable
private final String routing;
@Nullable
private final Long version;
PendingRequest(String indexName,
String id,
@Nullable Symbol[] assignments,
@Nullable Object[] missingAssignments,
@Nullable String routing,
@Nullable Long version) {
this.indexName = indexName;
this.id = id;
this.assignments = assignments;
this.missingAssignments = missingAssignments;
this.routing = routing;
this.version = version;
}
}
public interface BulkRequestBuilder<Request extends BulkProcessorRequest> {
Request newRequest(ShardId shardId);
void addItem(Request existingRequest,
ShardId shardId,
int location,
String id,
@Nullable Symbol[] assignments,
@Nullable Object[] missingAssignments,
@Nullable String routing,
@Nullable Long version);
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.searchafter;
import org.apache.lucene.document.LatLonDocValuesField;
import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.SortedNumericSortField;
import org.apache.lucene.search.SortedSetSortField;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.math.BigDecimal;
import java.util.Collections;
import static org.elasticsearch.search.searchafter.SearchAfterBuilder.extractSortType;
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class SearchAfterBuilderTests extends ESTestCase {
private static final int NUMBER_OF_TESTBUILDERS = 20;
private static SearchAfterBuilder randomSearchAfterBuilder() throws IOException {
int numSearchFrom = randomIntBetween(1, 10);
SearchAfterBuilder searchAfterBuilder = new SearchAfterBuilder();
Object[] values = new Object[numSearchFrom];
for (int i = 0; i < numSearchFrom; i++) {
int branch = randomInt(10);
switch (branch) {
case 0:
values[i] = randomInt();
break;
case 1:
values[i] = randomFloat();
break;
case 2:
values[i] = randomLong();
break;
case 3:
values[i] = randomDouble();
break;
case 4:
values[i] = randomAlphaOfLengthBetween(5, 20);
break;
case 5:
values[i] = randomBoolean();
break;
case 6:
values[i] = randomByte();
break;
case 7:
values[i] = randomShort();
break;
case 8:
values[i] = new Text(randomAlphaOfLengthBetween(5, 20));
break;
case 9:
values[i] = null;
break;
case 10:
values[i] = randomBigInteger();
break;
}
}
searchAfterBuilder.setSortValues(values);
return searchAfterBuilder;
}
// We build a json version of the search_after first in order to
// ensure that every number type remain the same before/after xcontent (de)serialization.
// This is not a problem because the final type of each field value is extracted from associated sort field.
// This little trick ensure that equals and hashcode are the same when using the xcontent serialization.
private SearchAfterBuilder randomJsonSearchFromBuilder() throws IOException {
int numSearchAfter = randomIntBetween(1, 10);
XContentBuilder jsonBuilder = XContentFactory.jsonBuilder();
jsonBuilder.startObject();
jsonBuilder.startArray("search_after");
for (int i = 0; i < numSearchAfter; i++) {
int branch = randomInt(9);
switch (branch) {
case 0:
jsonBuilder.value(randomInt());
break;
case 1:
jsonBuilder.value(randomFloat());
break;
case 2:
jsonBuilder.value(randomLong());
break;
case 3:
jsonBuilder.value(randomDouble());
break;
case 4:
jsonBuilder.value(randomAlphaOfLengthBetween(5, 20));
break;
case 5:
jsonBuilder.value(randomBoolean());
break;
case 6:
jsonBuilder.value(randomByte());
break;
case 7:
jsonBuilder.value(randomShort());
break;
case 8:
jsonBuilder.value(new Text(randomAlphaOfLengthBetween(5, 20)));
break;
case 9:
jsonBuilder.nullValue();
break;
}
}
jsonBuilder.endArray();
jsonBuilder.endObject();
try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(jsonBuilder))) {
parser.nextToken();
parser.nextToken();
parser.nextToken();
return SearchAfterBuilder.fromXContent(parser);
}
}
private static SearchAfterBuilder serializedCopy(SearchAfterBuilder original) throws IOException {
return copyWriteable(original, new NamedWriteableRegistry(Collections.emptyList()), SearchAfterBuilder::new);
}
public void testSerialization() throws Exception {
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
SearchAfterBuilder original = randomSearchAfterBuilder();
SearchAfterBuilder deserialized = serializedCopy(original);
assertEquals(deserialized, original);
assertEquals(deserialized.hashCode(), original.hashCode());
assertNotSame(deserialized, original);
}
}
public void testEqualsAndHashcode() throws Exception {
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
// TODO add equals tests with mutating the original object
checkEqualsAndHashCode(randomSearchAfterBuilder(), SearchAfterBuilderTests::serializedCopy);
}
}
public void testFromXContent() throws Exception {
for (int runs = 0; runs < 20; runs++) {
SearchAfterBuilder searchAfterBuilder = randomJsonSearchFromBuilder();
XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
if (randomBoolean()) {
builder.prettyPrint();
}
builder.startObject();
searchAfterBuilder.innerToXContent(builder);
builder.endObject();
try (XContentParser parser = createParser(shuffleXContent(builder))) {
parser.nextToken();
parser.nextToken();
parser.nextToken();
SearchAfterBuilder secondSearchAfterBuilder = SearchAfterBuilder.fromXContent(parser);
assertNotSame(searchAfterBuilder, secondSearchAfterBuilder);
assertEquals(searchAfterBuilder, secondSearchAfterBuilder);
assertEquals(searchAfterBuilder.hashCode(), secondSearchAfterBuilder.hashCode());
}
}
}
public void testFromXContentIllegalType() throws Exception {
for (XContentType type : XContentType.values()) {
// BIG_DECIMAL
// ignore json and yaml, they parse floating point numbers as floats/doubles
if (type == XContentType.JSON || type == XContentType.YAML) {
continue;
}
XContentBuilder xContent = XContentFactory.contentBuilder(type);
xContent.startObject()
.startArray("search_after")
.value(new BigDecimal("9223372036854776003.3"))
.endArray()
.endObject();
try (XContentParser parser = createParser(xContent)) {
parser.nextToken();
parser.nextToken();
parser.nextToken();
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> SearchAfterBuilder.fromXContent(parser));
assertThat(exc.getMessage(), containsString("BIG_DECIMAL"));
}
}
}
public void testWithNullArray() throws Exception {
SearchAfterBuilder builder = new SearchAfterBuilder();
try {
builder.setSortValues(null);
fail("Should fail on null array.");
} catch (NullPointerException e) {
assertThat(e.getMessage(), equalTo("Values cannot be null."));
}
}
public void testWithEmptyArray() throws Exception {
SearchAfterBuilder builder = new SearchAfterBuilder();
try {
builder.setSortValues(new Object[0]);
fail("Should fail on empty array.");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("Values must contains at least one value."));
}
}
/**
* Explicitly tests what you can't list as a sortValue. What you can list is tested by {@link #randomSearchAfterBuilder()}.
*/
public void testBadTypes() throws IOException {
randomSearchFromBuilderWithSortValueThrows(new Object());
randomSearchFromBuilderWithSortValueThrows(new GeoPoint(0, 0));
randomSearchFromBuilderWithSortValueThrows(randomSearchAfterBuilder());
randomSearchFromBuilderWithSortValueThrows(this);
}
private static void randomSearchFromBuilderWithSortValueThrows(Object containing) throws IOException {
// Get a valid one
SearchAfterBuilder builder = randomSearchAfterBuilder();
// Now replace its values with one containing the passed in object
Object[] values = builder.getSortValues();
values[between(0, values.length - 1)] = containing;
Exception e = expectThrows(IllegalArgumentException.class, () -> builder.setSortValues(values));
assertEquals(e.getMessage(), "Can't handle search_after field value of type [" + containing.getClass() + "]");
}
public void testExtractSortType() throws Exception {
SortField.Type type = extractSortType(LatLonDocValuesField.newDistanceSort("field", 0.0, 180.0));
assertThat(type, equalTo(SortField.Type.DOUBLE));
IndexFieldData.XFieldComparatorSource source = new IndexFieldData.XFieldComparatorSource(null, MultiValueMode.MIN, null) {
@Override
public SortField.Type reducedType() {
return SortField.Type.STRING;
}
@Override
public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) {
return null;
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format,
int bucketSize, BucketedSort.ExtraData extra) {
return null;
}
};
type = extractSortType(new SortField("field", source));
assertThat(type, equalTo(SortField.Type.STRING));
type = extractSortType(new SortedNumericSortField("field", SortField.Type.DOUBLE));
assertThat(type, equalTo(SortField.Type.DOUBLE));
type = extractSortType(new SortedSetSortField("field", false));
assertThat(type, equalTo(SortField.Type.STRING));
}
}
|
|
package de.danoeh.antennapod.dialog;
import android.content.res.TypedArray;
import android.graphics.Color;
import android.os.Bundle;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ListView;
import android.widget.Toast;
import com.joanzapata.iconify.Icon;
import com.joanzapata.iconify.IconDrawable;
import com.joanzapata.iconify.fonts.FontAwesomeIcons;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import de.danoeh.antennapod.R;
import de.danoeh.antennapod.core.dialog.DownloadRequestErrorDialogCreator;
import de.danoeh.antennapod.core.feed.FeedItem;
import de.danoeh.antennapod.core.storage.DBTasks;
import de.danoeh.antennapod.core.storage.DBWriter;
import de.danoeh.antennapod.core.storage.DownloadRequestException;
import de.danoeh.antennapod.core.util.LongList;
public class EpisodesApplyActionFragment extends Fragment {
public String TAG = "EpisodeActionFragment";
private ListView mListView;
private ArrayAdapter<String> mAdapter;
private Button btnAddToQueue;
private Button btnMarkAsPlayed;
private Button btnMarkAsUnplayed;
private Button btnDownload;
private Button btnDelete;
private final Map<Long,FeedItem> idMap;
private final List<FeedItem> episodes;
private final List<String> titles = new ArrayList();
private final LongList checkedIds = new LongList();
private MenuItem mSelectToggle;
private int textColor;
public EpisodesApplyActionFragment(List<FeedItem> episodes) {
this.episodes = episodes;
this.idMap = new HashMap<>(episodes.size());
for(FeedItem episode : episodes) {
this.idMap.put(episode.getId(), episode);
}
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setHasOptionsMenu(true);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.episodes_apply_action_fragment, container, false);
mListView = (ListView) view.findViewById(android.R.id.list);
mListView.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE);
mListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
public void onItemClick(AdapterView<?> ListView, View view, int position, long rowId) {
long id = episodes.get(position).getId();
if (checkedIds.contains(id)) {
checkedIds.remove(id);
} else {
checkedIds.add(id);
}
refreshCheckboxes();
}
});
for(FeedItem episode : episodes) {
titles.add(episode.getTitle());
}
mAdapter = new ArrayAdapter<>(getActivity(),
android.R.layout.simple_list_item_multiple_choice, titles);
mListView.setAdapter(mAdapter);
checkAll();
btnAddToQueue = (Button) view.findViewById(R.id.btnAddToQueue);
btnAddToQueue.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
queueChecked();
}
});
btnMarkAsPlayed = (Button) view.findViewById(R.id.btnMarkAsPlayed);
btnMarkAsPlayed.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
markedCheckedPlayed();
}
});
btnMarkAsUnplayed = (Button) view.findViewById(R.id.btnMarkAsUnplayed);
btnMarkAsUnplayed.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
markedCheckedUnplayed();
}
});
btnDownload = (Button) view.findViewById(R.id.btnDownload);
btnDownload.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
downloadChecked();
}
});
btnDelete = (Button) view.findViewById(R.id.btnDelete);
btnDelete.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
deleteChecked();
}
});
return view;
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
super.onCreateOptionsMenu(menu, inflater);
inflater.inflate(R.menu.episodes_apply_action_options, menu);
int[] attrs = { android.R.attr.textColor };
TypedArray ta = getActivity().obtainStyledAttributes(attrs);
textColor = ta.getColor(0, Color.GRAY);
ta.recycle();
menu.findItem(R.id.sort).setIcon(new IconDrawable(getActivity(),
FontAwesomeIcons.fa_sort).color(textColor).actionBarSize());
mSelectToggle = menu.findItem(R.id.select_toggle);
mSelectToggle.setOnMenuItemClickListener(new MenuItem.OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem item) {
if (checkedIds.size() == episodes.size()) {
checkNone();
} else {
checkAll();
}
return true;
}
});
menu.findItem(R.id.select_options).setIcon(new IconDrawable(getActivity(),
FontAwesomeIcons.fa_caret_down).color(textColor).actionBarSize());
}
@Override
public void onPrepareOptionsMenu (Menu menu) {
Icon icon;
if(checkedIds.size() == episodes.size()) {
icon = FontAwesomeIcons.fa_check_square_o;
} else if(checkedIds.size() == 0) {
icon = FontAwesomeIcons.fa_square_o;
} else {
icon = FontAwesomeIcons.fa_minus_square_o;
}
mSelectToggle.setIcon(new IconDrawable(getActivity(), icon).color(textColor).actionBarSize());
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int resId = 0;
switch(item.getItemId()) {
case R.id.select_options:
return true;
case R.id.check_all:
checkAll();
resId = R.string.selected_all_label;
break;
case R.id.check_none:
checkNone();
resId = R.string.deselected_all_label;
break;
case R.id.check_played:
checkPlayed(true);
resId = R.string.selected_played_label;
break;
case R.id.check_unplayed:
checkPlayed(false);
resId = R.string.selected_unplayed_label;
break;
case R.id.check_downloaded:
checkDownloaded(true);
resId = R.string.selected_downloaded_label;
break;
case R.id.check_not_downloaded:
checkDownloaded(false);
resId = R.string.selected_not_downloaded_label;
break;
case R.id.sort_title_a_z:
sortByTitle(false);
return true;
case R.id.sort_title_z_a:
sortByTitle(true);
return true;
case R.id.sort_date_new_old:
sortByDate(true);
return true;
case R.id.sort_date_old_new:
sortByDate(false);
return true;
case R.id.sort_duration_long_short:
sortByDuration(true);
return true;
case R.id.sort_duration_short_long:
sortByDuration(false);
return true;
}
if(resId != 0) {
Toast.makeText(getActivity(), resId, Toast.LENGTH_SHORT).show();
return true;
} else {
return false;
}
}
private void sortByTitle(final boolean reverse) {
Collections.sort(episodes, new Comparator<FeedItem>() {
@Override
public int compare(FeedItem lhs, FeedItem rhs) {
if (reverse) {
return -1 * lhs.getTitle().compareTo(rhs.getTitle());
} else {
return lhs.getTitle().compareTo(rhs.getTitle());
}
}
});
refreshTitles();
refreshCheckboxes();
}
private void sortByDate(final boolean reverse) {
Collections.sort(episodes, new Comparator<FeedItem>() {
@Override
public int compare(FeedItem lhs, FeedItem rhs) {
if (lhs.getPubDate() == null) {
return -1;
} else if (rhs.getPubDate() == null) {
return 1;
}
int code = lhs.getPubDate().compareTo(rhs.getPubDate());
if (reverse) {
return -1 * code;
} else {
return code;
}
}
});
refreshTitles();
refreshCheckboxes();
}
private void sortByDuration(final boolean reverse) {
Collections.sort(episodes, new Comparator<FeedItem>() {
@Override
public int compare(FeedItem lhs, FeedItem rhs) {
int ordering;
if (false == lhs.hasMedia()) {
ordering = 1;
} else if (false == rhs.hasMedia()) {
ordering = -1;
} else {
ordering = lhs.getMedia().getDuration() - rhs.getMedia().getDuration();
}
if(reverse) {
return -1 * ordering;
} else {
return ordering;
}
}
});
refreshTitles();
refreshCheckboxes();
}
private void checkAll() {
for (FeedItem episode : episodes) {
if(false == checkedIds.contains(episode.getId())) {
checkedIds.add(episode.getId());
}
}
refreshCheckboxes();
}
private void checkNone() {
checkedIds.clear();
refreshCheckboxes();
}
private void checkPlayed(boolean isPlayed) {
for (FeedItem episode : episodes) {
if(episode.isPlayed() == isPlayed) {
if(!checkedIds.contains(episode.getId())) {
checkedIds.add(episode.getId());
}
} else {
if(checkedIds.contains(episode.getId())) {
checkedIds.remove(episode.getId());
}
}
}
refreshCheckboxes();
}
private void checkDownloaded(boolean isDownloaded) {
for (FeedItem episode : episodes) {
if(episode.hasMedia() && episode.getMedia().isDownloaded() == isDownloaded) {
if(!checkedIds.contains(episode.getId())) {
checkedIds.add(episode.getId());
}
} else {
if(checkedIds.contains(episode.getId())) {
checkedIds.remove(episode.getId());
}
}
}
refreshCheckboxes();
}
private void refreshTitles() {
titles.clear();
for(FeedItem episode : episodes) {
titles.add(episode.getTitle());
}
mAdapter.notifyDataSetChanged();
}
private void refreshCheckboxes() {
for (int i = 0; i < episodes.size(); i++) {
FeedItem episode = episodes.get(i);
boolean checked = checkedIds.contains(episode.getId());
mListView.setItemChecked(i, checked);
}
ActivityCompat.invalidateOptionsMenu(EpisodesApplyActionFragment.this.getActivity());
}
private void queueChecked() {
LongList orderedIds = new LongList();
for(FeedItem episode : episodes) {
if(checkedIds.contains(episode.getId())) {
orderedIds.add((episode.getId()));
}
}
DBWriter.addQueueItem(getActivity(), false, orderedIds.toArray());
close();
}
private void markedCheckedPlayed() {
DBWriter.markItemPlayed(FeedItem.PLAYED, checkedIds.toArray());
close();
}
private void markedCheckedUnplayed() {
DBWriter.markItemPlayed(FeedItem.UNPLAYED, checkedIds.toArray());
close();
}
private void downloadChecked() {
// download the check episodes in the same order as they are currently displayed
List<FeedItem> toDownload = new ArrayList<FeedItem>(checkedIds.size());
for(FeedItem episode : episodes) {
if(checkedIds.contains(episode.getId())) {
toDownload.add(episode);
}
}
try {
DBTasks.downloadFeedItems(getActivity(), toDownload.toArray(new FeedItem[0]));
} catch (DownloadRequestException e) {
e.printStackTrace();
DownloadRequestErrorDialogCreator.newRequestErrorDialog(getActivity(), e.getMessage());
}
close();
}
private void deleteChecked() {
for(long id : checkedIds.toArray()) {
FeedItem episode = idMap.get(id);
if(episode.hasMedia()) {
DBWriter.deleteFeedMediaOfItem(getActivity(), episode.getMedia().getId());
}
}
close();
}
private void close() {
getActivity().getSupportFragmentManager().popBackStack();
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.infinispan.springboot;
import java.util.Map;
import java.util.Set;
import javax.annotation.Generated;
import org.apache.camel.component.infinispan.InfinispanComponent;
import org.apache.camel.component.infinispan.InfinispanCustomListener;
import org.apache.camel.component.infinispan.InfinispanOperation;
import org.apache.camel.component.infinispan.InfinispanQueryBuilder;
import org.apache.camel.spring.boot.ComponentConfigurationPropertiesCommon;
import org.infinispan.commons.api.BasicCacheContainer;
import org.infinispan.context.Flag;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.DeprecatedConfigurationProperty;
import org.springframework.boot.context.properties.NestedConfigurationProperty;
/**
* For reading/writing from/to Infinispan distributed key/value store and data
* grid.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.SpringBootAutoConfigurationMojo")
@ConfigurationProperties(prefix = "camel.component.infinispan")
public class InfinispanComponentConfiguration
extends
ComponentConfigurationPropertiesCommon {
/**
* The default configuration shared among endpoints.
*/
private InfinispanConfigurationNestedConfiguration configuration;
/**
* The default cache container.
*/
@NestedConfigurationProperty
private BasicCacheContainer cacheContainer;
/**
* Whether the component should resolve property placeholders on itself when
* starting. Only properties which are of String type can use property
* placeholders.
*/
private Boolean resolvePropertyPlaceholders = true;
public InfinispanConfigurationNestedConfiguration getConfiguration() {
return configuration;
}
public void setConfiguration(
InfinispanConfigurationNestedConfiguration configuration) {
this.configuration = configuration;
}
public BasicCacheContainer getCacheContainer() {
return cacheContainer;
}
public void setCacheContainer(BasicCacheContainer cacheContainer) {
this.cacheContainer = cacheContainer;
}
public Boolean getResolvePropertyPlaceholders() {
return resolvePropertyPlaceholders;
}
public void setResolvePropertyPlaceholders(
Boolean resolvePropertyPlaceholders) {
this.resolvePropertyPlaceholders = resolvePropertyPlaceholders;
}
public static class InfinispanConfigurationNestedConfiguration {
public static final Class CAMEL_NESTED_CLASS = org.apache.camel.component.infinispan.InfinispanConfiguration.class;
/**
* The operation to perform.
*
* @deprecated replaced by @{link setOperation}
*/
@Deprecated
private String command = "PUT";
/**
* The operation to perform.
*/
private InfinispanOperation operation = InfinispanOperation.PUT;
private String hosts;
private BasicCacheContainer cacheContainer;
private Boolean sync = true;
private Boolean clusteredListener = false;
/**
* Specifies the set of event types to register by the consumer.
* Multiple event can be separated by comma.
* <p/>
* The possible event types are: CACHE_ENTRY_ACTIVATED,
* CACHE_ENTRY_PASSIVATED, CACHE_ENTRY_VISITED, CACHE_ENTRY_LOADED,
* CACHE_ENTRY_EVICTED, CACHE_ENTRY_CREATED, CACHE_ENTRY_REMOVED,
* CACHE_ENTRY_MODIFIED, TRANSACTION_COMPLETED, TRANSACTION_REGISTERED,
* CACHE_ENTRY_INVALIDATED, DATA_REHASHED, TOPOLOGY_CHANGED,
* PARTITION_STATUS_CHANGED
*/
private Set eventTypes;
private InfinispanCustomListener customListener;
/**
* Specifies the query builder.
*/
private InfinispanQueryBuilder queryBuilder;
private Flag[] flags;
private String configurationUri;
/**
* Implementation specific properties for the CacheManager
*/
private Map configurationProperties;
/**
* The CacheContainer configuration
*/
private Object cacheContainerConfiguration;
/**
* Store the operation result in a header instead of the message body.
* By default, resultHeader == null and the query result is stored in
* the message body, any existing content in the message body is
* discarded. If resultHeader is set, the value is used as the name of
* the header to store the query result and the original message body is
* preserved. This value can be overridden by an in message header
* named: CamelInfinispanOperationResultHeader
*/
private Object resultHeader;
@Deprecated
@DeprecatedConfigurationProperty
public String getCommand() {
return command;
}
@Deprecated
public void setCommand(String command) {
this.command = command;
}
public InfinispanOperation getOperation() {
return operation;
}
public void setOperation(InfinispanOperation operation) {
this.operation = operation;
}
public String getHosts() {
return hosts;
}
public void setHosts(String hosts) {
this.hosts = hosts;
}
public BasicCacheContainer getCacheContainer() {
return cacheContainer;
}
public void setCacheContainer(BasicCacheContainer cacheContainer) {
this.cacheContainer = cacheContainer;
}
public Boolean getSync() {
return sync;
}
public void setSync(Boolean sync) {
this.sync = sync;
}
public Boolean getClusteredListener() {
return clusteredListener;
}
public void setClusteredListener(Boolean clusteredListener) {
this.clusteredListener = clusteredListener;
}
public Set getEventTypes() {
return eventTypes;
}
public void setEventTypes(Set eventTypes) {
this.eventTypes = eventTypes;
}
public InfinispanCustomListener getCustomListener() {
return customListener;
}
public void setCustomListener(InfinispanCustomListener customListener) {
this.customListener = customListener;
}
public InfinispanQueryBuilder getQueryBuilder() {
return queryBuilder;
}
public void setQueryBuilder(InfinispanQueryBuilder queryBuilder) {
this.queryBuilder = queryBuilder;
}
public Flag[] getFlags() {
return flags;
}
public void setFlags(Flag[] flags) {
this.flags = flags;
}
public String getConfigurationUri() {
return configurationUri;
}
public void setConfigurationUri(String configurationUri) {
this.configurationUri = configurationUri;
}
public Map getConfigurationProperties() {
return configurationProperties;
}
public void setConfigurationProperties(Map configurationProperties) {
this.configurationProperties = configurationProperties;
}
public Object getCacheContainerConfiguration() {
return cacheContainerConfiguration;
}
public void setCacheContainerConfiguration(
Object cacheContainerConfiguration) {
this.cacheContainerConfiguration = cacheContainerConfiguration;
}
public Object getResultHeader() {
return resultHeader;
}
public void setResultHeader(Object resultHeader) {
this.resultHeader = resultHeader;
}
}
}
|
|
package edu.isi.karma.modeling.steiner.topk;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.Set;
import java.util.TreeSet;
/**
* This class represents an implementation of the Distenace Network Heuristic for
* Steiner tree approximation.
*
*
* @author kasneci
*
*/
public class DNHfromMM extends TopKSteinertrees{
public DNHfromMM(){
}
/**
*
* @param terminals given terminal nodes
* @throws Exception
*/
public DNHfromMM(TreeSet<SteinerNode> terminals) throws Exception{
super(terminals);
}
/**
* builds the Distance Network ...
* @return Distance Network (graph that contains shortest paths between
* the terminals)
*/
public Map<String, SteinerNode> getDistanceNetworkOnTerminals(){
Map<String, SteinerNode> distanceNetwork = new HashMap<String, SteinerNode>();
Map<String, SteinerNode> processedNodes = new HashMap<String, SteinerNode>();
Map<String, SteinerNode> visitedNodes = new HashMap<String, SteinerNode>();
Queue<SteinerNode> queue= new LinkedList<SteinerNode>();
Set<SteinerNode> processedTerminals = new TreeSet<SteinerNode>();
Set<SteinerNode> copyOfTerminals= new TreeSet<SteinerNode>();
for(SteinerNode n: terminalNodes){
queue = new LinkedList<SteinerNode>();
processedNodes = new HashMap<String, SteinerNode>();
visitedNodes = new HashMap<String, SteinerNode>();
copyOfTerminals= new TreeSet<SteinerNode>();
processedTerminals.add(n);
for(SteinerNode node: terminalNodes)
if(!processedTerminals.contains(node))
copyOfTerminals.add(node.copy());
copyOfTerminals.remove(n);
queue.offer(n);
while (!queue.isEmpty() && !copyOfTerminals.isEmpty()) {
SteinerNode node =queue.poll();
visitedNodes.put(node.name(), node);
processedNodes.put(node.name(), node);
if(copyOfTerminals.contains(node)){
copyOfTerminals.remove(node);
continue;
}
for(SteinerEdge e: graph.get(node)){
accessedEdges++;
SteinerNode newNode = null;
if(!e.sourceNode.equals(node)){
newNode=new SteinerNode(e.sourceNode.name());
newNode.wasArg1=true;
}
else{
newNode=new SteinerNode(e.sinkNode.name());
newNode.wasArg1=false;
}
if(processedNodes.containsKey(newNode.name()));
newNode.relationToPredecessor=e.getEdgeLabel();
newNode.weightToPredecessor=e.getWeight();
//check whether n has been visited
SteinerNode v = visitedNodes.get(newNode.name());
if(v!=null){
newNode=v;
if(newNode.distancesToSources[0]>node.distancesToSources[0]+e.getWeight()){
newNode.distancesToSources[0]=node.distancesToSources[0]+e.getWeight();
newNode.predecessor= node;
newNode.relationToPredecessor=e.getEdgeLabel();
newNode.weightToPredecessor=e.getWeight();
}
}
// in case n has not been visited
else{
newNode.distancesToSources[0]=node.distancesToSources[0]+e.getWeight();
newNode.predecessor= node;
queue.offer(newNode);
visitedNodes.put(newNode.name(), newNode);
}
}
}
stabilizeNodes(visitedNodes, distanceNetwork);
}
return distanceNetwork;
}
/**
* reconstructs the paths of the distance network
* @param visitedNodes nodes visited by the single source shortest paths iterator
* @param dnhGraph the distance network with reconstructed paths
*/
protected void stabilizeNodes(Map<String, SteinerNode> visitedNodes, Map<String, SteinerNode>dnhGraph){
for(SteinerNode node: terminalNodes){
if(visitedNodes.containsKey(node.name())&&
visitedNodes.get(node.name()).predecessor!=null){
SteinerNode startNode = visitedNodes.get(node.name());
while(startNode.predecessor!=null){
if(!dnhGraph.containsKey(startNode.name())){
dnhGraph.put(startNode.name(), startNode);
}
if(!dnhGraph.containsKey(startNode.predecessor.name())){
dnhGraph.put(startNode.predecessor.name(), startNode.predecessor);
}
dnhGraph.get(startNode.name()).addEdge(dnhGraph.get(
startNode.predecessor.name()),
!startNode.wasArg1, startNode.relationToPredecessor,
startNode.weightToPredecessor);
startNode=startNode.predecessor;
if(terminalNodes.contains(startNode))break;
}
}
}
}
/**
* retrieves the induced graph of a subgraph of an underlying graph :-)
* (i.e. given a subgraph G' of an underlying graph G all edges of G that are connected
* to nodes of G' are included in G')
* @param dnhGraph subgraph that is going to be extended (induced w.r.t.
* the underlying graph)
* @return induced graph
*/
public Map<String, SteinerNode> getInducedGraph (Map<String, SteinerNode> dnhGraph){
Map<String, SteinerNode> copyOfCompleteGraph = new HashMap<String, SteinerNode>();
for(String name: dnhGraph.keySet()){
copyOfCompleteGraph.put(name, dnhGraph.get(name).copy());
}
for(SteinerNode n: copyOfCompleteGraph.values())
for(SteinerEdge e: graph.get(n)){
accessedEdges++;
if(!dnhGraph.containsKey(e.sourceNode.name())){
dnhGraph.put(e.sourceNode.name(), new SteinerNode(e.sourceNode.name()));
}
if(!dnhGraph.containsKey(e.sinkNode.name())){
dnhGraph.put(e.sinkNode.name(), new SteinerNode(e.sinkNode.name()));
}
dnhGraph.get(e.sourceNode.name()).addEdge(dnhGraph.get(e.sinkNode.name()),
false, e.getEdgeLabel(), e.getWeight());
}
clean(dnhGraph);
return dnhGraph;
}
/**
* finds the MST of a given graph by exploiting Prim's algorithm
* @param inducedGraph the given graph
* @return MST
*/
public Map<String, SteinerNode> getMinSpanningTree(Map<String, SteinerNode> inducedGraph){
//alg by prim
// initialize an empty MST
Map <String, SteinerNode> minSpanningTree = new HashMap<String, SteinerNode>();
// start with any node of the induced graph
SteinerNode startNode = new SteinerNode(((TreeSet<SteinerNode>)terminalNodes).first().name());
minSpanningTree.put(startNode.name(), startNode);
/** Input: A connected weighted graph with vertices V and edges E.
* Initialize: Vnew = {x}, where x is an arbitrary node (starting point) from V, Enew= {}
* repeat until Vnew=V:
Choose edge (u,v) from E with minimal weight such that u is in Vnew and v is not (if there are multiple edges with the same weight, choose arbitrarily)
Add v to Vnew, add (u,v) to Enew
* Output: Vnew and Enew describe the minimal spanning tree
*/
while(minSpanningTree.size()<inducedGraph.size()){
Queue<SteinerEdge> edgesToConsider= new PriorityQueue<SteinerEdge>(3000, new Comparator<SteinerEdge>(){
public int compare(SteinerEdge e1, SteinerEdge e2){
if(e1.getWeight()<e2.getWeight())return -1;
if(e1.getWeight()==e2.getWeight())return 0;
else return 1;
}
});
for(SteinerNode n: minSpanningTree.values()){
for(SteinerEdge e: inducedGraph.get(n.name()).edges){
accessedEdges++;
if(!minSpanningTree.containsKey(n.getNeighborInEdge(e).name())){
edgesToConsider.offer(e);
}
}
}
SteinerEdge e = edgesToConsider.poll();
SteinerNode neighbor=null;
boolean isNeighborArg1=false;
if(minSpanningTree.containsKey(e.sourceNode.name())
&& !minSpanningTree.containsKey(e.sinkNode.name())){
neighbor=new SteinerNode(e.sinkNode.name());
isNeighborArg1=false;
}
if(minSpanningTree.containsKey(e.sinkNode.name())
&& !minSpanningTree.containsKey(e.sourceNode.name())){
neighbor=new SteinerNode(e.sourceNode.name());
isNeighborArg1=true;
}
minSpanningTree.put(neighbor.name(), neighbor);
minSpanningTree.get(neighbor.getNeighborInEdge(e).name()).addEdge(
minSpanningTree.get(neighbor.name()), isNeighborArg1,
e.getEdgeLabel(), e.getWeight());
edgesToConsider=null;
}
clean(minSpanningTree);
return minSpanningTree;
}
public void clean (Map<String, SteinerNode> map){
LinkedList<SteinerNode> outliers = new LinkedList<SteinerNode>();
for(SteinerNode n: map.values())
if(!terminalNodes.contains(n)&& n.edges.size()==1)
outliers.add(n);
for(SteinerNode n: outliers)
while(n.edges.size()<2 && !terminalNodes.contains(n)){
SteinerNode node=n.getNeighbors().first();
node.edges.remove(node.getEdgeToNode(n));
n.edges.remove(n.getEdgeToNode(node));
map.remove(n.getNodeId());
n=node;
}
}
/**
*
* @return the approximation Steiner tree by the Distance Network Heuristic
*/
public ApprSteinerTree getDNHApprTree(){
Map<String, SteinerNode> tree= getMinSpanningTree(
getInducedGraph(getMinSpanningTree(getDistanceNetworkOnTerminals())));
TreeSet<SteinerNode> steinerNodes= new TreeSet<SteinerNode>();
steinerNodes.addAll(tree.values());
return new ApprSteinerTree(terminalNodes, steinerNodes);
}
/**
* finds the best tree and puts it into the resultQueue as a resultGraph
*
*/
public void getBestTree(){
resultQueue.add(getDNHApprTree().toResultGraph());
}
@Override
public void buildTaxonomicGraph() throws Exception {
// TODO Auto-generated method stub
}
@Override
public Queue<ResultGraph> getTopKTrees(int k) throws Exception {
// TODO Auto-generated method stub
return null;
}
}
|
|
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.mergejoin;
import org.pentaho.di.core.CheckResult;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.injection.Injection;
import org.pentaho.di.core.injection.InjectionSupported;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.TransMeta.TransformationType;
import org.pentaho.di.trans.step.*;
import org.pentaho.di.trans.step.errorhandling.Stream;
import org.pentaho.di.trans.step.errorhandling.StreamIcon;
import org.pentaho.di.trans.step.errorhandling.StreamInterface;
import org.pentaho.di.trans.step.errorhandling.StreamInterface.StreamType;
import org.pentaho.di.trans.steps.StreamingSteps;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
import java.util.List;
/*
* @author Biswapesh
* @since 24-nov-2006
*/
@InjectionSupported(localizationPrefix = "MergeJoin.Injection.")
public class MergeJoinMeta extends BaseStepMeta implements StepMetaInterface {
private static Class<?> PKG = MergeJoinMeta.class; // for i18n purposes, needed by Translator2!!
public static final String[] join_types = {"INNER", "LEFT OUTER", "RIGHT OUTER", "FULL OUTER"};
public static final boolean[] one_optionals = {false, false, true, true};
public static final boolean[] two_optionals = {false, true, false, true};
@Injection(name = "JOIN_TYPE")
private String joinType;
@Injection(name = "KEY_FIELD1")
private String[] keyFields1;
@Injection(name = "KEY_FIELD2")
private String[] keyFields2;
private StreamingSteps inputSteps;
/**
* The supported join types are INNER, LEFT OUTER, RIGHT OUTER and FULL OUTER
*
* @return The type of join
*/
public String getJoinType() {
return joinType;
}
/**
* Sets the type of join
*
* @param joinType The type of join, e.g. INNER/FULL OUTER
*/
public void setJoinType(String joinType) {
this.joinType = joinType;
}
/**
* @return Returns the keyFields1.
*/
public String[] getKeyFields1() {
return keyFields1;
}
/**
* @param keyFields1 The keyFields1 to set.
*/
public void setKeyFields1(String[] keyFields1) {
this.keyFields1 = keyFields1;
}
/**
* @return Returns the keyFields2.
*/
public String[] getKeyFields2() {
return keyFields2;
}
/**
* @param keyFields2 The keyFields2 to set.
*/
public void setKeyFields2(String[] keyFields2) {
this.keyFields2 = keyFields2;
}
public boolean excludeFromRowLayoutVerification() {
return true;
}
public MergeJoinMeta() {
super(); // allocate BaseStepMeta
}
public void loadXML(Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore) throws KettleXMLException {
readData(stepnode);
}
public void allocate(int nrKeys1, int nrKeys2) {
keyFields1 = new String[nrKeys1];
keyFields2 = new String[nrKeys2];
}
public Object clone() {
MergeJoinMeta retval = (MergeJoinMeta) super.clone();
int nrKeys1 = keyFields1.length;
int nrKeys2 = keyFields2.length;
retval.allocate(nrKeys1, nrKeys2);
System.arraycopy(keyFields1, 0, retval.keyFields1, 0, nrKeys1);
System.arraycopy(keyFields2, 0, retval.keyFields2, 0, nrKeys2);
StepIOMetaInterface stepIOMeta = new StepIOMeta(true, true, false, false, false, false);
List<StreamInterface> infoStreams = getStepIOMeta().getInfoStreams();
for (StreamInterface infoStream : infoStreams) {
stepIOMeta.addStream(new Stream(infoStream));
}
retval.ioMeta = stepIOMeta;
return retval;
}
public String getXML() {
StringBuilder retval = new StringBuilder();
List<StreamInterface> infoStreams = getStepIOMeta().getInfoStreams();
retval.append(XMLHandler.addTagValue("join_type", getJoinType()));
// retval.append(XMLHandler.addTagValue("step1", infoStreams.get(0).getStepname()));
// retval.append(XMLHandler.addTagValue("step2", infoStreams.get(1).getStepname()));
retval.append(XMLHandler.addTagValue("step1",
inputSteps == null ? infoStreams.get(0).getStepname() : inputSteps.getStepName()));
retval.append(XMLHandler.addTagValue("step2",
inputSteps == null ? infoStreams.get(1).getStepname() : inputSteps.getStepName(1)));
retval.append(" <keys_1>" + Const.CR);
for (int i = 0; i < keyFields1.length; i++) {
retval.append(" " + XMLHandler.addTagValue("key", keyFields1[i]));
}
retval.append(" </keys_1>" + Const.CR);
retval.append(" <keys_2>" + Const.CR);
for (int i = 0; i < keyFields2.length; i++) {
retval.append(" " + XMLHandler.addTagValue("key", keyFields2[i]));
}
retval.append(" </keys_2>" + Const.CR);
return retval.toString();
}
private void readData(Node stepnode) throws KettleXMLException {
try {
Node keysNode1 = XMLHandler.getSubNode(stepnode, "keys_1");
Node keysNode2 = XMLHandler.getSubNode(stepnode, "keys_2");
int nrKeys1 = XMLHandler.countNodes(keysNode1, "key");
int nrKeys2 = XMLHandler.countNodes(keysNode2, "key");
allocate(nrKeys1, nrKeys2);
for (int i = 0; i < nrKeys1; i++) {
Node keynode = XMLHandler.getSubNodeByNr(keysNode1, "key", i);
keyFields1[i] = XMLHandler.getNodeValue(keynode);
}
for (int i = 0; i < nrKeys2; i++) {
Node keynode = XMLHandler.getSubNodeByNr(keysNode2, "key", i);
keyFields2[i] = XMLHandler.getNodeValue(keynode);
}
List<StreamInterface> infoStreams = getStepIOMeta().getInfoStreams();
infoStreams.get(0).setSubject(XMLHandler.getTagValue(stepnode, "step1"));
infoStreams.get(1).setSubject(XMLHandler.getTagValue(stepnode, "step2"));
inputSteps = new StreamingSteps(this);
joinType = XMLHandler.getTagValue(stepnode, "join_type");
} catch (Exception e) {
throw new KettleXMLException(
BaseMessages.getString(PKG, "MergeJoinMeta.Exception.UnableToLoadStepInfo"), e);
}
}
public void setDefault() {
joinType = join_types[0];
allocate(0, 0);
}
public void readRep(Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases) throws KettleException {
try {
int nrKeys1 = rep.countNrStepAttributes(id_step, "keys_1");
int nrKeys2 = rep.countNrStepAttributes(id_step, "keys_2");
allocate(nrKeys1, nrKeys2);
for (int i = 0; i < nrKeys1; i++) {
keyFields1[i] = rep.getStepAttributeString(id_step, i, "keys_1");
}
for (int i = 0; i < nrKeys2; i++) {
keyFields2[i] = rep.getStepAttributeString(id_step, i, "keys_2");
}
List<StreamInterface> infoStreams = getStepIOMeta().getInfoStreams();
infoStreams.get(0).setSubject(rep.getStepAttributeString(id_step, "step1"));
infoStreams.get(1).setSubject(rep.getStepAttributeString(id_step, "step2"));
inputSteps = new StreamingSteps(this);
joinType = rep.getStepAttributeString(id_step, "join_type");
} catch (Exception e) {
throw new KettleException(BaseMessages.getString(
PKG, "MergeJoinMeta.Exception.UnexpectedErrorReadingStepInfo"), e);
}
}
@Override
public void searchInfoAndTargetSteps(List<StepMeta> steps) {
for (StreamInterface stream : getStepIOMeta().getInfoStreams()) {
stream.setStepMeta(StepMeta.findStep(steps, (String) stream.getSubject()));
}
}
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step) throws KettleException {
try {
for (int i = 0; i < keyFields1.length; i++) {
rep.saveStepAttribute(id_transformation, id_step, i, "keys_1", keyFields1[i]);
}
for (int i = 0; i < keyFields2.length; i++) {
rep.saveStepAttribute(id_transformation, id_step, i, "keys_2", keyFields2[i]);
}
List<StreamInterface> infoStreams = getStepIOMeta().getInfoStreams();
// rep.saveStepAttribute(id_transformation, id_step, "step1", infoStreams.get(0).getStepname());
// rep.saveStepAttribute(id_transformation, id_step, "step2", infoStreams.get(1).getStepname());
rep.saveStepAttribute(id_transformation, id_step, "step1",
inputSteps == null ? infoStreams.get(0).getStepname() : inputSteps.getStepName());
rep.saveStepAttribute(id_transformation, id_step, "step2",
inputSteps == null ? infoStreams.get(1).getStepname() : inputSteps.getStepName(1));
rep.saveStepAttribute(id_transformation, id_step, "join_type", getJoinType());
} catch (Exception e) {
throw new KettleException(BaseMessages.getString(PKG, "MergeJoinMeta.Exception.UnableToSaveStepInfo")
+ id_step, e);
}
}
public void check(List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore) {
/*
* @todo Need to check for the following: 1) Join type must be one of INNER / LEFT OUTER / RIGHT OUTER / FULL OUTER
* 2) Number of input streams must be two (for now at least) 3) The field names of input streams must be unique
*/
CheckResult cr =
new CheckResult(CheckResultInterface.TYPE_RESULT_WARNING, BaseMessages.getString(
PKG, "MergeJoinMeta.CheckResult.StepNotVerified"), stepMeta);
remarks.add(cr);
}
public void getFields(RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore) throws KettleStepException {
// We don't have any input fields here in "r" as they are all info fields.
// So we just merge in the info fields.
//
if (info != null) {
for (int i = 0; i < info.length; i++) {
if (info[i] != null) {
r.mergeRowMeta(info[i], name);
}
}
}
for (int i = 0; i < r.size(); i++) {
ValueMetaInterface vmi = r.getValueMeta(i);
if (vmi != null && Utils.isEmpty(vmi.getName())) {
vmi.setOrigin(name);
}
}
return;
}
public StepInterface getStep(StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr,
Trans trans) {
return new MergeJoin(stepMeta, stepDataInterface, cnr, tr, trans);
}
public StepDataInterface getStepData() {
return new MergeJoinData();
}
/**
* Returns the Input/Output metadata for this step. The generator step only produces output, does not accept input!
*/
public StepIOMetaInterface getStepIOMeta() {
if (ioMeta == null) {
ioMeta = new StepIOMeta(true, true, false, false, false, false);
ioMeta.addStream(new Stream(StreamType.INFO, null, BaseMessages.getString(
PKG, "MergeJoinMeta.InfoStream.FirstStream.Description"), StreamIcon.INFO, null));
ioMeta.addStream(new Stream(StreamType.INFO, null, BaseMessages.getString(
PKG, "MergeJoinMeta.InfoStream.SecondStream.Description"), StreamIcon.INFO, null));
}
return ioMeta;
}
public void resetStepIoMeta() {
// Don't reset!
}
public TransformationType[] getSupportedTransformationTypes() {
return new TransformationType[]{TransformationType.Normal,};
}
}
|
|
/*===========================================================================
* Licensed Materials - Property of IBM
* "Restricted Materials of IBM"
*
* IBM SDK, Java(tm) Technology Edition, v8
* (C) Copyright IBM Corp. 1997, 2012. All Rights Reserved
*
* US Government Users Restricted Rights - Use, duplication or disclosure
* restricted by GSA ADP Schedule Contract with IBM Corp.
*===========================================================================
*/
/*
* Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package java.util;
/**
* This class provides a skeletal implementation of the {@link List}
* interface to minimize the effort required to implement this interface
* backed by a "random access" data store (such as an array). For sequential
* access data (such as a linked list), {@link AbstractSequentialList} should
* be used in preference to this class.
*
* <p>To implement an unmodifiable list, the programmer needs only to extend
* this class and provide implementations for the {@link #get(int)} and
* {@link List#size() size()} methods.
*
* <p>To implement a modifiable list, the programmer must additionally
* override the {@link #set(int, Object) set(int, E)} method (which otherwise
* throws an {@code UnsupportedOperationException}). If the list is
* variable-size the programmer must additionally override the
* {@link #add(int, Object) add(int, E)} and {@link #remove(int)} methods.
*
* <p>The programmer should generally provide a void (no argument) and collection
* constructor, as per the recommendation in the {@link Collection} interface
* specification.
*
* <p>Unlike the other abstract collection implementations, the programmer does
* <i>not</i> have to provide an iterator implementation; the iterator and
* list iterator are implemented by this class, on top of the "random access"
* methods:
* {@link #get(int)},
* {@link #set(int, Object) set(int, E)},
* {@link #add(int, Object) add(int, E)} and
* {@link #remove(int)}.
*
* <p>The documentation for each non-abstract method in this class describes its
* implementation in detail. Each of these methods may be overridden if the
* collection being implemented admits a more efficient implementation.
*
* <p>This class is a member of the
* <a href="{@docRoot}/../technotes/guides/collections/index.html">
* Java Collections Framework</a>.
*
* @author Josh Bloch
* @author Neal Gafter
* @since 1.2
*/
public abstract class AbstractList<E> extends AbstractCollection<E> implements List<E> {
/**
* Sole constructor. (For invocation by subclass constructors, typically
* implicit.)
*/
protected AbstractList() {
}
/**
* Appends the specified element to the end of this list (optional
* operation).
*
* <p>Lists that support this operation may place limitations on what
* elements may be added to this list. In particular, some
* lists will refuse to add null elements, and others will impose
* restrictions on the type of elements that may be added. List
* classes should clearly specify in their documentation any restrictions
* on what elements may be added.
*
* <p>This implementation calls {@code add(size(), e)}.
*
* <p>Note that this implementation throws an
* {@code UnsupportedOperationException} unless
* {@link #add(int, Object) add(int, E)} is overridden.
*
* @param e element to be appended to this list
* @return {@code true} (as specified by {@link Collection#add})
* @throws UnsupportedOperationException if the {@code add} operation
* is not supported by this list
* @throws ClassCastException if the class of the specified element
* prevents it from being added to this list
* @throws NullPointerException if the specified element is null and this
* list does not permit null elements
* @throws IllegalArgumentException if some property of this element
* prevents it from being added to this list
*/
public boolean add(E e) {
add(size(), e);
return true;
}
/**
* {@inheritDoc}
*
* @throws IndexOutOfBoundsException {@inheritDoc}
*/
abstract public E get(int index);
/**
* {@inheritDoc}
*
* <p>This implementation always throws an
* {@code UnsupportedOperationException}.
*
* @throws UnsupportedOperationException {@inheritDoc}
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
* @throws IllegalArgumentException {@inheritDoc}
* @throws IndexOutOfBoundsException {@inheritDoc}
*/
public E set(int index, E element) {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*
* <p>This implementation always throws an
* {@code UnsupportedOperationException}.
*
* @throws UnsupportedOperationException {@inheritDoc}
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
* @throws IllegalArgumentException {@inheritDoc}
* @throws IndexOutOfBoundsException {@inheritDoc}
*/
public void add(int index, E element) {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*
* <p>This implementation always throws an
* {@code UnsupportedOperationException}.
*
* @throws UnsupportedOperationException {@inheritDoc}
* @throws IndexOutOfBoundsException {@inheritDoc}
*/
public E remove(int index) {
throw new UnsupportedOperationException();
}
// Search Operations
/**
* {@inheritDoc}
*
* <p>This implementation first gets a list iterator (with
* {@code listIterator()}). Then, it iterates over the list until the
* specified element is found or the end of the list is reached.
*
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
*/
public int indexOf(Object o) {
ListIterator<E> it = listIterator();
if (o==null) {
while (it.hasNext())
if (it.next()==null)
return it.previousIndex();
} else {
while (it.hasNext())
if (o.equals(it.next()))
return it.previousIndex();
}
return -1;
}
/**
* {@inheritDoc}
*
* <p>This implementation first gets a list iterator that points to the end
* of the list (with {@code listIterator(size())}). Then, it iterates
* backwards over the list until the specified element is found, or the
* beginning of the list is reached.
*
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
*/
public int lastIndexOf(Object o) {
ListIterator<E> it = listIterator(size());
if (o==null) {
while (it.hasPrevious())
if (it.previous()==null)
return it.nextIndex();
} else {
while (it.hasPrevious())
if (o.equals(it.previous()))
return it.nextIndex();
}
return -1;
}
// Bulk Operations
/**
* Removes all of the elements from this list (optional operation).
* The list will be empty after this call returns.
*
* <p>This implementation calls {@code removeRange(0, size())}.
*
* <p>Note that this implementation throws an
* {@code UnsupportedOperationException} unless {@code remove(int
* index)} or {@code removeRange(int fromIndex, int toIndex)} is
* overridden.
*
* @throws UnsupportedOperationException if the {@code clear} operation
* is not supported by this list
*/
public void clear() {
removeRange(0, size());
}
/**
* {@inheritDoc}
*
* <p>This implementation gets an iterator over the specified collection
* and iterates over it, inserting the elements obtained from the
* iterator into this list at the appropriate position, one at a time,
* using {@code add(int, E)}.
* Many implementations will override this method for efficiency.
*
* <p>Note that this implementation throws an
* {@code UnsupportedOperationException} unless
* {@link #add(int, Object) add(int, E)} is overridden.
*
* @throws UnsupportedOperationException {@inheritDoc}
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
* @throws IllegalArgumentException {@inheritDoc}
* @throws IndexOutOfBoundsException {@inheritDoc}
*/
public boolean addAll(int index, Collection<? extends E> c) {
rangeCheckForAdd(index);
boolean modified = false;
for (E e : c) {
add(index++, e);
modified = true;
}
return modified;
}
// Iterators
/**
* Returns an iterator over the elements in this list in proper sequence.
*
* <p>This implementation returns a straightforward implementation of the
* iterator interface, relying on the backing list's {@code size()},
* {@code get(int)}, and {@code remove(int)} methods.
*
* <p>Note that the iterator returned by this method will throw an
* {@link UnsupportedOperationException} in response to its
* {@code remove} method unless the list's {@code remove(int)} method is
* overridden.
*
* <p>This implementation can be made to throw runtime exceptions in the
* face of concurrent modification, as described in the specification
* for the (protected) {@link #modCount} field.
*
* @return an iterator over the elements in this list in proper sequence
*/
public Iterator<E> iterator() {
return new Itr();
}
/**
* {@inheritDoc}
*
* <p>This implementation returns {@code listIterator(0)}.
*
* @see #listIterator(int)
*/
public ListIterator<E> listIterator() {
return listIterator(0);
}
/**
* {@inheritDoc}
*
* <p>This implementation returns a straightforward implementation of the
* {@code ListIterator} interface that extends the implementation of the
* {@code Iterator} interface returned by the {@code iterator()} method.
* The {@code ListIterator} implementation relies on the backing list's
* {@code get(int)}, {@code set(int, E)}, {@code add(int, E)}
* and {@code remove(int)} methods.
*
* <p>Note that the list iterator returned by this implementation will
* throw an {@link UnsupportedOperationException} in response to its
* {@code remove}, {@code set} and {@code add} methods unless the
* list's {@code remove(int)}, {@code set(int, E)}, and
* {@code add(int, E)} methods are overridden.
*
* <p>This implementation can be made to throw runtime exceptions in the
* face of concurrent modification, as described in the specification for
* the (protected) {@link #modCount} field.
*
* @throws IndexOutOfBoundsException {@inheritDoc}
*/
public ListIterator<E> listIterator(final int index) {
rangeCheckForAdd(index);
return new ListItr(index);
}
private class Itr implements Iterator<E> {
/**
* Index of element to be returned by subsequent call to next.
*/
int cursor = 0;
/**
* Index of element returned by most recent call to next or
* previous. Reset to -1 if this element is deleted by a call
* to remove.
*/
int lastRet = -1;
/**
* The modCount value that the iterator believes that the backing
* List should have. If this expectation is violated, the iterator
* has detected concurrent modification.
*/
int expectedModCount = modCount;
public boolean hasNext() {
return cursor != size();
}
public E next() {
checkForComodification();
try {
int i = cursor;
E next = get(i);
lastRet = i;
cursor = i + 1;
return next;
} catch (IndexOutOfBoundsException e) {
checkForComodification();
throw new NoSuchElementException();
}
}
public void remove() {
if (lastRet < 0)
throw new IllegalStateException();
checkForComodification();
try {
AbstractList.this.remove(lastRet);
if (lastRet < cursor)
cursor--;
lastRet = -1;
expectedModCount = modCount;
} catch (IndexOutOfBoundsException e) {
throw new ConcurrentModificationException();
}
}
final void checkForComodification() {
if (modCount != expectedModCount)
throw new ConcurrentModificationException();
}
}
private class ListItr extends Itr implements ListIterator<E> {
ListItr(int index) {
cursor = index;
}
public boolean hasPrevious() {
return cursor != 0;
}
public E previous() {
checkForComodification();
try {
int i = cursor - 1;
E previous = get(i);
lastRet = cursor = i;
return previous;
} catch (IndexOutOfBoundsException e) {
checkForComodification();
throw new NoSuchElementException();
}
}
public int nextIndex() {
return cursor;
}
public int previousIndex() {
return cursor-1;
}
public void set(E e) {
if (lastRet < 0)
throw new IllegalStateException();
checkForComodification();
try {
AbstractList.this.set(lastRet, e);
expectedModCount = modCount;
} catch (IndexOutOfBoundsException ex) {
throw new ConcurrentModificationException();
}
}
public void add(E e) {
checkForComodification();
try {
int i = cursor;
AbstractList.this.add(i, e);
lastRet = -1;
cursor = i + 1;
expectedModCount = modCount;
} catch (IndexOutOfBoundsException ex) {
throw new ConcurrentModificationException();
}
}
}
/**
* {@inheritDoc}
*
* <p>This implementation returns a list that subclasses
* {@code AbstractList}. The subclass stores, in private fields, the
* offset of the subList within the backing list, the size of the subList
* (which can change over its lifetime), and the expected
* {@code modCount} value of the backing list. There are two variants
* of the subclass, one of which implements {@code RandomAccess}.
* If this list implements {@code RandomAccess} the returned list will
* be an instance of the subclass that implements {@code RandomAccess}.
*
* <p>The subclass's {@code set(int, E)}, {@code get(int)},
* {@code add(int, E)}, {@code remove(int)}, {@code addAll(int,
* Collection)} and {@code removeRange(int, int)} methods all
* delegate to the corresponding methods on the backing abstract list,
* after bounds-checking the index and adjusting for the offset. The
* {@code addAll(Collection c)} method merely returns {@code addAll(size,
* c)}.
*
* <p>The {@code listIterator(int)} method returns a "wrapper object"
* over a list iterator on the backing list, which is created with the
* corresponding method on the backing list. The {@code iterator} method
* merely returns {@code listIterator()}, and the {@code size} method
* merely returns the subclass's {@code size} field.
*
* <p>All methods first check to see if the actual {@code modCount} of
* the backing list is equal to its expected value, and throw a
* {@code ConcurrentModificationException} if it is not.
*
* @throws IndexOutOfBoundsException if an endpoint index value is out of range
* {@code (fromIndex < 0 || toIndex > size)}
* @throws IllegalArgumentException if the endpoint indices are out of order
* {@code (fromIndex > toIndex)}
*/
public List<E> subList(int fromIndex, int toIndex) {
return (this instanceof RandomAccess ?
new RandomAccessSubList<>(this, fromIndex, toIndex) :
new SubList<>(this, fromIndex, toIndex));
}
// Comparison and hashing
/**
* Compares the specified object with this list for equality. Returns
* {@code true} if and only if the specified object is also a list, both
* lists have the same size, and all corresponding pairs of elements in
* the two lists are <i>equal</i>. (Two elements {@code e1} and
* {@code e2} are <i>equal</i> if {@code (e1==null ? e2==null :
* e1.equals(e2))}.) In other words, two lists are defined to be
* equal if they contain the same elements in the same order.<p>
*
* This implementation first checks if the specified object is this
* list. If so, it returns {@code true}; if not, it checks if the
* specified object is a list. If not, it returns {@code false}; if so,
* it iterates over both lists, comparing corresponding pairs of elements.
* If any comparison returns {@code false}, this method returns
* {@code false}. If either iterator runs out of elements before the
* other it returns {@code false} (as the lists are of unequal length);
* otherwise it returns {@code true} when the iterations complete.
*
* @param o the object to be compared for equality with this list
* @return {@code true} if the specified object is equal to this list
*/
public boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof List))
return false;
ListIterator<E> e1 = listIterator();
ListIterator<?> e2 = ((List<?>) o).listIterator();
while (e1.hasNext() && e2.hasNext()) {
E o1 = e1.next();
Object o2 = e2.next();
if (!(o1==null ? o2==null : o1.equals(o2)))
return false;
}
return !(e1.hasNext() || e2.hasNext());
}
/**
* Returns the hash code value for this list.
*
* <p>This implementation uses exactly the code that is used to define the
* list hash function in the documentation for the {@link List#hashCode}
* method.
*
* @return the hash code value for this list
*/
public int hashCode() {
int hashCode = 1;
for (E e : this)
hashCode = 31*hashCode + (e==null ? 0 : e.hashCode());
return hashCode;
}
/**
* Removes from this list all of the elements whose index is between
* {@code fromIndex}, inclusive, and {@code toIndex}, exclusive.
* Shifts any succeeding elements to the left (reduces their index).
* This call shortens the list by {@code (toIndex - fromIndex)} elements.
* (If {@code toIndex==fromIndex}, this operation has no effect.)
*
* <p>This method is called by the {@code clear} operation on this list
* and its subLists. Overriding this method to take advantage of
* the internals of the list implementation can <i>substantially</i>
* improve the performance of the {@code clear} operation on this list
* and its subLists.
*
* <p>This implementation gets a list iterator positioned before
* {@code fromIndex}, and repeatedly calls {@code ListIterator.next}
* followed by {@code ListIterator.remove} until the entire range has
* been removed. <b>Note: if {@code ListIterator.remove} requires linear
* time, this implementation requires quadratic time.</b>
*
* @param fromIndex index of first element to be removed
* @param toIndex index after last element to be removed
*/
protected void removeRange(int fromIndex, int toIndex) {
ListIterator<E> it = listIterator(fromIndex);
for (int i=0, n=toIndex-fromIndex; i<n; i++) {
it.next();
it.remove();
}
}
/**
* The number of times this list has been <i>structurally modified</i>.
* Structural modifications are those that change the size of the
* list, or otherwise perturb it in such a fashion that iterations in
* progress may yield incorrect results.
*
* <p>This field is used by the iterator and list iterator implementation
* returned by the {@code iterator} and {@code listIterator} methods.
* If the value of this field changes unexpectedly, the iterator (or list
* iterator) will throw a {@code ConcurrentModificationException} in
* response to the {@code next}, {@code remove}, {@code previous},
* {@code set} or {@code add} operations. This provides
* <i>fail-fast</i> behavior, rather than non-deterministic behavior in
* the face of concurrent modification during iteration.
*
* <p><b>Use of this field by subclasses is optional.</b> If a subclass
* wishes to provide fail-fast iterators (and list iterators), then it
* merely has to increment this field in its {@code add(int, E)} and
* {@code remove(int)} methods (and any other methods that it overrides
* that result in structural modifications to the list). A single call to
* {@code add(int, E)} or {@code remove(int)} must add no more than
* one to this field, or the iterators (and list iterators) will throw
* bogus {@code ConcurrentModificationExceptions}. If an implementation
* does not wish to provide fail-fast iterators, this field may be
* ignored.
*/
protected transient int modCount = 0;
private void rangeCheckForAdd(int index) {
if (index < 0 || index > size())
throw new IndexOutOfBoundsException(outOfBoundsMsg(index));
}
private String outOfBoundsMsg(int index) {
return "Index: "+index+", Size: "+size();
}
}
class SubList<E> extends AbstractList<E> {
private final AbstractList<E> l;
private final int offset;
private int size;
SubList(AbstractList<E> list, int fromIndex, int toIndex) {
if (fromIndex < 0)
throw new IndexOutOfBoundsException("fromIndex = " + fromIndex);
if (toIndex > list.size())
throw new IndexOutOfBoundsException("toIndex = " + toIndex);
if (fromIndex > toIndex)
throw new IllegalArgumentException("fromIndex(" + fromIndex +
") > toIndex(" + toIndex + ")");
l = list;
offset = fromIndex;
size = toIndex - fromIndex;
this.modCount = l.modCount;
}
public E set(int index, E element) {
rangeCheck(index);
checkForComodification();
return l.set(index+offset, element);
}
public E get(int index) {
rangeCheck(index);
checkForComodification();
return l.get(index+offset);
}
public int size() {
checkForComodification();
return size;
}
public void add(int index, E element) {
rangeCheckForAdd(index);
checkForComodification();
l.add(index+offset, element);
this.modCount = l.modCount;
size++;
}
public E remove(int index) {
rangeCheck(index);
checkForComodification();
E result = l.remove(index+offset);
this.modCount = l.modCount;
size--;
return result;
}
protected void removeRange(int fromIndex, int toIndex) {
checkForComodification();
l.removeRange(fromIndex+offset, toIndex+offset);
this.modCount = l.modCount;
size -= (toIndex-fromIndex);
}
public boolean addAll(Collection<? extends E> c) {
return addAll(size, c);
}
public boolean addAll(int index, Collection<? extends E> c) {
rangeCheckForAdd(index);
int cSize = c.size();
if (cSize==0)
return false;
checkForComodification();
l.addAll(offset+index, c);
this.modCount = l.modCount;
size += cSize;
return true;
}
public Iterator<E> iterator() {
return listIterator();
}
public ListIterator<E> listIterator(final int index) {
checkForComodification();
rangeCheckForAdd(index);
return new ListIterator<E>() {
private final ListIterator<E> i = l.listIterator(index+offset);
public boolean hasNext() {
return nextIndex() < size;
}
public E next() {
if (hasNext())
return i.next();
else
throw new NoSuchElementException();
}
public boolean hasPrevious() {
return previousIndex() >= 0;
}
public E previous() {
if (hasPrevious())
return i.previous();
else
throw new NoSuchElementException();
}
public int nextIndex() {
return i.nextIndex() - offset;
}
public int previousIndex() {
return i.previousIndex() - offset;
}
public void remove() {
i.remove();
SubList.this.modCount = l.modCount;
size--;
}
public void set(E e) {
i.set(e);
}
public void add(E e) {
i.add(e);
SubList.this.modCount = l.modCount;
size++;
}
};
}
public List<E> subList(int fromIndex, int toIndex) {
return new SubList<>(this, fromIndex, toIndex);
}
private void rangeCheck(int index) {
if (index < 0 || index >= size)
throw new IndexOutOfBoundsException(outOfBoundsMsg(index));
}
private void rangeCheckForAdd(int index) {
if (index < 0 || index > size)
throw new IndexOutOfBoundsException(outOfBoundsMsg(index));
}
private String outOfBoundsMsg(int index) {
return "Index: "+index+", Size: "+size;
}
private void checkForComodification() {
if (this.modCount != l.modCount)
throw new ConcurrentModificationException();
}
}
class RandomAccessSubList<E> extends SubList<E> implements RandomAccess {
RandomAccessSubList(AbstractList<E> list, int fromIndex, int toIndex) {
super(list, fromIndex, toIndex);
}
public List<E> subList(int fromIndex, int toIndex) {
return new RandomAccessSubList<>(this, fromIndex, toIndex);
}
}
|
|
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.xml.ingest;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import org.apache.commons.lang3.StringUtils;
import org.kuali.common.util.log.LoggerUtils;
import org.kuali.common.util.runonce.smart.RunOnce;
import org.kuali.common.util.runonce.smart.RunOnceState;
import org.kuali.rice.core.api.util.Truth;
import org.kuali.rice.coreservice.api.parameter.Parameter;
import org.kuali.rice.coreservice.api.parameter.ParameterType;
import org.kuali.rice.coreservice.framework.CoreFrameworkServiceLocator;
import org.kuali.rice.coreservice.framework.parameter.ParameterService;
import org.slf4j.Logger;
import com.google.common.base.Optional;
public final class ParameterServiceRunOnce implements RunOnce {
private static final Logger logger = LoggerUtils.make();
private final String applicationId;
private final String namespace;
private final String component;
private final String name;
private final Optional<String> description;
private final boolean runOnMissingParameter;
//
private boolean initialized = false;
private boolean runonce = false;
private ParameterService service;
private static final String CONFIGURATION_PARAMETER_TYPE = "CONFG";
private static final String YES = "Y";
@Override
public synchronized void initialize() {
checkState(!initialized, "Already initialized");
this.service = CoreFrameworkServiceLocator.getParameterService();
Optional<Parameter> parameter = Optional.fromNullable(service.getParameter(namespace, component, name));
if (!parameter.isPresent() && runOnMissingParameter) {
parameter = Optional.of(createParameter());
}
this.runonce = isRunOnce(parameter);
showConfig(parameter);
this.initialized = true;
}
@Override
public synchronized boolean isTrue() {
checkState(initialized, "Not initialized");
return runonce;
}
@Override
public synchronized void changeState(RunOnceState state) {
// Ensure things are as they should be
checkState(initialized, "Not initialized");
checkNotNull(state, "'state' cannot be null");
// Get the existing parameter
Parameter existingParameter = service.getParameter(namespace, component, name);
// Can't change the state of a non-existent parameter
// The isRunOnce() method called during initialization cannot return true unless a parameter exists and it's value is set to 'Y'
checkNotNull(existingParameter, "'existingParameter' cannot be null");
// Update the parameter
logger.info("Updating parameter: [{}]", name);
Parameter.Builder builder = Parameter.Builder.create(existingParameter);
builder.setValue(state.name());
Parameter updatedParameter = service.updateParameter(builder.build());
// This must always return false here
this.runonce = isRunOnce(updatedParameter);
checkState(!isTrue(), "isTrue() must return false");
// Emit a log message indicating the change in state
logger.info("Transitioned RunOnce to - [{}]", updatedParameter.getValue());
}
protected boolean isRunOnce(Optional<Parameter> parameter) {
if (parameter.isPresent()) {
return isRunOnce(parameter.get());
} else {
return false;
}
}
protected boolean isRunOnce(Parameter parameter) {
return Boolean.parseBoolean(Truth.strToBooleanIgnoreCase(parameter.getValue()) + "");
}
protected Parameter createParameter() {
logger.info("Creating parameter: [{}]=[{}]", name, YES);
Parameter.Builder builder = Parameter.Builder.create(applicationId, namespace, component, name, ParameterType.Builder.create(CONFIGURATION_PARAMETER_TYPE));
builder.setValue(YES);
if (description.isPresent()) {
builder.setDescription(description.get());
}
return service.createParameter(builder.build());
}
protected void showConfig(Optional<Parameter> optional) {
logger.info(String.format("Parameter Metadata: [%s:%s:%s]", applicationId, namespace, component));
if (optional.isPresent()) {
Parameter parameter = optional.get();
logger.info("Parameter: [{}]=[{}]", name, parameter.getValue());
} else {
logger.info("Parameter [{}] does not exist", name);
}
logger.info("RunOnce: [{}]", runonce);
}
private ParameterServiceRunOnce(Builder builder) {
this.applicationId = builder.applicationId;
this.namespace = builder.namespace;
this.component = builder.component;
this.name = builder.name;
this.description = builder.description;
this.runOnMissingParameter = builder.runOnMissingParameter;
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String applicationId;
private String namespace;
private String component;
private String name;
private Optional<String> description;
private boolean runOnMissingParameter;
public Builder runOnMissingParameter(boolean runOnMissingParameter) {
this.runOnMissingParameter = runOnMissingParameter;
return this;
}
public Builder applicationId(String applicationId) {
this.applicationId = applicationId;
return this;
}
public Builder namespace(String namespace) {
this.namespace = namespace;
return this;
}
public Builder component(String component) {
this.component = component;
return this;
}
public Builder name(String name) {
this.name = name;
return this;
}
public Builder description(String name) {
this.description = Optional.fromNullable(name);
return this;
}
public ParameterServiceRunOnce build() {
ParameterServiceRunOnce instance = new ParameterServiceRunOnce(this);
validate(instance);
return instance;
}
private static void validate(ParameterServiceRunOnce instance) {
checkArgument(!StringUtils.isBlank(instance.getApplicationId()), "'application' id cannot be null");
checkArgument(!StringUtils.isBlank(instance.getNamespace()), "'namespace' cannot be null");
checkArgument(!StringUtils.isBlank(instance.getComponent()), "'component' cannot be null");
checkArgument(!StringUtils.isBlank(instance.getName()), "'name' cannot be null");
checkNotNull(instance.description, "'description' cannot be null");
}
public String getApplicationId() {
return applicationId;
}
public void setApplicationId(String applicationId) {
this.applicationId = applicationId;
}
public String getNamespace() {
return namespace;
}
public void setNamespace(String namespace) {
this.namespace = namespace;
}
public String getComponent() {
return component;
}
public void setComponent(String component) {
this.component = component;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Optional<String> getDescription() {
return description;
}
public void setDescription(Optional<String> description) {
this.description = description;
}
}
public String getApplicationId() {
return applicationId;
}
public String getNamespace() {
return namespace;
}
public String getComponent() {
return component;
}
public String getName() {
return name;
}
public Optional<String> getDescription() {
return description;
}
public boolean isRunOnMissingParameter() {
return runOnMissingParameter;
}
}
|
|
/*
* Copyright (C) 2014 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.cassandra.lucene.testsAT.util;
import com.stratio.cassandra.lucene.builder.index.Partitioner;
import com.stratio.cassandra.lucene.builder.index.schema.analysis.Analyzer;
import com.stratio.cassandra.lucene.builder.index.schema.mapping.Mapper;
import com.stratio.cassandra.lucene.builder.index.schema.mapping.SingleColumnMapper;
import java.util.*;
import static com.stratio.cassandra.lucene.builder.Builder.*;
import static com.stratio.cassandra.lucene.testsAT.util.CassandraConfig.*;
/**
* @author Andres de la Pena {@literal <adelapena@stratio.com>}
*/
public class CassandraUtilsBuilder {
private final String keyspace;
private String table = TABLE;
private String indexName = INDEX;
private String indexColumn = COLUMN;
private Boolean useNewQuerySyntax = USE_NEW_QUERY_SYNTAX;
private Map<String, String> columns;
private Map<String, Mapper> mappers;
private Map<String, Analyzer> analyzers;
private List<String> partitionKey;
private List<String> clusteringKey;
private String clusteringOrderColumn;
private boolean clusteringOrderAscending;
private Partitioner partitioner = PARTITIONER;
private final Map<String, Map<String, String>> udts;
CassandraUtilsBuilder(String keyspacePrefix) {
this.keyspace = keyspacePrefix + "_" + Math.abs(new Random().nextLong());
this.columns = new HashMap<>();
this.mappers = new HashMap<>();
this.analyzers = new HashMap<>();
this.partitionKey = new ArrayList<>();
this.clusteringKey = new ArrayList<>();
this.udts = new LinkedHashMap<>();
}
public CassandraUtilsBuilder withTable(String table) {
this.table = table;
return this;
}
public CassandraUtilsBuilder withIndexName(String index) {
this.indexName = index;
return this;
}
public CassandraUtilsBuilder withIndexColumn(String indexedColumn) {
this.indexColumn = indexedColumn;
return this;
}
public CassandraUtilsBuilder withUseNewQuerySyntax(Boolean useNewQuerySyntax) {
this.useNewQuerySyntax = useNewQuerySyntax;
return this;
}
public CassandraUtilsBuilder withColumn(String name, String type, Mapper mapper) {
columns.put(name, type);
if (mapper != null) {
mappers.put(name, mapper);
}
return this;
}
public CassandraUtilsBuilder withColumn(String name, String type) {
columns.put(name, type);
String baseType = type.replaceAll("(.*)(<|,)", "").replace(">", "");
SingleColumnMapper<?> mapper = defaultMapper(baseType);
if (mapper != null) {
mappers.put(name, mapper);
}
return this;
}
public CassandraUtilsBuilder withStaticColumn(String name, String type, boolean createMapper) {
columns.put(name, type + " static");
if (createMapper) {
String baseType = type.replaceAll("(.*)(<|,)", "").replace(">", "");
SingleColumnMapper<?> mapper = defaultMapper(baseType);
mappers.put(name, mapper);
}
return this;
}
public CassandraUtilsBuilder withUDT(String column, String field, String type) {
Map<String, String> udt = udts.get(column);
if (udt == null) {
udt = new HashMap<>();
udts.put(column, udt);
}
udt.put(field, type);
return this;
}
public CassandraUtilsBuilder withPartitionKey(String... columns) {
partitionKey.addAll(Arrays.asList(columns));
return this;
}
public CassandraUtilsBuilder withClusteringKey(String... columns) {
clusteringKey.addAll(Arrays.asList(columns));
return this;
}
public CassandraUtilsBuilder withMapper(String name, Mapper mapper) {
mappers.put(name, mapper);
return this;
}
public CassandraUtilsBuilder withAnalyzer(String name, Analyzer analyzer) {
analyzers.put(name, analyzer);
return this;
}
public CassandraUtilsBuilder withClusteringOrder(String columnName, boolean ascending) {
clusteringOrderColumn = columnName;
clusteringOrderAscending = ascending;
return this;
}
public CassandraUtilsBuilder withPartitioner(Partitioner partitioner) {
this.partitioner = partitioner;
return this;
}
private SingleColumnMapper<?> defaultMapper(String name) {
switch (name) {
case "ascii":
return stringMapper();
case "bigint":
return longMapper();
case "blob":
return blobMapper();
case "boolean":
return booleanMapper();
case "counter":
return longMapper();
case "decimal":
return bigDecimalMapper().integerDigits(10).decimalDigits(10);
case "double":
return doubleMapper();
case "float":
return floatMapper();
case "inet":
return inetMapper();
case "int":
return integerMapper();
case "smallint":
return integerMapper();
case "text":
return textMapper();
case "timestamp":
return dateMapper().pattern("yyyy/MM/dd HH:mm:ss.SSS");
case "timeuuid":
return uuidMapper();
case "tinyint":
return integerMapper();
case "uuid":
return uuidMapper();
case "varchar":
return stringMapper();
case "varint":
return bigIntegerMapper().digits(10);
default:
return null;
}
}
public CassandraUtils build() {
return new CassandraUtils(keyspace,
table,
indexName,
indexColumn,
useNewQuerySyntax,
columns,
mappers,
analyzers,
partitionKey,
clusteringKey,
udts,
clusteringOrderColumn,
clusteringOrderAscending,
partitioner);
}
}
|
|
package org.rmatil.sync.network.core;
import net.tomp2p.futures.FutureDirect;
import net.tomp2p.peers.PeerAddress;
import org.rmatil.sync.network.api.*;
import org.rmatil.sync.network.config.Config;
import org.rmatil.sync.network.core.exception.ConnectionException;
import org.rmatil.sync.network.core.exception.ConnectionFailedException;
import org.rmatil.sync.network.core.exception.ObjectSendFailedException;
import org.rmatil.sync.network.core.exception.SecurityException;
import org.rmatil.sync.network.core.messaging.EncryptedDataReplyHandler;
import org.rmatil.sync.network.core.messaging.ObjectDataReplyHandler;
import org.rmatil.sync.network.core.model.EncryptedData;
import org.rmatil.sync.network.core.model.NodeLocation;
import org.rmatil.sync.network.core.security.encryption.asymmetric.rsa.RsaEncryption;
import org.rmatil.sync.network.core.security.encryption.symmetric.aes.AesEncryption;
import org.rmatil.sync.network.core.security.encryption.symmetric.aes.AesKeyFactory;
import org.rmatil.sync.network.core.security.sign.rsa.RsaSign;
import org.rmatil.sync.network.core.serialize.ByteSerializer;
import org.rmatil.sync.persistence.core.dht.secured.SecuredDhtStorageAdapter;
import org.rmatil.sync.persistence.exceptions.InputOutputException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.crypto.SecretKey;
import java.io.IOException;
import java.security.InvalidKeyException;
import java.security.interfaces.RSAPrivateKey;
import java.security.interfaces.RSAPublicKey;
import java.util.UUID;
public class Node implements INode {
protected final static Logger logger = LoggerFactory.getLogger(Node.class);
protected ConnectionConfiguration config;
protected Connection connection;
protected IUser user;
protected UUID clientDeviceId;
protected ObjectDataReplyHandler objectDataReplyHandler;
protected NodeManager nodeManager;
protected IUserManager userManager;
protected IIdentifierManager<String, UUID> identifierManager;
protected RsaEncryption rsaEncryption;
protected AesEncryption aesEncryption;
protected RsaSign rsaSign;
public Node(ConnectionConfiguration config, IUser user, UUID uuid) {
this.config = config;
this.user = user;
this.clientDeviceId = uuid;
this.rsaEncryption = new RsaEncryption();
this.aesEncryption = new AesEncryption();
this.rsaSign = new RsaSign();
}
@Override
public boolean start()
throws ConnectionException, InvalidKeyException {
return this.start(null, null);
}
@Override
public boolean start(String bootstrapIpAddress, Integer bootstrapPort)
throws ConnectionException, ConnectionFailedException, InvalidKeyException {
// since the encrypted data reply handler
// requires a node manager but he requires
// a dht, we init the manager here already
// and modify it later
this.nodeManager = new NodeManager(
null,
Config.DEFAULT.getLocationsContentKey(),
Config.DEFAULT.getPrivateKeyContentKey(),
Config.DEFAULT.getPublicKeyContentKey(),
Config.DEFAULT.getSaltContentKey(),
Config.DEFAULT.getDomainKey()
);
this.connection = new Connection(
this.config,
new EncryptedDataReplyHandler(
this.objectDataReplyHandler,
this.nodeManager,
(RSAPrivateKey) this.user.getPrivateKey()
)
);
this.connection.open(this.user.getKeyPair());
if (null != bootstrapIpAddress && null != bootstrapPort) {
this.connection.connect(bootstrapIpAddress, bootstrapPort);
}
logger.info("Successfully started node on address " + this.getPeerAddress().inetAddress().getHostAddress() + ":" + this.config.getPort());
SecuredDhtStorageAdapter dhtStorageAdapter = new SecuredDhtStorageAdapter(
this.connection.getPeerDHT(),
this.config.getCacheTtl()
);
NodeLocation nodeLocation = new NodeLocation(
this.user.getUserName(),
this.clientDeviceId,
this.connection.getPeerDHT().peerAddress()
);
this.nodeManager.setStorageAdapter(dhtStorageAdapter);
this.identifierManager = new IdentifierManager(
dhtStorageAdapter,
this.user.getUserName(),
Config.DEFAULT.getIdentifierContentKey(),
Config.DEFAULT.getDomainKey()
);
this.userManager = new UserManager(this.nodeManager);
if (! this.userManager.login(this.user, nodeLocation)) {
logger.error("Failed to login the user " + this.user.getUserName());
this.connection.close();
return false;
}
logger.debug("Bootstrap peer succeeded to bootstrap at " + this.getPeerAddress().inetAddress().getHostAddress() + ":" + this.getPeerAddress().tcpPort());
return true;
}
@Override
public boolean shutdown() {
if (null == this.connection || this.connection.isClosed()) {
// reset the connection
this.connection = null;
return true;
}
logger.info("Shutting node down");
// remove the node location
NodeLocation nodeLocation = new NodeLocation(
this.user.getUserName(),
this.clientDeviceId,
this.connection.getPeerDHT().peerAddress()
);
this.userManager.logout(this.user, nodeLocation);
// friendly announce the shutdown of this node
try {
this.connection.close();
} catch (ConnectionException e) {
logger.error("Failed to shut down this node: " + e.getMessage());
return false;
}
// reset the connection
this.connection = null;
return true;
}
@Override
public boolean isConnected() {
return null != this.connection && ! this.connection.isClosed();
}
@Override
public void setObjectDataReplyHandler(ObjectDataReplyHandler objectDataReplyHandler) {
if (null != this.connection) {
throw new IllegalStateException("Can not set the object data reply handler after the connection has been set up");
}
// we are still able to set the object data reply
this.objectDataReplyHandler = objectDataReplyHandler;
}
@Override
public ObjectDataReplyHandler getObjectDataReplyHandler() {
return this.objectDataReplyHandler;
}
@Override
public UUID getClientDeviceId() {
return clientDeviceId;
}
@Override
public IUser getUser() {
return user;
}
@Override
public INodeManager getNodeManager() {
return this.nodeManager;
}
@Override
public IUserManager getUserManager() {
return userManager;
}
@Override
public IIdentifierManager<String, UUID> getIdentifierManager() {
return this.identifierManager;
}
@Override
public PeerAddress getPeerAddress() {
return this.connection.getPeerDHT().peerAddress();
}
@Override
public FutureDirect sendDirect(NodeLocation receiverAddress, Object data) {
logger.info("Sending request to "
+ receiverAddress.getUsername()
+ " ("
+ receiverAddress.getIpAddress()
+ ":"
+ receiverAddress.getPort()
+ ")"
);
// get public key from receiver to encrypt
RSAPublicKey publicKey;
try {
publicKey = (RSAPublicKey) this.nodeManager.getPublicKey(receiverAddress.getUsername());
} catch (InputOutputException e) {
throw new ObjectSendFailedException(
"Could not use public key of user "
+ receiverAddress.getUsername()
+ " to encrypt data. Aborting to send request for this receiver. Message: "
+ e.getMessage()
);
}
if (null == publicKey) {
throw new ObjectSendFailedException("Can not encrypt message. No public key found for receiver " + receiverAddress.getUsername());
}
try {
byte[] plainData = ByteSerializer.toBytes(data);
// encrypt the actual data using the AES key
byte[] initVector = AesEncryption.generateInitializationVector();
SecretKey aesKey = AesKeyFactory.generateSecretKey();
byte[] aesEncryptedData = this.aesEncryption.encrypt(aesKey, initVector, plainData);
// encrypt the AES key with RSA
byte[] encodedAesKey = aesKey.getEncoded();
byte[] symmetricKey = new byte[AesEncryption.INIT_VECTOR_LENGTH + encodedAesKey.length];
System.arraycopy(initVector, 0, symmetricKey, 0, initVector.length);
System.arraycopy(encodedAesKey, 0, symmetricKey, initVector.length, encodedAesKey.length);
byte[] rsaEncryptedData = this.rsaEncryption.encrypt(publicKey, symmetricKey);
byte[] signature = this.rsaSign.sign((RSAPrivateKey) this.user.getPrivateKey(), plainData);
return this.connection.sendDirect(
receiverAddress.getPeerAddress(),
new EncryptedData(
signature,
rsaEncryptedData,
aesEncryptedData
)
);
} catch (IOException | SecurityException e) {
throw new ObjectSendFailedException(
"Failed to encrypt data for receiver "
+ receiverAddress.getUsername()
+ " ("
+ receiverAddress.getIpAddress()
+ ":"
+ receiverAddress.getPort()
+ "). Aborting to send request for this receiver. Message: "
+ e.getMessage(),
e
);
}
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.kinesis.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* Represents the output for <code>ListTagsForStream</code>.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kinesis-2013-12-02/ListTagsForStream" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListTagsForStreamResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* A list of tags associated with <code>StreamName</code>, starting with the first tag after
* <code>ExclusiveStartTagKey</code> and up to the specified <code>Limit</code>.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<Tag> tags;
/**
* <p>
* If set to <code>true</code>, more tags are available. To request additional tags, set
* <code>ExclusiveStartTagKey</code> to the key of the last tag returned.
* </p>
*/
private Boolean hasMoreTags;
/**
* <p>
* A list of tags associated with <code>StreamName</code>, starting with the first tag after
* <code>ExclusiveStartTagKey</code> and up to the specified <code>Limit</code>.
* </p>
*
* @return A list of tags associated with <code>StreamName</code>, starting with the first tag after
* <code>ExclusiveStartTagKey</code> and up to the specified <code>Limit</code>.
*/
public java.util.List<Tag> getTags() {
if (tags == null) {
tags = new com.amazonaws.internal.SdkInternalList<Tag>();
}
return tags;
}
/**
* <p>
* A list of tags associated with <code>StreamName</code>, starting with the first tag after
* <code>ExclusiveStartTagKey</code> and up to the specified <code>Limit</code>.
* </p>
*
* @param tags
* A list of tags associated with <code>StreamName</code>, starting with the first tag after
* <code>ExclusiveStartTagKey</code> and up to the specified <code>Limit</code>.
*/
public void setTags(java.util.Collection<Tag> tags) {
if (tags == null) {
this.tags = null;
return;
}
this.tags = new com.amazonaws.internal.SdkInternalList<Tag>(tags);
}
/**
* <p>
* A list of tags associated with <code>StreamName</code>, starting with the first tag after
* <code>ExclusiveStartTagKey</code> and up to the specified <code>Limit</code>.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param tags
* A list of tags associated with <code>StreamName</code>, starting with the first tag after
* <code>ExclusiveStartTagKey</code> and up to the specified <code>Limit</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTagsForStreamResult withTags(Tag... tags) {
if (this.tags == null) {
setTags(new com.amazonaws.internal.SdkInternalList<Tag>(tags.length));
}
for (Tag ele : tags) {
this.tags.add(ele);
}
return this;
}
/**
* <p>
* A list of tags associated with <code>StreamName</code>, starting with the first tag after
* <code>ExclusiveStartTagKey</code> and up to the specified <code>Limit</code>.
* </p>
*
* @param tags
* A list of tags associated with <code>StreamName</code>, starting with the first tag after
* <code>ExclusiveStartTagKey</code> and up to the specified <code>Limit</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTagsForStreamResult withTags(java.util.Collection<Tag> tags) {
setTags(tags);
return this;
}
/**
* <p>
* If set to <code>true</code>, more tags are available. To request additional tags, set
* <code>ExclusiveStartTagKey</code> to the key of the last tag returned.
* </p>
*
* @param hasMoreTags
* If set to <code>true</code>, more tags are available. To request additional tags, set
* <code>ExclusiveStartTagKey</code> to the key of the last tag returned.
*/
public void setHasMoreTags(Boolean hasMoreTags) {
this.hasMoreTags = hasMoreTags;
}
/**
* <p>
* If set to <code>true</code>, more tags are available. To request additional tags, set
* <code>ExclusiveStartTagKey</code> to the key of the last tag returned.
* </p>
*
* @return If set to <code>true</code>, more tags are available. To request additional tags, set
* <code>ExclusiveStartTagKey</code> to the key of the last tag returned.
*/
public Boolean getHasMoreTags() {
return this.hasMoreTags;
}
/**
* <p>
* If set to <code>true</code>, more tags are available. To request additional tags, set
* <code>ExclusiveStartTagKey</code> to the key of the last tag returned.
* </p>
*
* @param hasMoreTags
* If set to <code>true</code>, more tags are available. To request additional tags, set
* <code>ExclusiveStartTagKey</code> to the key of the last tag returned.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTagsForStreamResult withHasMoreTags(Boolean hasMoreTags) {
setHasMoreTags(hasMoreTags);
return this;
}
/**
* <p>
* If set to <code>true</code>, more tags are available. To request additional tags, set
* <code>ExclusiveStartTagKey</code> to the key of the last tag returned.
* </p>
*
* @return If set to <code>true</code>, more tags are available. To request additional tags, set
* <code>ExclusiveStartTagKey</code> to the key of the last tag returned.
*/
public Boolean isHasMoreTags() {
return this.hasMoreTags;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getTags() != null)
sb.append("Tags: ").append(getTags()).append(",");
if (getHasMoreTags() != null)
sb.append("HasMoreTags: ").append(getHasMoreTags());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListTagsForStreamResult == false)
return false;
ListTagsForStreamResult other = (ListTagsForStreamResult) obj;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false)
return false;
if (other.getHasMoreTags() == null ^ this.getHasMoreTags() == null)
return false;
if (other.getHasMoreTags() != null && other.getHasMoreTags().equals(this.getHasMoreTags()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
hashCode = prime * hashCode + ((getHasMoreTags() == null) ? 0 : getHasMoreTags().hashCode());
return hashCode;
}
@Override
public ListTagsForStreamResult clone() {
try {
return (ListTagsForStreamResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
|
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.cmmn.rest.service.api.runtime.planitem;
import java.util.Date;
import java.util.List;
import org.flowable.cmmn.rest.service.api.engine.variable.QueryVariable;
import org.flowable.common.rest.api.PaginateRequest;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeInfo.Id;
/**
* @author Tijs Rademakers
*/
public class PlanItemInstanceQueryRequest extends PaginateRequest {
private String id;
private String elementId;
private String name;
private String caseInstanceId;
private String caseDefinitionId;
private String stageInstanceId;
private String planItemDefinitionId;
private String planItemDefinitionType;
private List<String> planItemDefinitionTypes;
private String state;
private Date createdBefore;
private Date createdAfter;
private String startUserId;
private String referenceId;
private String referenceType;
private List<QueryVariable> variables;
private List<QueryVariable> caseInstanceVariables;
private String tenantId;
private Boolean withoutTenantId;
@JsonTypeInfo(use = Id.CLASS, defaultImpl = QueryVariable.class)
public List<QueryVariable> getVariables() {
return variables;
}
public void setVariables(List<QueryVariable> variables) {
this.variables = variables;
}
public List<QueryVariable> getCaseInstanceVariables() {
return caseInstanceVariables;
}
@JsonTypeInfo(use = Id.CLASS, defaultImpl = QueryVariable.class)
public void setCaseInstanceVariables(List<QueryVariable> caseInstanceVariables) {
this.caseInstanceVariables = caseInstanceVariables;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getElementId() {
return elementId;
}
public void setElementId(String elementId) {
this.elementId = elementId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCaseInstanceId() {
return caseInstanceId;
}
public void setCaseInstanceId(String caseInstanceId) {
this.caseInstanceId = caseInstanceId;
}
public String getCaseDefinitionId() {
return caseDefinitionId;
}
public void setCaseDefinitionId(String caseDefinitionId) {
this.caseDefinitionId = caseDefinitionId;
}
public String getStageInstanceId() {
return stageInstanceId;
}
public void setStageInstanceId(String stageInstanceId) {
this.stageInstanceId = stageInstanceId;
}
public String getPlanItemDefinitionId() {
return planItemDefinitionId;
}
public void setPlanItemDefinitionId(String planItemDefinitionId) {
this.planItemDefinitionId = planItemDefinitionId;
}
public String getPlanItemDefinitionType() {
return planItemDefinitionType;
}
public void setPlanItemDefinitionType(String planItemDefinitionType) {
this.planItemDefinitionType = planItemDefinitionType;
}
public List<String> getPlanItemDefinitionTypes() {
return planItemDefinitionTypes;
}
public void setPlanItemDefinitionTypes(List<String> planItemDefinitionTypes) {
this.planItemDefinitionTypes = planItemDefinitionTypes;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public Date getCreatedBefore() {
return createdBefore;
}
public void setCreatedBefore(Date createdBefore) {
this.createdBefore = createdBefore;
}
public Date getCreatedAfter() {
return createdAfter;
}
public void setCreatedAfter(Date createdAfter) {
this.createdAfter = createdAfter;
}
public String getStartUserId() {
return startUserId;
}
public void setStartUserId(String startUserId) {
this.startUserId = startUserId;
}
public String getReferenceId() {
return referenceId;
}
public void setReferenceId(String referenceId) {
this.referenceId = referenceId;
}
public String getReferenceType() {
return referenceType;
}
public void setReferenceType(String referenceType) {
this.referenceType = referenceType;
}
public String getTenantId() {
return tenantId;
}
public void setTenantId(String tenantId) {
this.tenantId = tenantId;
}
public Boolean getWithoutTenantId() {
return withoutTenantId;
}
public void setWithoutTenantId(Boolean withoutTenantId) {
this.withoutTenantId = withoutTenantId;
}
}
|
|
/*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.internal.gosu.parser;
import gw.config.BaseService;
import gw.config.CommonServices;
import gw.fs.IDirectory;
import gw.lang.parser.IAttributeSource;
import gw.lang.parser.IParseIssue;
import gw.lang.parser.ITypeUsesMap;
import gw.lang.parser.ILanguageLevel;
import gw.lang.parser.exceptions.IncompatibleTypeException;
import gw.lang.parser.exceptions.ParseIssue;
import gw.lang.reflect.IEntityAccess;
import gw.lang.reflect.IGosuClassLoadingObserver;
import gw.lang.reflect.IPropertyInfo;
import gw.lang.reflect.IType;
import gw.lang.reflect.ITypeLoader;
import gw.lang.reflect.AbstractTypeSystemListener;
import gw.lang.reflect.RefreshRequest;
import gw.lang.reflect.java.IJavaType;
import gw.lang.reflect.TypeSystem;
import gw.lang.reflect.gs.GosuClassTypeLoader;
import gw.lang.reflect.gs.ICompilableType;
import gw.lang.reflect.module.IModule;
import gw.util.ILogger;
import gw.util.SystemOutLogger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
/**
*/
public class DefaultEntityAccess extends BaseService implements IEntityAccess
{
public static final SystemOutLogger DEFAULT_LOGGER = new SystemOutLogger(SystemOutLogger.LoggingLevel.WARN);
private static DefaultEntityAccess g_instance;
private static final ITypeUsesMap EMPTY_TYPE_USES = new TypeUsesMap( Collections.<String>emptyList() ).lock();
private List<IGosuClassLoadingObserver> _classLoadingObservers;
public static DefaultEntityAccess instance()
{
return g_instance == null ? g_instance = new DefaultEntityAccess() : g_instance;
}
private Map _scopes = new HashMap();
/**
*/
public DefaultEntityAccess()
{
_classLoadingObservers = Collections.synchronizedList(new ArrayList<IGosuClassLoadingObserver>());
}
public ITypeLoader getDefaultTypeLoader()
{
return DefaultTypeLoader.instance();
}
@Override
public ITypeUsesMap getDefaultTypeUses()
{
return EMPTY_TYPE_USES;
}
/**
* @return x
*/
@Override
public boolean isDomainInstance( Object value )
{
return false;
}
/**
* @return x
*/
@Override
public boolean isEntityClass( IType cls )
{
return false;
}
/**
* @return x
*/
@Override
public Object getEntityInstanceFrom( Object entity, IType classEntity )
{
return null;
}
/**
* @return x
*/
@Override
public boolean areBeansEqual( Object bean1, Object bean2 )
{
return bean1.equals( bean2 );
}
/**
* @param type
* @param value
* @return
* @throws RuntimeException
*/
@Override
public boolean verifyValueForType( IType type, Object value ) throws RuntimeException
{
try
{
IType valueType = TypeLoaderAccess.instance().getIntrinsicTypeFromObject(value);
CommonServices.getCoercionManager().verifyTypesComparable( type, valueType, false );
}
catch( ParseIssue pe )
{
throw new IncompatibleTypeException( "Value of type: " + TypeLoaderAccess.instance().getIntrinsicTypeFromObject( value ).getName() +
" is not compatible with symbol type: " + type.getName() );
}
return true;
}
@Override
public String makeStringFrom( Object obj )
{
if( obj == null )
{
return null;
}
return obj.toString();
}
/**
* @return x
*/
@Override
public long getHashedEntityId( String strId, IType classEntity )
{
return -1;
}
/**
* @return x
*/
@Override
public boolean isInternal( IType cls )
{
return false;
}
@Override
public ILogger getLogger()
{
return DEFAULT_LOGGER;
}
@Override
public Locale getLocale()
{
return Locale.getDefault();
}
@Override
public Date getCurrentTime()
{
return new Date();
}
@Override
public TimeZone getTimeZone() {
return TimeZone.getDefault();
}
@Override
public void addEnhancementMethods(IType typeToEnhance, Collection methodsToAddTo)
{
IModule module = TypeSystem.getCurrentModule();
addEnhancementMethods(typeToEnhance, methodsToAddTo, module, new HashSet<IModule>());
}
private void addEnhancementMethods(IType typeToEnhance, Collection methodsToAddTo, IModule module, Set<IModule> visited)
{
if(visited.contains(module))
{
return;
}
visited.add(module);
if( GosuClassTypeLoader.getDefaultClassLoader(module) != null )
{
GosuClassTypeLoader.getDefaultClassLoader(module).getEnhancementIndex().addEnhancementMethods( typeToEnhance, methodsToAddTo);
}
for(IModule dep : module.getModuleTraversalList())
{
addEnhancementMethods(typeToEnhance, methodsToAddTo, dep, visited);
}
}
@Override
public void addEnhancementProperties(IType typeToEnhance, Map propertyInfosToAddTo, boolean caseSensitive)
{
IModule module = TypeSystem.getCurrentModule();
addEnhancementProperties(typeToEnhance, propertyInfosToAddTo, caseSensitive, module, new HashSet<IModule>());
}
private void addEnhancementProperties(IType typeToEnhance, Map propertyInfosToAddTo, boolean caseSensitive, IModule module, Set<IModule> visited)
{
if(visited.contains(module))
{
return;
}
visited.add(module);
if( GosuClassTypeLoader.getDefaultClassLoader(module) != null )
{
GosuClassTypeLoader.getDefaultClassLoader(module).getEnhancementIndex().addEnhancementProperties( typeToEnhance, propertyInfosToAddTo, caseSensitive);
}
for(IModule dep : module.getModuleTraversalList())
{
addEnhancementProperties(typeToEnhance, propertyInfosToAddTo, caseSensitive, dep, visited);
}
}
@Override
public ClassLoader getPluginClassLoader()
{
return DefaultEntityAccess.class.getClassLoader();
}
@Override
public StringBuilder getPluginRepositories()
{
return new StringBuilder();
}
@Override
public String getWebServerPaths()
{
return "";
}
@Override
public boolean isUnreachableCodeDetectionOn()
{
return true;
}
@Override
public IType getKeyType()
{
return null;
}
@Override
public IPropertyInfo getEntityIdProperty( IType rootType )
{
return null;
}
@Override
public boolean shouldAddWarning( IType type, IParseIssue warning )
{
return true;
}
@Override
public ILanguageLevel getLanguageLevel()
{
return new StandardLanguageLevel();
}
@Override
public List<IGosuClassLoadingObserver> getGosuClassLoadingObservers() {
return _classLoadingObservers;
}
@Override
public boolean areUsesStatementsAllowedInStatementLists(ICompilableType gosuClass) {
return false;
}
@Override
public List<IDirectory> getAdditionalSourceRoots() {
return Collections.EMPTY_LIST;
}
@Override
public void reloadedTypes(String[] types) {
//nothing to do
}
@Override
public ExtendedTypeDataFactory getExtendedTypeDataFactory(String typeName) {
return null;
}
@Override
public String getLocalizedTypeName(IType type) {
return type.getName();
}
@Override
public String getLocalizedTypeInfoName(IType type) {
String result;
if (type instanceof IJavaType) {
result = ((IJavaType) type).getBackingClassInfo().getDisplayName();
} else {
result = getLocalizedTypeName(type);
}
return result;
}
}
|
|
package com.gentics.mesh.core.branch;
import static com.gentics.mesh.core.rest.job.JobStatus.COMPLETED;
import static com.gentics.mesh.core.rest.job.JobStatus.FAILED;
import static com.gentics.mesh.test.ClientHelper.call;
import static com.gentics.mesh.test.TestDataProvider.PROJECT_NAME;
import static com.gentics.mesh.test.TestSize.FULL;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.stream.Collectors;
import org.junit.Before;
import org.junit.Test;
import com.gentics.mesh.FieldUtil;
import com.gentics.mesh.core.data.branch.HibBranch;
import com.gentics.mesh.core.data.dao.NodeDao;
import com.gentics.mesh.core.data.node.HibNode;
import com.gentics.mesh.core.data.project.HibProject;
import com.gentics.mesh.core.db.Tx;
import com.gentics.mesh.core.rest.branch.BranchCreateRequest;
import com.gentics.mesh.core.rest.common.ContainerType;
import com.gentics.mesh.core.rest.job.JobListResponse;
import com.gentics.mesh.core.rest.job.JobStatus;
import com.gentics.mesh.core.rest.node.NodeCreateRequest;
import com.gentics.mesh.core.rest.node.NodeResponse;
import com.gentics.mesh.core.rest.schema.impl.SchemaCreateRequest;
import com.gentics.mesh.core.rest.schema.impl.SchemaReferenceImpl;
import com.gentics.mesh.core.rest.schema.impl.SchemaResponse;
import com.gentics.mesh.event.EventQueueBatch;
import com.gentics.mesh.parameter.impl.PublishParametersImpl;
import com.gentics.mesh.test.MeshTestSetting;
import com.gentics.mesh.test.context.AbstractMeshTest;
import io.vertx.core.DeploymentOptions;
@MeshTestSetting(testSize = FULL, startServer = true)
public class BranchMigrationEndpointTest extends AbstractMeshTest {
@Before
public void setupVerticleTest() throws Exception {
DeploymentOptions options = new DeploymentOptions();
options.setWorker(true);
vertx().deployVerticle(meshDagger().jobWorkerVerticle(), options);
grantAdmin();
}
@Test
public void testStartBranchMigration() throws Throwable {
EventQueueBatch batch = createBatch();
HibBranch newBranch;
List<? extends HibNode> nodes;
List<? extends HibNode> published;
HibProject project = project();
try (Tx tx = tx()) {
assertThat(project.getInitialBranch().isMigrated()).as("Initial branch migration status").isEqualTo(true);
}
call(() -> client().takeNodeOffline(PROJECT_NAME, tx(() -> project().getBaseNode().getUuid()),
new PublishParametersImpl().setRecursive(true)));
published = Arrays.asList(folder("news"), folder("2015"), folder("2014"), folder("march"));
try (Tx tx = tx()) {
NodeDao nodeDao = tx.nodeDao();
nodes = nodeDao.findAll(project).stream().filter(node -> nodeDao.getParentNode(node, project.getLatestBranch().getUuid()) != null)
.collect(Collectors.toList());
assertThat(nodes).as("Nodes list").isNotEmpty();
}
// publish some nodes
published.forEach(node -> {
call(() -> client().publishNode(PROJECT_NAME, tx(() -> node.getUuid())));
});
try (Tx tx = tx()) {
newBranch = tx.branchDao().create(project, "newbranch", user(), batch);
assertThat(newBranch.isMigrated()).as("Branch migration status").isEqualTo(false);
tx.success();
}
nodes.forEach(node -> {
Arrays.asList(ContainerType.INITIAL, ContainerType.DRAFT, ContainerType.PUBLISHED).forEach(type -> {
assertThat(tx(() -> boot().contentDao().getFieldContainers(node, newBranch, type).list()))
.as(type + " Field Containers before Migration").isNotNull()
.isEmpty();
});
});
triggerAndWaitForJob(requestBranchMigration(newBranch));
try (Tx tx = tx()) {
NodeDao nodeDao = tx.nodeDao();
HibBranch newBranchReloaded = reloadBranch(newBranch);
assertThat(newBranchReloaded.isMigrated()).as("Branch migration status").isEqualTo(true);
nodes.forEach(node -> {
Arrays.asList(ContainerType.INITIAL, ContainerType.DRAFT).forEach(type -> {
assertThat(boot().contentDao().getFieldContainers(node, newBranchReloaded, type)).as(type + " Field Containers after Migration")
.isNotNull()
.isNotEmpty();
});
if (published.contains(node)) {
assertThat(boot().contentDao().getFieldContainers(node, newBranchReloaded, ContainerType.PUBLISHED))
.as("Published field containers after migration")
.isNotNull().isNotEmpty();
} else {
assertThat(boot().contentDao().getFieldContainers(node, newBranchReloaded, ContainerType.PUBLISHED))
.as("Published field containers after migration")
.isNotNull().isEmpty();
}
HibNode initialParent = nodeDao.getParentNode(node, initialBranchUuid());
if (initialParent == null) {
assertThat(nodeDao.getParentNode(node, newBranchReloaded.getUuid())).as("Parent in new branch").isNull();
} else {
assertThat(nodeDao.getParentNode(node, newBranchReloaded.getUuid())).as("Parent in new branch").isNotNull()
.isEqualToComparingOnlyGivenFields(initialParent, "uuid");
}
// TODO assert tags
});
}
}
@Test
public void testStartForInitial() throws Throwable {
try (Tx tx = tx()) {
triggerAndWaitForJob(requestBranchMigration(initialBranch()), FAILED);
}
}
@Test
public void testStartAgain() throws Throwable {
EventQueueBatch batch = createBatch();
HibBranch newBranch = tx(tx -> {
return tx.branchDao().create(project(), "newbranch", user(), batch);
});
String jobUuidA = requestBranchMigration(newBranch);
triggerAndWaitForJob(jobUuidA, COMPLETED);
// The second job should fail because the branch has already been migrated.
String jobUuidB = requestBranchMigration(newBranch);
JobListResponse response = triggerAndWaitForJob(jobUuidB, FAILED);
List<JobStatus> status = response.getData().stream().map(e -> e.getStatus()).collect(Collectors.toList());
assertThat(status).contains(COMPLETED, FAILED);
}
@Test
public void testStartOrder() throws Throwable {
HibProject project = project();
EventQueueBatch batch = createBatch();
HibBranch newBranch = tx(tx -> {
return tx.branchDao().create(project, "newbranch", user(), batch);
});
HibBranch newestBranch = tx(tx -> {
return tx.branchDao().create(project, "newestbranch", user(), batch);
});
try (Tx tx = tx()) {
triggerAndWaitForJob(requestBranchMigration(newestBranch), FAILED);
JobListResponse response = triggerAndWaitForJob(requestBranchMigration(newBranch), COMPLETED);
List<JobStatus> status = response.getData().stream().map(e -> e.getStatus()).collect(Collectors.toList());
assertThat(status).contains(FAILED, COMPLETED);
response = triggerAndWaitForJob(requestBranchMigration(newestBranch), COMPLETED);
status = response.getData().stream().map(e -> e.getStatus()).collect(Collectors.toList());
assertThat(status).contains(FAILED, COMPLETED, COMPLETED);
}
}
@Test
public void testMigrateNodesWithoutSegment() {
String baseNodeUuid = tx(() -> project().getBaseNode().getUuid());
SchemaCreateRequest request = new SchemaCreateRequest();
request.setName("dummyData");
request.addField(FieldUtil.createStringFieldSchema("test"));
SchemaResponse response = call(() -> client().createSchema(request));
String schemaUuid = response.getUuid();
call(() -> client().assignSchemaToProject(PROJECT_NAME, schemaUuid));
for (int i = 0; i < 2; i++) {
NodeCreateRequest nodeCreateRequest = new NodeCreateRequest();
nodeCreateRequest.setSchemaName("dummyData");
nodeCreateRequest.setLanguage("en");
nodeCreateRequest.setParentNodeUuid(baseNodeUuid);
NodeResponse node = call(() -> client().createNode(PROJECT_NAME, nodeCreateRequest));
call(() -> client().publishNode(PROJECT_NAME, node.getUuid()));
}
grantAdmin();
waitForJobs(() -> {
call(() -> client().createBranch(PROJECT_NAME, new BranchCreateRequest().setName("branch1")));
}, COMPLETED, 1);
}
@Test
public void testBigData() throws Throwable {
EventQueueBatch batch = createBatch();
String baseNodeUuid = tx(() -> project().getBaseNode().getUuid());
createNode(baseNodeUuid);
HibBranch newBranch;
try (Tx tx = tx()) {
int numThreads = 1;
int numFolders = 1000;
ExecutorService service = Executors.newFixedThreadPool(numThreads);
List<Future<Boolean>> futures = new ArrayList<>();
for (int i = 0; i < numFolders; i++) {
futures.add(service.submit(() -> {
createNode(baseNodeUuid);
return true;
}));
}
for (Future<Boolean> future : futures) {
future.get();
}
newBranch = tx.branchDao().create(project(), "newbranch", user(), batch);
tx.success();
}
String jobUuid = requestBranchMigration(newBranch);
triggerAndWaitForJob(jobUuid);
}
private void createNode(String baseNodeUuid) {
NodeCreateRequest create = new NodeCreateRequest();
create.setLanguage("en");
create.setSchema(new SchemaReferenceImpl().setName("folder"));
create.setParentNodeUuid(baseNodeUuid);
call(() -> client().createNode(PROJECT_NAME, create));
}
/**
* Request branch migration and return the future
*
* @param branch
* @return future
*/
protected String requestBranchMigration(HibBranch branch) {
return tx(tx -> {
return tx.jobDao().enqueueBranchMigration(user(), branch).getUuid();
});
}
}
|
|
/*
* Copyright (C) 2010-2012 The Async HBase Authors. All rights reserved.
* This file is part of Async HBase.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* - Neither the name of the StumbleUpon nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package org.hbase.async;
import java.util.ArrayList;
import java.util.HashMap;
import org.hbase.async.AtomicIncrementRequest;
import org.hbase.async.DeleteRequest;
import org.hbase.async.GetRequest;
import org.hbase.async.HBaseClient;
import org.hbase.async.KeyValue;
import org.hbase.async.PutRequest;
import org.hbase.async.RowLock;
import org.hbase.async.RowLockRequest;
import org.hbase.async.Scanner;
import org.slf4j.Logger;
import org.hbase.async.Common;
import org.junit.Ignore;
/**
* Simple command-line interface to quickly test async HBase.
*/
@Ignore // ignore for test runners
final class Test {
private static final Logger LOG = Common.logger(Test.class);
private static final HashMap<String, Cmd> commands;
static {
commands = new HashMap<String, Cmd>();
commands.put("icv", new icv());
commands.put("scan", new scan());
commands.put("mscan", new mscan());
final get get = new get(); // get get get!!
commands.put("get", get);
commands.put("lget", get);
final put put = new put(); // put put put!!
commands.put("put", put);
commands.put("lput", put);
final delete delete = new delete(); // delete delete delete!!
commands.put("delete", delete);
commands.put("ldelete", delete);
final cas cas = new cas(); // cas cas cas!!
commands.put("cas", cas);
commands.put("lcas", cas);
}
private static void printUsage() {
System.err.println("Usage: " + Test.class.getSimpleName()
+ " <zk quorum> <cmd> <table> [args]\n"
+ "Available commands:\n"
+ " get <table> <key> [family] [qualifiers ...]\n"
+ " icv <table> <key> <family> <qualifier> [amount]\n"
+ " put <table> <key> <family> <qualifier> <value>\n"
+ " delete <table> <key> [<family> [<qualifier>]]\n"
+ " scan <table> [start] [family] [qualifier] [stop] [regexp]\n"
+ " mscan <table> [start] [family[:qualifier[,qualifier]]/[family...]] [stop] [regexp]\n"
+ " cas <table> <key> <family> <qualifier> <expected> <value>\n"
+ "Variants that acquire an explicit row-lock:\n"
+ " lget <table> <key> [family] [qualifiers ...]\n"
+ " lput <table> <key> <family> <qualifier> <value>\n"
+ " ldelete <table> <key> <family> <qualifier>\n"
+ " lcas <table> <key> <family> <qualifier> <expected> <value>\n"
);
}
private static void fatalUsage(final String msg, final int rv) {
System.err.println(msg);
printUsage();
System.exit(rv);
}
private static void ensureArguments(final String[] args,
final int min,
final int max) {
if (args.length < min) {
fatalUsage("Not enough arguments, need at least " + min, 1);
} else if (args.length > max && max > min) {
fatalUsage("Too many arguments, need at most " + max, 1);
}
}
public static void main(final String[] args) throws Exception {
ensureArguments(args, 3, -1);
final Cmd cmd = commands.get(args[1]);
if (cmd == null) {
fatalUsage("Unknown command: " + args[1], 2);
}
final HBaseClient client = new HBaseClient(args[0]);
try {
cmd.execute(client, args);
} catch (Exception e) {
LOG.error("Unexpected exception caught in main", e);
}
System.out.println("Starting shutdown...");
LOG.debug("Shutdown returned " + client.shutdown().joinUninterruptibly());
System.out.println("Exiting...");
}
private static interface Cmd {
void execute(HBaseClient client, String[] args) throws Exception;
}
private static final class get implements Cmd {
public void execute(final HBaseClient client, String[] args) throws Exception {
ensureArguments(args, 4, 64);
final GetRequest get = new GetRequest(args[2], args[3]);
if (args.length > 4) {
get.family(args[4]);
}
if (args.length > 5) {
if (args.length == 6) {
get.qualifier(args[5]);
} else {
final byte[][] qualifiers = new byte[args.length - 5][];
for (int i = 5; i < args.length; i++) {
qualifiers[i - 5] = args[i].getBytes();
}
get.qualifiers(qualifiers);
}
}
RowLock lock = null;
if (args[1].charAt(0) == 'l') { // locked version of the command
final RowLockRequest rlr = new RowLockRequest(args[2], args[3]);
lock = client.lockRow(rlr).joinUninterruptibly();
LOG.info("Acquired explicit row lock: " + lock);
}
args = null;
try {
final ArrayList<KeyValue> result = client.get(get).joinUninterruptibly();
LOG.info("Get result=" + result);
} catch (Exception e) {
LOG.error("Get failed", e);
} finally {
if (lock != null) {
client.unlockRow(lock).joinUninterruptibly();
LOG.info("Released explicit row lock: " + lock);
}
}
}
}
private static final class icv implements Cmd {
public void execute(final HBaseClient client, String[] args) {
ensureArguments(args, 6, 7);
final AtomicIncrementRequest icv =
new AtomicIncrementRequest(args[2], args[3], args[4], args[5]);
if (args.length > 6) {
icv.setAmount(Long.parseLong(args[6]));
}
args = null;
try {
final long result = client.atomicIncrement(icv).joinUninterruptibly();
LOG.info("ICV result=" + result);
} catch (Exception e) {
LOG.error("ICV failed", e);
}
}
}
private static final class put implements Cmd {
public void execute(final HBaseClient client, String[] args) throws Exception {
ensureArguments(args, 7, 7);
RowLock lock = null;
if (args[1].charAt(0) == 'l') { // locked version of the command
final RowLockRequest rlr = new RowLockRequest(args[2], args[3]);
lock = client.lockRow(rlr).joinUninterruptibly();
LOG.info("Acquired explicit row lock: " + lock);
}
final PutRequest put = lock == null
? new PutRequest(args[2], args[3], args[4], args[5], args[6])
: new PutRequest(args[2], args[3], args[4], args[5], args[6], lock);
args = null;
try {
final Object result = client.put(put).joinUninterruptibly();
LOG.info("Put result=" + result);
} catch (Exception e) {
LOG.error("Put failed", e);
} finally {
if (lock != null) {
client.unlockRow(lock).joinUninterruptibly();
LOG.info("Released explicit row lock: " + lock);
}
}
}
}
private static final class delete implements Cmd {
public void execute(final HBaseClient client, String[] args) throws Exception {
ensureArguments(args, 4, 6);
RowLock lock = null;
if (args[1].charAt(0) == 'l') { // locked version of the command
ensureArguments(args, 6, 6);
final RowLockRequest rlr = new RowLockRequest(args[2], args[3]);
lock = client.lockRow(rlr).joinUninterruptibly();
LOG.info("Acquired explicit row lock: " + lock);
}
final DeleteRequest delete;
if (lock == null) {
switch (args.length) {
case 4: delete = new DeleteRequest(args[2], args[3]); break;
case 5: delete = new DeleteRequest(args[2], args[3], args[4]); break;
case 6: delete = new DeleteRequest(args[2], args[3], args[4], args[5]); break;
default: throw new AssertionError("Should never be here");
}
} else {
delete = new DeleteRequest(args[2], args[3], args[4], args[5], lock);
}
args = null;
try {
final Object result = client.delete(delete).joinUninterruptibly();
LOG.info("Delete result=" + result);
} catch (Exception e) {
LOG.error("Delete failed", e);
} finally {
if (lock != null) {
client.unlockRow(lock).joinUninterruptibly();
LOG.info("Released explicit row lock: " + lock);
}
}
}
}
private static final class scan implements Cmd {
@SuppressWarnings("fallthrough")
public void execute(final HBaseClient client, String[] args) {
ensureArguments(args, 3, 8);
final Scanner scanner = client.newScanner(args[2]);
switch (args.length) {
case 8: scanner.setKeyRegexp(args[7]);
case 7: scanner.setStopKey(args[6]);
case 6: scanner.setQualifier(args[5]);
case 5: scanner.setFamily(args[4]);
case 4: scanner.setStartKey(args[3]);
}
args = null;
LOG.info("Start scanner=" + scanner);
try {
ArrayList<ArrayList<KeyValue>> rows;
while ((rows = scanner.nextRows().joinUninterruptibly()) != null) {
LOG.info("scanned results=" + rows + " from " + scanner);
}
} catch (Exception e) {
LOG.error("Scan failed", e);
}
}
}
private static final class mscan implements Cmd {
@SuppressWarnings("fallthrough")
public void execute(final HBaseClient client, String[] args) {
ensureArguments(args, 3, 7);
final Scanner scanner = client.newScanner(args[2]);
switch (args.length) {
case 7: scanner.setKeyRegexp(args[6]);
case 6: scanner.setStopKey(args[5]);
case 5:
final String columns = args[4];
final ArrayList<byte[]> families = new ArrayList<byte[]>();
final ArrayList<byte[][]> qualifiers = new ArrayList<byte[][]>();
for (String spec : columns.split("/")) {
final String[] family = spec.split(":");
families.add(family[0].getBytes());
if (family.length == 1) {
qualifiers.add(null);
} else {
final String[] quals = family[1].split(",");
final byte[][] qb = new byte[quals.length][];
for (int i = 0; i < qb.length; i++) {
qb[i] = quals[i].getBytes();
}
qualifiers.add(qb);
}
}
scanner.setFamilies(families.toArray(new byte[families.size()][]),
qualifiers.toArray(new byte[qualifiers.size()][][]));
case 4: scanner.setStartKey(args[3]);
}
LOG.info("Start scanner=" + scanner);
try {
ArrayList<ArrayList<KeyValue>> rows;
while ((rows = scanner.nextRows().joinUninterruptibly()) != null) {
LOG.info("scanned results=" + rows + " from " + scanner);
}
} catch (Exception e) {
LOG.error("Scan failed", e);
}
}
}
private static final class cas implements Cmd {
public void execute(final HBaseClient client, String[] args) throws Exception {
ensureArguments(args, 8, 8);
RowLock lock = null;
if (args[1].charAt(0) == 'l') { // locked version of the command
final RowLockRequest rlr = new RowLockRequest(args[2], args[3]);
lock = client.lockRow(rlr).joinUninterruptibly();
LOG.info("Acquired explicit row lock: " + lock);
}
final PutRequest put = lock == null
? new PutRequest(args[2], args[3], args[4], args[5], args[7])
: new PutRequest(args[2], args[3], args[4], args[5], args[7], lock);
final String expected = args[6];
args = null;
try {
final boolean ok = client.compareAndSet(put, expected).joinUninterruptibly();
LOG.info("CAS "
+ (ok ? "succeeded" : "failed: value wasn't " + expected));
} catch (Exception e) {
LOG.error("CAS failed", e);
} finally {
if (lock != null) {
client.unlockRow(lock).joinUninterruptibly();
LOG.info("Released explicit row lock: " + lock);
}
}
}
}
}
|