gt
stringclasses 1
value | context
stringlengths 2.05k
161k
|
---|---|
// Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.upload.impl;
import jodd.core.JoddCore;
import jodd.io.FastByteArrayOutputStream;
import jodd.io.FileUtil;
import jodd.io.FileNameUtil;
import jodd.io.StreamUtil;
import jodd.upload.FileUpload;
import jodd.upload.MultipartRequestInputStream;
import java.io.File;
import java.io.IOException;
import java.io.FileOutputStream;
import java.io.BufferedOutputStream;
import java.io.InputStream;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.FileInputStream;
/**
* Smart {@link FileUpload} implementation that defer the action of what to do with uploaded file
* for later. Internally, it stores uploaded file either in memory if it is small, or, in all
* other cases, it stores them in TEMP folder.
*/
public class AdaptiveFileUpload extends FileUpload {
protected static final String TMP_FILE_SUFFIX = ".upload.tmp";
protected final int memoryThreshold;
protected final File uploadPath;
protected final boolean breakOnError;
protected final String[] fileExtensions;
protected final boolean allowFileExtensions;
AdaptiveFileUpload(MultipartRequestInputStream input, int memoryThreshold, File uploadPath, int maxFileSize, boolean breakOnError, String[] extensions, boolean allowed) {
super(input, maxFileSize);
this.memoryThreshold = memoryThreshold;
this.uploadPath = uploadPath;
this.breakOnError = breakOnError;
this.fileExtensions = extensions;
this.allowFileExtensions = allowed;
}
// ---------------------------------------------------------------- settings
public int getMemoryThreshold() {
return memoryThreshold;
}
public File getUploadPath() {
return uploadPath;
}
public boolean isBreakOnError() {
return breakOnError;
}
public String[] getFileExtensions() {
return fileExtensions;
}
public boolean isAllowFileExtensions() {
return allowFileExtensions;
}
// ---------------------------------------------------------------- properties
protected File tempFile;
protected byte[] data;
/**
* Returns <code>true</code> if file upload resides in memory.
*/
@Override
public boolean isInMemory() {
return data != null;
}
// ---------------------------------------------------------------- process
protected boolean matchFileExtension() throws IOException {
String fileNameExtension = FileNameUtil.getExtension(getHeader().getFileName());
for (String fileExtension : fileExtensions) {
if (fileNameExtension.equalsIgnoreCase(fileExtension)) {
if (!allowFileExtensions) { // extension matched and it is not allowed
if (breakOnError) {
throw new IOException("Upload filename extension not allowed: " + fileNameExtension);
}
size = input.skipToBoundary();
return false;
}
return true; // extension matched and it is allowed.
}
}
if (allowFileExtensions) { // extension is not one of the allowed ones.
if (breakOnError) {
throw new IOException("Upload filename extension not allowed: " + fileNameExtension);
}
size = input.skipToBoundary();
return false;
}
return true;
}
/**
* Determines if upload is allowed.
*/
protected boolean checkUpload() throws IOException {
if (fileExtensions != null) {
if (!matchFileExtension()) {
return false;
}
}
return true;
}
@Override
protected void processStream() throws IOException {
if (!checkUpload()) {
return;
}
size = 0;
if (memoryThreshold > 0) {
FastByteArrayOutputStream fbaos = new FastByteArrayOutputStream(memoryThreshold + 1);
int written = input.copyMax(fbaos, memoryThreshold + 1);
data = fbaos.toByteArray();
if (written <= memoryThreshold) {
size = data.length;
valid = true;
return;
}
}
tempFile = FileUtil.createTempFile(JoddCore.tempFilePrefix, TMP_FILE_SUFFIX, uploadPath);
BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(tempFile));
if (data != null) {
size = data.length;
out.write(data);
data = null; // not needed anymore
}
boolean deleteTempFile = false;
try {
if (maxFileSize == -1) {
size += input.copyAll(out);
} else {
size += input.copyMax(out, maxFileSize - size + 1); // one more byte to detect larger files
if (size > maxFileSize) {
deleteTempFile = true;
fileTooBig = true;
valid = false;
if (breakOnError) {
throw new IOException("File upload (" + header.getFileName() + ") too big, > " + maxFileSize);
}
input.skipToBoundary();
return;
}
}
valid = true;
} finally {
StreamUtil.close(out);
if (deleteTempFile) {
tempFile.delete();
tempFile = null;
}
}
}
// ---------------------------------------------------------------- operations
/**
* Deletes file uploaded item from disk or memory.
*/
public void delete() {
if (tempFile != null) {
tempFile.delete();
}
if (data != null) {
data = null;
}
}
/**
* Writes file uploaded item.
*/
public File write(String destination) throws IOException {
return write(new File(destination));
}
/**
* Writes file upload item to destination folder or to destination file.
* Returns the destination file.
*/
public File write(File destination) throws IOException {
if (destination.isDirectory()) {
destination = new File(destination, this.header.getFileName());
}
if (data != null) {
FileUtil.writeBytes(destination, data);
} else {
if (tempFile != null) {
FileUtil.move(tempFile, destination);
}
}
return destination;
}
/**
* Returns the content of file upload item.
*/
@Override
public byte[] getFileContent() throws IOException {
if (data != null) {
return data;
}
if (tempFile != null) {
return FileUtil.readBytes(tempFile);
}
return null;
}
@Override
public InputStream getFileInputStream() throws IOException {
if (data != null) {
return new BufferedInputStream(new ByteArrayInputStream(data));
}
if (tempFile != null) {
return new BufferedInputStream(new FileInputStream(tempFile));
}
return null;
}
}
|
|
package com.whatdoyouwanttodo.config;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.DialogInterface;
import android.content.res.Resources;
import android.graphics.Color;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentActivity;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.view.ViewGroup.LayoutParams;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TableLayout;
import android.widget.TableRow;
import android.widget.TextView;
import com.whatdoyouwanttodo.R;
import com.whatdoyouwanttodo.application.Cell;
import com.whatdoyouwanttodo.application.Chessboard;
import com.whatdoyouwanttodo.settings.Constants;
import com.whatdoyouwanttodo.utils.ArrayUtils;
import com.whatdoyouwanttodo.utils.ImageLoader;
/**
* Mostra un insieme di tabelle, usato da AllGridViewActivity
*/
public class AllGridViewFragment extends Fragment {
public static final String CHESSBOARD_ARRAY = "com.whatdoyouwanttodo.config.AllGridViewFragment.CHESSBOARD_ARRAY";
public static final String CELL_ARRAY = "com.whatdoyouwanttodo.config.AllGridViewFragment.CELL_ARRAY";
public static final String WITH_LINKS = "com.whatdoyouwanttodo.config.AllGridViewFragment.WITH_LINKS";
public static AllGridViewFragment newAllGridViewFragment(Chessboard[] chessboards, Cell[] cells, boolean withLinks) {
AllGridViewFragment fragment = new AllGridViewFragment();
Bundle arguments = new Bundle();
arguments.putParcelableArray(CHESSBOARD_ARRAY, chessboards);
arguments.putParcelableArray(CELL_ARRAY, cells);
arguments.putBoolean(WITH_LINKS, withLinks);
fragment.setArguments(arguments);
return fragment;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
// get params
Bundle arguments = getArguments();
Chessboard[] chessboards = (Chessboard[]) arguments.getParcelableArray(CHESSBOARD_ARRAY);
Cell[] cells = (Cell[]) arguments.getParcelableArray(CELL_ARRAY);
boolean withLinks = arguments.getBoolean(WITH_LINKS);
ArrayUtils.sortInIdOrder(chessboards);
ArrayUtils.sortInChessboardIdOrder(cells);
if (withLinks == true) {
// add new chessboard
Chessboard[] newChessboards = new Chessboard[chessboards.length + 1];
for (int i = 0; i < chessboards.length; i++) {
newChessboards[i] = chessboards[i];
}
Constants constants = Constants.getInstance(getActivity());
Chessboard newCb = constants.NEW_CHESSBOARD.clone();
newCb.setId(AllGridViewActivity.SELECTED_NEW_CHESSBOARD);
newChessboards[newChessboards.length - 1] = newCb;
/*
Chessboard backCb = constants.NEW_CHESSBOARD.clone();
backCb.setName(constants.BACK_CHESSBOARD_NAME);
backCb.setId(AllGridViewActivity.SELECTED_BACK_CHESSBOARD);
newChessboards[1] = backCb;
*/
chessboards = newChessboards;
}
ImageView[] cbFragments = new ImageView[chessboards.length];
int c = 0;
for (int cb = 0; cb < chessboards.length; cb++) {
Chessboard chessboard = chessboards[cb];
long cbId = chessboard.getId();
int ce = c;
boolean cond = ce < cells.length;
if (cond == true)
cond = cond && cells[ce].getChessboard() == cbId;
while (cond) {
ce++;
cond = ce < cells.length;
if (cond == true)
cond = cond && cells[ce].getChessboard() == cbId;
}
// create chessboard
Cell[] cbCells = ArrayUtils.copyOfRange(cells, c, ce);
for(int i = 0; i < cbCells.length; i++) {
cbCells[i].setActivityType(Cell.ACTIVITY_TYPE_NONE);
}
if (chessboard.getId() == AllGridViewActivity.SELECTED_NEW_CHESSBOARD) {
cbFragments[cb] = new ImageView(getActivity());
cbFragments[cb].setImageResource(R.drawable.cell_chessboard_new_high);
} else if (chessboard.getId() == AllGridViewActivity.SELECTED_BACK_CHESSBOARD) {
cbFragments[cb] = new ImageView(getActivity());
cbFragments[cb].setImageResource(R.drawable.cell_chessboard_back_high);
} else {
String tnPath = ChessboardThumbnailManager.getInstance(getActivity()).getThumbnailPathOf(getActivity(), chessboard, cbCells);
cbFragments[cb] = new ImageView(getActivity());
ImageLoader.getInstance().loadImageLazy(cbFragments[cb], tnPath);
}
cbFragments[cb].setBackgroundColor(getResources().getColor(R.color.soft_black));
int layoutHeight = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, ChessboardThumbnailManager.THUMBNAIL_HEIGHT, getResources().getDisplayMetrics());
cbFragments[cb].setLayoutParams(new TableRow.LayoutParams(LayoutParams.MATCH_PARENT, layoutHeight));
c = ce;
}
// construct table layout
FragmentActivity fragmentActivity = getActivity();
TableLayout tableLayout = new TableLayout(fragmentActivity);
tableLayout.setStretchAllColumns(true);
int rowCount = 0;
TableRow tableRow = null;
for (int cb = 0; cb < chessboards.length; cb++) {
Chessboard chessboard = chessboards[cb];
if (rowCount == 0) {
tableRow = new TableRow(fragmentActivity);
TableLayout.LayoutParams layoutParams = new TableLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT);
if (cb > 0) {
layoutParams.topMargin = getActivity().getResources().getDimensionPixelSize(R.dimen.activity_config_space_small);
}
tableRow.setLayoutParams(layoutParams);
}
LinearLayout linearLayout = new LinearLayout(fragmentActivity);
linearLayout.setOrientation(LinearLayout.VERTICAL);
TableRow.LayoutParams layoutParams = new TableRow.LayoutParams(0, LayoutParams.WRAP_CONTENT);
if (rowCount > 0) {
layoutParams.leftMargin = getActivity().getResources().getDimensionPixelSize(R.dimen.activity_config_space_small);
}
linearLayout.setLayoutParams(layoutParams);
linearLayout.setClickable(true);
OpenChessboardListener openChessboardListener = new OpenChessboardListener(chessboard.getId());
linearLayout.setOnClickListener(openChessboardListener);
TextView textView = new TextView(fragmentActivity, null, android.R.attr.textAppearanceLarge);
textView.setText(chessboard.getName());
textView.setTextColor(Color.BLACK);
textView.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
linearLayout.addView(textView);
linearLayout.addView(cbFragments[cb]);
linearLayout.setOnClickListener(openChessboardListener);
tableRow.addView(linearLayout);
rowCount++;
if (rowCount >= 2) {
tableLayout.addView(tableRow);
rowCount = 0;
}
}
if (rowCount > 0) {
tableLayout.addView(tableRow);
}
return tableLayout;
}
private class OpenChessboardListener implements OnClickListener {
private long id;
public OpenChessboardListener(long id) {
this.id = id;
}
@Override
public void onClick(View view) {
if(id == AllGridViewActivity.SELECTED_NEW_CHESSBOARD) {
Activity activity = getActivity();
Resources res = activity.getResources();
String messageStr = res.getString(R.string.activity_all_grid_view_new_table_message);
String confirmStr = res.getString(R.string.activity_all_grid_view_new_table_confirm);
String backStr = res.getString(R.string.activity_all_grid_view_new_table_back);
AlertDialog.Builder builder = new AlertDialog.Builder(activity);
builder.setMessage(messageStr);
final EditText inputText = new EditText(activity);
builder.setView(inputText);
builder.setPositiveButton(confirmStr, new Dialog.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
Activity fragmentActivity = getActivity();
fragmentActivity.finish();
AllGridViewActivity.ret = new AllGridViewActivity.AllGridViewReturn(id);
AllGridViewActivity.ret.setName(inputText.getText().toString());
}
});
builder.setNegativeButton(backStr, new Dialog.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
// do nothing
}
});
AlertDialog dialog = builder.create();
dialog.show();
} else {
Activity fragmentActivity = getActivity();
fragmentActivity.finish();
AllGridViewActivity.ret = new AllGridViewActivity.AllGridViewReturn(id);
}
}
}
}
|
|
/*
* Copyright 2013, Rogue.IO
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rogue.app.framework.model;
import org.apache.commons.lang.StringUtils;
import rogue.app.framework.util.ScopedStateHolder;
import rogue.app.framework.util.StateHolder;
import java.io.Serializable;
import java.lang.annotation.Annotation;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Helper class that implements the {@link ResourceSelection} interface, providing the optional ability to store selected
* resource at a specified scope for potential data sharing. The <code>ResourceSelectionHolder</code> supports the notion
* of a binary mode for storing the selection information -- meaning, if {@link #isBinaryMode()} returns true, then only
* one set of values will be stored, i.e., either the {@link #getSelectedResource() selected resource} or the
* {@link #getSelectedResources() selected resources}. When {@link #isBinaryMode()} returns false, then both values can
* be stored independent of each other in the same holder.
*/
public class ResourceSelectionHolder<T> implements ResourceSelection<T>, Serializable
{
private static final String CLASS_NAME = ResourceSelectionHolder.class.getName();
private static final Logger LOGGER = Logger.getLogger(CLASS_NAME);
/**
* The key used to store the single resource.
*/
public static final String KEY_SINGLE_VALUE = "SingleValue";
/**
* The key used to store the multiple resources.
*/
public static final String KEY_MULTIPLE_VALUE = "MultipleValues";
/**
* The prefix used to construct the keys within this holder.
*/
private String prefix;
/**
* The key used to store the single selection value.
*/
private String computedSingleValueKey;
/**
* The key used to store the multi-selection value.
*/
private String computedMultiValueKey;
/**
* The scope at which stateful data should be stored.
*/
private Class<? extends Annotation> scope;
/**
* Member variable to store the single value selection, if the scope is explicitly set to none.
*/
private T singleSelectionValue;
/**
* Member variable to store the multiple value selection, if the scope is explicitly set to none.
*/
private T[] multipleSelectionValues;
/**
* Internal variable to be further defined. Currently it identifies if at any given point both
*/
private boolean binaryMode = true;
/**
* Constructs an instance of <code>ResourceSelectionHolder</code> that stores the selection data locally.
*/
public ResourceSelectionHolder()
{
this(null, null);
}
/**
* Constructs an instance of <code>ResourceSelectionHolder</code> that stores the selection data at the specified
* scope using the given prefix. The prefix is provided to ensure that there are no name clashes between multiple
* resource selection objects storing data at the same scope.
* <pre>
* e.g., the following instance will use the class name as the prefix of the resource selection keys, which are
* used to store data in the Session scope.
*
* ResourceSelection selection = new ResourceSelectionHolder(SessionScoped.class, getClass().getName());
*
* </pre>
*
* @param scope the scope at which data should be stored
* @param prefix the prefix to use when storing data at the specified scope
*/
public ResourceSelectionHolder(Class<? extends Annotation> scope, String prefix)
{
final String METHOD_NAME = "<ctor>";
this.prefix = prefix;
this.scope = scope;
if (scope != null && StringUtils.isEmpty(prefix))
{
LOGGER.logp(Level.WARNING, CLASS_NAME, METHOD_NAME,
"Empty prefix identified for non-null scope. This can lead to data collisions.");
}
String prefixKey = "";
if (!StringUtils.isEmpty(prefix))
{
prefixKey = prefix + "_";
}
computedSingleValueKey = prefixKey + KEY_SINGLE_VALUE;
computedMultiValueKey = prefixKey + KEY_MULTIPLE_VALUE;
}
/**
* Identifies if this resource holder is operating in a binary selection mode or not.
*
* @return true if this resource holder is operating in a binary selection mode, false otherwise.
*/
public boolean isBinaryMode()
{
return binaryMode;
}
/**
* Set whether this resource holder should operate in a binary selection mode or not.
*
* @param binaryMode true if this resource holder should operate in a binary selection mode, false otherwise.
*/
public void setBinaryMode(boolean binaryMode)
{
this.binaryMode = binaryMode;
}
/**
* Get the prefix used for constructing the various keys in this holder.
*
* @return the prefix used for constructing the various keys in this holder.
*/
public String getPrefix()
{
return prefix;
}
/**
* Get the scope at which the data is stored.
*
* @return the scope at which the data is stored.
*/
public Class<? extends Annotation> getScope()
{
return scope;
}
/**
* {@inheritDoc}
*/
@Override
public T getSelectedResource()
{
return scope != null ? (T) ScopedStateHolder.getStateHolder(scope).get(computedSingleValueKey) :
singleSelectionValue;
}
/**
* {@inheritDoc}
*/
@Override
public void setSelectedResource(T resource)
{
if (scope != null)
{
StateHolder stateHolder = ScopedStateHolder.getStateHolder(scope);
stateHolder.put(computedSingleValueKey, resource);
if (binaryMode)
{
stateHolder.remove(computedMultiValueKey);
}
}
else
{
singleSelectionValue = resource;
if (binaryMode)
{
multipleSelectionValues = null;
}
}
}
/**
* {@inheritDoc}
*/
@Override
public T[] getSelectedResources()
{
return scope != null ? (T[]) ScopedStateHolder.getStateHolder(scope).get(computedMultiValueKey) :
multipleSelectionValues;
}
/**
* {@inheritDoc}
*/
@Override
public void setSelectedResources(T[] resources)
{
if (scope != null)
{
StateHolder stateHolder = ScopedStateHolder.getStateHolder(scope);
stateHolder.put(computedMultiValueKey, resources);
if (binaryMode)
{
stateHolder.remove(computedSingleValueKey);
}
}
else
{
multipleSelectionValues = resources;
if (binaryMode)
{
singleSelectionValue = null;
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.tier.sockets.command;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.isA;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.IOException;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.ArgumentCaptor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.apache.geode.CancelCriterion;
import org.apache.geode.cache.PartitionAttributes;
import org.apache.geode.cache.TransactionException;
import org.apache.geode.cache.operations.KeySetOperationContext;
import org.apache.geode.internal.cache.InternalCache;
import org.apache.geode.internal.cache.LocalRegion;
import org.apache.geode.internal.cache.tier.CachedRegionHelper;
import org.apache.geode.internal.cache.tier.sockets.CacheServerStats;
import org.apache.geode.internal.cache.tier.sockets.ChunkedMessage;
import org.apache.geode.internal.cache.tier.sockets.Message;
import org.apache.geode.internal.cache.tier.sockets.Part;
import org.apache.geode.internal.cache.tier.sockets.ServerConnection;
import org.apache.geode.internal.security.AuthorizeRequest;
import org.apache.geode.internal.security.SecurityService;
import org.apache.geode.security.NotAuthorizedException;
import org.apache.geode.security.ResourcePermission.Operation;
import org.apache.geode.security.ResourcePermission.Resource;
import org.apache.geode.test.junit.categories.ClientServerTest;
@Category({ClientServerTest.class})
public class KeySetTest {
private static final String REGION_NAME = "region1";
private static final String KEY = "key1";
private static final Object CALLBACK_ARG = "arg";
private static final byte[] EVENT = new byte[8];
@Mock
private SecurityService securityService;
@Mock
private Message message;
@Mock
private ServerConnection serverConnection;
@Mock
private AuthorizeRequest authzRequest;
@Mock
private LocalRegion region;
@Mock
private InternalCache cache;
@Mock
private ChunkedMessage chunkedResponseMessage;
@Mock
private Part regionNamePart;
@Mock
private KeySetOperationContext keySetOperationContext;
@InjectMocks
private KeySet keySet;
@Before
public void setUp() throws Exception {
this.keySet = new KeySet();
MockitoAnnotations.initMocks(this);
when(this.authzRequest.keySetAuthorize(eq(REGION_NAME)))
.thenReturn(this.keySetOperationContext);
when(this.cache.getRegion(isA(String.class))).thenReturn(this.region);
when(this.cache.getCancelCriterion()).thenReturn(mock(CancelCriterion.class));
when(this.message.getPart(eq(0))).thenReturn(this.regionNamePart);
when(this.regionNamePart.getCachedString()).thenReturn(REGION_NAME);
when(this.serverConnection.getCache()).thenReturn(this.cache);
when(this.serverConnection.getCacheServerStats()).thenReturn(mock(CacheServerStats.class));
when(this.serverConnection.getAuthzRequest()).thenReturn(this.authzRequest);
when(this.serverConnection.getCachedRegionHelper()).thenReturn(mock(CachedRegionHelper.class));
when(this.serverConnection.getChunkedResponseMessage()).thenReturn(this.chunkedResponseMessage);
}
@Test
public void retryKeySet_doesNotWriteTransactionException_ifIsNotInTransaction() throws Exception {
long startTime = 0; // arbitrary value
TestableKeySet keySet = new TestableKeySet();
keySet.setIsInTransaction(false);
when(message.isRetry()).thenReturn(true);
when(region.getPartitionAttributes()).thenReturn(mock(PartitionAttributes.class));
keySet.cmdExecute(message, serverConnection, securityService, startTime);
assertThat(keySet.exceptionSentToClient).isNull();
}
@Test
public void nonRetryKeySet_doesNotWriteTransactionException() throws Exception {
long startTime = 0; // arbitrary value
TestableKeySet keySet = new TestableKeySet();
keySet.setIsInTransaction(true);
when(message.isRetry()).thenReturn(false);
when(region.getPartitionAttributes()).thenReturn(mock(PartitionAttributes.class));
keySet.cmdExecute(message, serverConnection, securityService, startTime);
assertThat(keySet.exceptionSentToClient).isNull();
}
@Test
public void retryKeySet_doesNotWriteTransactionException_ifIsInTransactionAndIsNotPartitionedRegion()
throws Exception {
long startTime = 0; // arbitrary value
TestableKeySet keySet = new TestableKeySet();
keySet.setIsInTransaction(true);
when(message.isRetry()).thenReturn(true);
when(region.getPartitionAttributes()).thenReturn(null);
keySet.cmdExecute(message, serverConnection, securityService, startTime);
assertThat(keySet.exceptionSentToClient).isNull();
}
@Test
public void retryKeySet_writesTransactionException_ifIsInTransactionAndIsPartitionedRegion()
throws Exception {
long startTime = 0; // arbitrary value
TestableKeySet keySet = new TestableKeySet();
keySet.setIsInTransaction(true);
when(message.isRetry()).thenReturn(true);
when(region.getPartitionAttributes()).thenReturn(mock(PartitionAttributes.class));
keySet.cmdExecute(message, serverConnection, securityService, startTime);
assertThat(keySet.exceptionSentToClient).isInstanceOf(TransactionException.class).hasMessage(
"Failover on a set operation of a partitioned region is not allowed in a transaction.");
}
@Test
public void noSecurityShouldSucceed() throws Exception {
when(this.securityService.isClientSecurityRequired()).thenReturn(false);
this.keySet.cmdExecute(this.message, this.serverConnection, this.securityService, 0);
verify(this.chunkedResponseMessage).sendChunk(this.serverConnection);
}
@Test
public void integratedSecurityShouldSucceedIfAuthorized() throws Exception {
when(this.securityService.isClientSecurityRequired()).thenReturn(true);
when(this.securityService.isIntegratedSecurity()).thenReturn(true);
this.keySet.cmdExecute(this.message, this.serverConnection, this.securityService, 0);
verify(this.securityService).authorize(Resource.DATA, Operation.READ, REGION_NAME);
verify(this.chunkedResponseMessage).sendChunk(this.serverConnection);
}
@Test
public void integratedSecurityShouldFailIfNotAuthorized() throws Exception {
when(this.securityService.isClientSecurityRequired()).thenReturn(true);
when(this.securityService.isIntegratedSecurity()).thenReturn(true);
doThrow(new NotAuthorizedException("")).when(this.securityService).authorize(Resource.DATA,
Operation.READ, REGION_NAME);
this.keySet.cmdExecute(this.message, this.serverConnection, this.securityService, 0);
verify(this.securityService).authorize(Resource.DATA, Operation.READ, REGION_NAME);
verify(this.chunkedResponseMessage).sendChunk(this.serverConnection);
}
@Test
public void oldSecurityShouldSucceedIfAuthorized() throws Exception {
when(this.securityService.isClientSecurityRequired()).thenReturn(true);
when(this.securityService.isIntegratedSecurity()).thenReturn(false);
this.keySet.cmdExecute(this.message, this.serverConnection, this.securityService, 0);
verify(this.authzRequest).keySetAuthorize(eq(REGION_NAME));
verify(this.chunkedResponseMessage).sendChunk(this.serverConnection);
}
@Test
public void oldSecurityShouldFailIfNotAuthorized() throws Exception {
when(this.securityService.isClientSecurityRequired()).thenReturn(true);
when(this.securityService.isIntegratedSecurity()).thenReturn(false);
doThrow(new NotAuthorizedException("")).when(this.authzRequest)
.keySetAuthorize(eq(REGION_NAME));
this.keySet.cmdExecute(this.message, this.serverConnection, this.securityService, 0);
verify(this.authzRequest).keySetAuthorize(eq(REGION_NAME));
ArgumentCaptor<NotAuthorizedException> argument =
ArgumentCaptor.forClass(NotAuthorizedException.class);
verify(this.chunkedResponseMessage).addObjPart(argument.capture());
assertThat(argument.getValue()).isExactlyInstanceOf(NotAuthorizedException.class);
verify(this.chunkedResponseMessage).sendChunk(eq(this.serverConnection));
}
private class TestableKeySet extends KeySet {
private boolean isInTransaction = false;
public Throwable exceptionSentToClient;
public void setIsInTransaction(boolean isInTransaction) {
this.isInTransaction = isInTransaction;
}
@Override
public boolean isInTransaction() {
return isInTransaction;
}
@Override
protected void keySetWriteChunkedException(Message clientMessage, Throwable ex,
ServerConnection serverConnection) throws IOException {
this.exceptionSentToClient = ex;
}
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.search;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.SearchPhaseResult;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.internal.AliasFilter;
import org.elasticsearch.search.internal.InternalSearchResponse;
import org.elasticsearch.search.internal.ShardSearchContextId;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.transport.Transport;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BiFunction;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.instanceOf;
public class AbstractSearchAsyncActionTests extends ESTestCase {
private final List<Tuple<String, String>> resolvedNodes = new ArrayList<>();
private final Set<ShardSearchContextId> releasedContexts = new CopyOnWriteArraySet<>();
private AbstractSearchAsyncAction<SearchPhaseResult> createAction(SearchRequest request,
ArraySearchPhaseResults<SearchPhaseResult> results,
ActionListener<SearchResponse> listener,
final boolean controlled,
final AtomicLong expected) {
final Runnable runnable;
final TransportSearchAction.SearchTimeProvider timeProvider;
if (controlled) {
runnable = () -> expected.set(randomNonNegativeLong());
timeProvider = new TransportSearchAction.SearchTimeProvider(0, 0, expected::get);
} else {
runnable = () -> {
long elapsed = spinForAtLeastNMilliseconds(randomIntBetween(1, 10));
expected.set(elapsed);
};
timeProvider = new TransportSearchAction.SearchTimeProvider(
0,
System.nanoTime(),
System::nanoTime);
}
BiFunction<String, String, Transport.Connection> nodeIdToConnection = (cluster, node) -> {
resolvedNodes.add(Tuple.tuple(cluster, node));
return null;
};
return new AbstractSearchAsyncAction<SearchPhaseResult>("test", logger, null, nodeIdToConnection,
Collections.singletonMap("foo", new AliasFilter(new MatchAllQueryBuilder())), Collections.singletonMap("foo", 2.0f),
null, request, listener,
new GroupShardsIterator<>(
Collections.singletonList(
new SearchShardIterator(null, null, Collections.emptyList(), null)
)
), timeProvider, ClusterState.EMPTY_STATE, null,
results, request.getMaxConcurrentShardRequests(),
SearchResponse.Clusters.EMPTY) {
@Override
protected SearchPhase getNextPhase(final SearchPhaseResults<SearchPhaseResult> results, SearchPhaseContext context) {
return null;
}
@Override
protected void executePhaseOnShard(final SearchShardIterator shardIt, final SearchShardTarget shard,
final SearchActionListener<SearchPhaseResult> listener) {
}
@Override
long buildTookInMillis() {
runnable.run();
return super.buildTookInMillis();
}
@Override
public void sendReleaseSearchContext(ShardSearchContextId contextId, Transport.Connection connection,
OriginalIndices originalIndices) {
releasedContexts.add(contextId);
}
};
}
public void testTookWithControlledClock() {
runTestTook(true);
}
public void testTookWithRealClock() {
runTestTook(false);
}
private void runTestTook(final boolean controlled) {
final AtomicLong expected = new AtomicLong();
AbstractSearchAsyncAction<SearchPhaseResult> action = createAction(new SearchRequest(),
new ArraySearchPhaseResults<>(10), null, controlled, expected);
final long actual = action.buildTookInMillis();
if (controlled) {
// with a controlled clock, we can assert the exact took time
assertThat(actual, equalTo(TimeUnit.NANOSECONDS.toMillis(expected.get())));
} else {
// with a real clock, the best we can say is that it took as long as we spun for
assertThat(actual, greaterThanOrEqualTo(TimeUnit.NANOSECONDS.toMillis(expected.get())));
}
}
public void testBuildShardSearchTransportRequest() {
SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(randomBoolean());
final AtomicLong expected = new AtomicLong();
AbstractSearchAsyncAction<SearchPhaseResult> action = createAction(searchRequest,
new ArraySearchPhaseResults<>(10), null, false, expected);
String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10);
SearchShardIterator iterator = new SearchShardIterator(clusterAlias, new ShardId(new Index("name", "foo"), 1),
Collections.emptyList(), new OriginalIndices(new String[] {"name", "name1"}, IndicesOptions.strictExpand()));
ShardSearchRequest shardSearchTransportRequest = action.buildShardSearchRequest(iterator, 10);
assertEquals(IndicesOptions.strictExpand(), shardSearchTransportRequest.indicesOptions());
assertArrayEquals(new String[] {"name", "name1"}, shardSearchTransportRequest.indices());
assertEquals(new MatchAllQueryBuilder(), shardSearchTransportRequest.getAliasFilter().getQueryBuilder());
assertEquals(2.0f, shardSearchTransportRequest.indexBoost(), 0.0f);
assertArrayEquals(new String[] {"name", "name1"}, shardSearchTransportRequest.indices());
assertEquals(clusterAlias, shardSearchTransportRequest.getClusterAlias());
}
public void testSendSearchResponseDisallowPartialFailures() {
SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(false);
AtomicReference<Exception> exception = new AtomicReference<>();
ActionListener<SearchResponse> listener = ActionListener.wrap(response -> fail("onResponse should not be called"), exception::set);
Set<ShardSearchContextId> requestIds = new HashSet<>();
List<Tuple<String, String>> nodeLookups = new ArrayList<>();
int numFailures = randomIntBetween(1, 5);
ArraySearchPhaseResults<SearchPhaseResult> phaseResults = phaseResults(requestIds, nodeLookups, numFailures);
AbstractSearchAsyncAction<SearchPhaseResult> action = createAction(searchRequest, phaseResults, listener, false, new AtomicLong());
for (int i = 0; i < numFailures; i++) {
ShardId failureShardId = new ShardId("index", "index-uuid", i);
String failureClusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10);
String failureNodeId = randomAlphaOfLengthBetween(5, 10);
action.onShardFailure(i, new SearchShardTarget(failureNodeId, failureShardId, failureClusterAlias, OriginalIndices.NONE),
new IllegalArgumentException());
}
action.sendSearchResponse(InternalSearchResponse.empty(), phaseResults.results);
assertThat(exception.get(), instanceOf(SearchPhaseExecutionException.class));
SearchPhaseExecutionException searchPhaseExecutionException = (SearchPhaseExecutionException)exception.get();
assertEquals(0, searchPhaseExecutionException.getSuppressed().length);
assertEquals(numFailures, searchPhaseExecutionException.shardFailures().length);
for (ShardSearchFailure shardSearchFailure : searchPhaseExecutionException.shardFailures()) {
assertThat(shardSearchFailure.getCause(), instanceOf(IllegalArgumentException.class));
}
assertEquals(nodeLookups, resolvedNodes);
assertEquals(requestIds, releasedContexts);
}
public void testOnPhaseFailure() {
SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(false);
AtomicReference<Exception> exception = new AtomicReference<>();
ActionListener<SearchResponse> listener = ActionListener.wrap(response -> fail("onResponse should not be called"), exception::set);
Set<ShardSearchContextId> requestIds = new HashSet<>();
List<Tuple<String, String>> nodeLookups = new ArrayList<>();
ArraySearchPhaseResults<SearchPhaseResult> phaseResults = phaseResults(requestIds, nodeLookups, 0);
AbstractSearchAsyncAction<SearchPhaseResult> action = createAction(searchRequest, phaseResults, listener, false, new AtomicLong());
action.onPhaseFailure(new SearchPhase("test") {
@Override
public void run() {
}
}, "message", null);
assertThat(exception.get(), instanceOf(SearchPhaseExecutionException.class));
SearchPhaseExecutionException searchPhaseExecutionException = (SearchPhaseExecutionException)exception.get();
assertEquals("message", searchPhaseExecutionException.getMessage());
assertEquals("test", searchPhaseExecutionException.getPhaseName());
assertEquals(0, searchPhaseExecutionException.shardFailures().length);
assertEquals(0, searchPhaseExecutionException.getSuppressed().length);
assertEquals(nodeLookups, resolvedNodes);
assertEquals(requestIds, releasedContexts);
}
public void testShardNotAvailableWithDisallowPartialFailures() {
SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(false);
AtomicReference<Exception> exception = new AtomicReference<>();
ActionListener<SearchResponse> listener = ActionListener.wrap(response -> fail("onResponse should not be called"), exception::set);
int numShards = randomIntBetween(2, 10);
ArraySearchPhaseResults<SearchPhaseResult> phaseResults =
new ArraySearchPhaseResults<>(numShards);
AbstractSearchAsyncAction<SearchPhaseResult> action = createAction(searchRequest, phaseResults, listener, false, new AtomicLong());
// skip one to avoid the "all shards failed" failure.
SearchShardIterator skipIterator = new SearchShardIterator(null, null, Collections.emptyList(), null);
skipIterator.resetAndSkip();
action.skipShard(skipIterator);
// expect at least 2 shards, so onPhaseDone should report failure.
action.onPhaseDone();
assertThat(exception.get(), instanceOf(SearchPhaseExecutionException.class));
SearchPhaseExecutionException searchPhaseExecutionException = (SearchPhaseExecutionException)exception.get();
assertEquals("Partial shards failure (" + (numShards - 1) + " shards unavailable)",
searchPhaseExecutionException.getMessage());
assertEquals("test", searchPhaseExecutionException.getPhaseName());
assertEquals(0, searchPhaseExecutionException.shardFailures().length);
assertEquals(0, searchPhaseExecutionException.getSuppressed().length);
}
private static ArraySearchPhaseResults<SearchPhaseResult> phaseResults(Set<ShardSearchContextId> contextIds,
List<Tuple<String, String>> nodeLookups,
int numFailures) {
int numResults = randomIntBetween(1, 10);
ArraySearchPhaseResults<SearchPhaseResult> phaseResults = new ArraySearchPhaseResults<>(numResults + numFailures);
for (int i = 0; i < numResults; i++) {
ShardSearchContextId contextId = new ShardSearchContextId(UUIDs.randomBase64UUID(), randomNonNegativeLong());
contextIds.add(contextId);
SearchPhaseResult phaseResult = new PhaseResult(contextId);
String resultClusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10);
String resultNodeId = randomAlphaOfLengthBetween(5, 10);
ShardId resultShardId = new ShardId("index", "index-uuid", i);
nodeLookups.add(Tuple.tuple(resultClusterAlias, resultNodeId));
phaseResult.setSearchShardTarget(new SearchShardTarget(resultNodeId, resultShardId, resultClusterAlias, OriginalIndices.NONE));
phaseResult.setShardIndex(i);
phaseResults.consumeResult(phaseResult, () -> {});
}
return phaseResults;
}
private static final class PhaseResult extends SearchPhaseResult {
PhaseResult(ShardSearchContextId contextId) {
this.contextId = contextId;
}
}
}
|
|
/*
* Copyright 2014 JBoss by Red Hat.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.kie.services.impl;
import static org.kie.scanner.MavenRepository.getMavenRepository;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.persistence.EntityManagerFactory;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.commons.codec.binary.Base64;
import org.drools.compiler.kie.builder.impl.InternalKieModule;
import org.drools.compiler.kie.builder.impl.KieContainerImpl;
import org.drools.core.common.ProjectClassLoader;
import org.drools.core.marshalling.impl.ClassObjectMarshallingStrategyAcceptor;
import org.drools.core.marshalling.impl.SerializablePlaceholderResolverStrategy;
import org.drools.core.util.StringUtils;
import org.jbpm.kie.services.impl.bpmn2.BPMN2DataServiceImpl;
import org.jbpm.kie.services.impl.model.ProcessAssetDesc;
import org.jbpm.process.audit.event.AuditEventBuilder;
import org.jbpm.runtime.manager.impl.KModuleRegisterableItemsFactory;
import org.jbpm.runtime.manager.impl.deploy.DeploymentDescriptorManager;
import org.jbpm.runtime.manager.impl.deploy.DeploymentDescriptorMerger;
import org.jbpm.runtime.manager.impl.jpa.EntityManagerFactoryManager;
import org.jbpm.services.api.DefinitionService;
import org.jbpm.services.api.model.DeployedAsset;
import org.jbpm.services.api.model.DeployedUnit;
import org.jbpm.services.api.model.DeploymentUnit;
import org.kie.api.KieBase;
import org.kie.api.KieServices;
import org.kie.api.builder.ReleaseId;
import org.kie.api.builder.model.KieBaseModel;
import org.kie.api.marshalling.ObjectMarshallingStrategy;
import org.kie.api.remote.Remotable;
import org.kie.api.runtime.EnvironmentName;
import org.kie.api.runtime.KieContainer;
import org.kie.api.runtime.manager.RegisterableItemsFactory;
import org.kie.api.runtime.manager.RuntimeEnvironmentBuilder;
import org.kie.internal.executor.api.ExecutorService;
import org.kie.internal.runtime.conf.DeploymentDescriptor;
import org.kie.internal.runtime.conf.MergeMode;
import org.kie.internal.runtime.conf.NamedObjectModel;
import org.kie.internal.runtime.conf.ObjectModel;
import org.kie.internal.runtime.conf.ObjectModelResolver;
import org.kie.internal.runtime.conf.ObjectModelResolverProvider;
import org.kie.internal.runtime.conf.PersistenceMode;
import org.kie.scanner.MavenRepository;
import org.scannotation.AnnotationDB;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class KModuleDeploymentService extends AbstractDeploymentService {
private static Logger logger = LoggerFactory.getLogger(KModuleDeploymentService.class);
private static final String DEFAULT_KBASE_NAME = "defaultKieBase";
private DefinitionService bpmn2Service;
private DeploymentDescriptorMerger merger = new DeploymentDescriptorMerger();
private FormManagerService formManagerService;
private ExecutorService executorService;
public void onInit() {
EntityManagerFactoryManager.get().addEntityManagerFactory("org.jbpm.domain", getEmf());
}
@Override
public void deploy(DeploymentUnit unit) {
try {
super.deploy(unit);
if (!(unit instanceof KModuleDeploymentUnit)) {
throw new IllegalArgumentException("Invalid deployment unit provided - " + unit.getClass().getName());
}
KModuleDeploymentUnit kmoduleUnit = (KModuleDeploymentUnit) unit;
DeployedUnitImpl deployedUnit = new DeployedUnitImpl(unit);
KieContainer kieContainer = kmoduleUnit.getKieContainer();
ReleaseId releaseId = null;
if (kieContainer == null) {
KieServices ks = KieServices.Factory.get();
releaseId = ks.newReleaseId(kmoduleUnit.getGroupId(), kmoduleUnit.getArtifactId(), kmoduleUnit.getVersion());
MavenRepository repository = getMavenRepository();
repository.resolveArtifact(releaseId.toExternalForm());
kieContainer = ks.newKieContainer(releaseId);
kmoduleUnit.setKieContainer(kieContainer);
}
releaseId = kieContainer.getReleaseId();
String kbaseName = kmoduleUnit.getKbaseName();
if (StringUtils.isEmpty(kbaseName)) {
KieBaseModel defaultKBaseModel = ((KieContainerImpl)kieContainer).getKieProject().getDefaultKieBaseModel();
if (defaultKBaseModel != null) {
kbaseName = defaultKBaseModel.getName();
} else {
kbaseName = DEFAULT_KBASE_NAME;
}
}
InternalKieModule module = (InternalKieModule) ((KieContainerImpl)kieContainer).getKieModuleForKBase(kbaseName);
if (module == null) {
throw new IllegalStateException("Cannot find kbase, either it does not exist or there are multiple default kbases in kmodule.xml");
}
KieBase kbase = kieContainer.getKieBase(kbaseName);
//Map<String, String> formsData = new HashMap<String, String>();
Collection<String> files = module.getFileNames();
processResources(module, files, kieContainer, kmoduleUnit, deployedUnit, releaseId);
if (module.getKieDependencies() != null) {
Collection<InternalKieModule> dependencies = module.getKieDependencies().values();
for (InternalKieModule depModule : dependencies) {
logger.debug("Processing dependency module " + depModule.getReleaseId());
files = depModule.getFileNames();
processResources(depModule, files, kieContainer, kmoduleUnit, deployedUnit, depModule.getReleaseId());
}
}
if (module.getJarDependencies() != null && !module.getJarDependencies().isEmpty()) {
processClassloader(kieContainer, deployedUnit);
}
AuditEventBuilder auditLoggerBuilder = setupAuditLogger(identityProvider, unit.getIdentifier());
RuntimeEnvironmentBuilder builder = boostrapRuntimeEnvironmentBuilder(
kmoduleUnit, deployedUnit, kieContainer, kmoduleUnit.getMergeMode())
.knowledgeBase(kbase)
.classLoader(kieContainer.getClassLoader());
builder.registerableItemsFactory(getRegisterableItemsFactory(auditLoggerBuilder, kieContainer, kmoduleUnit));
commonDeploy(unit, deployedUnit, builder.get(), kieContainer);
kmoduleUnit.setDeployed(true);
} catch (Throwable e) {
logger.warn("Unexpected error while deploying unit {}", unit.getIdentifier(), e);
// catch all possible errors to be able to report them to caller as RuntimeException
throw new RuntimeException(e);
}
}
protected RegisterableItemsFactory getRegisterableItemsFactory(AuditEventBuilder auditLoggerBuilder,
KieContainer kieContainer,KModuleDeploymentUnit unit) {
KModuleRegisterableItemsFactory factory = new KModuleRegisterableItemsFactory(kieContainer, unit.getKsessionName());
factory.setAuditBuilder(auditLoggerBuilder);
factory.addProcessListener(IdentityProviderAwareProcessListener.class);
return factory;
}
@Override
public void undeploy(DeploymentUnit unit) {
if (!(unit instanceof KModuleDeploymentUnit)) {
throw new IllegalArgumentException("Invalid deployment unit provided - " + unit.getClass().getName());
}
KModuleDeploymentUnit kmoduleUnit = (KModuleDeploymentUnit) unit;
super.undeploy(unit);
formManagerService.unRegisterForms( unit.getIdentifier() );
KieServices ks = KieServices.Factory.get();
ReleaseId releaseId = ks.newReleaseId(kmoduleUnit.getGroupId(), kmoduleUnit.getArtifactId(), kmoduleUnit.getVersion());
ks.getRepository().removeKieModule(releaseId);
}
protected RuntimeEnvironmentBuilder boostrapRuntimeEnvironmentBuilder(KModuleDeploymentUnit deploymentUnit,
DeployedUnit deployedUnit, KieContainer kieContainer, MergeMode mode) {
DeploymentDescriptor descriptor = deploymentUnit.getDeploymentDescriptor();
if (descriptor == null) {
DeploymentDescriptorManager descriptorManager = new DeploymentDescriptorManager("org.jbpm.domain");
List<DeploymentDescriptor> descriptorHierarchy = descriptorManager.getDeploymentDescriptorHierarchy(kieContainer);
descriptor = merger.merge(descriptorHierarchy, mode);
deploymentUnit.setDeploymentDescriptor(descriptor);
} else if (descriptor != null && !deploymentUnit.isDeployed()) {
DeploymentDescriptorManager descriptorManager = new DeploymentDescriptorManager("org.jbpm.domain");
List<DeploymentDescriptor> descriptorHierarchy = descriptorManager.getDeploymentDescriptorHierarchy(kieContainer);
descriptorHierarchy.add(0, descriptor);
descriptor = merger.merge(descriptorHierarchy, mode);
deploymentUnit.setDeploymentDescriptor(descriptor);
}
// first set on unit the strategy
deploymentUnit.setStrategy(descriptor.getRuntimeStrategy());
// setting up runtime environment via builder
RuntimeEnvironmentBuilder builder = null;
if (descriptor.getPersistenceMode() == PersistenceMode.NONE) {
builder = RuntimeEnvironmentBuilder.Factory.get().newDefaultInMemoryBuilder();
} else {
builder = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder();
}
// populate various properties of the builder
EntityManagerFactory emf = EntityManagerFactoryManager.get().getOrCreate(descriptor.getPersistenceUnit());
builder.entityManagerFactory(emf);
Map<String, Object> contaxtParams = new HashMap<String, Object>();
contaxtParams.put("entityManagerFactory", emf);
contaxtParams.put("classLoader", kieContainer.getClassLoader());
// process object models that are globally configured (environment entries, session configuration)
for (NamedObjectModel model : descriptor.getEnvironmentEntries()) {
Object entry = getInstanceFromModel(model, kieContainer, contaxtParams);
builder.addEnvironmentEntry(model.getName(), entry);
}
for (NamedObjectModel model : descriptor.getConfiguration()) {
Object entry = getInstanceFromModel(model, kieContainer, contaxtParams);
builder.addConfiguration(model.getName(), (String) entry);
}
ObjectMarshallingStrategy[] mStrategies = new ObjectMarshallingStrategy[descriptor.getMarshallingStrategies().size() + 1];
int index = 0;
for (ObjectModel model : descriptor.getMarshallingStrategies()) {
Object strategy = getInstanceFromModel(model, kieContainer, contaxtParams);
mStrategies[index] = (ObjectMarshallingStrategy)strategy;
index++;
}
// lastly add the main default strategy
mStrategies[index] = new SerializablePlaceholderResolverStrategy(ClassObjectMarshallingStrategyAcceptor.DEFAULT);
builder.addEnvironmentEntry(EnvironmentName.OBJECT_MARSHALLING_STRATEGIES, mStrategies);
builder.addEnvironmentEntry("KieDeploymentDescriptor", descriptor);
builder.addEnvironmentEntry("KieContainer", kieContainer);
if (executorService != null) {
builder.addEnvironmentEntry("ExecutorService", executorService);
}
// populate all assets with roles for this deployment unit
List<String> requiredRoles = descriptor.getRequiredRoles(DeploymentDescriptor.TYPE_VIEW);
if (requiredRoles != null && !requiredRoles.isEmpty()) {
for (DeployedAsset desc : deployedUnit.getDeployedAssets()) {
if (desc instanceof ProcessAssetDesc) {
((ProcessAssetDesc) desc).setRoles(requiredRoles);
}
}
}
// process additional classes
List<String> remoteableClasses = descriptor.getClasses();
if (remoteableClasses != null && !remoteableClasses.isEmpty()) {
for (String className : remoteableClasses) {
try {
((DeployedUnitImpl)deployedUnit).addClass(kieContainer.getClassLoader().loadClass(className));
logger.debug( "Loaded {} into the classpath from deployment descriptor {}", className, kieContainer.getReleaseId().toExternalForm());
} catch (ClassNotFoundException cnfe) {
throw new IllegalArgumentException("Class " + className + " not found in the project");
} catch (NoClassDefFoundError e) {
throw new IllegalArgumentException("Class " + className + " not found in the project");
}
}
}
return builder;
}
protected Object getInstanceFromModel(ObjectModel model, KieContainer kieContainer, Map<String, Object> contaxtParams) {
ObjectModelResolver resolver = ObjectModelResolverProvider.get(model.getResolver());
if (resolver == null) {
// if we don't throw an exception here, we have an NPE below..
throw new IllegalStateException("Unable to find ObjectModelResolver for " + model.getResolver());
}
return resolver.getInstance(model, kieContainer.getClassLoader(), contaxtParams);
}
protected void processResources(InternalKieModule module, Collection<String> files,
KieContainer kieContainer, DeploymentUnit unit, DeployedUnitImpl deployedUnit, ReleaseId releaseId) {
for (String fileName : files) {
if(fileName.matches(".+bpmn[2]?$")) {
ProcessAssetDesc process;
try {
String processString = new String(module.getBytes(fileName), "UTF-8");
process = (ProcessAssetDesc) bpmn2Service.buildProcessDefinition(unit.getIdentifier(), processString, kieContainer, true);
if (process == null) {
throw new IllegalArgumentException("Unable to read process " + fileName);
}
process.setEncodedProcessSource(Base64.encodeBase64String(processString.getBytes()));
process.setDeploymentId(unit.getIdentifier());
deployedUnit.addAssetLocation(process.getId(), process);
} catch (UnsupportedEncodingException e) {
throw new IllegalArgumentException("Unsupported encoding while processing process " + fileName);
}
} else if (fileName.matches(".+ftl$") || fileName.matches(".+form$")) {
try {
String formContent = new String(module.getBytes(fileName), "UTF-8");
if (fileName.indexOf( "/" ) != -1) fileName = fileName.substring( fileName.lastIndexOf( "/" ) + 1);
formManagerService.registerForm(unit.getIdentifier(), fileName, formContent);
} catch (UnsupportedEncodingException e) {
throw new IllegalArgumentException("Unsupported encoding while processing form " + fileName);
}
} else if( fileName.matches(".+class$")) {
String className = fileName.replaceAll("/", ".");
className = className.substring(0, fileName.length() - ".class".length());
try {
deployedUnit.addClass(kieContainer.getClassLoader().loadClass(className));
logger.debug( "Loaded {} into the classpath from deployment {}", className, releaseId.toExternalForm());
} catch (ClassNotFoundException cnfe) {
throw new IllegalArgumentException("Class " + className + " not found in the project");
} catch (NoClassDefFoundError e) {
throw new IllegalArgumentException("Class " + className + " not found in the project");
}
}
}
}
protected void processClassloader(KieContainer kieContainer, DeployedUnitImpl deployedUnit) {
if (kieContainer.getClassLoader() instanceof ProjectClassLoader) {
ClassLoader parentCl = kieContainer.getClassLoader().getParent();
if (parentCl instanceof URLClassLoader) {
URL[] urls = ((URLClassLoader) parentCl).getURLs();
if (urls == null || urls.length == 0) {
return;
}
AnnotationDB db = new AnnotationDB();
try {
db.scanArchives(urls);
Set<String> jaxbClasses = db.getAnnotationIndex().get(XmlRootElement.class.getName());
Set<String> remoteClasses = db.getAnnotationIndex().get(Remotable.class.getName());
Set<String> allClasses = new HashSet<String>();
if (jaxbClasses != null) {
allClasses.addAll(jaxbClasses);
}
if (remoteClasses != null) {
allClasses.addAll(remoteClasses);
}
for (String className : allClasses) {
try {
deployedUnit.addClass(kieContainer.getClassLoader().loadClass(className));
logger.debug( "Loaded {} into the classpath from deployment {}", className, kieContainer.getReleaseId().toExternalForm());
} catch (ClassNotFoundException cnfe) {
throw new IllegalArgumentException("Class " + className + " not found in the project");
}
}
} catch (IOException e) {
logger.warn("Encountered error while scanning classes {}", e.getMessage());
}
}
}
}
public void setBpmn2Service(DefinitionService bpmn2Service) {
this.bpmn2Service = bpmn2Service;
}
public void setMerger(DeploymentDescriptorMerger merger) {
this.merger = merger;
}
public void setFormManagerService(FormManagerService formManagerService) {
this.formManagerService = formManagerService;
}
public void setExecutorService(ExecutorService executorService) {
this.executorService = executorService;
}
@Override
public void activate(String deploymentId) {
DeployedUnit deployed = getDeployedUnit(deploymentId);
if (deployed != null) {
((DeployedUnitImpl)deployed).setActive(true);
notifyOnActivate(deployed.getDeploymentUnit(), deployed);
}
}
@Override
public void deactivate(String deploymentId) {
DeployedUnit deployed = getDeployedUnit(deploymentId);
if (deployed != null) {
((DeployedUnitImpl)deployed).setActive(false);
notifyOnDeactivate(deployed.getDeploymentUnit(), deployed);
}
}
}
|
|
package org.jcommand.queue.manager;
import java.io.Serializable;
import java.util.AbstractQueue;
import java.util.Arrays;
import java.util.Deque;
import java.util.Iterator;
import java.util.LinkedList;
import org.jcommand.queue.api.QueueObject;
public class TimeBucketPriorityQueue<T extends QueueObject> extends AbstractQueue<T> implements Serializable {
private static final long serialVersionUID = 2097375673049561916L;
private static final int QUEUE_SORT_SIZE = 10000;
private static final int SORTTRIGGER = 3200;
@SuppressWarnings("unchecked")
private T[] queue = (T[]) new QueueObject[QUEUE_SORT_SIZE];
private Buckets buckets = new Buckets();
private PriorityComperator<T> comperator;
private int queueReadIndex = 0;
private int queueWriteIndex;
public TimeBucketPriorityQueue(PriorityComperator<T> comperator) {
this.comperator = comperator;
}
@Override
public boolean offer(T queueObject) {
if (queueWriteIndex < QUEUE_SORT_SIZE && buckets.size() == 0) {
queue[queueWriteIndex++] = queueObject;
return true;
}
return buckets.add(queueObject);
}
public int findWriteIndex() {
int position = QUEUE_SORT_SIZE - 1;
int divisor = 1;
int delta = 0;
T onPosition = queue[position];
T checkPosition = position > 0 ? queue[position - 1] : null;
while (!(onPosition == null && checkPosition != null)) {
divisor *= 2;
delta = (QUEUE_SORT_SIZE / divisor);
delta = delta == 0 ? 1 : delta;
if (onPosition == null) {
position -= delta;
if (position == 0) {
break;
}
} else {
position += delta;
if (position > QUEUE_SORT_SIZE) {
position = QUEUE_SORT_SIZE;
break;
}
}
onPosition = queue[position];
checkPosition = position > 0 ? queue[position - 1] : null;
}
return position;
}
@Override
public T poll() {
if (queueReadIndex % SORTTRIGGER == 0) {
if (queueReadIndex > 0) {
for (int count = 0; count < SORTTRIGGER; count++) {
if (buckets.hasNext()) {
queue[count] = buckets.next();
} else {
queue[count] = null;
}
}
}
comperator.setLatestTimeStamp(System.currentTimeMillis());
Arrays.sort(queue, comperator);
queueWriteIndex = findWriteIndex();
queueReadIndex = 0;
}
T queueObject = queue[queueReadIndex];
queueReadIndex = queueReadIndex < queueWriteIndex ? queueReadIndex + 1 : queueReadIndex;
return queueObject;
}
@Override
public T peek() {
int position = queueReadIndex - 1;
return queue[position];
}
@Override
public Iterator<T> iterator() {
throw new UnsupportedOperationException("iterator - use poll to get elements");
}
@Override
public int size() {
int queueSize = queueWriteIndex - queueReadIndex;
queueSize = queueSize > 0 ? queueSize : 0;
return (int) (queueSize + buckets.size());
}
private class Buckets implements Iterable<T>, Iterator<T>, Serializable {
private static final long serialVersionUID = 3116836357355181214L;
Deque<Bucket> bucketList = new LinkedList<>();
public boolean add(T queueObject) {
if (bucketList.size() == 0 || !bucketList.getLast().add(queueObject)) {
Bucket bucket = new Bucket();
bucketList.addLast(bucket);
return bucket.add(queueObject);
}
return true;
}
@Override
public boolean hasNext() {
if (bucketList.size() == 0) {
return false;
}
Bucket bucket = bucketList.getFirst();
if (!bucket.hasNext()) {
bucketList.removeFirst();
}
return bucketList.size() > 0 && bucket.hasNext();
}
@Override
public T next() {
T nextQueueObject = null;
do {
if (!hasNext()) {
break;
}
nextQueueObject = bucketList.getFirst().next();
} while (nextQueueObject == null);
return nextQueueObject;
}
// public T current() {
// return bucketList.getFirst().current();
// }
@Override
public Iterator<T> iterator() {
return this;
}
public long size() {
long size = 0;
for (Bucket bucket : bucketList) {
size += bucket.size();
}
return size;
}
}
private class Bucket implements Iterable<T>, Iterator<T>, Serializable {
private static final long serialVersionUID = -3790866376333630780L;
short readPosition = 0;
short writePosition = 0;
private static final int BUCKET_SIZE = 10 * SORTTRIGGER;
@SuppressWarnings("unchecked")
T[] bucketList = (T[]) new QueueObject[BUCKET_SIZE];
public boolean add(T queueObject) {
if (writePosition < bucketList.length) {
bucketList[writePosition++] = queueObject;
return true;
}
return false;
}
@Override
public boolean hasNext() {
return readPosition < BUCKET_SIZE && readPosition < writePosition;
}
@Override
public T next() {
return bucketList[readPosition++];
}
// public T current() {
// return bucketList[readPosition - 1];
// }
@Override
public Iterator<T> iterator() {
return this;
}
public long size() {
return writePosition - readPosition;
}
}
}
|
|
/**
* Derby - Class org.apache.derbyTesting.functionTests.tests.jdbapi.XATest
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.derbyTesting.functionTests.tests.jdbcapi;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Iterator;
import javax.sql.XAConnection;
import javax.sql.XADataSource;
import javax.transaction.xa.XAException;
import javax.transaction.xa.XAResource;
import javax.transaction.xa.Xid;
import junit.framework.Test;
import junit.framework.TestSuite;
import org.apache.derbyTesting.junit.BaseJDBCTestCase;
import org.apache.derbyTesting.junit.CleanDatabaseTestSetup;
import org.apache.derbyTesting.junit.J2EEDataSource;
import org.apache.derbyTesting.junit.JDBC;
import org.apache.derbyTesting.junit.TestConfiguration;
import org.apache.derbyTesting.junit.XATestUtil;
public class XATest extends BaseJDBCTestCase {
public XATest(String name) {
super(name);
}
/**
* A single connection and 1 phase commit.
*
*
* Original "SQL" from xaSimplePositive.sql <code>
xa_connect ;
xa_start xa_noflags 0;
xa_getconnection;
drop table foo;
create table foo (a int);
insert into foo values (0);
select * from foo;
run resource '/org/apache/derbyTesting/functionTests/tests/store/global_xactTable.view';
select * from global_xactTable where gxid is not null order by gxid;
xa_end xa_success 0;
xa_commit xa_1phase 0;
xa_datasource 'wombat' shutdown;
</code>
*
* @throws SQLException
* @throws XAException
* @throws XAException
*/
public void testSingleConnectionOnePhaseCommit() throws SQLException,
XAException {
XADataSource xads = J2EEDataSource.getXADataSource();
J2EEDataSource.setBeanProperty(xads, "databaseName", "wombat");
XAConnection xac = xads.getXAConnection();
XAResource xar = xac.getXAResource();
Xid xid = XATestUtil.getXid(0, 32, 46);
xar.start(xid, XAResource.TMNOFLAGS);
Connection conn = xac.getConnection();
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, conn.getHoldability());
Statement s = conn.createStatement();
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, s
.getResultSetHoldability());
s.execute("create table foo (a int)");
s.executeUpdate("insert into foo values (0)");
ResultSet rs = s.executeQuery("select * from foo");
JDBC.assertDrainResults(rs, 1);
String[][] expectedRows = { { "(0", "ACTIVE", "false", "APP",
"UserTransaction" } };
XATestUtil.checkXATransactionView(conn, expectedRows);
s.close();
xar.end(xid, XAResource.TMSUCCESS);
// 1 phase commit
xar.commit(xid, true);
conn.close();
xac.close();
}
/*
* Two interleaving transaction and prepare/commit prepare/rollback.
*
* (original test said two connections but only one connection was opened)
*
* <code> xa_datasource 'wombat'; xa_connect user 'sku' password 'testxa' ;
*
* xa_start xa_noflags 1; xa_getconnection; insert into APP.foo values (1);
* xa_end xa_suspend 1;
*
* xa_start xa_noflags 2; insert into APP.foo values (2); xa_end xa_suspend
* 2;
*
* xa_start xa_resume 1; insert into APP.foo values (3); xa_end xa_suspend
* 1;
*
* xa_start xa_resume 2; insert into APP.foo values (4); select * from
* APP.global_xactTable where gxid is not null order by gxid; -- this
* prepare won't work since transaction 1 has been suspended - XA_PROTO
* xa_prepare 1;
*
* select * from APP.global_xactTable where gxid is not null order by gxid;
* xa_end xa_success 2; -- this assumes a resume xa_end xa_success 1;
* xa_prepare 1; xa_prepare 2; -- both transactions should be prepared
* select * from APP.global_xactTable where gxid is not null order by gxid; --
* NOTE: The following call to "xa_recover xa_startrscan" is apt to --
* return the result set rows in reverse order when changes to -- the Derby
* engine affect the number of transactions that it takes -- to create a
* database. The transactions are stored in a hash table -- based on a
* global and local id, and when the number of transactions -- changes, the
* (internal) local id can change, which may lead to a -- change in the
* result set order. This order is determined by the -- JVM's hashing
* algorithm. Examples of changes to the engine that -- can affect this
* include ones that cause more commits or that -- change the amount of data
* being stored, such as changes to the -- metadata statements (which is
* what prompted this explanation in -- the first place). Ultimately, the
* problem is that there is no -- way to order the return values from
* "xa_recover" since it is an -- ij internal statement, not SQL...
* xa_recover xa_startrscan; xa_recover xa_noflags;
*
* xa_commit xa_2Phase 1; xa_rollback 2; -- check results xa_start
* xa_noflags 3; select * from APP.global_xactTable where gxid is not null
* order by gxid; select * from APP.foo; xa_end xa_success 3;
*
* xa_prepare 3; -- should fail with XA_NOTA because we prepared a read only
* transaction xa_commit xa_1Phase 3; disconnect; </code>
*/
public void testInterleavingTransactions() throws SQLException, XAException {
Statement preStatement = getConnection().createStatement();
preStatement.execute("create table fooInterleaving (a int)");
preStatement.execute("insert into fooInterleaving values (0)");
preStatement.close();
XADataSource xads = J2EEDataSource.getXADataSource();
XAConnection xac = xads.getXAConnection("sku", "testxa");
XAResource xar = xac.getXAResource();
Xid xid1 = XATestUtil.getXid(1, 93, 18);
Xid xid2 = XATestUtil.getXid(2, 45, 77);
xar.start(xid1, XAResource.TMNOFLAGS);
Connection conn = xac.getConnection();
Statement s = conn.createStatement();
s.executeUpdate("insert into APP.fooInterleaving values (1)");
xar.end(xid1, XAResource.TMSUSPEND);
xar.start(xid2, XAResource.TMNOFLAGS);
s.executeUpdate("insert into APP.fooInterleaving values (2)");
xar.end(xid2, XAResource.TMSUSPEND);
xar.start(xid1, XAResource.TMRESUME);
s.executeUpdate("insert into APP.fooInterleaving values (3)");
xar.end(xid1, XAResource.TMSUSPEND);
xar.start(xid2, XAResource.TMRESUME);
s.executeUpdate("insert into APP.fooInterleaving values (4)");
String[][] expectedRows = {
{ "(1", "ACTIVE", "false", "SKU", "UserTransaction" },
{ "(2", "ACTIVE", "false", "SKU", "UserTransaction" } };
XATestUtil.checkXATransactionView(conn, expectedRows);
// this prepare won't work since
// transaction 1 has been suspended - XA_PROTO
try {
xar.prepare(xid1);
fail("FAIL - prepare on suspended transaction");
} catch (XAException e) {
if (e.errorCode != XAException.XAER_PROTO)
XATestUtil.dumpXAException(
"FAIL - prepare on suspended transaction", e);
}
// check it was not prepared
XATestUtil.checkXATransactionView(conn, expectedRows);
xar.end(xid2, XAResource.TMSUCCESS);
xar.end(xid1, XAResource.TMSUCCESS);
xar.prepare(xid1);
xar.prepare(xid2);
// both should be prepared.
expectedRows = new String[][] {
{ "(1", "PREPARED", "false", "SKU", "UserTransaction" },
{ "(2", "PREPARED", "false", "SKU", "UserTransaction" } };
XATestUtil.checkXATransactionView(conn, expectedRows);
Xid[] recoveredStart = xar.recover(XAResource.TMSTARTRSCAN);
assertEquals(2, recoveredStart.length);
Xid[] recovered = xar.recover(XAResource.TMNOFLAGS);
assertEquals(0, recovered.length);
Xid[] recoveredEnd = xar.recover(XAResource.TMENDRSCAN);
assertEquals(0, recoveredEnd.length);
for (int i = 0; i < recoveredStart.length; i++) {
Xid xid = recoveredStart[i];
if (xid.getFormatId() == 1) {
// commit 1 with 2pc
xar.commit(xid, false);
} else if (xid.getFormatId() == 2) {
xar.rollback(xid);
} else {
fail("FAIL: unknown xact");
}
}
// check the results
Xid xid3 = XATestUtil.getXid(3, 2, 101);
xar.start(xid3, XAResource.TMNOFLAGS);
expectedRows = new String[][] { { "(3", "IDLE", "NULL", "SKU",
"UserTransaction" } };
XATestUtil.checkXATransactionView(conn, expectedRows);
ResultSet rs = s.executeQuery("select * from APP.fooInterleaving");
expectedRows = new String[][] { { "0" }, { "1" }, { "3" } };
JDBC.assertFullResultSet(rs, expectedRows);
rs.close();
xar.end(xid3, XAResource.TMSUCCESS);
int pr = xar.prepare(xid3);
if (pr != XAResource.XA_RDONLY)
fail("FAIL - prepare on read only xact returned " + pr);
try {
xar.commit(xid3, true);
fail("FAIL - 2pc commit on read-only xact");
} catch (XAException e) {
if (e.errorCode != XAException.XAER_NOTA)
throw e;
}
s.close();
conn.close();
xac.close();
}
/**
* Tests on INIT STATE (no tr Original SQL from xaStateTran.sql. <code>
-- the following should error XAER_NOTA
xa_start xa_join 11;
-- the following should error XAER_NOTA
xa_start xa_resume 11;
-- the following should error XAER_NOTA
xa_end xa_success 11;
-- the following should error XAER_NOTA
xa_end xa_fail 11;
-- the following should error XAER_NOTA
xa_end xa_suspend 11;
-- the following should error XAER_NOTA
xa_prepare 11;
-- the following should error XAER_NOTA
xa_commit xa_1phase 11;
-- the following should error XAER_NOTA
xa_commit xa_2phase 11;
-- the following should error XAER_NOTA
xa_rollback 11;
-- the following should error XAER_NOTA
xa_forget 11;
</code>
*/
public void testNoTransaction() throws SQLException, XAException {
XADataSource xads = J2EEDataSource.getXADataSource();
XAConnection xac = xads.getXAConnection();
XAResource xar = xac.getXAResource();
Xid xid11 = XATestUtil.getXid(11, 3, 128);
try {
xar.start(xid11, XAResource.TMJOIN);
} catch (XAException e) {
if (e.errorCode != XAException.XAER_NOTA)
throw e;
}
try {
xar.start(xid11, XAResource.TMRESUME);
} catch (XAException e) {
if (e.errorCode != XAException.XAER_NOTA)
throw e;
}
try {
xar.end(xid11, XAResource.TMSUCCESS);
} catch (XAException e) {
if (e.errorCode != XAException.XAER_NOTA)
throw e;
}
try {
xar.end(xid11, XAResource.TMFAIL);
} catch (XAException e) {
if (e.errorCode != XAException.XAER_NOTA)
throw e;
}
try {
xar.end(xid11, XAResource.TMSUSPEND);
} catch (XAException e) {
if (e.errorCode != XAException.XAER_NOTA)
throw e;
}
try {
xar.prepare(xid11);
} catch (XAException e) {
if (e.errorCode != XAException.XAER_NOTA)
throw e;
}
try {
xar.commit(xid11, false);
} catch (XAException e) {
if (e.errorCode != XAException.XAER_NOTA)
throw e;
}
try {
xar.commit(xid11, true);
} catch (XAException e) {
if (e.errorCode != XAException.XAER_NOTA)
throw e;
}
try {
xar.rollback(xid11);
} catch (XAException e) {
if (e.errorCode != XAException.XAER_NOTA)
throw e;
}
try {
xar.forget(xid11);
} catch (XAException e) {
if (e.errorCode != XAException.XAER_NOTA)
throw e;
}
}
/**
* Morph a connection between local anf global transactions.
*/
public void testMorph() throws SQLException, XAException {
Statement preStatement = getConnection().createStatement();
preStatement.execute("create table fooMorph (a int)");
preStatement.executeUpdate("insert into APP.fooMorph values (0)");
preStatement.executeUpdate("insert into APP.fooMorph values (1)");
preStatement.executeUpdate("insert into APP.fooMorph values (2)");
preStatement.executeUpdate("insert into APP.fooMorph values (3)");
preStatement.executeUpdate("insert into APP.fooMorph values (4)");
preStatement.close();
XADataSource xads = J2EEDataSource.getXADataSource();
XAConnection xac = xads.getXAConnection();
XAResource xar = xac.getXAResource();
Connection conn = xac.getConnection();
/*
* autocommit off; insert into foo values (1); select * from
* global_xactTable where gxid is not null order by gxid,username;
* commit;
*/
conn.setAutoCommit(false);
Statement s = conn.createStatement();
s.executeUpdate("insert into APP.fooMorph values (2001)");
// no rows expected
XATestUtil.checkXATransactionView(conn, null);
conn.commit();
/*
* autocommit on; insert into foo values (2); select * from
* global_xactTable where gxid is not null order by gxid,username;
*
*/
conn.setAutoCommit(true);
s.executeUpdate("insert into APP.fooMorph values (2002)");
XATestUtil.checkXATransactionView(conn, null);
/*
* -- morph the connection to a global transaction xa_start xa_noflags
* 1; select * from global_xactTable where gxid is not null order by
* gxid,username; insert into foo values (3);
*/
Xid xid = XATestUtil.getXid(1001, 66, 13);
xar.start(xid, XAResource.TMNOFLAGS);
String[][] expectedRows = { { "(1", "IDLE", "NULL", "APP",
"UserTransaction" } };
XATestUtil.checkXATransactionView(conn, expectedRows);
s.executeUpdate("insert into APP.fooMorph values (2003)");
/*
* -- disallowed commit; -- disallowed rollback; -- disallowed
* autocommit on; -- OK autocommit off;
*/
try {
conn.commit();
fail("FAIL: commit allowed in global xact");
} catch (SQLException e) {
}
try {
conn.rollback();
fail("FAIL: roll back allowed in global xact");
} catch (SQLException e) {
}
try {
conn.setAutoCommit(true);
fail("FAIL: setAutoCommit(true) allowed " + "in global xact");
} catch (SQLException e) {
}
try {
conn.setSavepoint();
fail("FAIL: setSavepoint() allowed in global xact");
} catch (SQLException e) {
}
try {
conn.setSavepoint("badsavepoint");
fail("FAIL: setSavepoint(String) allowed in " + "global xact");
} catch (SQLException e) {
}
conn.setAutoCommit(false);
// s was created in local mode so it has holdibilty
// set, will execute but ResultSet will have close on commit
// DERBY-1158 query with holdable statement
s.executeQuery("select * from APP.fooMorph where A >= 2000").close();
s.close();
// statement created in global xact is CLOSE_CURSORS_AT_COMMIT
s = conn.createStatement();
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, s
.getResultSetHoldability());
/*
* select * from foo; xa_end xa_success 1; xa_prepare 1;
*/
ResultSet rs = s.executeQuery("select * from APP.fooMorph where A >= 2000");
expectedRows = new String[][] { { "2001" }, { "2002" }, { "2003" } };
rs.close();
xar.end(xid, XAResource.TMSUCCESS);
xar.prepare(xid);
/*
* -- dup id xa_start xa_noflags 1;
*/
try {
xar.start(xid, XAResource.TMNOFLAGS);
fail("FAIL - start with duplicate XID");
} catch (XAException e) {
if (e.errorCode != XAException.XAER_DUPID)
throw e;
}
/*
* xa_start xa_noflags 2; -- still should disallow autommit; autocommit
* on; -- still should disallow commit and rollback commit; rollback;
* select * from global_xactTable where gxid is not null order by
* gxid,username; xa_end xa_suspend 2;
*/
Xid xid2 = XATestUtil.getXid(1002, 23, 3);
xar.start(xid2, XAResource.TMNOFLAGS);
try {
conn.commit();
fail("FAIL: commit allowed in global xact");
} catch (SQLException e) {
}
try {
conn.rollback();
fail("FAIL: roll back allowed in global xact");
} catch (SQLException e) {
}
try {
conn.setAutoCommit(true);
fail("FAIL: setAutoCommit(true) allowed in global xact");
} catch (SQLException e) {
}
conn.setAutoCommit(false);
xar.end(xid2, XAResource.TMSUSPEND);
/*
* -- get local connection again xa_getconnection;
*
* insert into foo values (5); -- autocommit should be on by default;
* commit;
*
* autocommit off; insert into foo values (6); -- commit and rollback is
* allowed on local connection rollback;
*
* insert into foo values (6); commit;
*/
conn = xac.getConnection();
s = conn.createStatement();
s.executeUpdate("insert into APP.fooMorph values (2005)");
conn.commit();
conn.setAutoCommit(false);
s.executeUpdate("insert into APP.fooMorph values (2006)");
conn.rollback();
s.executeUpdate("insert into APP.fooMorph values (2007)");
conn.commit();
expectedRows = new String[][] {
{ "(1", "PREPARED", "false", "APP", "UserTransaction" },
{ "(1", "IDLE", "NULL", "APP", "UserTransaction" } };
XATestUtil.checkXATransactionView(conn, expectedRows);
/*
* -- I am still able to commit other global transactions while I am
* attached to a -- local transaction. xa_commit xa_2phase 1; xa_end
* xa_success 2; xa_rollback 2;
*/
xar.commit(xid, false);
xar.end(xid2, XAResource.TMSUCCESS);
xar.rollback(xid2);
XATestUtil.checkXATransactionView(conn, null);
rs = s.executeQuery("select * from APP.fooMorph where A >= 2000");
expectedRows = new String[][] { { "2001" }, { "2002" }, { "2003" },
{ "2005" }, { "2007" } };
JDBC.assertFullResultSet(rs, expectedRows);
rs.close();
conn.close();
/*
* xa_getconnection; select * from global_xactTable where gxid is not
* null order by gxid,username; select * from foo; autocommit off;
* delete from foo;
*/
conn = xac.getConnection();
conn.setAutoCommit(false);
s = conn.createStatement();
s.executeUpdate("delete from app.fooMorph");
rs = s.executeQuery("select * from APP.fooMorph");
JDBC.assertEmpty(rs);
rs.close();
/*
* -- yanking a local connection away should rollback the changes
*/
conn = xac.getConnection();
conn.setAutoCommit(false);
s = conn.createStatement();
rs = s.executeQuery("select * from APP.fooMorph where A >= 2000");
expectedRows = new String[][] { { "2001" }, { "2002" }, { "2003" },
{ "2005" }, { "2007" } };
JDBC.assertFullResultSet(rs, expectedRows);
/*
* -- cannot morph it if the local transaction is not idle xa_start
* xa_noflags 3; commit; -- now morph it to a global transaction
* xa_start xa_noflags 3;
*/
Xid xid3 = XATestUtil.getXid(1003, 27, 9);
try {
xar.start(xid3, XAResource.TMNOFLAGS);
fail("FAIL XAResource.start on a global transaction with an active local transaction (autocommit false)");
} catch (XAException xae) {
if (xae.errorCode != XAException.XAER_OUTSIDE)
throw xae;
}
conn.commit();
xar.start(xid3, XAResource.TMNOFLAGS);
/*
* -- now I shouldn't be able to yank it xa_getconnection;
*/
// DERBY-341 - client skip XAConnection with active local xact
if (usingEmbedded()) {
try {
xac.getConnection();
fail("FAIL: getConnection with active global xact");
} catch (SQLException sqle) {
assertSQLState("XJ059", sqle);
}
}
/*
* select * from foo; delete from foo;
*
* xa_end xa_fail 3; xa_rollback 3; -- local connection again
* xa_getconnection; select * from global_xactTable where gxid is not
* null order by gxid,username; select * from foo;
*/
s = conn.createStatement();
s.executeUpdate("delete from APP.fooMorph");
rs = s.executeQuery("select * from APP.fooMorph where A >= 2000");
JDBC.assertEmpty(rs);
rs.close();
try {
xar.end(xid3, XAResource.TMFAIL);
} catch (XAException e) {
if (e.errorCode != XAException.XA_RBROLLBACK)
throw e;
}
xar.rollback(xid3);
conn = xac.getConnection();
s = conn.createStatement();
rs = s.executeQuery("select * from APP.fooMorph where A >= 2000");
expectedRows = new String[][] { { "2001" }, { "2002" }, { "2003" },
{ "2005" }, { "2007" } };
JDBC.assertFullResultSet(rs, expectedRows);
rs.close();
s.close();
conn.close();
}
/**
* This test checks the fix on DERBY-4310, for not repreparing PreparedStatements
* upon calling close() on them.
*/
public void testDerby4310PreparedStatement() throws SQLException, XAException {
XADataSource xads = J2EEDataSource.getXADataSource();
J2EEDataSource.setBeanProperty(xads, "databaseName", "wombat");
XAConnection xaconn = xads.getXAConnection();
XAResource xar = xaconn.getXAResource();
Xid xid = XATestUtil.getXid(1,93,18);
/* Create the table and insert some records into it. */
Connection conn = xaconn.getConnection();
Statement s = conn.createStatement();
s.executeUpdate("CREATE TABLE foo4310_PS (I INT)");
conn.createStatement().executeUpdate("insert into APP.foo4310_PS values (0)");
conn.createStatement().executeUpdate("insert into APP.foo4310_PS values (1)");
conn.createStatement().executeUpdate("insert into APP.foo4310_PS values (2)");
conn.commit();
/* Prepare and execute the statement to be tested */
PreparedStatement ps = conn.prepareStatement("SELECT * FROM APP.foo4310_PS");
ps.executeQuery().close();
/* Start and end a transaction on the XAResource object */
xar.start(xid, XAResource.TMNOFLAGS);
xar.end(xid, XAResource.TMSUCCESS);
/* Drop the table on a parallel, regular connection */
Connection conn2 = getConnection();
Statement s2 = conn2.createStatement();
s2.execute("DROP TABLE foo4310_PS");
conn2.commit();
conn2.close();
try {
/* Try to close the prepared statement. This would throw an exception
* before the fix, claiming that the table was not found. */
ps.close();
} finally {
/* Rollback the transaction and close the connections */
xar.rollback(xid);
conn.close();
xaconn.close();
}
}
/**
* This test checks the fix on DERBY-4310, for not repreparing CallableStatements
* upon calling close() on them.
*/
public void testDerby4310CallableStatement() throws SQLException, XAException {
XADataSource xads = J2EEDataSource.getXADataSource();
J2EEDataSource.setBeanProperty(xads, "databaseName", "wombat");
XAConnection xaconn = xads.getXAConnection();
XAResource xar = xaconn.getXAResource();
Xid xid = XATestUtil.getXid(1,93,18);
/* Create the procedure bazed on XATest.zeroArg() */
Connection conn = xaconn.getConnection();
Statement s = conn.createStatement();
s.executeUpdate("CREATE PROCEDURE ZA() LANGUAGE JAVA "+
"EXTERNAL NAME 'org.apache.derbyTesting.functionTests.tests.jdbcapi.XATest.zeroArg' "+
"PARAMETER STYLE JAVA");
conn.commit();
/* Prepare and execute CallableStatement based on the procedure above */
CallableStatement cs = conn.prepareCall("CALL ZA()");
cs.execute();
/* Start and end a transaction on the XAResource object */
xar.start(xid, XAResource.TMNOFLAGS);
xar.end(xid, XAResource.TMSUCCESS);
/* Drop the procedure on a parallel, regular connection */
Connection conn2 = getConnection();
Statement s2 = conn2.createStatement();
s2.execute("DROP PROCEDURE ZA");
conn2.commit();
conn2.close();
try {
/* Try to close the prepared statement. This would throw an exception
* before the fix, claiming that the table was not found. */
cs.close();
} finally {
/* Rollback the transaction and close the connections */
xar.rollback(xid);
conn.close();
xaconn.close();
}
}
/**
* Derby-966 holdability and global/location transactions. (work in
* progress)
*/
public void testDerby966() throws SQLException, XAException {
Statement preStatement = getConnection().createStatement();
preStatement.execute("create table foo966 (a int)");
preStatement.executeUpdate("insert into APP.foo966 values (0)");
preStatement.executeUpdate("insert into APP.foo966 values (1)");
preStatement.executeUpdate("insert into APP.foo966 values (2)");
preStatement.executeUpdate("insert into APP.foo966 values (3)");
preStatement.executeUpdate("insert into APP.foo966 values (4)");
preStatement.executeUpdate("insert into APP.foo966 values (2001)");
preStatement.executeUpdate("insert into APP.foo966 values (2002)");
preStatement.executeUpdate("insert into APP.foo966 values (2003)");
preStatement.executeUpdate("insert into APP.foo966 values (2005)");
preStatement.executeUpdate("insert into APP.foo966 values (2007)");
preStatement.close();
XADataSource xads = J2EEDataSource.getXADataSource();
XAConnection xac = xads.getXAConnection();
XAResource xar = xac.getXAResource();
Xid xid = XATestUtil.getXid(996, 9, 48);
Connection conn = xac.getConnection();
// Obtain Statements and PreparedStatements
// with all the holdability options.
assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, conn.getHoldability());
Statement sdh = conn.createStatement();
assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, sdh
.getResultSetHoldability());
checkHeldRS(conn, sdh, sdh.executeQuery("select * from app.foo966"));
PreparedStatement psdh = conn.prepareStatement("SELECT * FROM APP.foo966");
PreparedStatement psdh_d = conn
.prepareStatement("DELETE FROM APP.foo966 WHERE A < -99");
assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, psdh
.getResultSetHoldability());
checkHeldRS(conn, psdh, psdh.executeQuery());
Statement shh = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY,
ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT);
assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, shh
.getResultSetHoldability());
checkHeldRS(conn, shh, shh.executeQuery("select * from app.foo966"));
PreparedStatement pshh = conn.prepareStatement("SELECT * FROM APP.foo966",
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY,
ResultSet.HOLD_CURSORS_OVER_COMMIT);
PreparedStatement pshh_d = conn.prepareStatement(
"DELETE FROM APP.foo966 WHERE A < -99",
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY,
ResultSet.HOLD_CURSORS_OVER_COMMIT);
assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, shh
.getResultSetHoldability());
checkHeldRS(conn, pshh, pshh.executeQuery());
Statement sch = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY,
ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT);
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, sch
.getResultSetHoldability());
checkHeldRS(conn, sch, sch.executeQuery("select * from app.foo966"));
PreparedStatement psch = conn.prepareStatement("SELECT * FROM APP.foo966",
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY,
ResultSet.CLOSE_CURSORS_AT_COMMIT);
PreparedStatement psch_d = conn.prepareStatement(
"DELETE FROM APP.foo966 WHERE A < -99",
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY,
ResultSet.CLOSE_CURSORS_AT_COMMIT);
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, psch_d
.getResultSetHoldability());
checkHeldRS(conn, psch, psch.executeQuery());
// set the connection's holdabilty to false
conn.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT);
Statement sdc = conn.createStatement();
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, sdc
.getResultSetHoldability());
checkHeldRS(conn, sdc, sdc.executeQuery("select * from app.foo966"));
PreparedStatement psdc = conn.prepareStatement("SELECT * FROM APP.foo966");
PreparedStatement psdc_d = conn
.prepareStatement("DELETE FROM APP.foo966 WHERE A < -99");
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, psdc
.getResultSetHoldability());
checkHeldRS(conn, psdc, psdc.executeQuery());
Statement shc = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY,
ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT);
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, psdc
.getResultSetHoldability());
checkHeldRS(conn, shc, shc.executeQuery("select * from app.foo966"));
PreparedStatement pshc = conn.prepareStatement("SELECT * FROM APP.foo966",
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY,
ResultSet.HOLD_CURSORS_OVER_COMMIT);
PreparedStatement pshc_d = conn.prepareStatement(
"DELETE FROM APP.foo966 WHERE A < -99",
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY,
ResultSet.HOLD_CURSORS_OVER_COMMIT);
assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, pshc
.getResultSetHoldability());
checkHeldRS(conn, pshc, pshc.executeQuery());
Statement scc = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY,
ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT);
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, scc
.getResultSetHoldability());
checkHeldRS(conn, scc, scc.executeQuery("select * from app.foo966"));
PreparedStatement pscc = conn.prepareStatement("SELECT * FROM APP.foo966",
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY,
ResultSet.CLOSE_CURSORS_AT_COMMIT);
PreparedStatement pscc_d = conn.prepareStatement(
"DELETE FROM APP.foo966 WHERE A < -99",
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY,
ResultSet.CLOSE_CURSORS_AT_COMMIT);
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, pscc
.getResultSetHoldability());
checkHeldRS(conn, pscc, pscc.executeQuery());
// Revert back to holdable
conn.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT);
ResultSet rs = sdh.executeQuery("SELECT * FROM APP.foo966");
rs.next();
// before commit
assertEquals(0, +rs.getInt(1));
conn.commit();
// aftercommit
rs.next();
assertEquals(1, rs.getInt(1));
rs.close();
// ensure a transaction is active to test DERBY-1025
rs = sdh.executeQuery("SELECT * FROM APP.foo966");
// This switch to global is ok because conn
// is in auto-commit mode, thus the start performs
// an implicit commit to complete the local transaction.
// start a global xact and test those statements.
xar.start(xid, XAResource.TMNOFLAGS);
// Statements not returning ResultSet's should be ok
sdh.executeUpdate("DELETE FROM APP.foo966 where A < -99");
shh.executeUpdate("DELETE FROM APP.foo966 where A < -99");
sch.executeUpdate("DELETE FROM APP.foo966 where A < -99");
ArrayList openRS = new ArrayList();
// Statements obtained while default was hold.
// All should work, holability will be downgraded
// to close on commit for those Statements with hold set.
openRS.add(sdh.executeQuery("SELECT * FROM APP.foo966"));
openRS.add(shh.executeQuery("SELECT * FROM APP.foo966"));
openRS.add(sch.executeQuery("SELECT * FROM APP.foo966"));
// PreparedStatements obtained while default was hold.
// Holdability should be downgraded.
openRS.add(psdh.executeQuery());
openRS.add(pshh.executeQuery());
openRS.add(psch.executeQuery());
// Statements not returning ResultSet's should be ok
psdh_d.executeUpdate();
pshh_d.executeUpdate();
psch_d.executeUpdate();
// Statements not returning ResultSet's should be ok
sdc.executeUpdate("DELETE FROM APP.foo966 where A < -99");
shc.executeUpdate("DELETE FROM APP.foo966 where A < -99");
scc.executeUpdate("DELETE FROM APP.foo966 where A < -99");
// Statements obtained while default was close.
// all should return close on commit ResultSets
openRS.add(sdc.executeQuery("SELECT * FROM APP.foo966"));
openRS.add(shc.executeQuery("SELECT * FROM APP.foo966"));
openRS.add(scc.executeQuery("SELECT * FROM APP.foo966"));
// PreparedStatements obtained while default was close.
openRS.add(psdc.executeQuery());
openRS.add(pshc.executeQuery());
openRS.add(pscc.executeQuery());
// Statements not returning ResultSet's should be ok
psdc_d.executeUpdate();
pshc_d.executeUpdate();
pscc_d.executeUpdate();
// All the ResultSets should be open. Run a simple
// test, clearWarnings throws an error if the ResultSet
// is closed. Also would be nice here to use the new
// JDBC 4.0 method getHoldabilty to ensure the
// holdability is reported correctly.
int orsCount = 0;
for (Iterator i = openRS.iterator(); i.hasNext();) {
ResultSet ors = (ResultSet) i.next();
ors.clearWarnings();
orsCount++;
}
assertEquals("Incorrect number of open result sets", 12, orsCount);
// Test we cannot switch the connection to holdable
try {
conn.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT);
fail("FAIL - set holdability in global xact.");
} catch (SQLException sqle) {
assertSQLState("XJ05C", sqle);
}
// JDBC 4.0 (proposed final draft) section allows
// drivers to change the holdability when creating
// a Statement object and attach a warning to the Connection.
Statement sglobalhold = conn.createStatement(
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY,
ResultSet.HOLD_CURSORS_OVER_COMMIT);
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, sglobalhold
.getResultSetHoldability());
sglobalhold.close();
// DERBY2481 Client does not downgrade PreparedStatement holdability
if (!usingDerbyNetClient()) {
PreparedStatement psglobalhold = conn.prepareStatement(
"SELECT * FROM APP.foo966", ResultSet.TYPE_FORWARD_ONLY,
ResultSet.CONCUR_READ_ONLY,
ResultSet.HOLD_CURSORS_OVER_COMMIT);
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, psglobalhold
.getResultSetHoldability());
psglobalhold.close();
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, sdh
.getResultSetHoldability());
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, sch
.getResultSetHoldability());
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, psdh
.getResultSetHoldability());
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, pshh
.getResultSetHoldability());
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, psch
.getResultSetHoldability());
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, sdc
.getResultSetHoldability());
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, shc
.getResultSetHoldability());
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, scc
.getResultSetHoldability());
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, psdh_d
.getResultSetHoldability());
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, pshh_d
.getResultSetHoldability());
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, psch_d
.getResultSetHoldability());
}
xar.end(xid, XAResource.TMSUCCESS);
if (xar.prepare(xid) != XAResource.XA_RDONLY)
fail("FAIL prepare didn't indicate r/o");
// All the ResultSets should be closed. Run a simple
// test, clearWarnings throws an error if the ResultSet
// is closed.
int crsCount = 0;
for (Iterator i = openRS.iterator(); i.hasNext();) {
ResultSet crs = (ResultSet) i.next();
try {
crs.clearWarnings();
} catch (SQLException sqle) {
}
crsCount++;
}
assertEquals("After global transaction closed ResultSets ", 12,
crsCount);
// Check the statements revert to holdable as required.
assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, sdh
.getResultSetHoldability());
assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, shh
.getResultSetHoldability());
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, sch
.getResultSetHoldability());
assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, psdh
.getResultSetHoldability());
assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, pshh
.getResultSetHoldability());
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, psch
.getResultSetHoldability());
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, sdc
.getResultSetHoldability());
assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, shc
.getResultSetHoldability());
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, scc
.getResultSetHoldability());
assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, psdh_d
.getResultSetHoldability());
assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, pshh_d
.getResultSetHoldability());
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, psch_d
.getResultSetHoldability());
conn.close();
}
/**
* DERBY-4731
* Test using a GLOBAL TEMPORARY TABLE table in an
* XA transaction and leaving it active during two phase commit.
* Before the fix this test would throw the following at commit
* time:
* ERROR 40XT0: An internal error was identified by RawStore module.
*
*
* @throws XAException
* @throws SQLException
*
*/
public void testXATempTableD4731_RawStore()
throws SQLException, XAException {
doXATempTableD4731Work(true, false, XATestUtil.getXid(997, 9, 49));
}
/**
* DERBY-4735 Temp tables with XA transactions
* an Assert will occur on prepare if only
* temp table work is done in the xact.
*
* @throws XAException
* @throws SQLException
*
*/
public void xtestXATempTableD4735_Assert()
throws SQLException, XAException {
doXATempTableD4731Work(false, false, XATestUtil.getXid(999, 11, 51));
doXATempTableD4731Work(false, true, XATestUtil.getXid(1000, 12, 52));
}
/**
* DERBY-4743 Temp tables with XA transactions
*
* Will throw an error in network server when attempt is made to
* access the global temporary table after the end and commit.
*
* @throws XAException
* @throws SQLException
*
*/
public void xtestXATempTableD4743()
throws SQLException, XAException {
doXATempTableD4731Work(true, true, XATestUtil.getXid(998, 10, 50));
}
/**
* The two cases for DERBY-4371 do essentially the same thing. Except doing
* logged work causes the RawStore error and doing only temp table
* operations causes the assert.
*
* @param doLoggedWorkInXact
* @throws SQLException
* @throws XAException
*/
private void doXATempTableD4731Work(
boolean doLoggedWorkInXact,
boolean access_temp_table_after_xaendandcommit,
Xid xid)
throws SQLException, XAException{
XADataSource xads = J2EEDataSource.getXADataSource();
XAConnection xaconn = xads.getXAConnection();
XAResource xar = xaconn.getXAResource();
xar.start(xid, XAResource.TMNOFLAGS);
Connection conn = xaconn.getConnection();
Statement s = conn.createStatement();
if (doLoggedWorkInXact){
// need to do some real work in our transaction
// so make a table
makeARealTable(s);
}
// make the temp table
s.executeUpdate("DECLARE GLOBAL TEMPORARY TABLE SESSION.T1 ( XWSID INT, XCTID INT, XIID CHAR(26), XVID SMALLINT, XLID CHAR(8) FOR BIT DATA) ON COMMIT DELETE ROWS NOT LOGGED ON ROLLBACK DELETE ROWS");
// insert a row
PreparedStatement ps =
conn.prepareStatement("INSERT INTO SESSION.T1 VALUES (?,?,?,?,?)");
ps.setInt(1,1);
ps.setInt(2,1);
ps.setString(3,"hello");
ps.setShort(4, (short) 1);
ps.setBytes(5, new byte[] {0x0,0x1});
ps.executeUpdate();
ResultSet rs = s.executeQuery("SELECT count(*) FROM SESSION.t1");
JDBC.assertFullResultSet(rs, new String[][] {{"1"}});
rs.close();
// You could work around the issue by dropping the TEMP table
//s.executeUpdate("DROP TABLE SESSION.T1");
xar.end(xid, XAResource.TMSUCCESS);
assertEquals(
(doLoggedWorkInXact ? XAResource.XA_OK : XAResource.XA_RDONLY),
xar.prepare(xid));
xar.commit(xid,false);
if (access_temp_table_after_xaendandcommit)
{
// is temp table empty after the commit?
rs = s.executeQuery("SELECT count(*) FROM SESSION.t1");
JDBC.assertFullResultSet(rs, new String[][] {{"0"}});
rs.close();
conn.commit();
}
s.close();
conn.close();
xaconn.close();
}
private void makeARealTable(Statement s) throws SQLException {
try {
s.executeUpdate("DROP TABLE REALTABLE1");
} catch (SQLException se) {
{
s.executeUpdate("CREATE TABLE REALTABLE1 (i int)");
}
}
}
/**
* Check the held state of a ResultSet by fetching one row, executing a
* commit and then fetching the next. Checks the held state matches the
* behaviour.
*/
private static void checkHeldRS(Connection conn, Statement s, ResultSet rs)
throws SQLException {
// DERBY-1008 - can't run with client
if (!usingDerbyNetClient()) {
if (s.getConnection() != conn)
fail("FAIL - mismatched statement & Connection");
}
if (rs.getStatement() != s) {
// DERBY-1009
fail("FAIL - mismatched statement & ResultSet "
+ " Statement class " + s.getClass()
+ " ResultSet' Statements class "
+ rs.getStatement().getClass());
}
boolean held = (ResultSet.HOLD_CURSORS_OVER_COMMIT == s
.getResultSetHoldability());
rs.next();
assertEquals(0, rs.getInt(1));
conn.commit();
try {
rs.next();
} catch (SQLException sqle) {
boolean ok = !held;
if (ok) {
assertSQLState("XCL16", sqle);
} else {
fail("Held cursor closed on commit");
}
}
rs.close();
conn.commit();
}
/**
* Dummy method for testDerby4310* fixtures
*/
public static void zeroArg() { }
public static Test baseSuite(String name) {
TestSuite suite = new TestSuite(name);
suite.addTestSuite(XATest.class);
return new CleanDatabaseTestSetup(suite) {
/**
* Creates the table used in the test cases.
*
*/
protected void decorateSQL(Statement s) throws SQLException {
XATestUtil.createXATransactionView(s);
}
};
}
/**
* Runs the test fixtures in embedded and client.
*
* @return test suite
*/
public static Test suite() {
TestSuite suite = new TestSuite("XATest");
// no XA for JSR169
if (JDBC.vmSupportsJSR169())
return suite;
suite.addTest(baseSuite("XATest:embedded"));
suite.addTest(TestConfiguration
.clientServerDecorator(baseSuite("XATest:client")));
return suite;
}
}
|
|
package com.pennapps.labs.pennmobile.components.expandedbottomnavbar;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.ValueAnimator;
import android.content.Context;
import android.content.res.TypedArray;
import android.content.res.XmlResourceParser;
import android.graphics.Color;
import android.os.Build;
import android.os.Bundle;
import android.os.Parcelable;
import androidx.annotation.RequiresApi;
import androidx.core.view.ViewCompat;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Gravity;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.pennapps.labs.pennmobile.R;
import com.pennapps.labs.pennmobile.utils.Utils;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Created by kalmath_v on 8/29/16.
*/
public class ExpandableBottomTabBar extends LinearLayout implements View.OnClickListener {
private static final String SELECTED_TAB = "Selected Tab";
private static final String TAG = "TabBarView";
public static final String ANDROID_NS = "http://schemas.android.com/apk/res/android";
private int MAX_TABS_PER_ROW = 5;
private int mMoreTabPosition = MAX_TABS_PER_ROW - 1;
private Context mContext;
private LinearLayout mRootContainer;
private final List<LinearLayout> mTabContainers = new ArrayList<LinearLayout>();
private int mTabCount = 20;
private OnTabClickedListener mOnTabClickedListener;
private boolean mAllTabsVisible = false;
private int mPrimaryColor;
private int mScreenWidth;
private int mTenDp;
private int mMaxFixedItemWidth;
private int mMaxTabPerRow = MAX_TABS_PER_ROW;
private final List<TabInformation> mTabInfoList = new ArrayList<>();
private List<View> mTabViewList = new ArrayList<>();
// XML Attributes
private int mTabXmlResource;
private float mTextSize = 16;
private int mPadding = 20;
private int mAnimationDuration = 300;
private int mSelectedTab = 0;
private int mTabContainerCount = 1;
private int mBgColor;
private int mTabTextColor;
private int mSelectedTabTextColor;
private int mMoreIconRecId = android.R.drawable.ic_menu_more;
private static class TabInformation {
int id;
int iconResId;
int iconPressedResId;
String title;
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR1)
public ExpandableBottomTabBar(Context context) {
super(context);
init(context, null, 0);
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR1)
public ExpandableBottomTabBar(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs, 0);
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR1)
public ExpandableBottomTabBar(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init(context, attrs, defStyleAttr);
}
public void setOnTabClickedListener(OnTabClickedListener listener) {
mOnTabClickedListener = listener;
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR1)
private void init(Context context, AttributeSet attrs, int defStyleAttr) {
mContext = context;
populateAttributes(context, attrs);
initializeViews();
}
private void populateAttributes(Context context, AttributeSet attrs) {
mPrimaryColor = Utils.getColor(getContext(), R.attr.colorPrimary);
mScreenWidth = Utils.getScreenWidth(getContext());
mTenDp = Utils.dpToPixel(getContext(), 10);
mMaxFixedItemWidth = Utils.dpToPixel(getContext(), 168);
TypedArray ta = context.getTheme().obtainStyledAttributes(
attrs, R.styleable.BottomBar, 0, 0);
try {
mTabXmlResource = ta.getResourceId(R.styleable.BottomBar_tab_resource, 0);
mMaxTabPerRow = ta.getInt(R.styleable.BottomBar_max_tab_count, 5);
mSelectedTabTextColor = ta.getColor(R.styleable.BottomBar_selected_tab_text_color, 0xffffffff);
mMoreIconRecId = ta.getResourceId(R.styleable.BottomBar_more_icon_resource, android.R.drawable.ic_menu_more);
mMoreTabPosition = mMaxTabPerRow - 1;
String bgColor = attrs.getAttributeValue(ANDROID_NS, "background");
if (bgColor.contains("@")) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
mBgColor = getResources().getColor(Integer.valueOf(bgColor.replace("@", "")), null);
} else {
mBgColor = getResources().getColor(Integer.valueOf(bgColor.replace("@", "")));
}
} else if (bgColor.contains("#")) {
mBgColor = Color.parseColor(bgColor);
}
String textColor = attrs.getAttributeValue(ANDROID_NS, "textColor");
if (textColor.contains("@")) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
mTabTextColor = getResources().getColor(Integer.valueOf(textColor.replace("@", "")), null);
} else {
mTabTextColor = getResources().getColor(Integer.valueOf(textColor.replace("@", "")));
}
} else if (textColor.contains("#")) {
mTabTextColor = Color.parseColor(textColor);
}
String textSize = attrs.getAttributeValue(ANDROID_NS, "textSize");
if (textSize.contains("sp")) {
mTextSize = Float.valueOf(textSize.replace("sp", ""));
} else if (textSize.contains("dp")) {
mTextSize = Float.valueOf(textSize.replace("dp", ""));
} else {
mTextSize = Float.valueOf(textSize);
}
XmlResourceParser parser = context.getResources().getXml(mTabXmlResource);
parse(parser);
} finally {
ta.recycle();
}
}
private void parse(XmlResourceParser parser) {
try {
int eventType = parser.getEventType();
while (eventType != XmlPullParser.END_DOCUMENT) {
// instead of the following if/else if lines
// you should custom parse your xml
if (eventType == XmlPullParser.START_DOCUMENT) {
Log.i(TAG, "Start document");
} else if (eventType == XmlPullParser.START_TAG) {
Log.i(TAG, "Start tag " + parser.getName());
if (parser.getName().equals("tab")) {
parseTabInfo(parser);
}
} else if (eventType == XmlPullParser.END_TAG) {
Log.i(TAG, "End tag " + parser.getName());
} else if (eventType == XmlPullParser.TEXT) {
Log.i(TAG, "Text " + parser.getText());
}
eventType = parser.next();
}
} catch (IOException | XmlPullParserException e) {
e.printStackTrace();
} finally {
mTabCount = mTabInfoList.size();
if (mTabCount > mMaxTabPerRow) {
final TabInformation moreTab = new TabInformation();
moreTab.title = "More";
moreTab.iconResId = mMoreIconRecId;
moreTab.iconPressedResId = mMoreIconRecId;
mTabInfoList.add(mMoreTabPosition, moreTab);
}
}
}
private void parseTabInfo(XmlResourceParser parser) {
TabInformation tab = new TabInformation();
for (int i = 0; i < parser.getAttributeCount(); i++) {
String attrName = parser.getAttributeName(i);
switch (attrName) {
case "id":
tab.id = parser.getIdAttributeResourceValue(i);
break;
case "icon":
tab.iconResId = parser.getAttributeResourceValue(i, 0);
break;
case "icon_pressed":
tab.iconPressedResId = parser.getAttributeResourceValue(i, 0) == 0
? tab.iconResId : parser.getAttributeResourceValue(i, 0);
break;
case "title":
tab.title = getTitleValue(i, parser);
break;
}
}
if(tab.iconPressedResId == 0){
tab.iconPressedResId = tab.iconResId;
}
mTabInfoList.add(tab);
}
private String getTitleValue(int attrIndex, XmlResourceParser parser) {
int titleResource = parser.getAttributeResourceValue(attrIndex, 0);
if (titleResource != 0) {
return mContext.getString(titleResource);
}
return parser.getAttributeValue(attrIndex);
}
public void setTabCount(int count) {
this.mTabCount = count;
}
/**
* Set the selected tab
* @param index
*/
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR1)
public void setSelectedTab(int index) {
this.mSelectedTab = index;
setFocusOnTab(mSelectedTab);
if (mSelectedTab > mMoreTabPosition) {
((TextView) mTabViewList.get(mMoreTabPosition)).setTextColor(mSelectedTabTextColor);
}
}
/**
* get Selected Tab
* @return
*/
public int getSelectedTab() {
return mSelectedTab;
}
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public ExpandableBottomTabBar(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR1)
private void initializeViews() {
int width = LayoutParams.MATCH_PARENT;
int height = LayoutParams.WRAP_CONTENT;
LayoutParams params = new LayoutParams(width, height);
ViewCompat.setElevation(this, Utils.dpToPixel(getContext(), 8));
mRootContainer = this;
mRootContainer.setLayoutParams(params);
mRootContainer.setOrientation(LinearLayout.VERTICAL);
initializeTabContainers();
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR1)
private void initializeTabContainers() {
int layoutCount = 1;
if (mTabCount > mMaxTabPerRow) {
mTabCount = mTabCount + 1;
}
final int quotient = mTabCount / mMaxTabPerRow;
final int remainder = mTabCount % mMaxTabPerRow;
if (mTabCount < mMaxTabPerRow) {
layoutCount = 1;
} else if (quotient >= 1 && remainder > 0) {
layoutCount = quotient + 1;
} else if (remainder == 0) {
layoutCount = quotient;
}
mTabContainerCount = layoutCount;
int width = LayoutParams.MATCH_PARENT;
int height = LayoutParams.WRAP_CONTENT;
final LayoutParams params = new LayoutParams(width, height);
for (int index = 0; index < layoutCount; index++) {
final LinearLayout linearLayout = new LinearLayout(getContext());
linearLayout.setLayoutParams(params);
linearLayout.setOrientation(LinearLayout.HORIZONTAL);
linearLayout.setLayoutDirection(LinearLayout.LAYOUT_DIRECTION_LOCALE);
linearLayout.setGravity(Gravity.CENTER);
linearLayout.setWeightSum(100);
if (index > 0) {
linearLayout.setVisibility(View.GONE);
}
mRootContainer.addView(linearLayout, index);
}
final LayoutParams params1 = new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
params1.weight = 100f / (float) mMaxTabPerRow;
for (int index = 0; index < mTabCount; index++) {
TextView textView = new TextView(getContext());
textView.setLayoutParams(params1);
textView.setPadding(mPadding, mPadding, mPadding, mPadding);
textView.setTextSize(mTextSize);
textView.setGravity(Gravity.CENTER);
textView.setText(mTabInfoList.get(index).title);
textView.setTextColor(mTabTextColor);
textView.setCompoundDrawablesRelativeWithIntrinsicBounds(0, mTabInfoList.get(index).iconResId, 0, 0);
textView.setOnClickListener(this);
((LinearLayout) mRootContainer.getChildAt(getLayoutIndex(index))).addView(textView);
mTabViewList.add(textView);
setFocusOnTab(index);
}
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR1)
public void setFocusOnTab(int index) {
if (index == mSelectedTab) {
((TextView) mTabViewList.get(index)).setTextColor(mSelectedTabTextColor);
((TextView) mTabViewList.get(index))
.setCompoundDrawablesRelativeWithIntrinsicBounds(0, mTabInfoList.get(index).iconPressedResId, 0, 0);
}
if (mSelectedTab > mMoreTabPosition) {
if (index == mMoreTabPosition) {
((TextView) mTabViewList.get(index)).setTextColor(mSelectedTabTextColor);
((TextView) mTabViewList.get(index))
.setCompoundDrawablesRelativeWithIntrinsicBounds(0, mTabInfoList.get(index).iconPressedResId, 0, 0);
}
}
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR1)
public void resetFocusOnAllTabs() {
for (View textView : mTabViewList) {
((TextView) textView).setTextColor(mTabTextColor);
}
for(int i = 0; i < mTabViewList.size(); i++ ){
((TextView) mTabViewList.get(i))
.setCompoundDrawablesRelativeWithIntrinsicBounds(0, mTabInfoList.get(i).iconResId, 0, 0);
}
}
private int getLayoutIndex(int i) {
return i / mMaxTabPerRow;
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR1)
@Override
public void onClick(View view) {
int pos = -1;
if (mOnTabClickedListener != null) {
pos = getClickedPosition(view);
mOnTabClickedListener.onTabClicked(view, pos);
final View topTabContainer = mRootContainer.getChildAt(0);
final int btmTabContainerCount = mRootContainer.getChildCount() - 1;
if (btmTabContainerCount > 0) {
final View[] btmTabContainers = new View[btmTabContainerCount];
for (int i = 0; i < btmTabContainerCount; i++) {
btmTabContainers[i] = mRootContainer.getChildAt(i + 1);
}
if (isAnyBottomContainerVisible(btmTabContainers)) {
ValueAnimator btm = ValueAnimator.ofFloat(topTabContainer.getHeight(), 0);
btm.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator valueAnimator) {
float val = (float) valueAnimator.getAnimatedValue();
LayoutParams p = new LayoutParams(btmTabContainers[0].getLayoutParams().width, (int) val);
setLayoutParamsToBtmTabs(btmTabContainers, p);
}
});
btm.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
super.onAnimationEnd(animation);
setBtmTabsVisibility(btmTabContainers, View.GONE);
mAllTabsVisible = false;
}
});
btm.setDuration(mAnimationDuration);
btm.start();
} else if (pos == mMoreTabPosition &&
mAllTabsVisible == false) {
setBtmTabsVisibility(btmTabContainers, View.VISIBLE);
mAllTabsVisible = true;
ValueAnimator btm = ValueAnimator.ofFloat(0, topTabContainer.getHeight());
btm.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator valueAnimator) {
float val = (float) valueAnimator.getAnimatedValue();
LayoutParams p = new LayoutParams(btmTabContainers[0].getLayoutParams().width, (int) val);
setLayoutParamsToBtmTabs(btmTabContainers, p);
}
});
btm.setDuration(mAnimationDuration);
btm.start();
}
}
if (pos != mMoreTabPosition) {
getTabViewAt(mSelectedTab).setBackgroundColor(mBgColor);
((TextView) getTabViewAt(mSelectedTab)).setTextColor(mTabTextColor);
((TextView) getTabViewAt(mSelectedTab))
.setCompoundDrawablesRelativeWithIntrinsicBounds(0, mTabInfoList.get(mSelectedTab).iconResId, 0, 0);
mSelectedTab = pos;
((TextView) getTabViewAt(pos)).setTextColor(mSelectedTabTextColor);
((TextView) getTabViewAt(pos)).setCompoundDrawablesRelativeWithIntrinsicBounds(0, mTabInfoList.get(pos).iconPressedResId, 0, 0);
}
if (mSelectedTab > mMoreTabPosition) {
((TextView) getTabViewAt(mMoreTabPosition)).setTextColor(mSelectedTabTextColor);
((TextView) getTabViewAt(mMoreTabPosition))
.setCompoundDrawablesRelativeWithIntrinsicBounds(0, mTabInfoList.get(mMoreTabPosition).iconPressedResId, 0, 0);
}
if (mSelectedTab < mMaxTabPerRow && mTabContainerCount > 1) {
((TextView) getTabViewAt(mMoreTabPosition)).setTextColor(mTabTextColor);
((TextView) getTabViewAt(mMoreTabPosition))
.setCompoundDrawablesRelativeWithIntrinsicBounds(0, mTabInfoList.get(mMoreTabPosition).iconResId, 0, 0);
getTabViewAt(mMoreTabPosition).setBackgroundColor(mBgColor);
}
}
}
private View getTabViewAt(int position) {
return mTabViewList.get(position);
}
private int getClickedPosition(View view) {
for (View tabView : mTabViewList) {
if (view == tabView) {
return mTabViewList.indexOf(tabView);
}
}
return -1;
}
private void setBtmTabsVisibility(View[] btmTabContainers, int visibility) {
for (View btmTabContainer : btmTabContainers) {
btmTabContainer.setVisibility(visibility);
}
}
private void setLayoutParamsToBtmTabs(View[] btmTabContainers, LayoutParams layoutParams) {
for (View btmTabContainer : btmTabContainers) {
btmTabContainer.setLayoutParams(layoutParams);
}
}
private boolean isAnyBottomContainerVisible(View[] btmTabContainers) {
for (View view : btmTabContainers) {
if (view.getVisibility() == View.VISIBLE) {
return true;
}
}
return false;
}
@Override
public Parcelable onSaveInstanceState() {
final Bundle bundle = new Bundle();
bundle.putInt(SELECTED_TAB, mSelectedTab);
bundle.putParcelable("superstate", super.onSaveInstanceState());
return bundle;
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR1)
@Override
public void onRestoreInstanceState(Parcelable state) {
if (state instanceof Bundle) {
final Bundle bundle = (Bundle) state;
mSelectedTab = bundle.getInt(SELECTED_TAB);
state = bundle.getParcelable("superstate");
}
super.onRestoreInstanceState(state);
resetFocusOnAllTabs();
setFocusOnTab(mSelectedTab);
if (mSelectedTab > mMoreTabPosition) {
((TextView) mTabViewList.get(mMoreTabPosition)).setTextColor(mSelectedTabTextColor);
}
}
public interface OnTabClickedListener {
public void onTabClicked(View view, int tabPos);
}
}
|
|
/*
* #%L
* mosaic-connectors
* %%
* Copyright (C) 2010 - 2013 Institute e-Austria Timisoara (Romania)
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package eu.mosaic_cloud.platform.implementation.v2.connectors.queue.amqp;
import java.util.concurrent.Callable;
import eu.mosaic_cloud.platform.implementation.v2.connectors.core.ConnectorConfiguration;
import eu.mosaic_cloud.platform.implementation.v2.connectors.tools.ConfigProperties;
import eu.mosaic_cloud.platform.interop.common.amqp.AmqpExchangeType;
import eu.mosaic_cloud.platform.interop.common.amqp.AmqpInboundMessage;
import eu.mosaic_cloud.platform.v2.connectors.queue.QueueConsumerCallback;
import eu.mosaic_cloud.platform.v2.connectors.queue.QueueConsumerConnector;
import eu.mosaic_cloud.platform.v2.connectors.queue.QueueDeliveryToken;
import eu.mosaic_cloud.platform.v2.connectors.queue.amqp.AmqpQueueRawConsumerCallback;
import eu.mosaic_cloud.platform.v2.serialization.DataEncoder;
import eu.mosaic_cloud.platform.v2.serialization.EncodingException;
import eu.mosaic_cloud.platform.v2.serialization.EncodingMetadata;
import eu.mosaic_cloud.tools.callbacks.core.CallbackCompletion;
import eu.mosaic_cloud.tools.callbacks.tools.CallbackCompletionWorkflows;
import com.google.common.base.Preconditions;
public final class AmqpQueueConsumerConnectorProxy<TMessage>
extends AmqpQueueConnectorProxy<TMessage>
implements
QueueConsumerConnector<TMessage>
{
private AmqpQueueConsumerConnectorProxy (final AmqpQueueRawConnectorProxy rawProxy, final ConnectorConfiguration configuration, final Class<TMessage> messageClass, final DataEncoder<TMessage> messageEncoder, final QueueConsumerCallback<TMessage> callback) {
super (rawProxy, configuration, messageClass, messageEncoder);
final String identifier = this.raw.getIdentifier ();
this.exchange = configuration.getConfigParameter (ConfigProperties.AmqpQueueConnector_0, String.class, identifier);
this.exchangeType = configuration.getConfigParameter (ConfigProperties.AmqpQueueConnector_5, AmqpExchangeType.class, AmqpExchangeType.DIRECT);
this.exchangeDurable = configuration.getConfigParameter (ConfigProperties.AmqpQueueConnector_9, Boolean.class, Boolean.FALSE).booleanValue ();
this.exchangeAutoDelete = configuration.getConfigParameter (ConfigProperties.AmqpQueueConnector_7, Boolean.class, Boolean.TRUE).booleanValue ();
this.queue = configuration.getConfigParameter (ConfigProperties.AmqpQueueConnector_2, String.class, identifier);
this.queueExclusive = configuration.getConfigParameter (ConfigProperties.AmqpQueueConnector_6, Boolean.class, Boolean.FALSE).booleanValue ();
this.queueAutoDelete = this.exchangeAutoDelete;
this.queueDurable = this.exchangeDurable;
this.bindingRoutingKey = configuration.getConfigParameter (ConfigProperties.AmqpQueueConnector_1, String.class, identifier);
// FIXME: this should also be made a configurable parameter...
this.consumerIdentifier = identifier;
this.consumerAutoAck = configuration.getConfigParameter (ConfigProperties.AmqpQueueConnector_10, Boolean.class, Boolean.FALSE).booleanValue ();
this.definePassive = configuration.getConfigParameter (ConfigProperties.AmqpQueueConnector_8, Boolean.class, Boolean.FALSE).booleanValue ();
this.callback = new AmqpConsumerCallback (callback);
this.transcript.traceDebugging ("created the queue consumer connector proxy for queue `%s` bound to exchange `%s` (of type `%s`) with routing key `%s`.", this.queue, this.exchange, this.exchangeType, this.bindingRoutingKey);
this.transcript.traceDebugging ("using the underlying raw proxy `%{object:identity}`...", this.raw);
this.transcript.traceDebugging ("using the underlying raw consumer callbacks `%{object:identity}`...", this.callback);
this.transcript.traceDebugging ("using the delegate consumer callbacks `%{object:identity}`...", this.callback.delegate);
}
@Override
public CallbackCompletion<Void> acknowledge (final QueueDeliveryToken token_) {
final DeliveryToken token = (DeliveryToken) token_;
Preconditions.checkNotNull (token);
Preconditions.checkArgument (token.proxy == this);
this.transcript.traceDebugging ("acknowledging the message `%s` for consumer `%s`...", token, this.consumerIdentifier);
return (this.raw.ack (token.getDelivery (), false));
}
@Override
public CallbackCompletion<Void> destroy () {
this.transcript.traceDebugging ("destroying the proxy...");
final Callable<CallbackCompletion<Void>> cancelOperation = new Callable<CallbackCompletion<Void>> () {
@Override
public CallbackCompletion<Void> call () {
AmqpQueueConsumerConnectorProxy.this.transcript.traceDebugging ("canceling the consumer `%s`...", AmqpQueueConsumerConnectorProxy.this.consumerIdentifier);
return (AmqpQueueConsumerConnectorProxy.this.raw.cancel (AmqpQueueConsumerConnectorProxy.this.consumerIdentifier));
}
};
final Callable<CallbackCompletion<Void>> destroyOperation = new Callable<CallbackCompletion<Void>> () {
@Override
public CallbackCompletion<Void> call () {
AmqpQueueConsumerConnectorProxy.this.transcript.traceDebugging ("destroying the underlying raw proxy...");
return (AmqpQueueConsumerConnectorProxy.this.raw.destroy ());
}
};
return (CallbackCompletionWorkflows.executeSequence (cancelOperation, destroyOperation));
}
@Override
public CallbackCompletion<Void> initialize () {
this.transcript.traceDebugging ("initializing the proxy...");
final Callable<CallbackCompletion<Void>> initializeOperation = new Callable<CallbackCompletion<Void>> () {
@Override
public CallbackCompletion<Void> call () {
AmqpQueueConsumerConnectorProxy.this.transcript.traceDebugging ("initializing the underlying raw proxy...");
return (AmqpQueueConsumerConnectorProxy.this.raw.initialize ());
}
};
final Callable<CallbackCompletion<Void>> declareExchangeOperation = new Callable<CallbackCompletion<Void>> () {
@Override
public CallbackCompletion<Void> call () {
AmqpQueueConsumerConnectorProxy.this.transcript.traceDebugging ("declaring the exchange `%s` of type `%s`...", AmqpQueueConsumerConnectorProxy.this.exchange, AmqpQueueConsumerConnectorProxy.this.exchangeType);
return (AmqpQueueConsumerConnectorProxy.this.raw.declareExchange (AmqpQueueConsumerConnectorProxy.this.exchange, AmqpQueueConsumerConnectorProxy.this.exchangeType, AmqpQueueConsumerConnectorProxy.this.exchangeDurable, AmqpQueueConsumerConnectorProxy.this.exchangeAutoDelete, AmqpQueueConsumerConnectorProxy.this.definePassive));
}
};
final Callable<CallbackCompletion<Void>> declareQueueOperation = new Callable<CallbackCompletion<Void>> () {
@Override
public CallbackCompletion<Void> call () {
AmqpQueueConsumerConnectorProxy.this.transcript.traceDebugging ("declaring the queue `%s`...", AmqpQueueConsumerConnectorProxy.this.queue);
return (AmqpQueueConsumerConnectorProxy.this.raw.declareQueue (AmqpQueueConsumerConnectorProxy.this.queue, AmqpQueueConsumerConnectorProxy.this.queueExclusive, AmqpQueueConsumerConnectorProxy.this.queueDurable, AmqpQueueConsumerConnectorProxy.this.queueAutoDelete, AmqpQueueConsumerConnectorProxy.this.definePassive));
}
};
final Callable<CallbackCompletion<Void>> bindQueueOperation = new Callable<CallbackCompletion<Void>> () {
@Override
public CallbackCompletion<Void> call () {
AmqpQueueConsumerConnectorProxy.this.transcript.traceDebugging ("binding the queue `%s` to exchange `%s` (of type `%s`) with routing key `%s`...", AmqpQueueConsumerConnectorProxy.this.queue, AmqpQueueConsumerConnectorProxy.this.exchange, AmqpQueueConsumerConnectorProxy.this.exchangeType, AmqpQueueConsumerConnectorProxy.this.bindingRoutingKey);
return (AmqpQueueConsumerConnectorProxy.this.raw.bindQueue (AmqpQueueConsumerConnectorProxy.this.exchange, AmqpQueueConsumerConnectorProxy.this.queue, AmqpQueueConsumerConnectorProxy.this.bindingRoutingKey));
}
};
final Callable<CallbackCompletion<Void>> consumeOperation = new Callable<CallbackCompletion<Void>> () {
@Override
public CallbackCompletion<Void> call () {
AmqpQueueConsumerConnectorProxy.this.transcript.traceDebugging ("registering the consumer `%s` for queue `%s`...", AmqpQueueConsumerConnectorProxy.this.consumerIdentifier, AmqpQueueConsumerConnectorProxy.this.queue);
return (AmqpQueueConsumerConnectorProxy.this.raw.consume (AmqpQueueConsumerConnectorProxy.this.queue, AmqpQueueConsumerConnectorProxy.this.consumerIdentifier, AmqpQueueConsumerConnectorProxy.this.queueExclusive, AmqpQueueConsumerConnectorProxy.this.consumerAutoAck, AmqpQueueConsumerConnectorProxy.this.callback));
}
};
// FIXME: If these operations fail we should continue with `destroy`.
return (CallbackCompletionWorkflows.executeSequence (initializeOperation, declareExchangeOperation, declareQueueOperation, bindQueueOperation, consumeOperation));
}
private final String bindingRoutingKey;
private final AmqpConsumerCallback callback;
private final boolean consumerAutoAck;
private final String consumerIdentifier;
private final boolean definePassive;
private final String exchange;
private final boolean exchangeAutoDelete;
private final boolean exchangeDurable;
private final AmqpExchangeType exchangeType;
private final String queue;
private final boolean queueAutoDelete;
private final boolean queueDurable;
private final boolean queueExclusive;
public static <TMessage> AmqpQueueConsumerConnectorProxy<TMessage> create (final ConnectorConfiguration configuration, final Class<TMessage> messageClass, final DataEncoder<TMessage> messageEncoder, final QueueConsumerCallback<TMessage> callback) {
final AmqpQueueRawConnectorProxy rawProxy = AmqpQueueRawConnectorProxy.create (configuration);
// FIXME: the splice below will be done when creating the environment
//# final Configuration subConfiguration = configuration.spliceConfiguration(ConfigurationIdentifier.resolveRelative("publisher"));
final AmqpQueueConsumerConnectorProxy<TMessage> proxy = new AmqpQueueConsumerConnectorProxy<TMessage> (rawProxy, configuration, messageClass, messageEncoder, callback);
return (proxy);
}
private final class AmqpConsumerCallback
implements
AmqpQueueRawConsumerCallback
{
AmqpConsumerCallback (final QueueConsumerCallback<TMessage> delegate) {
super ();
this.delegate = delegate;
}
@Override
public CallbackCompletion<Void> handleCancelOk (final String consumerTag) {
AmqpQueueConsumerConnectorProxy.this.transcript.traceDebugging ("canceled the consumer `%s` successfully.", AmqpQueueConsumerConnectorProxy.this.consumerIdentifier);
return (CallbackCompletion.createOutcome ());
}
@Override
public CallbackCompletion<Void> handleConsumeOk (final String consumerTag) {
AmqpQueueConsumerConnectorProxy.this.transcript.traceDebugging ("registered the consumer `%s` successfully.", AmqpQueueConsumerConnectorProxy.this.consumerIdentifier);
return (CallbackCompletion.createOutcome ());
}
@Override
public CallbackCompletion<Void> handleDelivery (final AmqpInboundMessage inbound) {
final DeliveryToken token = new DeliveryToken (AmqpQueueConsumerConnectorProxy.this, inbound.getDelivery ());
final byte[] data = inbound.getData ();
AmqpQueueConsumerConnectorProxy.this.transcript.traceDebugging ("delivered the message `%s` for consumer `%s`...", token, AmqpQueueConsumerConnectorProxy.this.consumerIdentifier);
TMessage message = null;
CallbackCompletion<Void> result = null;
final EncodingMetadata encodingMetadata = new EncodingMetadata (inbound.getContentType (), inbound.getContentEncoding ());
try {
message = AmqpQueueConsumerConnectorProxy.this.messageEncoder.decode (data, encodingMetadata);
} catch (final EncodingException exception) {
AmqpQueueConsumerConnectorProxy.this.exceptions.traceDeferredException (exception, "decoding the message `%s` failed; deferring!", token);
result = CallbackCompletion.createFailure (exception);
}
if (result == null) {
AmqpQueueConsumerConnectorProxy.this.transcript.traceDebugging ("triggering callback for the message `%s`...", token);
result = this.delegate.consume (token, message);
}
return (result);
}
@Override
public CallbackCompletion<Void> handleShutdownSignal (final String consumerTag, final String message) {
// FIXME: this should be handled...
return (CallbackCompletion.createOutcome ());
}
final QueueConsumerCallback<TMessage> delegate;
}
private static final class DeliveryToken
implements
QueueDeliveryToken
{
DeliveryToken (final AmqpQueueConsumerConnectorProxy<?> proxy, final long token) {
super ();
this.proxy = proxy;
this.token = token;
}
@Override
public String toString () {
return (String.format ("%032x", Long.valueOf (this.token)));
}
long getDelivery () {
return (this.token);
}
final AmqpQueueConsumerConnectorProxy<?> proxy;
final long token;
}
}
|
|
/**
* Copyright 2005-2014 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.jube.process.support;
import static java.util.concurrent.Executors.newFixedThreadPool;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.Executor;
import com.google.common.base.Strings;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import io.fabric8.jube.process.ProcessController;
import io.fabric8.jube.process.config.ProcessConfig;
import io.fabric8.jube.process.support.command.CommandFailedException;
import io.fabric8.jube.util.FilesHelper;
import io.fabric8.utils.ExecParseUtils;
import io.fabric8.utils.Processes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A default implementation of {@link io.fabric8.jube.process.ProcessController} which assumes a launch script which
* takes operations as the first argument such as for the
* <a href="http://refspecs.freestandards.org/LSB_3.1.1/LSB-Core-generic/LSB-Core-generic/iniscrptact.html">Init Script Actions spec</a>
*/
public class DefaultProcessController implements ProcessController {
private static final Logger LOG = LoggerFactory.getLogger(DefaultProcessController.class);
/**
* Number of threads used to execute the controller's tasks. We need at least two, because we execute process and
* consume its output concurrently.
*/
private static final int THREADS_PER_CONTROLLER = 2;
/**
* Local identifier of the controlled process
* (assigned by the {@link io.fabric8.jube.process.service.ProcessManagerService}).
*/
private final String id;
private File rootDir;
private final File baseDir;
private final ProcessConfig config;
private transient Executor executor;
/**
* @param id identifier of the controlled process. Usually PID.
*/
public DefaultProcessController(String id, ProcessConfig config, File rootDir, File baseDir) {
this.id = id;
this.config = config;
this.rootDir = rootDir;
this.baseDir = baseDir;
}
@Override
public String toString() {
return "DefaultProcessController(" + id + ")";
}
@Override
public int install() throws InterruptedException, IOException, CommandFailedException {
int answer = 0;
List<String> installCommands = config.getInstallCommands();
if (installCommands != null) {
for (String installCommand : installCommands) {
if (!Strings.isNullOrEmpty(installCommand)) {
runCommandLine(installCommand);
}
}
}
return answer;
}
@Override
public synchronized int uninstall() {
// we should uninstall from the root dir, so we uninstall it all
String name = rootDir.getName();
if (name.startsWith(".")) {
LOG.debug("Uninstalling " + rootDir + " has already been done.");
// silently ignore this
} else {
File newName = new File(rootDir.getParentFile(), "." + name);
// delete any existing directory first
FilesHelper.recursiveDelete(newName);
// and then rename
if (FilesHelper.renameTo(rootDir, newName)) {
rootDir = newName;
}
}
return 0;
}
@Override
public int start() throws Exception {
return runConfigCommandValueOrLaunchScriptWith(config.getStartCommand(), "start");
}
@Override
public int stop() throws Exception {
String customCommand = config.getStopCommand();
if (Strings.isNullOrEmpty(customCommand)) {
// lets just kill it
LOG.info("No stop command configured so lets just try killing it " + this);
return Processes.killProcess(getPid(), "");
}
return runConfigCommandValueOrLaunchScriptWith(customCommand, "stop");
}
@Override
public int kill() throws Exception {
String customCommand = config.getKillCommand();
if (Strings.isNullOrEmpty(customCommand)) {
// lets stop it
LOG.info("No kill command configured so lets just try killing it with -9 " + this);
return Processes.killProcess(getPid(), "-9");
}
return runConfigCommandValueOrLaunchScriptWith(customCommand, "kill");
}
@Override
public int restart() throws Exception {
String customCommand = config.getRestartCommand();
if (customCommand != null && customCommand.trim().isEmpty()) {
// lets stop and start()
LOG.info("No restart command configured so lets just try stopping " + this + " then starting again.");
int answer = stop();
if (answer == 0) {
answer = start();
}
return answer;
}
return runConfigCommandValueOrLaunchScriptWith(customCommand, "restart");
}
@Override
public int status() throws Exception {
return runConfigCommandValueOrLaunchScriptWith(config.getStatusCommand(), "status");
}
@Override
public int configure() throws Exception {
String customCommand = config.getConfigureCommand();
if (customCommand != null && customCommand.trim().isEmpty()) {
// TODO is it ok to simply ignore this?
LOG.info("No configure command configured " + this);
return 0;
}
return runCommandLine(customCommand);
}
// Properties
//-------------------------------------------------------------------------
public File getBaseDir() {
return baseDir;
}
public Executor getExecutor() {
if (executor == null) {
executor = newFixedThreadPool(THREADS_PER_CONTROLLER, new ThreadFactoryBuilder().setDaemon(true).setNameFormat("jube-process-controller-%s").build());
}
return executor;
}
@Override
public ProcessConfig getConfig() {
return config;
}
public Long getPid() throws IOException {
Long answer = null;
String pidFileName = config.getPidFile();
if (pidFileName != null) {
File file = new File(baseDir, pidFileName);
if (file.exists() && file.isFile()) {
return extractPidFromFile(file);
}
}
File pidFile = new File(baseDir, "var/process.pid");
if (pidFile.exists()) {
return extractPidFromFile(pidFile);
}
File pidDir = new File(baseDir, "var/run");
if (pidDir.exists() && pidDir.isDirectory()) {
String script = getLaunchScript();
int idx = script.lastIndexOf("/");
if (idx < 0) {
idx = script.lastIndexOf("\\");
}
if (idx > 0) {
script = script.substring(idx + 1);
}
// lets try find the file /var/run/launcher.pid by default
pidFile = new File(pidDir, script + ".pid");
if (pidFile.exists()) {
return extractPidFromFile(pidFile);
}
// otherwise lets just find a /var/run/*.pid file
if (answer == null) {
File[] files = pidDir.listFiles();
if (files != null) {
for (File file : files) {
if (file.getName().toLowerCase().endsWith(".pid")) {
answer = extractPidFromFile(file);
if (answer != null) {
break;
}
}
}
}
}
}
return answer;
}
protected String getLaunchScript() {
String launchScript = config.getLaunchScript();
if (launchScript == null) {
// TODO should we auto-discover here?
launchScript = "bin/launcher";
}
return launchScript;
}
private Long extractPidFromFile(File file) throws IOException {
List<String> lines = FilesHelper.readLines(file);
for (String line : lines) {
String text = line.trim();
if (text.matches("\\d+")) {
try {
return Long.parseLong(text);
} catch (NumberFormatException e) {
throw new RuntimeException("Failed to parse pid '" + text + "' as a number. Exception: " + e, e);
}
}
}
return null;
}
// Implementation methods
//-------------------------------------------------------------------------
protected int runConfigCommandValueOrLaunchScriptWith(String command, String launchArgument) throws InterruptedException, IOException, CommandFailedException {
if (command != null) {
return runCommandLine(command);
} else {
return config.runCommand(getExecutor(), baseDir, getLaunchScript(), launchArgument);
}
}
/**
* Converts a space separated command line into a Command and executes it
*/
protected int runCommandLine(String command) throws IOException, InterruptedException, CommandFailedException {
if (command != null) {
// TODO warning this doesn't handle quoted strings as a single argument
List<String> commandArgs = ExecParseUtils.splitToWhiteSpaceSeparatedTokens(command);
return config.runCommand(getExecutor(), baseDir, commandArgs.toArray(new String[commandArgs.size()]));
} else {
return 0;
}
}
}
|
|
/*
* Copyright (C) 2012-2015 DataStax Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.driver.core;
import com.datastax.driver.core.utils.CassandraVersion;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.testng.annotations.Test;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.InetAddress;
import java.nio.ByteBuffer;
import java.util.*;
import static com.datastax.driver.core.Metadata.quote;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotEquals;
@CassandraVersion(major = 2.1)
public class UserTypesTest extends CCMBridge.PerClassSingleNodeCluster {
private final static List<DataType> DATA_TYPE_PRIMITIVES = new ArrayList<DataType>(DataType.allPrimitiveTypes());
private final static List<DataType.Name> DATA_TYPE_NON_PRIMITIVE_NAMES =
new ArrayList<DataType.Name>(EnumSet.of(DataType.Name.LIST, DataType.Name.SET, DataType.Name.MAP, DataType.Name.TUPLE));
@Override
protected Collection<String> getTableDefinitions() {
String type1 = "CREATE TYPE phone (alias text, number text)";
String type2 = "CREATE TYPE address (street text, \"ZIP\" int, phones set<frozen<phone>>)";
String table = "CREATE TABLE user (id int PRIMARY KEY, addr frozen<address>)";
return Arrays.asList(type1, type2, table);
}
/**
* Basic write read test to ensure UDTs are stored and retrieved correctly.
*
* @throws Exception
*/
@Test(groups = "short")
public void simpleWriteReadTest() throws Exception {
int userId = 0;
try {
session.execute("USE " + keyspace);
PreparedStatement ins = session.prepare("INSERT INTO user(id, addr) VALUES (?, ?)");
PreparedStatement sel = session.prepare("SELECT * FROM user WHERE id=?");
UserType addrDef = cluster.getMetadata().getKeyspace(keyspace).getUserType("address");
UserType phoneDef = cluster.getMetadata().getKeyspace(keyspace).getUserType("phone");
UDTValue phone1 = phoneDef.newValue().setString("alias", "home").setString("number", "0123548790");
UDTValue phone2 = phoneDef.newValue().setString("alias", "work").setString("number", "0698265251");
UDTValue addr = addrDef.newValue().setString("street", "1600 Pennsylvania Ave NW").setInt(quote("ZIP"), 20500).setSet("phones", ImmutableSet.of(phone1, phone2));
session.execute(ins.bind(userId, addr));
Row r = session.execute(sel.bind(userId)).one();
assertEquals(r.getInt("id"), 0);
assertEquals(r.getUDTValue("addr"), addr);
} catch (Exception e) {
errorOut();
throw e;
}
}
/**
* Run simpleWriteReadTest with unprepared requests.
*
* @throws Exception
*/
@Test(groups = "short")
public void simpleUnpreparedWriteReadTest() throws Exception {
int userId = 1;
try {
session.execute("USE " + keyspace);
UserType addrDef = cluster.getMetadata().getKeyspace(keyspace).getUserType("address");
UserType phoneDef = cluster.getMetadata().getKeyspace(keyspace).getUserType("phone");
UDTValue phone1 = phoneDef.newValue().setString("alias", "home").setString("number", "0123548790");
UDTValue phone2 = phoneDef.newValue().setString("alias", "work").setString("number", "0698265251");
UDTValue addr = addrDef.newValue().setString("street", "1600 Pennsylvania Ave NW").setInt(quote("ZIP"), 20500).setSet("phones", ImmutableSet.of(phone1, phone2));
session.execute("INSERT INTO user(id, addr) VALUES (?, ?)", userId, addr);
Row r = session.execute("SELECT * FROM user WHERE id=?", userId).one();
assertEquals(r.getInt("id"), 1);
assertEquals(r.getUDTValue("addr"), addr);
} catch (Exception e) {
errorOut();
throw e;
}
}
/**
* Test for ensuring udts are defined in a particular keyspace.
*
* @throws Exception
*/
@Test(groups = "short")
public void nonExistingTypesTest() throws Exception {
try {
session.execute("USE " + keyspace);
UserType addrDef = cluster.getMetadata().getKeyspace(keyspace).getUserType("address1");
UserType phoneDef = cluster.getMetadata().getKeyspace(keyspace).getUserType("phone1");
assertEquals(addrDef, null);
assertEquals(phoneDef, null);
addrDef = cluster.getMetadata().getKeyspace(keyspace).getUserType("address");
phoneDef = cluster.getMetadata().getKeyspace(keyspace).getUserType("phone");
assertNotEquals(addrDef, null);
assertNotEquals(phoneDef, null);
// create keyspace
String nonExistingKeyspace = keyspace + "_nonEx";
session.execute("CREATE KEYSPACE " + nonExistingKeyspace + " " +
"WITH replication = { 'class' : 'SimpleStrategy', 'replication_factor': '1'}");
session.execute("USE " + nonExistingKeyspace);
addrDef = cluster.getMetadata().getKeyspace(nonExistingKeyspace).getUserType("address");
phoneDef = cluster.getMetadata().getKeyspace(nonExistingKeyspace).getUserType("phone");
assertEquals(addrDef, null);
assertEquals(phoneDef, null);
session.execute("USE " + keyspace);
addrDef = cluster.getMetadata().getKeyspace(keyspace).getUserType("address");
phoneDef = cluster.getMetadata().getKeyspace(keyspace).getUserType("phone");
assertNotEquals(addrDef, null);
assertNotEquals(phoneDef, null);
} catch (Exception e) {
errorOut();
throw e;
}
}
/**
* Test for ensuring extra-lengthy udts are handled correctly.
* Original code found in python-driver:integration.standard.test_udts.py:test_udt_sizes
*
* @throws Exception
*/
@Test(groups = "short")
public void udtSizesTest() throws Exception {
int MAX_TEST_LENGTH = 1024;
try {
// create keyspace
session.execute("CREATE KEYSPACE test_udt_sizes " +
"WITH replication = { 'class' : 'SimpleStrategy', 'replication_factor': '1'}");
session.execute("USE test_udt_sizes");
// create the seed udt
StringBuilder sb = new StringBuilder();
for (int i = 0; i < MAX_TEST_LENGTH; ++i) {
sb.append(String.format("v_%s int", i));
if (i + 1 < MAX_TEST_LENGTH)
sb.append(",");
}
session.execute(String.format("CREATE TYPE lengthy_udt (%s)", sb.toString()));
// create a table with multiple sizes of udts
session.execute("CREATE TABLE mytable (k int PRIMARY KEY, v frozen<lengthy_udt>)");
// hold onto the UserType for future use
UserType udtDef = cluster.getMetadata().getKeyspace("test_udt_sizes").getUserType("lengthy_udt");
// verify inserts and reads
for (int i : Arrays.asList(0, 1, 2, 3, MAX_TEST_LENGTH)) {
// create udt
UDTValue createdUDT = udtDef.newValue();
for (int j = 0; j < i; ++j) {
createdUDT.setInt(j, j);
}
// write udt
session.execute("INSERT INTO mytable (k, v) VALUES (0, ?)", createdUDT);
// verify udt was written and read correctly
UDTValue r = session.execute("SELECT v FROM mytable WHERE k=0")
.one().getUDTValue("v");
assertEquals(r.toString(), createdUDT.toString());
}
} catch (Exception e) {
errorOut();
throw e;
}
}
/**
* Test for inserting various types of DATA_TYPE_PRIMITIVES into UDT's.
* Original code found in python-driver:integration.standard.test_udts.py:test_primitive_datatypes
*
* @throws Exception
*/
@Test(groups = "short")
public void testPrimitiveDatatypes() throws Exception {
try {
// create keyspace
session.execute("CREATE KEYSPACE testPrimitiveDatatypes " +
"WITH replication = { 'class' : 'SimpleStrategy', 'replication_factor': '1'}");
session.execute("USE testPrimitiveDatatypes");
// create UDT
List<String> alpha_type_list = new ArrayList<String>();
int startIndex = (int) 'a';
for (int i = 0; i < DATA_TYPE_PRIMITIVES.size(); i++) {
alpha_type_list.add(String.format("%s %s", Character.toString((char) (startIndex + i)),
DATA_TYPE_PRIMITIVES.get(i).getName()));
}
session.execute(String.format("CREATE TYPE alldatatypes (%s)", Joiner.on(',').join(alpha_type_list)));
session.execute("CREATE TABLE mytable (a int PRIMARY KEY, b frozen<alldatatypes>)");
// insert UDT data
UserType alldatatypesDef = cluster.getMetadata().getKeyspace("testPrimitiveDatatypes").getUserType("alldatatypes");
UDTValue alldatatypes = alldatatypesDef.newValue();
for (int i = 0; i < DATA_TYPE_PRIMITIVES.size(); i++) {
DataType dataType = DATA_TYPE_PRIMITIVES.get(i);
String index = Character.toString((char) (startIndex + i));
Object sampleData = PrimitiveTypeSamples.ALL.get(dataType);
switch (dataType.getName()) {
case ASCII:
alldatatypes.setString(index, (String) sampleData);
break;
case BIGINT:
alldatatypes.setLong(index, ((Long) sampleData).longValue());
break;
case BLOB:
alldatatypes.setBytes(index, (ByteBuffer) sampleData);
break;
case BOOLEAN:
alldatatypes.setBool(index, ((Boolean) sampleData).booleanValue());
break;
case DECIMAL:
alldatatypes.setDecimal(index, (BigDecimal) sampleData);
break;
case DOUBLE:
alldatatypes.setDouble(index, ((Double) sampleData).doubleValue());
break;
case FLOAT:
alldatatypes.setFloat(index, ((Float) sampleData).floatValue());
break;
case INET:
alldatatypes.setInet(index, (InetAddress) sampleData);
break;
case INT:
alldatatypes.setInt(index, ((Integer) sampleData).intValue());
break;
case TEXT:
alldatatypes.setString(index, (String) sampleData);
break;
case TIMESTAMP:
alldatatypes.setDate(index, ((Date) sampleData));
break;
case TIMEUUID:
alldatatypes.setUUID(index, (UUID) sampleData);
break;
case UUID:
alldatatypes.setUUID(index, (UUID) sampleData);
break;
case VARCHAR:
alldatatypes.setString(index, (String) sampleData);
break;
case VARINT:
alldatatypes.setVarint(index, (BigInteger) sampleData);
break;
}
}
PreparedStatement ins = session.prepare("INSERT INTO mytable (a, b) VALUES (?, ?)");
session.execute(ins.bind(0, alldatatypes));
// retrieve and verify data
ResultSet rs = session.execute("SELECT * FROM mytable");
List<Row> rows = rs.all();
assertEquals(1, rows.size());
Row row = rows.get(0);
assertEquals(row.getInt("a"), 0);
assertEquals(row.getUDTValue("b"), alldatatypes);
} catch (Exception e) {
errorOut();
throw e;
}
}
/**
* Test for inserting various types of DATA_TYPE_NON_PRIMITIVE into UDT's
* Original code found in python-driver:integration.standard.test_udts.py:test_nonprimitive_datatypes
*
* @throws Exception
*/
@Test(groups = "short")
public void testNonPrimitiveDatatypes() throws Exception {
try {
// create keyspace
session.execute("CREATE KEYSPACE test_nonprimitive_datatypes " +
"WITH replication = { 'class' : 'SimpleStrategy', 'replication_factor': '1'}");
session.execute("USE test_nonprimitive_datatypes");
// counters are not allowed inside collections
DATA_TYPE_PRIMITIVES.remove(DataType.counter());
// create UDT
List<String> alpha_type_list = new ArrayList<String>();
int startIndex = (int) 'a';
for (int i = 0; i < DATA_TYPE_NON_PRIMITIVE_NAMES.size(); i++)
for (int j = 0; j < DATA_TYPE_PRIMITIVES.size(); j++) {
String typeString;
if (DATA_TYPE_NON_PRIMITIVE_NAMES.get(i) == DataType.Name.MAP) {
typeString = (String.format("%s_%s %s<%s, %s>", Character.toString((char) (startIndex + i)),
Character.toString((char) (startIndex + j)), DATA_TYPE_NON_PRIMITIVE_NAMES.get(i),
DATA_TYPE_PRIMITIVES.get(j).getName(), DATA_TYPE_PRIMITIVES.get(j).getName()));
} else if (DATA_TYPE_NON_PRIMITIVE_NAMES.get(i) == DataType.Name.TUPLE) {
typeString = (String.format("%s_%s frozen<%s<%s>>", Character.toString((char) (startIndex + i)),
Character.toString((char) (startIndex + j)), DATA_TYPE_NON_PRIMITIVE_NAMES.get(i),
DATA_TYPE_PRIMITIVES.get(j).getName()));
} else {
typeString = (String.format("%s_%s %s<%s>", Character.toString((char) (startIndex + i)),
Character.toString((char) (startIndex + j)), DATA_TYPE_NON_PRIMITIVE_NAMES.get(i),
DATA_TYPE_PRIMITIVES.get(j).getName()));
}
alpha_type_list.add(typeString);
}
session.execute(String.format("CREATE TYPE alldatatypes (%s)", Joiner.on(',').join(alpha_type_list)));
session.execute("CREATE TABLE mytable (a int PRIMARY KEY, b frozen<alldatatypes>)");
// insert UDT data
UserType alldatatypesDef = cluster.getMetadata().getKeyspace("test_nonprimitive_datatypes").getUserType("alldatatypes");
UDTValue alldatatypes = alldatatypesDef.newValue();
for (int i = 0; i < DATA_TYPE_NON_PRIMITIVE_NAMES.size(); i++)
for (int j = 0; j < DATA_TYPE_PRIMITIVES.size(); j++) {
DataType.Name name = DATA_TYPE_NON_PRIMITIVE_NAMES.get(i);
DataType dataType = DATA_TYPE_PRIMITIVES.get(j);
String index = Character.toString((char) (startIndex + i)) + "_" + Character.toString((char) (startIndex + j));
Object sampleElement = PrimitiveTypeSamples.ALL.get(dataType);
switch (name) {
case LIST:
alldatatypes.setList(index, Lists.newArrayList(sampleElement));
break;
case SET:
alldatatypes.setSet(index, Sets.newHashSet(sampleElement));
break;
case MAP:
alldatatypes.setMap(index, ImmutableMap.of(sampleElement, sampleElement));
break;
case TUPLE:
alldatatypes.setTupleValue(index, TupleType.of(dataType).newValue(sampleElement));
}
}
PreparedStatement ins = session.prepare("INSERT INTO mytable (a, b) VALUES (?, ?)");
session.execute(ins.bind(0, alldatatypes));
// retrieve and verify data
ResultSet rs = session.execute("SELECT * FROM mytable");
List<Row> rows = rs.all();
assertEquals(1, rows.size());
Row row = rows.get(0);
assertEquals(row.getInt("a"), 0);
assertEquals(row.getUDTValue("b"), alldatatypes);
} catch (Exception e) {
errorOut();
throw e;
}
}
/**
* Test for ensuring nested UDT's are handled correctly.
* Original code found in python-driver:integration.standard.test_udts.py:test_nested_registered_udts
*
* @throws Exception
*/
@Test(groups = "short")
public void udtNestedTest() throws Exception {
final int MAX_NESTING_DEPTH = 4;
try {
// create keyspace
session.execute("CREATE KEYSPACE udtNestedTest " +
"WITH replication = { 'class' : 'SimpleStrategy', 'replication_factor': '1'}");
session.execute("USE udtNestedTest");
// create UDT
session.execute("CREATE TYPE depth_0 (age int, name text)");
for (int i = 1; i <= MAX_NESTING_DEPTH; i++) {
session.execute(String.format("CREATE TYPE depth_%s (value frozen<depth_%s>)", String.valueOf(i), String.valueOf(i - 1)));
}
session.execute(String.format("CREATE TABLE mytable (a int PRIMARY KEY, b frozen<depth_0>, c frozen<depth_1>, d frozen<depth_2>, e frozen<depth_3>," +
"f frozen<depth_%s>)", MAX_NESTING_DEPTH));
// insert UDT data
UserType depthZeroDef = cluster.getMetadata().getKeyspace("udtNestedTest").getUserType("depth_0");
UDTValue depthZero = depthZeroDef.newValue().setInt("age", 42).setString("name", "Bob");
UserType depthOneDef = cluster.getMetadata().getKeyspace("udtNestedTest").getUserType("depth_1");
UDTValue depthOne = depthOneDef.newValue().setUDTValue("value", depthZero);
UserType depthTwoDef = cluster.getMetadata().getKeyspace("udtNestedTest").getUserType("depth_2");
UDTValue depthTwo = depthTwoDef.newValue().setUDTValue("value", depthOne);
UserType depthThreeDef = cluster.getMetadata().getKeyspace("udtNestedTest").getUserType("depth_3");
UDTValue depthThree = depthThreeDef.newValue().setUDTValue("value", depthTwo);
UserType depthFourDef = cluster.getMetadata().getKeyspace("udtNestedTest").getUserType("depth_4");
UDTValue depthFour = depthFourDef.newValue().setUDTValue("value", depthThree);
PreparedStatement ins = session.prepare("INSERT INTO mytable (a, b, c, d, e, f) VALUES (?, ?, ?, ?, ?, ?)");
session.execute(ins.bind(0, depthZero, depthOne, depthTwo, depthThree, depthFour));
// retrieve and verify data
ResultSet rs = session.execute("SELECT * FROM mytable");
List<Row> rows = rs.all();
assertEquals(1, rows.size());
Row row = rows.get(0);
assertEquals(row.getInt("a"), 0);
assertEquals(row.getUDTValue("b"), depthZero);
assertEquals(row.getUDTValue("c"), depthOne);
assertEquals(row.getUDTValue("d"), depthTwo);
assertEquals(row.getUDTValue("e"), depthThree);
assertEquals(row.getUDTValue("f"), depthFour);
} catch (Exception e) {
errorOut();
throw e;
}
}
/**
* Test for inserting null values into UDT's
* Original code found in python-driver:integration.standard.test_udts.py:test_udts_with_nulls
*
* @throws Exception
*/
@Test(groups = "short")
public void testUdtsWithNulls() throws Exception {
try {
// create keyspace
session.execute("CREATE KEYSPACE testUdtsWithNulls " +
"WITH replication = { 'class' : 'SimpleStrategy', 'replication_factor': '1'}");
session.execute("USE testUdtsWithNulls");
// create UDT
session.execute("CREATE TYPE user (a text, b int, c uuid, d blob)");
session.execute("CREATE TABLE mytable (a int PRIMARY KEY, b frozen<user>)");
// insert UDT data
UserType userTypeDef = cluster.getMetadata().getKeyspace("testUdtsWithNulls").getUserType("user");
UDTValue userType = userTypeDef.newValue().setString("a", null).setInt("b", 0).setUUID("c", null).setBytes("d", null);
PreparedStatement ins = session.prepare("INSERT INTO mytable (a, b) VALUES (?, ?)");
session.execute(ins.bind(0, userType));
// retrieve and verify data
ResultSet rs = session.execute("SELECT * FROM mytable");
List<Row> rows = rs.all();
assertEquals(1, rows.size());
Row row = rows.get(0);
assertEquals(row.getInt("a"), 0);
assertEquals(row.getUDTValue("b"), userType);
// test empty strings
userType = userTypeDef.newValue().setString("a", "").setInt("b", 0).setUUID("c", null).setBytes("d", ByteBuffer.allocate(0));
session.execute(ins.bind(0, userType));
// retrieve and verify data
rs = session.execute("SELECT * FROM mytable");
rows = rs.all();
assertEquals(1, rows.size());
row = rows.get(0);
assertEquals(row.getInt("a"), 0);
assertEquals(row.getUDTValue("b"), userType);
} catch (Exception e) {
errorOut();
throw e;
}
}
/**
* Test for inserting null values into collections of UDT's
*
* @throws Exception
*/
@Test(groups = "short")
public void testUdtsWithCollectionNulls() throws Exception {
try {
// create keyspace
session.execute("CREATE KEYSPACE testUdtsWithCollectionNulls " +
"WITH replication = { 'class' : 'SimpleStrategy', 'replication_factor': '1'}");
session.execute("USE testUdtsWithCollectionNulls");
// create UDT
session.execute("CREATE TYPE user (a List<text>, b Set<text>, c Map<text, text>, d frozen<Tuple<text>>)");
session.execute("CREATE TABLE mytable (a int PRIMARY KEY, b frozen<user>)");
// insert null UDT data
PreparedStatement ins = session.prepare("INSERT INTO mytable (a, b) " +
"VALUES (0, { a: ?, b: ?, c: ?, d: ? })");
session.execute(ins.bind().setList(0, null).setSet(1, null).setMap(2, null).setTupleValue(3, null));
// retrieve and verify data
ResultSet rs = session.execute("SELECT * FROM mytable");
List<Row> rows = rs.all();
assertEquals(1, rows.size());
Row row = rows.get(0);
assertEquals(row.getInt("a"), 0);
UserType userTypeDef = cluster.getMetadata().getKeyspace("testUdtsWithCollectionNulls").getUserType("user");
UDTValue userType = userTypeDef.newValue().setList("a", null).setSet("b", null).setMap("c", null).setTupleValue("d", null);
assertEquals(row.getUDTValue("b"), userType);
// test missing UDT args
ins = session.prepare("INSERT INTO mytable (a, b) " +
"VALUES (1, { a: ? })");
session.execute(ins.bind().setList(0, new ArrayList<Object>()));
// retrieve and verify data
rs = session.execute("SELECT * FROM mytable");
rows = rs.all();
assertEquals(2, rows.size());
row = rows.get(0);
assertEquals(row.getInt("a"), 1);
userType = userTypeDef.newValue().setList(0, new ArrayList<Object>());
assertEquals(row.getUDTValue("b"), userType);
} catch (Exception e) {
errorOut();
throw e;
}
}
}
|
|
package it.sauronsoftware.feed4j.html;
import it.sauronsoftware.feed4j.bean.RawAttribute;
import it.sauronsoftware.feed4j.bean.RawElement;
import it.sauronsoftware.feed4j.bean.RawNode;
import it.sauronsoftware.feed4j.bean.RawText;
import java.io.StringReader;
import java.net.MalformedURLException;
import java.net.URL;
import org.apache.html.dom.HTMLDocumentImpl;
import org.cyberneko.html.parsers.DOMFragmentParser;
import org.w3c.dom.DocumentFragment;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.Text;
import org.w3c.dom.html.HTMLDocument;
import org.xml.sax.InputSource;
/**
* HTML fragments parser and generator.
*
* @author Carlo Pelliccia
*/
public class HTMLFragmentHelper {
/**
* XML namespace.
*/
private static final String XML_NAMESPACE = "http://www.w3.org/XML/1998/namespace";
/**
* XHTML namespace.
*/
private static final String XHTML_NAMESPACE = "http://www.w3.org/1999/xhtml";
/**
* This method extracts a plain text from a HTML fragment.
*
* @param str
* The HTML fragment.
* @return The plain text extracted from the fragment.
*/
public static String fromHTMLtoTextPlain(String str) {
DOMFragmentParser parser = new DOMFragmentParser();
HTMLDocument document = new HTMLDocumentImpl();
DocumentFragment fragment = document.createDocumentFragment();
try {
parser.parse(new InputSource(new StringReader(str)), fragment);
} catch (Exception e) {
return null;
}
return nodeToText(fragment);
}
private static String nodeToText(Node node) {
StringBuffer buffer = new StringBuffer();
if (node instanceof Text) {
Text text = (Text) node;
buffer.append(text.getData());
buffer.append(' ');
} else {
NodeList list = node.getChildNodes();
for (int i = 0; i < list.getLength(); i++) {
buffer.append(nodeToText(list.item(i)));
buffer.append(' ');
}
}
String ret = buffer.toString();
return ret.replaceAll("\\s+", " ").trim();
}
/**
* This method extracts a plain text from a XHTML fragment.
*
* @param element
* The XHTML fragment as an XML raw element.
* @return The plain text extracted from the fragment.
*/
public static String fromXHTMLtoTextPlain(RawElement element) {
StringBuffer buffer = new StringBuffer();
for (int i = 0; i < element.getNodeCount(); i++) {
RawNode node = element.getNode(i);
if (node instanceof RawElement) {
RawElement sub = (RawElement) node;
buffer.append(fromXHTMLtoTextPlain(sub));
} else if (node instanceof RawText) {
RawText sub = (RawText) node;
buffer.append(sub.getText());
}
}
String ret = buffer.toString();
return ret.replaceAll("\\s+", " ").trim();
}
/**
* This method encodes a plain text fragment in a HTML one.
*
* @param str
* The plain text fragment.
* @return The encoded HTML fragment.
*/
public static String fromTextPlainToHTML(String str) {
return HTMLEntities.encode(str);
}
/**
* This method changes a XHTML fragment in a HTML one.
*
* @param element
* The XHTML fragment as an XML raw element.
* @return The HTML fragment as a string.
*/
public static String fromXHTMLtoHTML(RawElement element) {
return fromXHTMLtoHTML(element, null);
}
/**
* This method changes a XHTML fragment in a HTML one.
*
* @param element
* The XHTML fragment as an XML raw element.
* @param base
* The base URL for link href and image src absolute
* reconstruction.
* @return The HTML fragment as a string.
*/
private static String fromXHTMLtoHTML(RawElement element, URL base) {
String aux = element.getAttributeValue(XML_NAMESPACE, "base");
if (aux != null) {
try {
base = new URL(aux);
} catch (MalformedURLException e) {
;
}
}
StringBuffer buffer = new StringBuffer();
for (int i = 0; i < element.getNodeCount(); i++) {
RawNode node = element.getNode(i);
if (node instanceof RawText) {
RawText rawText = (RawText) node;
buffer.append(rawText.getText());
} else if (node instanceof RawElement) {
RawElement rawElement = (RawElement) node;
if (rawElement.getNamespaceURI().equals(XHTML_NAMESPACE)) {
buffer.append('<');
buffer.append(rawElement.getName());
for (int j = 0; j < rawElement.getAttributeCount(); j++) {
RawAttribute rawAttribute = rawElement.getAttribute(j);
if (rawAttribute.getNamespaceURI().equals(
XHTML_NAMESPACE)) {
String attrname = rawAttribute.getName();
String attrvalue = rawAttribute.getValue();
attrvalue = applBase(attrname, attrvalue, base);
buffer.append(' ');
buffer.append(attrname);
buffer.append('=');
buffer.append('"');
buffer.append(HTMLEntities.encode(attrvalue));
buffer.append('"');
}
}
String value = rawElement.getValue();
if (value != null) {
buffer.append('>');
buffer.append(HTMLEntities.encode(value));
buffer.append('<');
buffer.append('/');
buffer.append(rawElement.getName());
buffer.append('>');
} else if (rawElement.getNodeCount() > 0) {
buffer.append('>');
buffer.append(fromXHTMLtoHTML(rawElement, base));
buffer.append('<');
buffer.append('/');
buffer.append(rawElement.getName());
buffer.append('>');
} else {
buffer.append(' ');
buffer.append('/');
buffer.append('>');
}
}
}
}
String ret = buffer.toString();
return ret.replaceAll("\\s+", " ").trim();
}
private static String applBase(String name, String value, URL base) {
if (base != null && (name.equals("href") || name.equals("src"))) {
if (value.indexOf(':') == -1) {
try {
URL aux = new URL(base, value);
value = aux.toExternalForm();
} catch (MalformedURLException e) {
;
}
}
}
return value;
}
}
|
|
package org.interledger.cryptoconditions;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableList.Builder;
import com.google.common.io.BaseEncoding;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.interledger.cryptoconditions.helpers.RsaTestVectorJson;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import java.io.File;
import java.math.BigInteger;
import java.net.URI;
import java.security.KeyFactory;
import java.security.Provider;
import java.security.SecureRandom;
import java.security.Security;
import java.security.Signature;
import java.security.interfaces.RSAPrivateKey;
import java.security.spec.PKCS8EncodedKeySpec;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
@RunWith(Parameterized.class)
public class RsaSha256SignatureTest extends AbstractCryptoConditionTest {
/**
* Need to add BouncyCastle so we have a provider that supports SHA256withRSA/PSS signatures
*/
static {
Provider bc = new BouncyCastleProvider();
Security.addProvider(bc);
}
private final KeyFactory keyFactory;
private final Signature rsaSigner;
private final RsaTestVectorJson rsaJsonTestVector;
/**
* Required args Constructor.
*
* @param testVectorPair A {@link RsaTestVectorPair} to populate this test.
* @throws Exception If anything goes wrong.
*/
public RsaSha256SignatureTest(final RsaTestVectorPair testVectorPair) throws Exception {
Objects.requireNonNull(testVectorPair);
this.keyFactory = KeyFactory.getInstance("RSA");
this.rsaSigner = testVectorPair.getSignature();
this.rsaJsonTestVector = testVectorPair.getRsaTestVectorJson();
}
/**
* Loads a list of tests based on the json-encoded test vector files. Each object in the array
*/
@Parameters(name = "Modulus {index}: {0}")
public static Collection<RsaTestVectorPair> testVectors() throws Exception {
final URI baseUri = RsaSha256SignatureTest.class
.getResource(RsaSha256SignatureTest.class.getSimpleName() + ".class").toURI();
final File baseDirectoryFile = new File(baseUri).getParentFile();
final File validTestVectorDir = new File(baseDirectoryFile, "/rsa");
final Builder<RsaTestVectorPair> vectors = ImmutableList.builder();
final ObjectMapper mapper = new ObjectMapper();
final Signature rsaSha256Signer = Signature.getInstance("SHA256withRSA/PSS", "BC");
Arrays.stream(validTestVectorDir.listFiles()).forEach(file -> {
try {
if (file.getName().endsWith("sha256.json")) {
final List<RsaTestVectorJson> testVectors = mapper
.readValue(file, new TypeReference<List<RsaTestVectorJson>>() {
});
vectors.addAll(
testVectors.stream()
.map(tv -> new RsaTestVectorPair(rsaSha256Signer, tv))
.collect(Collectors.toList()));
}
} catch (Exception e) {
throw new RuntimeException(e);
}
});
final Signature rsaSha1Signer = Signature.getInstance("SHA1withRSA/PSS", "BC");
Arrays.stream(validTestVectorDir.listFiles()).forEach(file -> {
try {
if (file.getName().endsWith("sha1.json")) {
final List<RsaTestVectorJson> testVectors = mapper
.readValue(file, new TypeReference<List<RsaTestVectorJson>>() {
});
vectors.addAll(
testVectors.stream()
.map(tv -> new RsaTestVectorPair(rsaSha1Signer, tv))
.collect(Collectors.toList()));
}
} catch (Exception e) {
throw new RuntimeException(e);
}
});
return vectors.build();
}
/**
* This test ensures that the supplied private key signs a message correctly.
*/
@Test
public void testSignsCorrectly() throws Exception {
final String privKeyPem = rsaJsonTestVector.getPrivateKey();
final RSAPrivateKey privKey = this.buildRsaPrivKey(privKeyPem);
rsaJsonTestVector.getCases().stream().forEach(_case -> {
try {
final byte[] saltHex = BaseEncoding.base16().decode(_case.getSalt().toUpperCase());
rsaSigner.initSign(privKey, new FixedRandom(saltHex));
rsaSigner.update(BaseEncoding.base16().decode(_case.getMessage().toUpperCase()));
byte[] rsaSignature = rsaSigner.sign();
assertThat(_case.getSignature().toUpperCase(),
is(BaseEncoding.base16().encode(rsaSignature)));
} catch (Exception e) {
throw new RuntimeException(e);
}
});
}
/**
* This test ensures that the supplied private key signs a message correctly.
*/
@Test
public void testVerifiesCorrectly()
throws Exception {
final String privKeyPem = rsaJsonTestVector.getPrivateKey();
final RSAPrivateKey privKey = this.buildRsaPrivKey(privKeyPem);
rsaJsonTestVector.getCases().stream().forEach(_case -> {
try {
final byte[] saltHex = BaseEncoding.base16().decode(_case.getSalt().toUpperCase());
rsaSigner.initSign(privKey, new FixedRandom(saltHex));
rsaSigner.update(BaseEncoding.base16().decode(_case.getMessage().toUpperCase()));
final byte[] expectedSignatureBytes = BaseEncoding.base16()
.decode(_case.getSignature().toUpperCase());
final byte[] actualSignatureByte = rsaSigner.sign();
assertThat(actualSignatureByte, is(expectedSignatureBytes));
} catch (Exception e) {
throw new RuntimeException(e);
}
});
}
private RSAPrivateKey buildRsaPrivKey(String privateKeyString) throws Exception {
final byte[] innerKey = BaseEncoding.base64()
.decode(privateKeyString.replaceAll("-----\\w+ RSA PRIVATE KEY-----", ""));
final byte[] result = new byte[innerKey.length + 26];
System
.arraycopy(BaseEncoding.base64().decode("MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKY="), 0, result,
0, 26);
System.arraycopy(BigInteger.valueOf(result.length - 4).toByteArray(), 0, result, 2, 2);
System.arraycopy(BigInteger.valueOf(innerKey.length).toByteArray(), 0, result, 24, 2);
System.arraycopy(innerKey, 0, result, 26, innerKey.length);
PKCS8EncodedKeySpec spec = new PKCS8EncodedKeySpec(result);
return (RSAPrivateKey) this.keyFactory.generatePrivate(spec);
}
private class FixedRandom extends SecureRandom {
byte[] vals;
FixedRandom(byte[] vals) {
this.vals = vals;
}
public void nextBytes(byte[] bytes) {
System.arraycopy(vals, 0, bytes, 0, vals.length);
}
}
private static class RsaTestVectorPair {
private final Signature signature;
private final RsaTestVectorJson rsaTestVectorJson;
private RsaTestVectorPair(final Signature signature,
final RsaTestVectorJson rsaTestVectorJson) {
this.signature = Objects.requireNonNull(signature);
this.rsaTestVectorJson = Objects.requireNonNull(rsaTestVectorJson);
}
public Signature getSignature() {
return signature;
}
public RsaTestVectorJson getRsaTestVectorJson() {
return rsaTestVectorJson;
}
}
}
|
|
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* @author Eugene Zhuravlev
*/
package com.intellij.debugger.jdi;
import com.intellij.debugger.DebuggerBundle;
import com.intellij.debugger.engine.DebuggerManagerThreadImpl;
import com.intellij.debugger.engine.evaluation.EvaluateException;
import com.intellij.debugger.engine.evaluation.EvaluateExceptionUtil;
import com.intellij.debugger.engine.jdi.StackFrameProxy;
import com.intellij.debugger.impl.DebuggerUtilsEx;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.util.ThreeState;
import com.sun.jdi.*;
import gnu.trove.THashMap;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class StackFrameProxyImpl extends JdiProxy implements StackFrameProxy {
private static final Logger LOG = Logger.getInstance("#com.intellij.debugger.jdi.StackFrameProxyImpl");
private final ThreadReferenceProxyImpl myThreadProxy;
private final int myFrameFromBottomIndex; // 1-based
//caches
private int myFrameIndex = -1;
private StackFrame myStackFrame;
private ObjectReference myThisReference;
private ClassLoaderReference myClassLoader;
private ThreeState myIsObsolete = ThreeState.UNSURE;
private Map<LocalVariable, Value> myAllValues;
public StackFrameProxyImpl(@NotNull ThreadReferenceProxyImpl threadProxy, @NotNull StackFrame frame, int fromBottomIndex /* 1-based */) {
super(threadProxy.getVirtualMachine());
myThreadProxy = threadProxy;
myFrameFromBottomIndex = fromBottomIndex;
myStackFrame = frame;
}
public boolean isObsolete() throws EvaluateException {
DebuggerManagerThreadImpl.assertIsManagerThread();
checkValid();
if (myIsObsolete != ThreeState.UNSURE) {
return myIsObsolete.toBoolean();
}
InvalidStackFrameException error = null;
for (int attempt = 0; attempt < 2; attempt++) {
try {
Method method = DebuggerUtilsEx.getMethod(location());
boolean isObsolete = (getVirtualMachine().canRedefineClasses() && (method == null || method.isObsolete()));
myIsObsolete = ThreeState.fromBoolean(isObsolete);
return isObsolete;
}
catch (InvalidStackFrameException e) {
error = e;
clearCaches();
}
catch (InternalException e) {
if (e.errorCode() == 23 /*INVALID_METHODID according to JDI sources*/) {
myIsObsolete = ThreeState.YES;
return true;
}
throw e;
}
}
throw new EvaluateException(error.getMessage(), error);
}
@Override
public boolean isValid() {
DebuggerManagerThreadImpl.assertIsManagerThread();
if (!super.isValid()) {
return false;
}
try {
if (myStackFrame != null) {
myStackFrame.location(); //extra check if jdi frame is valid
}
return true;
} catch (InvalidStackFrameException e) {
return false;
}
}
@Override
protected void clearCaches() {
DebuggerManagerThreadImpl.assertIsManagerThread();
if (LOG.isDebugEnabled()) {
LOG.debug("caches cleared " + super.toString());
}
myFrameIndex = -1;
myStackFrame = null;
myIsObsolete = ThreeState.UNSURE;
myThisReference = null;
myClassLoader = null;
myAllValues = null;
}
/**
* Use with caution. Better access stackframe data through the Proxy's methods
*/
@Override
public StackFrame getStackFrame() throws EvaluateException {
DebuggerManagerThreadImpl.assertIsManagerThread();
checkValid();
if (myStackFrame == null) {
try {
final ThreadReference threadRef = myThreadProxy.getThreadReference();
myStackFrame = threadRef.frame(getFrameIndex());
}
catch (IndexOutOfBoundsException e) {
throw new EvaluateException(e.getMessage(), e);
}
catch (ObjectCollectedException ignored) {
throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.thread.collected"));
}
catch (IncompatibleThreadStateException e) {
throw EvaluateExceptionUtil.createEvaluateException(e);
}
}
return myStackFrame;
}
@Override
public int getFrameIndex() throws EvaluateException {
DebuggerManagerThreadImpl.assertIsManagerThread();
checkValid();
if(myFrameIndex == -1) {
int count = myThreadProxy.frameCount();
if(myFrameFromBottomIndex > count) {
throw EvaluateExceptionUtil.createEvaluateException(new IncompatibleThreadStateException());
}
myFrameIndex = count - myFrameFromBottomIndex;
}
return myFrameIndex;
}
// public boolean isProxiedFrameValid() {
// if (myStackFrame != null) {
// try {
// myStackFrame.thread();
// return true;
// }
// catch (InvalidStackFrameException e) {
// }
// }
// return false;
// }
@NotNull
@Override
public VirtualMachineProxyImpl getVirtualMachine() {
return (VirtualMachineProxyImpl) myTimer;
}
@Override
public Location location() throws EvaluateException {
InvalidStackFrameException error = null;
for (int attempt = 0; attempt < 2; attempt++) {
try {
return getStackFrame().location();
}
catch (InvalidStackFrameException e) {
error = e;
clearCaches();
}
}
throw new EvaluateException(error.getMessage(), error);
}
@NotNull
@Override
public ThreadReferenceProxyImpl threadProxy() {
return myThreadProxy;
}
public @NonNls String toString() {
try {
return "StackFrameProxyImpl: " + getStackFrame().toString();
}
catch (EvaluateException e) {
return "StackFrameProxyImpl: " + e.getMessage() + "; frameFromBottom = " + myFrameFromBottomIndex + " threadName = " + threadProxy().name();
}
}
@Nullable
public ObjectReference thisObject() throws EvaluateException {
DebuggerManagerThreadImpl.assertIsManagerThread();
checkValid();
try {
for (int attempt = 0; attempt < 2; attempt++) {
try {
if(myThisReference == null) {
myThisReference = getStackFrame().thisObject();
}
break;
}
catch (InvalidStackFrameException ignored) {
clearCaches();
}
}
}
catch (InternalException e) {
// suppress some internal errors caused by bugs in specific JDI implementations
if (e.errorCode() != 23 && e.errorCode() != 35) {
throw EvaluateExceptionUtil.createEvaluateException(e);
}
else {
LOG.info("Exception while getting this object", e);
}
}
catch (IllegalArgumentException e) {
LOG.info("Exception while getting this object", e);
}
return myThisReference;
}
@NotNull
public List<LocalVariableProxyImpl> visibleVariables() throws EvaluateException {
DebuggerManagerThreadImpl.assertIsManagerThread();
RuntimeException error = null;
for (int attempt = 0; attempt < 2; attempt++) {
try {
final List<LocalVariable> list = getStackFrame().visibleVariables();
final List<LocalVariableProxyImpl> locals = new ArrayList<>(list.size());
for (LocalVariable localVariable : list) {
LOG.assertTrue(localVariable != null);
locals.add(new LocalVariableProxyImpl(this, localVariable));
}
return locals;
}
catch (InvalidStackFrameException | IllegalArgumentException e) {
error = e;
clearCaches();
}
catch (AbsentInformationException e) {
throw EvaluateExceptionUtil.createEvaluateException(e);
}
}
throw new EvaluateException(error.getMessage(), error);
}
@Override
public LocalVariableProxyImpl visibleVariableByName(String name) throws EvaluateException {
DebuggerManagerThreadImpl.assertIsManagerThread();
final LocalVariable variable = visibleVariableByNameInt(name);
return variable != null ? new LocalVariableProxyImpl(this, variable) : null;
}
@Nullable
public Value visibleValueByName(@NotNull String name) throws EvaluateException {
LocalVariable variable = visibleVariableByNameInt(name);
return variable != null ? getValue(new LocalVariableProxyImpl(this, variable)) : null;
}
protected LocalVariable visibleVariableByNameInt(String name) throws EvaluateException {
DebuggerManagerThreadImpl.assertIsManagerThread();
InvalidStackFrameException error = null;
for (int attempt = 0; attempt < 2; attempt++) {
try {
try {
return getStackFrame().visibleVariableByName(name);
}
catch (InvalidStackFrameException e) {
error = e;
clearCaches();
}
}
catch (InvalidStackFrameException | AbsentInformationException e) {
throw EvaluateExceptionUtil.createEvaluateException(e);
}
}
throw new EvaluateException(error.getMessage(), error);
}
public Value getValue(LocalVariableProxyImpl localVariable) throws EvaluateException {
DebuggerManagerThreadImpl.assertIsManagerThread();
InvalidStackFrameException error = null;
for (int attempt = 0; attempt < 2; attempt++) {
try {
Map<LocalVariable, Value> values = getAllValues();
LocalVariable variable = localVariable.getVariable();
if (values.containsKey(variable)) {
return values.get(variable);
}
else { // try direct get
return getStackFrame().getValue(variable);
}
}
catch (InvalidStackFrameException e) {
error = e;
clearCaches();
}
catch (InternalException e) {
if (e.errorCode() == 35 || e.errorCode() == 101) {
throw new EvaluateException(DebuggerBundle.message("error.corrupt.debug.info", e.getMessage()), e);
}
else throw e;
}
}
throw new EvaluateException(error.getMessage(), error);
}
@NotNull
public List<Value> getArgumentValues() throws EvaluateException {
DebuggerManagerThreadImpl.assertIsManagerThread();
InvalidStackFrameException error = null;
for (int attempt = 0; attempt < 2; attempt++) {
try {
final StackFrame stackFrame = getStackFrame();
return stackFrame != null? stackFrame.getArgumentValues() : Collections.emptyList();
}
catch (InternalException e) {
// From Oracle's forums:
// This could be a JPDA bug. Unexpected JDWP Error: 32 means that an 'opaque' frame was detected at the lower JPDA levels,
// typically a native frame.
if (e.errorCode() == 32 /*opaque frame JDI bug*/ ) {
return Collections.emptyList();
}
else {
throw e;
}
}
catch (InvalidStackFrameException e) {
error = e;
clearCaches();
}
}
throw new EvaluateException(error.getMessage(), error);
}
private Map<LocalVariable, Value> getAllValues() throws EvaluateException{
DebuggerManagerThreadImpl.assertIsManagerThread();
checkValid();
if (myAllValues == null) {
try {
StackFrame stackFrame = getStackFrame();
myAllValues = new THashMap<>(stackFrame.getValues(stackFrame.visibleVariables()));
}
catch (InconsistentDebugInfoException ignored) {
clearCaches();
throw EvaluateExceptionUtil.INCONSISTEND_DEBUG_INFO;
}
catch (AbsentInformationException e) {
throw EvaluateExceptionUtil.createEvaluateException(e);
}
catch (InternalException e) {
// extra logging for IDEA-141270
if (e.errorCode() == 35 || e.errorCode() == 101) {
LOG.info(e);
myAllValues = Collections.emptyMap();
}
else throw e;
}
}
return myAllValues;
}
public void setValue(LocalVariableProxyImpl localVariable, Value value) throws EvaluateException, ClassNotLoadedException, InvalidTypeException {
DebuggerManagerThreadImpl.assertIsManagerThread();
InvalidStackFrameException error = null;
for (int attempt = 0; attempt < 2; attempt++) {
try {
final LocalVariable variable = localVariable.getVariable();
final StackFrame stackFrame = getStackFrame();
stackFrame.setValue(variable, (value instanceof ObjectReference)? ((ObjectReference)value) : value);
if (myAllValues != null) {
// update cached data if any
// re-read the value just set from the stackframe to be 100% sure
myAllValues.put(variable, stackFrame.getValue(variable));
}
return;
}
catch (InvalidStackFrameException e) {
error = e;
clearCaches();
}
}
throw new EvaluateException(error.getMessage(), error);
}
public int hashCode() {
return 31 * myThreadProxy.hashCode() + myFrameFromBottomIndex;
}
public boolean equals(final Object obj) {
if (!(obj instanceof StackFrameProxyImpl)) {
return false;
}
StackFrameProxyImpl frameProxy = (StackFrameProxyImpl)obj;
if(frameProxy == this)return true;
return (myFrameFromBottomIndex == frameProxy.myFrameFromBottomIndex) &&
(myThreadProxy.equals(frameProxy.myThreadProxy));
}
public boolean isLocalVariableVisible(LocalVariableProxyImpl var) throws EvaluateException {
try {
return var.getVariable().isVisible(getStackFrame());
}
catch (IllegalArgumentException ignored) {
// can be thrown if frame's method is different than variable's method
return false;
}
}
@Override
public ClassLoaderReference getClassLoader() throws EvaluateException {
if(myClassLoader == null) {
myClassLoader = location().declaringType().classLoader();
}
return myClassLoader;
}
public boolean isBottom() {
return myFrameFromBottomIndex == 1;
}
public int getIndexFromBottom() {
return myFrameFromBottomIndex;
}
}
|
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/speech/v1beta1/cloud_speech.proto
package com.google.cloud.speech.v1beta1;
/**
*
*
* <pre>
* The only message returned to the client by `AsyncRecognize`. It contains the
* result as zero or more sequential `SpeechRecognitionResult` messages. It is
* included in the `result.response` field of the `Operation` returned by the
* `GetOperation` call of the `google::longrunning::Operations` service.
* </pre>
*
* Protobuf type {@code google.cloud.speech.v1beta1.AsyncRecognizeResponse}
*/
public final class AsyncRecognizeResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.speech.v1beta1.AsyncRecognizeResponse)
AsyncRecognizeResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use AsyncRecognizeResponse.newBuilder() to construct.
private AsyncRecognizeResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AsyncRecognizeResponse() {
results_ = java.util.Collections.emptyList();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private AsyncRecognizeResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default:
{
if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 18:
{
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
results_ =
new java.util.ArrayList<
com.google.cloud.speech.v1beta1.SpeechRecognitionResult>();
mutable_bitField0_ |= 0x00000001;
}
results_.add(
input.readMessage(
com.google.cloud.speech.v1beta1.SpeechRecognitionResult.parser(),
extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
results_ = java.util.Collections.unmodifiableList(results_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.speech.v1beta1.SpeechProto
.internal_static_google_cloud_speech_v1beta1_AsyncRecognizeResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v1beta1.SpeechProto
.internal_static_google_cloud_speech_v1beta1_AsyncRecognizeResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v1beta1.AsyncRecognizeResponse.class,
com.google.cloud.speech.v1beta1.AsyncRecognizeResponse.Builder.class);
}
public static final int RESULTS_FIELD_NUMBER = 2;
private java.util.List<com.google.cloud.speech.v1beta1.SpeechRecognitionResult> results_;
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public java.util.List<com.google.cloud.speech.v1beta1.SpeechRecognitionResult> getResultsList() {
return results_;
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public java.util.List<? extends com.google.cloud.speech.v1beta1.SpeechRecognitionResultOrBuilder>
getResultsOrBuilderList() {
return results_;
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public int getResultsCount() {
return results_.size();
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public com.google.cloud.speech.v1beta1.SpeechRecognitionResult getResults(int index) {
return results_.get(index);
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public com.google.cloud.speech.v1beta1.SpeechRecognitionResultOrBuilder getResultsOrBuilder(
int index) {
return results_.get(index);
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < results_.size(); i++) {
output.writeMessage(2, results_.get(i));
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < results_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, results_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.speech.v1beta1.AsyncRecognizeResponse)) {
return super.equals(obj);
}
com.google.cloud.speech.v1beta1.AsyncRecognizeResponse other =
(com.google.cloud.speech.v1beta1.AsyncRecognizeResponse) obj;
boolean result = true;
result = result && getResultsList().equals(other.getResultsList());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getResultsCount() > 0) {
hash = (37 * hash) + RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getResultsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.speech.v1beta1.AsyncRecognizeResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The only message returned to the client by `AsyncRecognize`. It contains the
* result as zero or more sequential `SpeechRecognitionResult` messages. It is
* included in the `result.response` field of the `Operation` returned by the
* `GetOperation` call of the `google::longrunning::Operations` service.
* </pre>
*
* Protobuf type {@code google.cloud.speech.v1beta1.AsyncRecognizeResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.speech.v1beta1.AsyncRecognizeResponse)
com.google.cloud.speech.v1beta1.AsyncRecognizeResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.speech.v1beta1.SpeechProto
.internal_static_google_cloud_speech_v1beta1_AsyncRecognizeResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v1beta1.SpeechProto
.internal_static_google_cloud_speech_v1beta1_AsyncRecognizeResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v1beta1.AsyncRecognizeResponse.class,
com.google.cloud.speech.v1beta1.AsyncRecognizeResponse.Builder.class);
}
// Construct using com.google.cloud.speech.v1beta1.AsyncRecognizeResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getResultsFieldBuilder();
}
}
public Builder clear() {
super.clear();
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
resultsBuilder_.clear();
}
return this;
}
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.speech.v1beta1.SpeechProto
.internal_static_google_cloud_speech_v1beta1_AsyncRecognizeResponse_descriptor;
}
public com.google.cloud.speech.v1beta1.AsyncRecognizeResponse getDefaultInstanceForType() {
return com.google.cloud.speech.v1beta1.AsyncRecognizeResponse.getDefaultInstance();
}
public com.google.cloud.speech.v1beta1.AsyncRecognizeResponse build() {
com.google.cloud.speech.v1beta1.AsyncRecognizeResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.cloud.speech.v1beta1.AsyncRecognizeResponse buildPartial() {
com.google.cloud.speech.v1beta1.AsyncRecognizeResponse result =
new com.google.cloud.speech.v1beta1.AsyncRecognizeResponse(this);
int from_bitField0_ = bitField0_;
if (resultsBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
results_ = java.util.Collections.unmodifiableList(results_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.results_ = results_;
} else {
result.results_ = resultsBuilder_.build();
}
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.speech.v1beta1.AsyncRecognizeResponse) {
return mergeFrom((com.google.cloud.speech.v1beta1.AsyncRecognizeResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.speech.v1beta1.AsyncRecognizeResponse other) {
if (other == com.google.cloud.speech.v1beta1.AsyncRecognizeResponse.getDefaultInstance())
return this;
if (resultsBuilder_ == null) {
if (!other.results_.isEmpty()) {
if (results_.isEmpty()) {
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureResultsIsMutable();
results_.addAll(other.results_);
}
onChanged();
}
} else {
if (!other.results_.isEmpty()) {
if (resultsBuilder_.isEmpty()) {
resultsBuilder_.dispose();
resultsBuilder_ = null;
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000001);
resultsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getResultsFieldBuilder()
: null;
} else {
resultsBuilder_.addAllMessages(other.results_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.speech.v1beta1.AsyncRecognizeResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.speech.v1beta1.AsyncRecognizeResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.speech.v1beta1.SpeechRecognitionResult> results_ =
java.util.Collections.emptyList();
private void ensureResultsIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
results_ =
new java.util.ArrayList<com.google.cloud.speech.v1beta1.SpeechRecognitionResult>(
results_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.speech.v1beta1.SpeechRecognitionResult,
com.google.cloud.speech.v1beta1.SpeechRecognitionResult.Builder,
com.google.cloud.speech.v1beta1.SpeechRecognitionResultOrBuilder>
resultsBuilder_;
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public java.util.List<com.google.cloud.speech.v1beta1.SpeechRecognitionResult>
getResultsList() {
if (resultsBuilder_ == null) {
return java.util.Collections.unmodifiableList(results_);
} else {
return resultsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public int getResultsCount() {
if (resultsBuilder_ == null) {
return results_.size();
} else {
return resultsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public com.google.cloud.speech.v1beta1.SpeechRecognitionResult getResults(int index) {
if (resultsBuilder_ == null) {
return results_.get(index);
} else {
return resultsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public Builder setResults(
int index, com.google.cloud.speech.v1beta1.SpeechRecognitionResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.set(index, value);
onChanged();
} else {
resultsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public Builder setResults(
int index,
com.google.cloud.speech.v1beta1.SpeechRecognitionResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.set(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public Builder addResults(com.google.cloud.speech.v1beta1.SpeechRecognitionResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(value);
onChanged();
} else {
resultsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public Builder addResults(
int index, com.google.cloud.speech.v1beta1.SpeechRecognitionResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(index, value);
onChanged();
} else {
resultsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public Builder addResults(
com.google.cloud.speech.v1beta1.SpeechRecognitionResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public Builder addResults(
int index,
com.google.cloud.speech.v1beta1.SpeechRecognitionResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public Builder addAllResults(
java.lang.Iterable<? extends com.google.cloud.speech.v1beta1.SpeechRecognitionResult>
values) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, results_);
onChanged();
} else {
resultsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public Builder clearResults() {
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
resultsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public Builder removeResults(int index) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.remove(index);
onChanged();
} else {
resultsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public com.google.cloud.speech.v1beta1.SpeechRecognitionResult.Builder getResultsBuilder(
int index) {
return getResultsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public com.google.cloud.speech.v1beta1.SpeechRecognitionResultOrBuilder getResultsOrBuilder(
int index) {
if (resultsBuilder_ == null) {
return results_.get(index);
} else {
return resultsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public java.util.List<
? extends com.google.cloud.speech.v1beta1.SpeechRecognitionResultOrBuilder>
getResultsOrBuilderList() {
if (resultsBuilder_ != null) {
return resultsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(results_);
}
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public com.google.cloud.speech.v1beta1.SpeechRecognitionResult.Builder addResultsBuilder() {
return getResultsFieldBuilder()
.addBuilder(com.google.cloud.speech.v1beta1.SpeechRecognitionResult.getDefaultInstance());
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public com.google.cloud.speech.v1beta1.SpeechRecognitionResult.Builder addResultsBuilder(
int index) {
return getResultsFieldBuilder()
.addBuilder(
index, com.google.cloud.speech.v1beta1.SpeechRecognitionResult.getDefaultInstance());
}
/**
*
*
* <pre>
* *Output-only* Sequential list of transcription results corresponding to
* sequential portions of audio.
* </pre>
*
* <code>repeated .google.cloud.speech.v1beta1.SpeechRecognitionResult results = 2;</code>
*/
public java.util.List<com.google.cloud.speech.v1beta1.SpeechRecognitionResult.Builder>
getResultsBuilderList() {
return getResultsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.speech.v1beta1.SpeechRecognitionResult,
com.google.cloud.speech.v1beta1.SpeechRecognitionResult.Builder,
com.google.cloud.speech.v1beta1.SpeechRecognitionResultOrBuilder>
getResultsFieldBuilder() {
if (resultsBuilder_ == null) {
resultsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.speech.v1beta1.SpeechRecognitionResult,
com.google.cloud.speech.v1beta1.SpeechRecognitionResult.Builder,
com.google.cloud.speech.v1beta1.SpeechRecognitionResultOrBuilder>(
results_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
isClean());
results_ = null;
}
return resultsBuilder_;
}
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.speech.v1beta1.AsyncRecognizeResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.speech.v1beta1.AsyncRecognizeResponse)
private static final com.google.cloud.speech.v1beta1.AsyncRecognizeResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.speech.v1beta1.AsyncRecognizeResponse();
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AsyncRecognizeResponse> PARSER =
new com.google.protobuf.AbstractParser<AsyncRecognizeResponse>() {
public AsyncRecognizeResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new AsyncRecognizeResponse(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<AsyncRecognizeResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AsyncRecognizeResponse> getParserForType() {
return PARSER;
}
public com.google.cloud.speech.v1beta1.AsyncRecognizeResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.aliyun.oss;
import com.aliyun.oss.ClientConfiguration;
import com.aliyun.oss.ClientException;
import com.aliyun.oss.OSSClient;
import com.aliyun.oss.OSSException;
import com.aliyun.oss.common.auth.CredentialsProvider;
import com.aliyun.oss.common.comm.Protocol;
import com.aliyun.oss.model.AbortMultipartUploadRequest;
import com.aliyun.oss.model.CannedAccessControlList;
import com.aliyun.oss.model.CompleteMultipartUploadRequest;
import com.aliyun.oss.model.CompleteMultipartUploadResult;
import com.aliyun.oss.model.CopyObjectResult;
import com.aliyun.oss.model.DeleteObjectsRequest;
import com.aliyun.oss.model.DeleteObjectsResult;
import com.aliyun.oss.model.GetObjectRequest;
import com.aliyun.oss.model.InitiateMultipartUploadRequest;
import com.aliyun.oss.model.InitiateMultipartUploadResult;
import com.aliyun.oss.model.ListObjectsRequest;
import com.aliyun.oss.model.ObjectMetadata;
import com.aliyun.oss.model.ObjectListing;
import com.aliyun.oss.model.OSSObjectSummary;
import com.aliyun.oss.model.PartETag;
import com.aliyun.oss.model.PutObjectResult;
import com.aliyun.oss.model.UploadPartCopyRequest;
import com.aliyun.oss.model.UploadPartCopyResult;
import com.aliyun.oss.model.UploadPartRequest;
import com.aliyun.oss.model.UploadPartResult;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.util.VersionInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.ListIterator;
import java.util.NoSuchElementException;
import static org.apache.hadoop.fs.aliyun.oss.Constants.*;
/**
* Core implementation of Aliyun OSS Filesystem for Hadoop.
* Provides the bridging logic between Hadoop's abstract filesystem and
* Aliyun OSS.
*/
public class AliyunOSSFileSystemStore {
public static final Logger LOG =
LoggerFactory.getLogger(AliyunOSSFileSystemStore.class);
private FileSystem.Statistics statistics;
private OSSClient ossClient;
private String bucketName;
private long uploadPartSize;
private long multipartThreshold;
private int maxKeys;
private String serverSideEncryptionAlgorithm;
public void initialize(URI uri, Configuration conf,
FileSystem.Statistics stat) throws IOException {
statistics = stat;
ClientConfiguration clientConf = new ClientConfiguration();
clientConf.setMaxConnections(conf.getInt(MAXIMUM_CONNECTIONS_KEY,
MAXIMUM_CONNECTIONS_DEFAULT));
boolean secureConnections = conf.getBoolean(SECURE_CONNECTIONS_KEY,
SECURE_CONNECTIONS_DEFAULT);
clientConf.setProtocol(secureConnections ? Protocol.HTTPS : Protocol.HTTP);
clientConf.setMaxErrorRetry(conf.getInt(MAX_ERROR_RETRIES_KEY,
MAX_ERROR_RETRIES_DEFAULT));
clientConf.setConnectionTimeout(conf.getInt(ESTABLISH_TIMEOUT_KEY,
ESTABLISH_TIMEOUT_DEFAULT));
clientConf.setSocketTimeout(conf.getInt(SOCKET_TIMEOUT_KEY,
SOCKET_TIMEOUT_DEFAULT));
clientConf.setUserAgent(
conf.get(USER_AGENT_PREFIX, USER_AGENT_PREFIX_DEFAULT) + ", Hadoop/"
+ VersionInfo.getVersion());
String proxyHost = conf.getTrimmed(PROXY_HOST_KEY, "");
int proxyPort = conf.getInt(PROXY_PORT_KEY, -1);
if (StringUtils.isNotEmpty(proxyHost)) {
clientConf.setProxyHost(proxyHost);
if (proxyPort >= 0) {
clientConf.setProxyPort(proxyPort);
} else {
if (secureConnections) {
LOG.warn("Proxy host set without port. Using HTTPS default 443");
clientConf.setProxyPort(443);
} else {
LOG.warn("Proxy host set without port. Using HTTP default 80");
clientConf.setProxyPort(80);
}
}
String proxyUsername = conf.getTrimmed(PROXY_USERNAME_KEY);
String proxyPassword = conf.getTrimmed(PROXY_PASSWORD_KEY);
if ((proxyUsername == null) != (proxyPassword == null)) {
String msg = "Proxy error: " + PROXY_USERNAME_KEY + " or " +
PROXY_PASSWORD_KEY + " set without the other.";
LOG.error(msg);
throw new IllegalArgumentException(msg);
}
clientConf.setProxyUsername(proxyUsername);
clientConf.setProxyPassword(proxyPassword);
clientConf.setProxyDomain(conf.getTrimmed(PROXY_DOMAIN_KEY));
clientConf.setProxyWorkstation(conf.getTrimmed(PROXY_WORKSTATION_KEY));
} else if (proxyPort >= 0) {
String msg = "Proxy error: " + PROXY_PORT_KEY + " set without " +
PROXY_HOST_KEY;
LOG.error(msg);
throw new IllegalArgumentException(msg);
}
String endPoint = conf.getTrimmed(ENDPOINT_KEY, "");
if (StringUtils.isEmpty(endPoint)) {
throw new IllegalArgumentException("Aliyun OSS endpoint should not be " +
"null or empty. Please set proper endpoint with 'fs.oss.endpoint'.");
}
CredentialsProvider provider =
AliyunOSSUtils.getCredentialsProvider(conf);
ossClient = new OSSClient(endPoint, provider, clientConf);
uploadPartSize = AliyunOSSUtils.getMultipartSizeProperty(conf,
MULTIPART_UPLOAD_PART_SIZE_KEY, MULTIPART_UPLOAD_PART_SIZE_DEFAULT);
multipartThreshold = conf.getLong(MIN_MULTIPART_UPLOAD_THRESHOLD_KEY,
MIN_MULTIPART_UPLOAD_THRESHOLD_DEFAULT);
serverSideEncryptionAlgorithm =
conf.get(SERVER_SIDE_ENCRYPTION_ALGORITHM_KEY, "");
if (multipartThreshold < 5 * 1024 * 1024) {
LOG.warn(MIN_MULTIPART_UPLOAD_THRESHOLD_KEY + " must be at least 5 MB");
multipartThreshold = 5 * 1024 * 1024;
}
if (multipartThreshold > 1024 * 1024 * 1024) {
LOG.warn(MIN_MULTIPART_UPLOAD_THRESHOLD_KEY + " must be less than 1 GB");
multipartThreshold = 1024 * 1024 * 1024;
}
String cannedACLName = conf.get(CANNED_ACL_KEY, CANNED_ACL_DEFAULT);
if (StringUtils.isNotEmpty(cannedACLName)) {
CannedAccessControlList cannedACL =
CannedAccessControlList.valueOf(cannedACLName);
ossClient.setBucketAcl(bucketName, cannedACL);
}
maxKeys = conf.getInt(MAX_PAGING_KEYS_KEY, MAX_PAGING_KEYS_DEFAULT);
bucketName = uri.getHost();
}
/**
* Delete an object, and update write operation statistics.
*
* @param key key to blob to delete.
*/
public void deleteObject(String key) {
ossClient.deleteObject(bucketName, key);
statistics.incrementWriteOps(1);
}
/**
* Delete a list of keys, and update write operation statistics.
*
* @param keysToDelete collection of keys to delete.
* @throws IOException if failed to delete objects.
*/
public void deleteObjects(List<String> keysToDelete) throws IOException {
if (CollectionUtils.isEmpty(keysToDelete)) {
LOG.warn("Keys to delete is empty.");
return;
}
int retry = 10;
int tries = 0;
List<String> deleteFailed = keysToDelete;
while(CollectionUtils.isNotEmpty(deleteFailed)) {
DeleteObjectsRequest deleteRequest = new DeleteObjectsRequest(bucketName);
deleteRequest.setKeys(deleteFailed);
// There are two modes to do batch delete:
// 1. detail mode: DeleteObjectsResult.getDeletedObjects returns objects
// which were deleted successfully.
// 2. simple mode: DeleteObjectsResult.getDeletedObjects returns objects
// which were deleted unsuccessfully.
// Here, we choose the simple mode to do batch delete.
deleteRequest.setQuiet(true);
DeleteObjectsResult result = ossClient.deleteObjects(deleteRequest);
deleteFailed = result.getDeletedObjects();
tries++;
if (tries == retry) {
break;
}
}
if (tries == retry && CollectionUtils.isNotEmpty(deleteFailed)) {
// Most of time, it is impossible to try 10 times, expect the
// Aliyun OSS service problems.
throw new IOException("Failed to delete Aliyun OSS objects for " +
tries + " times.");
}
}
/**
* Delete a directory from Aliyun OSS.
*
* @param key directory key to delete.
* @throws IOException if failed to delete directory.
*/
public void deleteDirs(String key) throws IOException {
key = AliyunOSSUtils.maybeAddTrailingSlash(key);
ListObjectsRequest listRequest = new ListObjectsRequest(bucketName);
listRequest.setPrefix(key);
listRequest.setDelimiter(null);
listRequest.setMaxKeys(maxKeys);
while (true) {
ObjectListing objects = ossClient.listObjects(listRequest);
statistics.incrementReadOps(1);
List<String> keysToDelete = new ArrayList<String>();
for (OSSObjectSummary objectSummary : objects.getObjectSummaries()) {
keysToDelete.add(objectSummary.getKey());
}
deleteObjects(keysToDelete);
if (objects.isTruncated()) {
listRequest.setMarker(objects.getNextMarker());
} else {
break;
}
}
}
/**
* Return metadata of a given object key.
*
* @param key object key.
* @return return null if key does not exist.
*/
public ObjectMetadata getObjectMetadata(String key) {
try {
return ossClient.getObjectMetadata(bucketName, key);
} catch (OSSException osse) {
return null;
} finally {
statistics.incrementReadOps(1);
}
}
/**
* Upload an empty file as an OSS object, using single upload.
*
* @param key object key.
* @throws IOException if failed to upload object.
*/
public void storeEmptyFile(String key) throws IOException {
ObjectMetadata dirMeta = new ObjectMetadata();
byte[] buffer = new byte[0];
ByteArrayInputStream in = new ByteArrayInputStream(buffer);
dirMeta.setContentLength(0);
try {
ossClient.putObject(bucketName, key, in, dirMeta);
} finally {
in.close();
}
}
/**
* Copy an object from source key to destination key.
*
* @param srcKey source key.
* @param dstKey destination key.
* @return true if file is successfully copied.
*/
public boolean copyFile(String srcKey, String dstKey) {
ObjectMetadata objectMeta =
ossClient.getObjectMetadata(bucketName, srcKey);
long contentLength = objectMeta.getContentLength();
if (contentLength <= multipartThreshold) {
return singleCopy(srcKey, dstKey);
} else {
return multipartCopy(srcKey, contentLength, dstKey);
}
}
/**
* Use single copy to copy an OSS object.
* (The caller should make sure srcPath is a file and dstPath is valid)
*
* @param srcKey source key.
* @param dstKey destination key.
* @return true if object is successfully copied.
*/
private boolean singleCopy(String srcKey, String dstKey) {
CopyObjectResult copyResult =
ossClient.copyObject(bucketName, srcKey, bucketName, dstKey);
LOG.debug(copyResult.getETag());
return true;
}
/**
* Use multipart copy to copy an OSS object.
* (The caller should make sure srcPath is a file and dstPath is valid)
*
* @param srcKey source key.
* @param contentLength data size of the object to copy.
* @param dstKey destination key.
* @return true if success, or false if upload is aborted.
*/
private boolean multipartCopy(String srcKey, long contentLength,
String dstKey) {
long realPartSize =
AliyunOSSUtils.calculatePartSize(contentLength, uploadPartSize);
int partNum = (int) (contentLength / realPartSize);
if (contentLength % realPartSize != 0) {
partNum++;
}
InitiateMultipartUploadRequest initiateMultipartUploadRequest =
new InitiateMultipartUploadRequest(bucketName, dstKey);
ObjectMetadata meta = new ObjectMetadata();
if (StringUtils.isNotEmpty(serverSideEncryptionAlgorithm)) {
meta.setServerSideEncryption(serverSideEncryptionAlgorithm);
}
initiateMultipartUploadRequest.setObjectMetadata(meta);
InitiateMultipartUploadResult initiateMultipartUploadResult =
ossClient.initiateMultipartUpload(initiateMultipartUploadRequest);
String uploadId = initiateMultipartUploadResult.getUploadId();
List<PartETag> partETags = new ArrayList<PartETag>();
try {
for (int i = 0; i < partNum; i++) {
long skipBytes = realPartSize * i;
long size = (realPartSize < contentLength - skipBytes) ?
realPartSize : contentLength - skipBytes;
UploadPartCopyRequest partCopyRequest = new UploadPartCopyRequest();
partCopyRequest.setSourceBucketName(bucketName);
partCopyRequest.setSourceKey(srcKey);
partCopyRequest.setBucketName(bucketName);
partCopyRequest.setKey(dstKey);
partCopyRequest.setUploadId(uploadId);
partCopyRequest.setPartSize(size);
partCopyRequest.setBeginIndex(skipBytes);
partCopyRequest.setPartNumber(i + 1);
UploadPartCopyResult partCopyResult =
ossClient.uploadPartCopy(partCopyRequest);
statistics.incrementWriteOps(1);
partETags.add(partCopyResult.getPartETag());
}
CompleteMultipartUploadRequest completeMultipartUploadRequest =
new CompleteMultipartUploadRequest(bucketName, dstKey,
uploadId, partETags);
CompleteMultipartUploadResult completeMultipartUploadResult =
ossClient.completeMultipartUpload(completeMultipartUploadRequest);
LOG.debug(completeMultipartUploadResult.getETag());
return true;
} catch (OSSException | ClientException e) {
AbortMultipartUploadRequest abortMultipartUploadRequest =
new AbortMultipartUploadRequest(bucketName, dstKey, uploadId);
ossClient.abortMultipartUpload(abortMultipartUploadRequest);
return false;
}
}
/**
* Upload a file as an OSS object, using single upload.
*
* @param key object key.
* @param file local file to upload.
* @throws IOException if failed to upload object.
*/
public void uploadObject(String key, File file) throws IOException {
File object = file.getAbsoluteFile();
FileInputStream fis = new FileInputStream(object);
ObjectMetadata meta = new ObjectMetadata();
meta.setContentLength(object.length());
if (StringUtils.isNotEmpty(serverSideEncryptionAlgorithm)) {
meta.setServerSideEncryption(serverSideEncryptionAlgorithm);
}
try {
PutObjectResult result = ossClient.putObject(bucketName, key, fis, meta);
LOG.debug(result.getETag());
statistics.incrementWriteOps(1);
} finally {
fis.close();
}
}
/**
* list objects.
*
* @param prefix prefix.
* @param maxListingLength max no. of entries
* @param marker last key in any previous search.
* @param recursive whether to list directory recursively.
* @return a list of matches.
*/
public ObjectListing listObjects(String prefix, int maxListingLength,
String marker, boolean recursive) {
String delimiter = recursive ? null : "/";
prefix = AliyunOSSUtils.maybeAddTrailingSlash(prefix);
ListObjectsRequest listRequest = new ListObjectsRequest(bucketName);
listRequest.setPrefix(prefix);
listRequest.setDelimiter(delimiter);
listRequest.setMaxKeys(maxListingLength);
listRequest.setMarker(marker);
ObjectListing listing = ossClient.listObjects(listRequest);
statistics.incrementReadOps(1);
return listing;
}
/**
* Retrieve a part of an object.
*
* @param key the object name that is being retrieved from the Aliyun OSS.
* @param byteStart start position.
* @param byteEnd end position.
* @return This method returns null if the key is not found.
*/
public InputStream retrieve(String key, long byteStart, long byteEnd) {
try {
GetObjectRequest request = new GetObjectRequest(bucketName, key);
request.setRange(byteStart, byteEnd);
return ossClient.getObject(request).getObjectContent();
} catch (OSSException | ClientException e) {
return null;
}
}
/**
* Close OSS client properly.
*/
public void close() {
if (ossClient != null) {
ossClient.shutdown();
ossClient = null;
}
}
/**
* Clean up all objects matching the prefix.
*
* @param prefix Aliyun OSS object prefix.
* @throws IOException if failed to clean up objects.
*/
public void purge(String prefix) throws IOException {
String key;
try {
ObjectListing objects = listObjects(prefix, maxKeys, null, true);
for (OSSObjectSummary object : objects.getObjectSummaries()) {
key = object.getKey();
ossClient.deleteObject(bucketName, key);
}
for (String dir: objects.getCommonPrefixes()) {
deleteDirs(dir);
}
} catch (OSSException | ClientException e) {
LOG.error("Failed to purge " + prefix);
}
}
public RemoteIterator<LocatedFileStatus> singleStatusRemoteIterator(
final FileStatus fileStatus, final BlockLocation[] locations) {
return new RemoteIterator<LocatedFileStatus>() {
private boolean hasNext = true;
@Override
public boolean hasNext() throws IOException {
return fileStatus != null && hasNext;
}
@Override
public LocatedFileStatus next() throws IOException {
if (hasNext()) {
LocatedFileStatus s = new LocatedFileStatus(fileStatus,
fileStatus.isFile() ? locations : null);
hasNext = false;
return s;
} else {
throw new NoSuchElementException();
}
}
};
}
public RemoteIterator<LocatedFileStatus> createLocatedFileStatusIterator(
final String prefix, final int maxListingLength, FileSystem fs,
PathFilter filter, FileStatusAcceptor acceptor, String delimiter) {
return new RemoteIterator<LocatedFileStatus>() {
private String nextMarker = null;
private boolean firstListing = true;
private boolean meetEnd = false;
private ListIterator<FileStatus> batchIterator;
@Override
public boolean hasNext() throws IOException {
if (firstListing) {
requestNextBatch();
firstListing = false;
}
return batchIterator.hasNext() || requestNextBatch();
}
@Override
public LocatedFileStatus next() throws IOException {
if (hasNext()) {
FileStatus status = batchIterator.next();
BlockLocation[] locations = fs.getFileBlockLocations(status,
0, status.getLen());
return new LocatedFileStatus(
status, status.isFile() ? locations : null);
} else {
throw new NoSuchElementException();
}
}
private boolean requestNextBatch() {
if (meetEnd) {
return false;
}
ListObjectsRequest listRequest = new ListObjectsRequest(bucketName);
listRequest.setPrefix(AliyunOSSUtils.maybeAddTrailingSlash(prefix));
listRequest.setMaxKeys(maxListingLength);
listRequest.setMarker(nextMarker);
listRequest.setDelimiter(delimiter);
ObjectListing listing = ossClient.listObjects(listRequest);
List<FileStatus> stats = new ArrayList<>(
listing.getObjectSummaries().size() +
listing.getCommonPrefixes().size());
for (OSSObjectSummary summary : listing.getObjectSummaries()) {
String key = summary.getKey();
Path path = fs.makeQualified(new Path("/" + key));
if (filter.accept(path) && acceptor.accept(path, summary)) {
FileStatus status = new FileStatus(summary.getSize(),
key.endsWith("/"), 1, fs.getDefaultBlockSize(path),
summary.getLastModified().getTime(), path);
stats.add(status);
}
}
for (String commonPrefix : listing.getCommonPrefixes()) {
Path path = fs.makeQualified(new Path("/" + commonPrefix));
if (filter.accept(path) && acceptor.accept(path, commonPrefix)) {
FileStatus status = new FileStatus(0, true, 1, 0, 0, path);
stats.add(status);
}
}
batchIterator = stats.listIterator();
if (listing.isTruncated()) {
nextMarker = listing.getNextMarker();
} else {
meetEnd = true;
}
statistics.incrementReadOps(1);
return batchIterator.hasNext();
}
};
}
public PartETag uploadPart(File file, String key, String uploadId, int idx)
throws IOException {
InputStream instream = null;
Exception caught = null;
int tries = 3;
while (tries > 0) {
try {
instream = new FileInputStream(file);
UploadPartRequest uploadRequest = new UploadPartRequest();
uploadRequest.setBucketName(bucketName);
uploadRequest.setKey(key);
uploadRequest.setUploadId(uploadId);
uploadRequest.setInputStream(instream);
uploadRequest.setPartSize(file.length());
uploadRequest.setPartNumber(idx);
UploadPartResult uploadResult = ossClient.uploadPart(uploadRequest);
return uploadResult.getPartETag();
} catch (Exception e) {
LOG.debug("Failed to upload "+ file.getPath() +", " +
"try again.", e);
caught = e;
} finally {
if (instream != null) {
instream.close();
instream = null;
}
}
tries--;
}
assert (caught != null);
throw new IOException("Failed to upload " + file.getPath() +
" for 3 times.", caught);
}
/**
* Initiate multipart upload.
*/
public String getUploadId(String key) {
InitiateMultipartUploadRequest initiateMultipartUploadRequest =
new InitiateMultipartUploadRequest(bucketName, key);
InitiateMultipartUploadResult initiateMultipartUploadResult =
ossClient.initiateMultipartUpload(initiateMultipartUploadRequest);
return initiateMultipartUploadResult.getUploadId();
}
/**
* Complete the specific multipart upload.
*/
public CompleteMultipartUploadResult completeMultipartUpload(String key,
String uploadId, List<PartETag> partETags) {
Collections.sort(partETags, new PartNumberAscendComparator());
CompleteMultipartUploadRequest completeMultipartUploadRequest =
new CompleteMultipartUploadRequest(bucketName, key, uploadId,
partETags);
return ossClient.completeMultipartUpload(completeMultipartUploadRequest);
}
/**
* Abort the specific multipart upload.
*/
public void abortMultipartUpload(String key, String uploadId) {
AbortMultipartUploadRequest request = new AbortMultipartUploadRequest(
bucketName, key, uploadId);
ossClient.abortMultipartUpload(request);
}
private static class PartNumberAscendComparator
implements Comparator<PartETag>, Serializable {
@Override
public int compare(PartETag o1, PartETag o2) {
if (o1.getPartNumber() > o2.getPartNumber()) {
return 1;
} else {
return -1;
}
}
}
}
|
|
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.sort;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import org.pentaho.di.core.CheckResult;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInjectionMetaEntry;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
/*
* Created on 02-jun-2003
*/
public class SortRowsMeta extends BaseStepMeta implements StepMetaInterface {
private static Class<?> PKG = SortRowsMeta.class; // for i18n purposes, needed by Translator2!!
/** order by which fields? */
private String[] fieldName;
/** false : descending, true=ascending */
private boolean[] ascending;
/** false : case insensitive, true=case sensitive */
private boolean[] caseSensitive;
/** false : not a presorted field, true=presorted field */
private boolean[] preSortedField;
private List<String> groupFields;
/** Directory to store the temp files */
private String directory;
/** Temp files prefix... */
private String prefix;
/** The sort size: number of rows sorted and kept in memory */
private String sortSize;
/** The free memory limit in percentages in case we don't use the sort size */
private String freeMemoryLimit;
/** only pass unique rows to the output stream(s) */
private boolean onlyPassingUniqueRows;
/**
* Compress files: if set to true, temporary files are compressed, thus reducing I/O at the cost of slightly higher
* CPU usage
*/
private boolean compressFiles;
/** The variable to use to set the compressFiles option boolean */
private String compressFilesVariable;
public SortRowsMeta() {
super(); // allocate BaseStepMeta
}
/**
* @return Returns the ascending.
*/
public boolean[] getAscending() {
return ascending;
}
/**
* @param ascending
* The ascending to set.
*/
public void setAscending( boolean[] ascending ) {
this.ascending = ascending;
}
/**
* @return Returns the directory.
*/
public String getDirectory() {
return directory;
}
/**
* @param directory
* The directory to set.
*/
public void setDirectory( String directory ) {
this.directory = directory;
}
/**
* @return Returns the fieldName.
*/
public String[] getFieldName() {
return fieldName;
}
/**
* @param fieldName
* The fieldName to set.
*/
public void setFieldName( String[] fieldName ) {
this.fieldName = fieldName;
}
/**
* @return Returns the prefix.
*/
public String getPrefix() {
return prefix;
}
/**
* @param prefix
* The prefix to set.
*/
public void setPrefix( String prefix ) {
this.prefix = prefix;
}
public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException {
readData( stepnode );
}
public void allocate( int nrfields ) {
fieldName = new String[nrfields]; // order by
ascending = new boolean[nrfields];
caseSensitive = new boolean[nrfields];
preSortedField = new boolean[nrfields];
groupFields = null;
}
public Object clone() {
SortRowsMeta retval = (SortRowsMeta) super.clone();
int nrfields = fieldName.length;
retval.allocate( nrfields );
for ( int i = 0; i < nrfields; i++ ) {
retval.fieldName[i] = fieldName[i];
retval.ascending[i] = ascending[i];
retval.caseSensitive[i] = caseSensitive[i];
retval.preSortedField[i] = preSortedField[i];
}
return retval;
}
private void readData( Node stepnode ) throws KettleXMLException {
try {
directory = XMLHandler.getTagValue( stepnode, "directory" );
prefix = XMLHandler.getTagValue( stepnode, "prefix" );
sortSize = XMLHandler.getTagValue( stepnode, "sort_size" );
freeMemoryLimit = XMLHandler.getTagValue( stepnode, "free_memory" );
compressFiles = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "compress" ) );
compressFilesVariable = XMLHandler.getTagValue( stepnode, "compress_variable" );
onlyPassingUniqueRows = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "unique_rows" ) );
Node fields = XMLHandler.getSubNode( stepnode, "fields" );
int nrfields = XMLHandler.countNodes( fields, "field" );
allocate( nrfields );
for ( int i = 0; i < nrfields; i++ ) {
Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i );
fieldName[i] = XMLHandler.getTagValue( fnode, "name" );
String asc = XMLHandler.getTagValue( fnode, "ascending" );
ascending[i] = "Y".equalsIgnoreCase( asc );
String sens = XMLHandler.getTagValue( fnode, "case_sensitive" );
caseSensitive[i] = Const.isEmpty( sens ) || "Y".equalsIgnoreCase( sens );
String presorted = XMLHandler.getTagValue( fnode, "presorted" );
preSortedField[i] = "Y".equalsIgnoreCase( presorted );
}
} catch ( Exception e ) {
throw new KettleXMLException( "Unable to load step info from XML", e );
}
}
public void setDefault() {
directory = "%%java.io.tmpdir%%";
prefix = "out";
sortSize = "1000000";
freeMemoryLimit = null;
compressFiles = false;
compressFilesVariable = null;
onlyPassingUniqueRows = false;
int nrfields = 0;
allocate( nrfields );
for ( int i = 0; i < nrfields; i++ ) {
fieldName[i] = "field" + i;
caseSensitive[i] = true;
preSortedField[i] = false;
}
}
public String getXML() {
StringBuffer retval = new StringBuffer( 256 );
retval.append( " " ).append( XMLHandler.addTagValue( "directory", directory ) );
retval.append( " " ).append( XMLHandler.addTagValue( "prefix", prefix ) );
retval.append( " " ).append( XMLHandler.addTagValue( "sort_size", sortSize ) );
retval.append( " " ).append( XMLHandler.addTagValue( "free_memory", freeMemoryLimit ) );
retval.append( " " ).append( XMLHandler.addTagValue( "compress", compressFiles ) );
retval.append( " " ).append( XMLHandler.addTagValue( "compress_variable", compressFilesVariable ) );
retval.append( " " ).append( XMLHandler.addTagValue( "unique_rows", onlyPassingUniqueRows ) );
retval.append( " <fields>" ).append( Const.CR );
for ( int i = 0; i < fieldName.length; i++ ) {
retval.append( " <field>" ).append( Const.CR );
retval.append( " " ).append( XMLHandler.addTagValue( "name", fieldName[i] ) );
retval.append( " " ).append( XMLHandler.addTagValue( "ascending", ascending[i] ) );
retval.append( " " ).append( XMLHandler.addTagValue( "case_sensitive", caseSensitive[i] ) );
retval.append( " " ).append( XMLHandler.addTagValue( "presorted", preSortedField[i] ) );
retval.append( " </field>" ).append( Const.CR );
}
retval.append( " </fields>" ).append( Const.CR );
return retval.toString();
}
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {
try {
directory = rep.getStepAttributeString( id_step, "directory" );
prefix = rep.getStepAttributeString( id_step, "prefix" );
sortSize = rep.getStepAttributeString( id_step, "sort_size" );
freeMemoryLimit = rep.getStepAttributeString( id_step, "free_memory" );
compressFiles = rep.getStepAttributeBoolean( id_step, "compress" );
compressFilesVariable = rep.getStepAttributeString( id_step, "compress_variable" );
onlyPassingUniqueRows = rep.getStepAttributeBoolean( id_step, "unique_rows" );
int nrfields = rep.countNrStepAttributes( id_step, "field_name" );
allocate( nrfields );
for ( int i = 0; i < nrfields; i++ ) {
fieldName[i] = rep.getStepAttributeString( id_step, i, "field_name" );
ascending[i] = rep.getStepAttributeBoolean( id_step, i, "field_ascending" );
caseSensitive[i] = rep.getStepAttributeBoolean( id_step, i, "field_case_sensitive", true );
preSortedField[i] = rep.getStepAttributeBoolean( id_step, i, "field_presorted", false );
}
} catch ( Exception e ) {
throw new KettleException( "Unexpected error reading step information from the repository", e );
}
}
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException {
try {
rep.saveStepAttribute( id_transformation, id_step, "directory", directory );
rep.saveStepAttribute( id_transformation, id_step, "prefix", prefix );
rep.saveStepAttribute( id_transformation, id_step, "sort_size", sortSize );
rep.saveStepAttribute( id_transformation, id_step, "free_memory", freeMemoryLimit );
rep.saveStepAttribute( id_transformation, id_step, "compress", compressFiles );
rep.saveStepAttribute( id_transformation, id_step, "compress_variable", compressFilesVariable );
rep.saveStepAttribute( id_transformation, id_step, "unique_rows", onlyPassingUniqueRows );
for ( int i = 0; i < fieldName.length; i++ ) {
rep.saveStepAttribute( id_transformation, id_step, i, "field_name", fieldName[i] );
rep.saveStepAttribute( id_transformation, id_step, i, "field_ascending", ascending[i] );
rep.saveStepAttribute( id_transformation, id_step, i, "field_case_sensitive", caseSensitive[i] );
rep.saveStepAttribute( id_transformation, id_step, i, "field_presorted", preSortedField[i] );
}
} catch ( Exception e ) {
throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e );
}
}
public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
// Set the sorted properties: ascending/descending
for ( int i = 0; i < fieldName.length; i++ ) {
int idx = inputRowMeta.indexOfValue( fieldName[i] );
if ( idx >= 0 ) {
ValueMetaInterface valueMeta = inputRowMeta.getValueMeta( idx );
valueMeta.setSortedDescending( !ascending[i] );
valueMeta.setCaseInsensitive( !caseSensitive[i] );
// Also see if lazy conversion is active on these key fields.
// If so we want to automatically convert them to the normal storage type.
// This will improve performance, see also: PDI-346
//
valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL );
valueMeta.setStorageMetadata( null );
}
}
}
public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ) {
CheckResult cr;
if ( prev != null && prev.size() > 0 ) {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(
PKG, "SortRowsMeta.CheckResult.FieldsReceived", "" + prev.size() ), stepMeta );
remarks.add( cr );
String error_message = "";
boolean error_found = false;
// Starting from selected fields in ...
for ( int i = 0; i < fieldName.length; i++ ) {
int idx = prev.indexOfValue( fieldName[i] );
if ( idx < 0 ) {
error_message += "\t\t" + fieldName[i] + Const.CR;
error_found = true;
}
}
if ( error_found ) {
error_message = BaseMessages.getString( PKG, "SortRowsMeta.CheckResult.SortKeysNotFound", error_message );
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta );
remarks.add( cr );
} else {
if ( fieldName.length > 0 ) {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(
PKG, "SortRowsMeta.CheckResult.AllSortKeysFound" ), stepMeta );
remarks.add( cr );
} else {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(
PKG, "SortRowsMeta.CheckResult.NoSortKeysEntered" ), stepMeta );
remarks.add( cr );
}
}
// Check the sort directory
String realDirectory = transMeta.environmentSubstitute( directory );
File f = new File( realDirectory );
if ( f.exists() ) {
if ( f.isDirectory() ) {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(
PKG, "SortRowsMeta.CheckResult.DirectoryExists", realDirectory ), stepMeta );
remarks.add( cr );
} else {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(
PKG, "SortRowsMeta.CheckResult.ExistsButNoDirectory", realDirectory ), stepMeta );
remarks.add( cr );
}
} else {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(
PKG, "SortRowsMeta.CheckResult.DirectoryNotExists", realDirectory ), stepMeta );
remarks.add( cr );
}
} else {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(
PKG, "SortRowsMeta.CheckResult.NoFields" ), stepMeta );
remarks.add( cr );
}
// See if we have input streams leading to this step!
if ( input.length > 0 ) {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(
PKG, "SortRowsMeta.CheckResult.ExpectedInputOk" ), stepMeta );
remarks.add( cr );
} else {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(
PKG, "SortRowsMeta.CheckResult.ExpectedInputError" ), stepMeta );
remarks.add( cr );
}
}
public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ) {
return new SortRows( stepMeta, stepDataInterface, cnr, transMeta, trans );
}
public StepDataInterface getStepData() {
return new SortRowsData();
}
/**
* @return Returns the sortSize.
*/
public String getSortSize() {
return sortSize;
}
/**
* @param sortSize
* The sortSize to set.
*/
public void setSortSize( String sortSize ) {
this.sortSize = sortSize;
}
/**
* @return Returns whether temporary files should be compressed
*/
public boolean getCompressFiles() {
return compressFiles;
}
/**
* @param compressFiles
* Whether to compress temporary files created during sorting
*/
public void setCompressFiles( boolean compressFiles ) {
this.compressFiles = compressFiles;
}
/**
* @return the onlyPassingUniqueRows
*/
public boolean isOnlyPassingUniqueRows() {
return onlyPassingUniqueRows;
}
/**
* @param onlyPassingUniqueRows
* the onlyPassingUniqueRows to set
*/
public void setOnlyPassingUniqueRows( boolean onlyPassingUniqueRows ) {
this.onlyPassingUniqueRows = onlyPassingUniqueRows;
}
/**
* @return the compressFilesVariable
*/
public String getCompressFilesVariable() {
return compressFilesVariable;
}
/**
* @param compressFilesVariable
* the compressFilesVariable to set
*/
public void setCompressFilesVariable( String compressFilesVariable ) {
this.compressFilesVariable = compressFilesVariable;
}
/**
* @return the caseSensitive
*/
public boolean[] getCaseSensitive() {
return caseSensitive;
}
/**
* @param caseSensitive
* the caseSensitive to set
*/
public void setCaseSensitive( boolean[] caseSensitive ) {
this.caseSensitive = caseSensitive;
}
/**
* @return the freeMemoryLimit
*/
public String getFreeMemoryLimit() {
return freeMemoryLimit;
}
/**
* @param freeMemoryLimit
* the freeMemoryLimit to set
*/
public void setFreeMemoryLimit( String freeMemoryLimit ) {
this.freeMemoryLimit = freeMemoryLimit;
}
/**
* @return the preSortedField
*/
public boolean[] getPreSortedField() {
return preSortedField;
}
/**
* @param preSortedField
* the preSorteField to set
*/
public void setPreSortedField( boolean[] preSorted ) {
preSortedField = preSorted;
}
public List<String> getGroupFields() {
if ( this.groupFields == null ) {
for ( int i = 0; i < preSortedField.length; i++ ) {
if ( preSortedField[i] == true ) {
if ( groupFields == null ) {
groupFields = new ArrayList<String>();
}
groupFields.add( this.fieldName[i] );
}
}
}
return groupFields;
}
public boolean isGroupSortEnabled() {
return ( this.getGroupFields() != null ) ? true : false;
}
@Override
public SortRowsMetaInjection getStepMetaInjectionInterface() {
return new SortRowsMetaInjection( this );
}
@Override
public List<StepInjectionMetaEntry> extractStepMetadataEntries() throws KettleException {
return getStepMetaInjectionInterface().extractStepMetadataEntries();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.hashtable;
import org.apache.flink.core.memory.MemorySegment;
import org.apache.flink.runtime.io.disk.RandomAccessInputView;
import org.apache.flink.table.dataformat.BinaryRow;
import org.apache.flink.util.MathUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.ByteOrder;
import java.util.Arrays;
import java.util.List;
import static org.apache.flink.util.Preconditions.checkArgument;
/**
* Bucket area for hash table.
*
* <p>The layout of the buckets inside a memory segment is as follows:</p>
* <pre>
* +----------------------------- Bucket x ----------------------------
* |element count (2 bytes) | probedFlags (2 bytes) | next-bucket-in-chain-pointer (4 bytes) |
* |
* |hashCode 1 (4 bytes) | hashCode 2 (4 bytes) | hashCode 3 (4 bytes) |
* | ... hashCode n-1 (4 bytes) | hashCode n (4 bytes)
* |
* |pointer 1 (4 bytes) | pointer 2 (4 bytes) | pointer 3 (4 bytes) |
* | ... pointer n-1 (4 bytes) | pointer n (4 bytes)
* |
* +---------------------------- Bucket x + 1--------------------------
* | ...
* |
* </pre>
*/
public class BinaryHashBucketArea {
private static final Logger LOG = LoggerFactory.getLogger(BinaryHashBucketArea.class);
/**
* Log 2 of bucket size.
*/
static final int BUCKET_SIZE_BITS = 7;
/**
* 128, bucket size of bytes.
*/
static final int BUCKET_SIZE = 0x1 << BUCKET_SIZE_BITS;
/**
* The length of the hash code stored in the bucket.
*/
static final int HASH_CODE_LEN = 4;
/**
* The length of a pointer from a hash bucket to the record in the buffers.
*/
static final int POINTER_LEN = 4;
/**
* The number of bytes that the entry in the hash structure occupies, in bytes.
* It corresponds to a 4 byte hash value and an 4 byte pointer.
*/
public static final int RECORD_BYTES = HASH_CODE_LEN + POINTER_LEN;
/**
* Offset of the field in the bucket header indicating the bucket's element count.
*/
static final int HEADER_COUNT_OFFSET = 0;
/**
* Offset of the field in the bucket header that holds the probed bit set.
*/
static final int PROBED_FLAG_OFFSET = 2;
/**
* Offset of the field in the bucket header that holds the forward pointer to its
* first overflow bucket.
*/
static final int HEADER_FORWARD_OFFSET = 4;
/**
* Total length for bucket header.
*/
static final int BUCKET_HEADER_LENGTH = 8;
/**
* The maximum number of elements that can be loaded in a bucket.
*/
static final int NUM_ENTRIES_PER_BUCKET = (BUCKET_SIZE - BUCKET_HEADER_LENGTH) / RECORD_BYTES;
/**
* Offset of record pointer.
*/
static final int BUCKET_POINTER_START_OFFSET = BUCKET_HEADER_LENGTH + HASH_CODE_LEN * NUM_ENTRIES_PER_BUCKET;
/**
* Constant for the forward pointer, indicating that the pointer is not set.
*/
static final int BUCKET_FORWARD_POINTER_NOT_SET = 0xFFFFFFFF;
/**
* Constant for the bucket header to init. (count: 0, probedFlag: 0, forwardPointerNotSet: ~0x0)
*/
private static final long BUCKET_HEADER_INIT;
static {
if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN) {
BUCKET_HEADER_INIT = 0xFFFFFFFF00000000L;
} else {
BUCKET_HEADER_INIT = 0x00000000FFFFFFFFL;
}
}
/**
* The load factor used when none specified in constructor.
*/
private static final double DEFAULT_LOAD_FACTOR = 0.75;
final BinaryHashTable table;
private final double estimatedRowCount;
private final double loadFactor;
BinaryHashPartition partition;
private int size;
MemorySegment[] buckets;
int numBuckets;
private int numBucketsMask;
// segments in which overflow buckets from the table structure are stored
MemorySegment[] overflowSegments;
int numOverflowSegments; // the number of actual segments in the overflowSegments array
private int nextOverflowBucket; // the next free bucket in the current overflow segment
private int threshold;
private boolean inReHash = false;
BinaryHashBucketArea(BinaryHashTable table, double estimatedRowCount, int maxSegs) {
this(table, estimatedRowCount, maxSegs, DEFAULT_LOAD_FACTOR);
}
private BinaryHashBucketArea(BinaryHashTable table, double estimatedRowCount, int maxSegs, double loadFactor) {
this.table = table;
this.estimatedRowCount = estimatedRowCount;
this.loadFactor = loadFactor;
this.size = 0;
int minNumBuckets = (int) Math.ceil((estimatedRowCount / loadFactor / NUM_ENTRIES_PER_BUCKET));
int bucketNumSegs = Math.max(1, Math.min(maxSegs, (minNumBuckets >>> table.bucketsPerSegmentBits) +
((minNumBuckets & table.bucketsPerSegmentMask) == 0 ? 0 : 1)));
int numBuckets = MathUtils.roundDownToPowerOf2(bucketNumSegs << table.bucketsPerSegmentBits);
int threshold = (int) (numBuckets * NUM_ENTRIES_PER_BUCKET * loadFactor);
MemorySegment[] buckets = new MemorySegment[bucketNumSegs];
table.ensureNumBuffersReturned(bucketNumSegs);
// go over all segments that are part of the table
for (int i = 0; i < bucketNumSegs; i++) {
final MemorySegment seg = table.getNextBuffer();
initMemorySegment(seg);
buckets[i] = seg;
}
setNewBuckets(buckets, numBuckets, threshold);
}
private void setNewBuckets(MemorySegment[] buckets, int numBuckets, int threshold) {
this.buckets = buckets;
checkArgument(MathUtils.isPowerOf2(numBuckets));
this.numBuckets = numBuckets;
this.numBucketsMask = numBuckets - 1;
this.overflowSegments = new MemorySegment[2];
this.numOverflowSegments = 0;
this.nextOverflowBucket = 0;
this.threshold = threshold;
}
public void setPartition(BinaryHashPartition partition) {
this.partition = partition;
}
private void resize(boolean spillingAllowed) throws IOException {
MemorySegment[] oldBuckets = this.buckets;
int oldNumBuckets = numBuckets;
MemorySegment[] oldOverflowSegments = overflowSegments;
int newNumSegs = oldBuckets.length * 2;
int newNumBuckets = MathUtils.roundDownToPowerOf2(newNumSegs << table.bucketsPerSegmentBits);
int newThreshold = (int) (newNumBuckets * NUM_ENTRIES_PER_BUCKET * loadFactor);
// We can't resize if not spillingAllowed and there are not enough buffers.
if (!spillingAllowed && newNumSegs > table.remainBuffers()) {
return;
}
// request new buckets.
MemorySegment[] newBuckets = new MemorySegment[newNumSegs];
for (int i = 0; i < newNumSegs; i++) {
MemorySegment seg = table.getNextBuffer();
if (seg == null) {
final int spilledPart = table.spillPartition();
if (spilledPart == partition.partitionNumber) {
// this bucket is no longer in-memory
// free new segments.
for (int j = 0; j < i; j++) {
table.free(newBuckets[j]);
}
return;
}
seg = table.getNextBuffer();
if (seg == null) {
throw new RuntimeException(
"Bug in HybridHashJoin: No memory became available after spilling a partition.");
}
}
initMemorySegment(seg);
newBuckets[i] = seg;
}
setNewBuckets(newBuckets, newNumBuckets, newThreshold);
reHash(oldBuckets, oldNumBuckets, oldOverflowSegments);
}
private void reHash(
MemorySegment[] oldBuckets,
int oldNumBuckets,
MemorySegment[] oldOverflowSegments) throws IOException {
long reHashStartTime = System.currentTimeMillis();
inReHash = true;
int scanCount = -1;
while (true) {
scanCount++;
if (scanCount >= oldNumBuckets) {
break;
}
// move to next bucket, update all the current bucket status with new bucket information.
final int bucketArrayPos = scanCount >> table.bucketsPerSegmentBits;
int bucketInSegOffset = (scanCount & table.bucketsPerSegmentMask) << BUCKET_SIZE_BITS;
MemorySegment bucketSeg = oldBuckets[bucketArrayPos];
int countInBucket = bucketSeg.getShort(bucketInSegOffset + HEADER_COUNT_OFFSET);
int numInBucket = 0;
while (countInBucket != 0) {
int hashCodeOffset = bucketInSegOffset + BUCKET_HEADER_LENGTH;
int pointerOffset = bucketInSegOffset + BUCKET_POINTER_START_OFFSET;
while (numInBucket < countInBucket) {
int hashCode = bucketSeg.getInt(hashCodeOffset);
int pointer = bucketSeg.getInt(pointerOffset);
if (!insertToBucket(hashCode, pointer, true, false)) {
buildBloomFilterAndFree(oldBuckets, oldNumBuckets, oldOverflowSegments);
return;
}
numInBucket++;
hashCodeOffset += HASH_CODE_LEN;
pointerOffset += POINTER_LEN;
}
// this segment is done. check if there is another chained bucket
int forwardPointer = bucketSeg.getInt(bucketInSegOffset + HEADER_FORWARD_OFFSET);
if (forwardPointer == BUCKET_FORWARD_POINTER_NOT_SET) {
break;
}
final int overflowSegIndex = forwardPointer >>> table.segmentSizeBits;
bucketSeg = oldOverflowSegments[overflowSegIndex];
bucketInSegOffset = forwardPointer & table.segmentSizeMask;
countInBucket = bucketSeg.getShort(bucketInSegOffset + HEADER_COUNT_OFFSET);
numInBucket = 0;
}
}
freeMemory(oldBuckets, oldOverflowSegments);
inReHash = false;
LOG.info("The rehash take {} ms for {} segments", (System.currentTimeMillis() - reHashStartTime), numBuckets);
}
private void freeMemory(MemorySegment[] buckets, MemorySegment[] overflowSegments) {
for (MemorySegment segment : buckets) {
table.free(segment);
}
for (MemorySegment segment : overflowSegments) {
if (segment != null) {
table.free(segment);
}
}
}
private void initMemorySegment(MemorySegment seg) {
// go over all buckets in the segment
for (int k = 0; k < table.bucketsPerSegment; k++) {
final int bucketOffset = k * BUCKET_SIZE;
// init count and probeFlag and forward pointer together.
seg.putLong(bucketOffset + HEADER_COUNT_OFFSET, BUCKET_HEADER_INIT);
}
}
private boolean insertToBucket(
MemorySegment bucket, int bucketInSegmentPos,
int hashCode, int pointer, boolean spillingAllowed, boolean sizeAddAndCheckResize) throws IOException {
final int count = bucket.getShort(bucketInSegmentPos + HEADER_COUNT_OFFSET);
if (count < NUM_ENTRIES_PER_BUCKET) {
// we are good in our current bucket, put the values
bucket.putShort(bucketInSegmentPos + HEADER_COUNT_OFFSET, (short) (count + 1)); // update count
bucket.putInt(bucketInSegmentPos + BUCKET_HEADER_LENGTH + (count * HASH_CODE_LEN), hashCode); // hash code
bucket.putInt(bucketInSegmentPos + BUCKET_POINTER_START_OFFSET + (count * POINTER_LEN), pointer); // pointer
} else {
// we need to go to the overflow buckets
final int originalForwardPointer = bucket.getInt(bucketInSegmentPos + HEADER_FORWARD_OFFSET);
final int forwardForNewBucket;
if (originalForwardPointer != BUCKET_FORWARD_POINTER_NOT_SET) {
// forward pointer set
final int overflowSegIndex = originalForwardPointer >>> table.segmentSizeBits;
final int segOffset = originalForwardPointer & table.segmentSizeMask;
final MemorySegment seg = overflowSegments[overflowSegIndex];
final short obCount = seg.getShort(segOffset + HEADER_COUNT_OFFSET);
// check if there is space in this overflow bucket
if (obCount < NUM_ENTRIES_PER_BUCKET) {
// space in this bucket and we are done
seg.putShort(segOffset + HEADER_COUNT_OFFSET, (short) (obCount + 1)); // update count
seg.putInt(segOffset + BUCKET_HEADER_LENGTH + (obCount * HASH_CODE_LEN), hashCode); // hash code
seg.putInt(segOffset + BUCKET_POINTER_START_OFFSET + (obCount * POINTER_LEN), pointer); // pointer
return true;
} else {
// no space here, we need a new bucket. this current overflow bucket will be the
// target of the new overflow bucket
forwardForNewBucket = originalForwardPointer;
}
} else {
// no overflow bucket yet, so we need a first one
forwardForNewBucket = BUCKET_FORWARD_POINTER_NOT_SET;
}
// we need a new overflow bucket
MemorySegment overflowSeg;
final int overflowBucketNum;
final int overflowBucketOffset;
// first, see if there is space for an overflow bucket remaining in the last overflow segment
if (nextOverflowBucket == 0) {
// no space left in last bucket, or no bucket yet, so create an overflow segment
overflowSeg = table.getNextBuffer();
if (overflowSeg == null) {
// no memory available to create overflow bucket. we need to spill a partition
if (!spillingAllowed) {
throw new IOException("Hashtable memory ran out in a non-spillable situation. " +
"This is probably related to wrong size calculations.");
}
final int spilledPart = table.spillPartition();
if (spilledPart == partition.partitionNumber) {
// this bucket is no longer in-memory
return false;
}
overflowSeg = table.getNextBuffer();
if (overflowSeg == null) {
throw new RuntimeException("Bug in HybridHashJoin: No memory became available after spilling a partition.");
}
}
overflowBucketOffset = 0;
overflowBucketNum = numOverflowSegments;
// add the new overflow segment
if (overflowSegments.length <= numOverflowSegments) {
MemorySegment[] newSegsArray = new MemorySegment[overflowSegments.length * 2];
System.arraycopy(overflowSegments, 0, newSegsArray, 0, overflowSegments.length);
overflowSegments = newSegsArray;
}
overflowSegments[numOverflowSegments] = overflowSeg;
numOverflowSegments++;
} else {
// there is space in the last overflow bucket
overflowBucketNum = numOverflowSegments - 1;
overflowSeg = overflowSegments[overflowBucketNum];
overflowBucketOffset = nextOverflowBucket << BUCKET_SIZE_BITS;
}
// next overflow bucket is one ahead. if the segment is full, the next will be at the beginning
// of a new segment
nextOverflowBucket = (nextOverflowBucket == table.bucketsPerSegmentMask ? 0 : nextOverflowBucket + 1);
// insert the new overflow bucket in the chain of buckets
// 1) set the old forward pointer
// 2) let the bucket in the main table point to this one
overflowSeg.putInt(overflowBucketOffset + HEADER_FORWARD_OFFSET, forwardForNewBucket);
final int pointerToNewBucket = (overflowBucketNum << table.segmentSizeBits) + overflowBucketOffset;
bucket.putInt(bucketInSegmentPos + HEADER_FORWARD_OFFSET, pointerToNewBucket);
// finally, insert the values into the overflow buckets
overflowSeg.putInt(overflowBucketOffset + BUCKET_HEADER_LENGTH, hashCode); // hash code
overflowSeg.putInt(overflowBucketOffset + BUCKET_POINTER_START_OFFSET, pointer); // pointer
// set the count to one
overflowSeg.putShort(overflowBucketOffset + HEADER_COUNT_OFFSET, (short) 1);
// initiate the probed bitset to 0.
overflowSeg.putShort(overflowBucketOffset + PROBED_FLAG_OFFSET, (short) 0);
}
if (sizeAddAndCheckResize && ++size > threshold) {
resize(spillingAllowed);
}
return true;
}
private int findBucket(int hashCode) {
return hashCode & this.numBucketsMask;
}
/**
* Insert into bucket by hashCode and pointer.
* @return return false when spill own partition.
*/
boolean insertToBucket(int hashCode, int pointer, boolean spillingAllowed, boolean sizeAddAndCheckResize) throws IOException {
final int posHashCode = findBucket(hashCode);
// get the bucket for the given hash code
final int bucketArrayPos = posHashCode >> table.bucketsPerSegmentBits;
final int bucketInSegmentPos = (posHashCode & table.bucketsPerSegmentMask) << BUCKET_SIZE_BITS;
final MemorySegment bucket = this.buckets[bucketArrayPos];
return insertToBucket(bucket, bucketInSegmentPos, hashCode, pointer, spillingAllowed, sizeAddAndCheckResize);
}
/**
* Append record and insert to bucket.
*/
boolean appendRecordAndInsert(BinaryRow record, int hashCode) throws IOException {
final int posHashCode = findBucket(hashCode);
// get the bucket for the given hash code
final int bucketArrayPos = posHashCode >> table.bucketsPerSegmentBits;
final int bucketInSegmentPos = (posHashCode & table.bucketsPerSegmentMask) << BUCKET_SIZE_BITS;
final MemorySegment bucket = this.buckets[bucketArrayPos];
if (!table.tryDistinctBuildRow ||
!partition.isInMemory() ||
!findFirstSameBuildRow(bucket, hashCode, bucketInSegmentPos, record)) {
int pointer = partition.insertIntoBuildBuffer(record);
if (pointer != -1) {
// record was inserted into an in-memory partition. a pointer must be inserted into the buckets
insertToBucket(bucket, bucketInSegmentPos, hashCode, pointer, true, true);
return true;
} else {
return false;
}
} else {
// distinct build rows in memory.
return true;
}
}
/**
* For distinct build.
*/
private boolean findFirstSameBuildRow(
MemorySegment bucket,
int searchHashCode,
int bucketInSegmentOffset,
BinaryRow buildRowToInsert) {
int posInSegment = bucketInSegmentOffset + BUCKET_HEADER_LENGTH;
int countInBucket = bucket.getShort(bucketInSegmentOffset + HEADER_COUNT_OFFSET);
int numInBucket = 0;
RandomAccessInputView view = partition.getBuildStateInputView();
while (countInBucket != 0) {
while (numInBucket < countInBucket) {
final int thisCode = bucket.getInt(posInSegment);
posInSegment += HASH_CODE_LEN;
if (thisCode == searchHashCode) {
final int pointer = bucket.getInt(bucketInSegmentOffset +
BUCKET_POINTER_START_OFFSET + (numInBucket * POINTER_LEN));
numInBucket++;
try {
view.setReadPosition(pointer);
BinaryRow row = table.binaryBuildSideSerializer.mapFromPages(table.reuseBuildRow, view);
if (buildRowToInsert.equals(row)) {
return true;
}
} catch (IOException e) {
throw new RuntimeException("Error deserializing key or value from the hashtable: " +
e.getMessage(), e);
}
} else {
numInBucket++;
}
}
// this segment is done. check if there is another chained bucket
final int forwardPointer = bucket.getInt(bucketInSegmentOffset + HEADER_FORWARD_OFFSET);
if (forwardPointer == BUCKET_FORWARD_POINTER_NOT_SET) {
return false;
}
final int overflowSegIndex = forwardPointer >>> table.segmentSizeBits;
bucket = overflowSegments[overflowSegIndex];
bucketInSegmentOffset = forwardPointer & table.segmentSizeMask;
countInBucket = bucket.getShort(bucketInSegmentOffset + HEADER_COUNT_OFFSET);
posInSegment = bucketInSegmentOffset + BUCKET_HEADER_LENGTH;
numInBucket = 0;
}
return false;
}
/**
* Probe start lookup joined build rows.
*/
void startLookup(int hashCode) {
final int posHashCode = findBucket(hashCode);
// get the bucket for the given hash code
final int bucketArrayPos = posHashCode >> table.bucketsPerSegmentBits;
final int bucketInSegmentOffset = (posHashCode & table.bucketsPerSegmentMask) << BUCKET_SIZE_BITS;
final MemorySegment bucket = this.buckets[bucketArrayPos];
table.bucketIterator.set(bucket, overflowSegments, partition, hashCode, bucketInSegmentOffset);
}
void returnMemory(List<MemorySegment> target) {
target.addAll(Arrays.asList(overflowSegments).subList(0, numOverflowSegments));
target.addAll(Arrays.asList(buckets));
}
private void freeMemory() {
table.availableMemory.addAll(Arrays.asList(overflowSegments).subList(0, numOverflowSegments));
table.availableMemory.addAll(Arrays.asList(buckets));
}
/**
* Three situations:
* 1.Not use bloom filter, just free memory.
* 2.In rehash, free new memory and let rehash go build bloom filter from old memory.
* 3.Not in rehash and use bloom filter, build it and free memory.
*/
void buildBloomFilterAndFree() {
if (inReHash || !table.useBloomFilters) {
freeMemory();
} else {
buildBloomFilterAndFree(buckets, numBuckets, overflowSegments);
}
}
private void buildBloomFilterAndFree(
MemorySegment[] buckets,
int numBuckets,
MemorySegment[] overflowSegments) {
if (table.useBloomFilters) {
long numRecords = (long) Math.max(partition.getBuildSideRecordCount() * 1.5, estimatedRowCount);
// BloomFilter size min of:
// 1.remain buffers
// 2.bf size for numRecords when fpp is 0.05
// 3.max init bucket area buffers.
int segSize = Math.min(
Math.min(table.remainBuffers(),
HashTableBloomFilter.optimalSegmentNumber(numRecords, table.pageSize(), 0.05)),
table.maxInitBufferOfBucketArea(table.partitionsBeingBuilt.size()));
if (segSize > 0) {
HashTableBloomFilter filter = new HashTableBloomFilter(
table.getNextBuffers(MathUtils.roundDownToPowerOf2(segSize)), numRecords);
// Add all records to bloom filter.
int scanCount = -1;
while (true) {
scanCount++;
if (scanCount >= numBuckets) {
break;
}
// move to next bucket, update all the current bucket status with new bucket information.
final int bucketArrayPos = scanCount >> table.bucketsPerSegmentBits;
int bucketInSegOffset = (scanCount & table.bucketsPerSegmentMask) << BUCKET_SIZE_BITS;
MemorySegment bucketSeg = buckets[bucketArrayPos];
int countInBucket = bucketSeg.getShort(bucketInSegOffset + HEADER_COUNT_OFFSET);
int numInBucket = 0;
while (countInBucket != 0) {
int hashCodeOffset = bucketInSegOffset + BUCKET_HEADER_LENGTH;
while (numInBucket < countInBucket) {
filter.addHash(bucketSeg.getInt(hashCodeOffset));
numInBucket++;
hashCodeOffset += HASH_CODE_LEN;
}
// this segment is done. check if there is another chained bucket
int forwardPointer = bucketSeg.getInt(bucketInSegOffset + HEADER_FORWARD_OFFSET);
if (forwardPointer == BUCKET_FORWARD_POINTER_NOT_SET) {
break;
}
final int overflowSegIndex = forwardPointer >>> table.segmentSizeBits;
bucketSeg = overflowSegments[overflowSegIndex];
bucketInSegOffset = forwardPointer & table.segmentSizeMask;
countInBucket = bucketSeg.getShort(bucketInSegOffset + HEADER_COUNT_OFFSET);
numInBucket = 0;
}
}
partition.bloomFilter = filter;
}
}
freeMemory(buckets, overflowSegments);
}
}
|
|
/*
*
* * Copyright 2010-2014 Orient Technologies LTD (info(at)orientechnologies.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package com.orientechnologies.orient.etl.transformer;
import com.orientechnologies.orient.core.command.OBasicCommandContext;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.serialization.serializer.OStringSerializerHelper;
import com.orientechnologies.orient.etl.OETLProcessor;
import sun.misc.FloatConsts;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
public class OCSVTransformer extends OAbstractTransformer {
private char separator = ',';
private boolean columnsOnFirstLine = true;
private List<String> columnNames = null;
private List<OType> columnTypes = null;
private long skipFrom = -1;
private long skipTo = -1;
private long line = -1;
private String nullValue;
private char stringCharacter = '"';
@Override
public ODocument getConfiguration() {
return new ODocument().fromJSON("{parameters:[" + getCommonConfigurationParameters()
+ ",{separator:{optional:true,description:'Column separator'}},"
+ "{columnsOnFirstLine:{optional:true,description:'Columns are described in the first line'}},"
+ "{columns:{optional:true,description:'Columns array containing names, and optionally type after :'}},"
+ "{nullValue:{optional:true,description:'value to consider as NULL. Default is not declared'}},"
+ "{stringCharacter:{optional:true,description:'String character delimiter'}},"
+ "{skipFrom:{optional:true,description:'Line number where start to skip',type:'int'}},"
+ "{skipTo:{optional:true,description:'Line number where skip ends',type:'int'}}"
+ "],input:['String'],output:'ODocument'}");
}
@Override
public void configure(final OETLProcessor iProcessor, final ODocument iConfiguration, final OBasicCommandContext iContext) {
super.configure(iProcessor, iConfiguration, iContext);
if (iConfiguration.containsField("separator"))
separator = iConfiguration.field("separator").toString().charAt(0);
if (iConfiguration.containsField("columnsOnFirstLine"))
columnsOnFirstLine = (Boolean) iConfiguration.field("columnsOnFirstLine");
if (iConfiguration.containsField("columns")) {
final List<String> columns = iConfiguration.field("columns");
columnNames = new ArrayList<String>(columns.size());
columnTypes = new ArrayList<OType>(columns.size());
for (String c : columns) {
final String[] parts = c.split(":");
columnNames.add(parts[0]);
if (parts.length > 1)
columnTypes.add(OType.valueOf(parts[1].toUpperCase()));
else
columnTypes.add(OType.ANY);
}
}
if (iConfiguration.containsField("skipFrom"))
skipFrom = ((Number) iConfiguration.field("skipFrom")).longValue();
if (iConfiguration.containsField("skipTo"))
skipTo = ((Number) iConfiguration.field("skipTo")).longValue();
if (iConfiguration.containsField("nullValue"))
nullValue = iConfiguration.field("nullValue");
if (iConfiguration.containsField("stringCharacter"))
stringCharacter = iConfiguration.field("stringCharacter").toString().charAt(0);
}
@Override
public String getName() {
return "csv";
}
@Override
public Object executeTransform(final Object input) {
if (skipTransform())
return null;
log(OETLProcessor.LOG_LEVELS.DEBUG, "parsing=%s", input);
final List<String> fields = OStringSerializerHelper.smartSplit(input.toString(), new char[] { separator }, 0, -1, false, false,
false, false);
if (!isColumnNamesCorrect(fields))
return null;
final ODocument doc = new ODocument();
for (int i = 0; i < columnNames.size() && i < fields.size(); ++i) {
final String fieldName = columnNames.get(i);
Object fieldValue = null;
try {
final String fieldStringValue = getCellContent(fields.get(i));
final OType fieldType = columnTypes != null ? columnTypes.get(i) : null;
if (fieldType != null && fieldType != OType.ANY) {
// DEFINED TYPE
fieldValue = processKnownType(doc, i, fieldName, fieldStringValue, fieldType);
} else {
// DETERMINE THE TYPE
if (fieldStringValue != null)
fieldValue = determineTheType(fieldStringValue);
}
doc.field(fieldName, fieldValue);
} catch (Exception e) {
processor.getStats().incrementErrors();
log(OETLProcessor.LOG_LEVELS.ERROR, "Error on setting document field %s=%s (cause=%s)", fieldName, fieldValue, e.toString());
}
}
log(OETLProcessor.LOG_LEVELS.DEBUG, "document=%s", doc);
return doc;
}
private Object processKnownType(ODocument doc, int i, String fieldName, String fieldStringValue, OType fieldType) {
Object fieldValue;
fieldValue = getCellContent(fieldStringValue);
try {
fieldValue = OType.convert(fieldValue, fieldType.getDefaultJavaType());
doc.field(fieldName, fieldValue);
} catch (Exception e) {
processor.getStats().incrementErrors();
log(OETLProcessor.LOG_LEVELS.ERROR, "Error on converting row %d field '%s' (%d), value '%s' (class:%s) to type: %s",
processor.getExtractor().getProgress(), fieldName, i, fieldValue, fieldValue.getClass().getName(), fieldType);
}
return fieldValue;
}
private Object determineTheType(String fieldStringValue) {
Object fieldValue;
if ((fieldValue = transformToDate(fieldStringValue)) == null)// try maybe Date type
if ((fieldValue = transformToNumeric(fieldStringValue)) == null)// try maybe Numeric type
fieldValue = fieldStringValue; // type String
return fieldValue;
}
private Object transformToDate(String fieldStringValue) {
// DATE
DateFormat df = new SimpleDateFormat("yyyy-MM-dd");
df.setLenient(true);
Object fieldValue;
try {
fieldValue = df.parse(fieldStringValue);
} catch (ParseException pe) {
fieldValue = null;
}
return fieldValue;
}
private Object transformToNumeric(final String fieldStringValue) {
if (fieldStringValue.isEmpty())
return fieldStringValue;
final char c = fieldStringValue.charAt(0);
if (c != '-' && !Character.isDigit(c))
// NOT A NUMBER FOR SURE
return fieldStringValue;
Object fieldValue;
try {
if (fieldStringValue.contains(".") || fieldStringValue.contains(",")) {
String numberAsString = fieldStringValue.replaceAll(",", ".");
fieldValue = new Float(numberAsString);
if (!isFinite((Float) fieldValue)) {
fieldValue = new Double(numberAsString);
}
} else
try {
fieldValue = new Integer(fieldStringValue);
} catch (Exception e) {
fieldValue = new Long(fieldStringValue);
}
} catch (NumberFormatException nf) {
fieldValue = fieldStringValue;
}
return fieldValue;
}
private boolean isColumnNamesCorrect(List<String> fields) {
if (columnNames == null) {
if (!columnsOnFirstLine)
throw new OTransformException(getName() + ": columnsOnFirstLine=false and no columns declared");
columnNames = fields;
// REMOVE ANY STRING CHARACTERS IF ANY
for (int i = 0; i < columnNames.size(); ++i)
columnNames.set(i, getCellContent(columnNames.get(i)));
return false;
}
if (columnsOnFirstLine && line == 0)
// JUST SKIP FIRST LINE
return false;
return true;
}
private boolean skipTransform() {
line++;
if (skipFrom > -1) {
if (skipTo > -1) {
if (line >= skipFrom && line <= skipTo)
return true;
} else if (line >= skipFrom)
// SKIP IT
return true;
}
return false;
}
/**
* Backport copy of Float.isFinite() method that was introduced since Java 1.8 but we must support 1.6. TODO replace after
* choosing Java 1.8 as minimal supported
**/
protected boolean isFinite(Float f) {
return Math.abs(f) <= FloatConsts.MAX_VALUE;
}
// TODO Test, and double doubleqoutes case
public String getCellContent(String iValue) {
if (iValue == null || iValue.isEmpty() || "NULL".equals(iValue))
return null;
if (iValue.length() > 1 && (iValue.charAt(0) == stringCharacter && iValue.charAt(iValue.length() - 1) == stringCharacter))
return iValue.substring(1, iValue.length() - 1);
return iValue;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.net;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOError;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.net.*;
import java.nio.ByteBuffer;
import java.nio.channels.AsynchronousCloseException;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.ServerSocketChannel;
import java.util.*;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import org.cliffc.high_scale_lib.NonBlockingHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.concurrent.DebuggableThreadPoolExecutor;
import org.apache.cassandra.concurrent.Stage;
import org.apache.cassandra.concurrent.StageManager;
import org.apache.cassandra.concurrent.TracingAwareExecutorService;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.config.EncryptionOptions.ServerEncryptionOptions;
import org.apache.cassandra.db.*;
import org.apache.cassandra.dht.BootStrapper;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.gms.GossipDigestAck;
import org.apache.cassandra.gms.GossipDigestAck2;
import org.apache.cassandra.gms.GossipDigestSyn;
import org.apache.cassandra.io.IVersionedSerializer;
import org.apache.cassandra.io.util.DataOutputBuffer;
import org.apache.cassandra.locator.ILatencySubscriber;
import org.apache.cassandra.metrics.ConnectionMetrics;
import org.apache.cassandra.metrics.DroppedMessageMetrics;
import org.apache.cassandra.net.sink.SinkManager;
import org.apache.cassandra.security.SSLFactory;
import org.apache.cassandra.service.*;
import org.apache.cassandra.streaming.*;
import org.apache.cassandra.streaming.compress.CompressedFileStreamTask;
import org.apache.cassandra.tracing.TraceState;
import org.apache.cassandra.tracing.Tracing;
import org.apache.cassandra.utils.*;
public final class MessagingService implements MessagingServiceMBean
{
public static final String MBEAN_NAME = "org.apache.cassandra.net:type=MessagingService";
// 8 bits version, so don't waste versions
public static final int VERSION_10 = 3;
public static final int VERSION_11 = 4;
public static final int VERSION_117 = 5;
public static final int VERSION_12 = 6;
public static final int current_version = VERSION_12;
/**
* we preface every message with this number so the recipient can validate the sender is sane
*/
static final int PROTOCOL_MAGIC = 0xCA552DFA;
/* All verb handler identifiers */
public enum Verb
{
MUTATION,
@Deprecated BINARY,
READ_REPAIR,
READ,
REQUEST_RESPONSE, // client-initiated reads and writes
@Deprecated STREAM_INITIATE,
@Deprecated STREAM_INITIATE_DONE,
STREAM_REPLY,
STREAM_REQUEST,
RANGE_SLICE,
BOOTSTRAP_TOKEN,
TREE_REQUEST,
TREE_RESPONSE,
@Deprecated JOIN,
GOSSIP_DIGEST_SYN,
GOSSIP_DIGEST_ACK,
GOSSIP_DIGEST_ACK2,
@Deprecated DEFINITIONS_ANNOUNCE,
DEFINITIONS_UPDATE,
TRUNCATE,
SCHEMA_CHECK,
@Deprecated INDEX_SCAN,
REPLICATION_FINISHED,
INTERNAL_RESPONSE, // responses to internal calls
COUNTER_MUTATION,
STREAMING_REPAIR_REQUEST,
STREAMING_REPAIR_RESPONSE,
SNAPSHOT, // Similar to nt snapshot
MIGRATION_REQUEST,
GOSSIP_SHUTDOWN,
_TRACE, // dummy verb so we can use MS.droppedMessages
// use as padding for backwards compatability where a previous version needs to validate a verb from the future.
UNUSED_1,
UNUSED_2,
UNUSED_3,
;
// remember to add new verbs at the end, since we serialize by ordinal
}
public static final Verb[] VERBS = Verb.values();
public static final EnumMap<MessagingService.Verb, Stage> verbStages = new EnumMap<MessagingService.Verb, Stage>(MessagingService.Verb.class)
{{
put(Verb.MUTATION, Stage.MUTATION);
put(Verb.BINARY, Stage.MUTATION);
put(Verb.READ_REPAIR, Stage.MUTATION);
put(Verb.TRUNCATE, Stage.MUTATION);
put(Verb.READ, Stage.READ);
put(Verb.REQUEST_RESPONSE, Stage.REQUEST_RESPONSE);
put(Verb.STREAM_REPLY, Stage.MISC); // actually handled by FileStreamTask and streamExecutors
put(Verb.STREAM_REQUEST, Stage.MISC);
put(Verb.RANGE_SLICE, Stage.READ);
put(Verb.BOOTSTRAP_TOKEN, Stage.MISC);
put(Verb.TREE_REQUEST, Stage.ANTI_ENTROPY);
put(Verb.TREE_RESPONSE, Stage.ANTI_ENTROPY);
put(Verb.STREAMING_REPAIR_REQUEST, Stage.ANTI_ENTROPY);
put(Verb.STREAMING_REPAIR_RESPONSE, Stage.ANTI_ENTROPY);
put(Verb.GOSSIP_DIGEST_ACK, Stage.GOSSIP);
put(Verb.GOSSIP_DIGEST_ACK2, Stage.GOSSIP);
put(Verb.GOSSIP_DIGEST_SYN, Stage.GOSSIP);
put(Verb.GOSSIP_SHUTDOWN, Stage.GOSSIP);
put(Verb.DEFINITIONS_UPDATE, Stage.MIGRATION);
put(Verb.SCHEMA_CHECK, Stage.MIGRATION);
put(Verb.MIGRATION_REQUEST, Stage.MIGRATION);
put(Verb.INDEX_SCAN, Stage.READ);
put(Verb.REPLICATION_FINISHED, Stage.MISC);
put(Verb.INTERNAL_RESPONSE, Stage.INTERNAL_RESPONSE);
put(Verb.COUNTER_MUTATION, Stage.MUTATION);
put(Verb.SNAPSHOT, Stage.MISC);
put(Verb.UNUSED_1, Stage.INTERNAL_RESPONSE);
put(Verb.UNUSED_2, Stage.INTERNAL_RESPONSE);
put(Verb.UNUSED_3, Stage.INTERNAL_RESPONSE);
}};
/**
* Messages we receive in IncomingTcpConnection have a Verb that tells us what kind of message it is.
* Most of the time, this is enough to determine how to deserialize the message payload.
* The exception is the REQUEST_RESPONSE verb, which just means "a reply to something you told me to do."
* Traditionally, this was fine since each VerbHandler knew what type of payload it expected, and
* handled the deserialization itself. Now that we do that in ITC, to avoid the extra copy to an
* intermediary byte[] (See CASSANDRA-3716), we need to wire that up to the CallbackInfo object
* (see below).
*/
public static final EnumMap<Verb, IVersionedSerializer<?>> verbSerializers = new EnumMap<Verb, IVersionedSerializer<?>>(Verb.class)
{{
put(Verb.REQUEST_RESPONSE, CallbackDeterminedSerializer.instance);
put(Verb.INTERNAL_RESPONSE, CallbackDeterminedSerializer.instance);
put(Verb.MUTATION, RowMutation.serializer);
put(Verb.READ_REPAIR, RowMutation.serializer);
put(Verb.READ, ReadCommand.serializer);
put(Verb.STREAM_REPLY, StreamReply.serializer);
put(Verb.STREAM_REQUEST, StreamRequest.serializer);
put(Verb.RANGE_SLICE, RangeSliceCommand.serializer);
put(Verb.BOOTSTRAP_TOKEN, BootStrapper.StringSerializer.instance);
put(Verb.TREE_REQUEST, AntiEntropyService.TreeRequest.serializer);
put(Verb.TREE_RESPONSE, AntiEntropyService.Validator.serializer);
put(Verb.STREAMING_REPAIR_REQUEST, StreamingRepairTask.serializer);
put(Verb.STREAMING_REPAIR_RESPONSE, UUIDSerializer.serializer);
put(Verb.GOSSIP_DIGEST_ACK, GossipDigestAck.serializer);
put(Verb.GOSSIP_DIGEST_ACK2, GossipDigestAck2.serializer);
put(Verb.GOSSIP_DIGEST_SYN, GossipDigestSyn.serializer);
put(Verb.DEFINITIONS_UPDATE, MigrationManager.MigrationsSerializer.instance);
put(Verb.TRUNCATE, Truncation.serializer);
put(Verb.INDEX_SCAN, IndexScanCommand.serializer);
put(Verb.REPLICATION_FINISHED, null);
put(Verb.COUNTER_MUTATION, CounterMutation.serializer);
put(Verb.SNAPSHOT, SnapshotCommand.serializer);
}};
/**
* A Map of what kind of serializer to wire up to a REQUEST_RESPONSE callback, based on outbound Verb.
*/
public static final EnumMap<Verb, IVersionedSerializer<?>> callbackDeserializers = new EnumMap<Verb, IVersionedSerializer<?>>(Verb.class)
{{
put(Verb.MUTATION, WriteResponse.serializer);
put(Verb.READ_REPAIR, WriteResponse.serializer);
put(Verb.COUNTER_MUTATION, WriteResponse.serializer);
put(Verb.RANGE_SLICE, RangeSliceReply.serializer);
put(Verb.READ, ReadResponse.serializer);
put(Verb.TRUNCATE, TruncateResponse.serializer);
put(Verb.SNAPSHOT, null);
put(Verb.MIGRATION_REQUEST, MigrationManager.MigrationsSerializer.instance);
put(Verb.SCHEMA_CHECK, UUIDSerializer.serializer);
put(Verb.BOOTSTRAP_TOKEN, BootStrapper.StringSerializer.instance);
put(Verb.REPLICATION_FINISHED, null);
}};
/* This records all the results mapped by message Id */
private final ExpiringMap<String, CallbackInfo> callbacks;
/**
* a placeholder class that means "deserialize using the callback." We can't implement this without
* special-case code in InboundTcpConnection because there is no way to pass the message id to IVersionedSerializer.
*/
static class CallbackDeterminedSerializer implements IVersionedSerializer<Object>
{
public static final CallbackDeterminedSerializer instance = new CallbackDeterminedSerializer();
public Object deserialize(DataInput in, int version) throws IOException
{
throw new UnsupportedOperationException();
}
public void serialize(Object o, DataOutput out, int version) throws IOException
{
throw new UnsupportedOperationException();
}
public long serializedSize(Object o, int version)
{
throw new UnsupportedOperationException();
}
}
/* Lookup table for registering message handlers based on the verb. */
private final Map<Verb, IVerbHandler> verbHandlers;
/**
* One executor per destination InetAddress for streaming.
* <p/>
* See CASSANDRA-3494 for the background. We have streaming in place so we do not want to limit ourselves to
* one stream at a time for throttling reasons. But, we also do not want to just arbitrarily stream an unlimited
* amount of files at once because a single destination might have hundreds of files pending and it would cause a
* seek storm. So, transfer exactly one file per destination host. That puts a very natural rate limit on it, in
* addition to mapping well to the expected behavior in many cases.
* <p/>
* We will create our stream executors with a core size of 0 so that they time out and do not consume threads. This
* means the overhead in the degenerate case of having streamed to everyone in the ring over time as a ring changes,
* is not going to be a thread per node - but rather an instance per node. That's totally fine.
*/
private final ConcurrentMap<InetAddress, DebuggableThreadPoolExecutor> streamExecutors = new NonBlockingHashMap<InetAddress, DebuggableThreadPoolExecutor>();
private final ConcurrentMap<InetAddress, OutboundTcpConnectionPool> connectionManagers = new NonBlockingHashMap<InetAddress, OutboundTcpConnectionPool>();
private static final Logger logger = LoggerFactory.getLogger(MessagingService.class);
private static final int LOG_DROPPED_INTERVAL_IN_MS = 5000;
private final List<SocketThread> socketThreads = Lists.newArrayList();
private final SimpleCondition listenGate;
/**
* Verbs it's okay to drop if the request has been queued longer than the request timeout. These
* all correspond to client requests or something triggered by them; we don't want to
* drop internal messages like bootstrap or repair notifications.
*/
public static final EnumSet<Verb> DROPPABLE_VERBS = EnumSet.of(Verb.BINARY,
Verb._TRACE,
Verb.MUTATION,
Verb.COUNTER_MUTATION,
Verb.READ_REPAIR,
Verb.READ,
Verb.RANGE_SLICE,
Verb.REQUEST_RESPONSE);
// total dropped message counts for server lifetime
private final Map<Verb, DroppedMessageMetrics> droppedMessages = new EnumMap<Verb, DroppedMessageMetrics>(Verb.class);
// dropped count when last requested for the Recent api. high concurrency isn't necessary here.
private final Map<Verb, Integer> lastDroppedInternal = new EnumMap<Verb, Integer>(Verb.class);
private final List<ILatencySubscriber> subscribers = new ArrayList<ILatencySubscriber>();
// protocol versions of the other nodes in the cluster
private final ConcurrentMap<InetAddress, Integer> versions = new NonBlockingHashMap<InetAddress, Integer>();
private static class MSHandle
{
public static final MessagingService instance = new MessagingService();
}
public static MessagingService instance()
{
return MSHandle.instance;
}
private MessagingService()
{
for (Verb verb : DROPPABLE_VERBS)
{
droppedMessages.put(verb, new DroppedMessageMetrics(verb));
lastDroppedInternal.put(verb, 0);
}
listenGate = new SimpleCondition();
verbHandlers = new EnumMap<Verb, IVerbHandler>(Verb.class);
Runnable logDropped = new Runnable()
{
public void run()
{
logDroppedMessages();
}
};
StorageService.scheduledTasks.scheduleWithFixedDelay(logDropped, LOG_DROPPED_INTERVAL_IN_MS, LOG_DROPPED_INTERVAL_IN_MS, TimeUnit.MILLISECONDS);
Function<Pair<String, ExpiringMap.CacheableObject<CallbackInfo>>, ?> timeoutReporter = new Function<Pair<String, ExpiringMap.CacheableObject<CallbackInfo>>, Object>()
{
public Object apply(Pair<String, ExpiringMap.CacheableObject<CallbackInfo>> pair)
{
CallbackInfo expiredCallbackInfo = pair.right.value;
maybeAddLatency(expiredCallbackInfo.callback, expiredCallbackInfo.target, pair.right.timeout);
ConnectionMetrics.totalTimeouts.mark();
getConnectionPool(expiredCallbackInfo.target).incrementTimeout();
if (expiredCallbackInfo.shouldHint())
{
assert expiredCallbackInfo.sentMessage != null;
RowMutation rm = (RowMutation) expiredCallbackInfo.sentMessage.payload;
return StorageProxy.submitHint(rm, expiredCallbackInfo.target, null, null);
}
return null;
}
};
callbacks = new ExpiringMap<String, CallbackInfo>(DatabaseDescriptor.getMinRpcTimeout(), timeoutReporter);
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
try
{
mbs.registerMBean(this, new ObjectName(MBEAN_NAME));
}
catch (Exception e)
{
throw new RuntimeException(e);
}
}
/**
* Track latency information for the dynamic snitch
*
* @param cb the callback associated with this message -- this lets us know if it's a message type we're interested in
* @param address the host that replied to the message
* @param latency
*/
public void maybeAddLatency(IMessageCallback cb, InetAddress address, long latency)
{
if (cb.isLatencyForSnitch())
addLatency(address, latency);
}
public void addLatency(InetAddress address, long latency)
{
for (ILatencySubscriber subscriber : subscribers)
subscriber.receiveTiming(address, latency);
}
/**
* called from gossiper when it notices a node is not responding.
*/
public void convict(InetAddress ep)
{
logger.debug("Resetting pool for " + ep);
getConnectionPool(ep).reset();
}
/**
* Listen on the specified port.
*
* @param localEp InetAddress whose port to listen on.
*/
public void listen(InetAddress localEp) throws ConfigurationException
{
callbacks.reset(); // hack to allow tests to stop/restart MS
for (ServerSocket ss : getServerSocket(localEp))
{
SocketThread th = new SocketThread(ss, "ACCEPT-" + localEp);
th.start();
socketThreads.add(th);
}
listenGate.signalAll();
}
private List<ServerSocket> getServerSocket(InetAddress localEp) throws ConfigurationException
{
final List<ServerSocket> ss = new ArrayList<ServerSocket>(2);
if (DatabaseDescriptor.getServerEncryptionOptions().internode_encryption != ServerEncryptionOptions.InternodeEncryption.none)
{
try
{
ss.add(SSLFactory.getServerSocket(DatabaseDescriptor.getServerEncryptionOptions(), localEp, DatabaseDescriptor.getSSLStoragePort()));
}
catch (IOException e)
{
throw new ConfigurationException("Unable to create ssl socket", e);
}
// setReuseAddress happens in the factory.
logger.info("Starting Encrypted Messaging Service on SSL port {}", DatabaseDescriptor.getSSLStoragePort());
}
if (DatabaseDescriptor.getServerEncryptionOptions().internode_encryption != ServerEncryptionOptions.InternodeEncryption.all)
{
ServerSocketChannel serverChannel = null;
try
{
serverChannel = ServerSocketChannel.open();
}
catch (IOException e)
{
throw new RuntimeException(e);
}
ServerSocket socket = serverChannel.socket();
try
{
socket.setReuseAddress(true);
}
catch (SocketException e)
{
throw new ConfigurationException("Insufficient permissions to setReuseAddress", e);
}
InetSocketAddress address = new InetSocketAddress(localEp, DatabaseDescriptor.getStoragePort());
try
{
socket.bind(address);
}
catch (BindException e)
{
if (e.getMessage().contains("in use"))
throw new ConfigurationException(address + " is in use by another process. Change listen_address:storage_port in cassandra.yaml to values that do not conflict with other services");
else if (e.getMessage().contains("Cannot assign requested address"))
throw new ConfigurationException("Unable to bind to address " + address
+ ". Set listen_address in cassandra.yaml to an interface you can bind to, e.g., your private IP address on EC2");
else
throw new RuntimeException(e);
}
catch (IOException e)
{
throw new RuntimeException(e);
}
logger.info("Starting Messaging Service on port {}", DatabaseDescriptor.getStoragePort());
ss.add(socket);
}
return ss;
}
public void waitUntilListening()
{
try
{
listenGate.await();
}
catch (InterruptedException ie)
{
logger.debug("await interrupted");
}
}
public void destroyConnectionPool(InetAddress to)
{
OutboundTcpConnectionPool cp = connectionManagers.get(to);
if (cp == null)
return;
cp.close();
connectionManagers.remove(to);
}
public OutboundTcpConnectionPool getConnectionPool(InetAddress to)
{
OutboundTcpConnectionPool cp = connectionManagers.get(to);
if (cp == null)
{
cp = new OutboundTcpConnectionPool(to);
OutboundTcpConnectionPool existingPool = connectionManagers.putIfAbsent(to, cp);
if (existingPool != null)
{
cp.close();
cp = existingPool;
}
}
return cp;
}
public OutboundTcpConnection getConnection(InetAddress to, MessageOut msg)
{
return getConnectionPool(to).getConnection(msg);
}
/**
* Register a verb and the corresponding verb handler with the
* Messaging Service.
*
* @param verb
* @param verbHandler handler for the specified verb
*/
public void registerVerbHandlers(Verb verb, IVerbHandler verbHandler)
{
assert !verbHandlers.containsKey(verb);
verbHandlers.put(verb, verbHandler);
}
/**
* This method returns the verb handler associated with the registered
* verb. If no handler has been registered then null is returned.
*
* @param type for which the verb handler is sought
* @return a reference to IVerbHandler which is the handler for the specified verb
*/
public IVerbHandler getVerbHandler(Verb type)
{
return verbHandlers.get(type);
}
public String addCallback(IMessageCallback cb, MessageOut message, InetAddress to, long timeout)
{
String messageId = nextId();
CallbackInfo previous;
// If HH is enabled and this is a mutation message => store the message to track for potential hints.
if (DatabaseDescriptor.hintedHandoffEnabled() && message.verb == Verb.MUTATION)
previous = callbacks.put(messageId, new CallbackInfo(to, cb, message, callbackDeserializers.get(message.verb)), timeout);
else
previous = callbacks.put(messageId, new CallbackInfo(to, cb, callbackDeserializers.get(message.verb)), timeout);
assert previous == null;
return messageId;
}
private static final AtomicInteger idGen = new AtomicInteger(0);
// TODO make these integers to avoid unnecessary int -> string -> int conversions
private static String nextId()
{
return Integer.toString(idGen.incrementAndGet());
}
/*
* @see #sendRR(Message message, InetAddress to, IMessageCallback cb, long timeout)
*/
public String sendRR(MessageOut message, InetAddress to, IMessageCallback cb)
{
return sendRR(message, to, cb, message.getTimeout());
}
/**
* Send a message to a given endpoint. This method specifies a callback
* which is invoked with the actual response.
* Also holds the message (only mutation messages) to determine if it
* needs to trigger a hint (uses StorageProxy for that).
*
* @param message message to be sent.
* @param to endpoint to which the message needs to be sent
* @param cb callback interface which is used to pass the responses or
* suggest that a timeout occurred to the invoker of the send().
* suggest that a timeout occurred to the invoker of the send().
* @param timeout the timeout used for expiration
* @return an reference to message id used to match with the result
*/
public String sendRR(MessageOut message, InetAddress to, IMessageCallback cb, long timeout)
{
String id = addCallback(cb, message, to, timeout);
if (cb instanceof AbstractWriteResponseHandler)
{
PBSPredictor.instance().startWriteOperation(id);
}
else if (cb instanceof ReadCallback)
{
PBSPredictor.instance().startReadOperation(id);
}
sendOneWay(message, id, to);
return id;
}
public void sendOneWay(MessageOut message, InetAddress to)
{
sendOneWay(message, nextId(), to);
}
public void sendReply(MessageOut message, String id, InetAddress to)
{
sendOneWay(message, id, to);
}
/**
* Send a message to a given endpoint. This method adheres to the fire and forget
* style messaging.
*
* @param message messages to be sent.
* @param to endpoint to which the message needs to be sent
*/
public void sendOneWay(MessageOut message, String id, InetAddress to)
{
if (logger.isTraceEnabled())
logger.trace(FBUtilities.getBroadcastAddress() + " sending " + message.verb + " to " + id + "@" + to);
if (to.equals(FBUtilities.getBroadcastAddress()))
logger.trace("Message-to-self {} going over MessagingService", message);
// message sinks are a testing hook
MessageOut processedMessage = SinkManager.processOutboundMessage(message, id, to);
if (processedMessage == null)
{
return;
}
// get pooled connection (really, connection queue)
OutboundTcpConnection connection = getConnection(to, processedMessage);
// write it
connection.enqueue(processedMessage, id);
}
public <T> IAsyncResult<T> sendRR(MessageOut message, InetAddress to)
{
IAsyncResult<T> iar = new AsyncResult();
sendRR(message, to, iar);
return iar;
}
/**
* Stream a file from source to destination. This is highly optimized
* to not hold any of the contents of the file in memory.
*
* @param header Header contains file to stream and other metadata.
* @param to endpoint to which we need to stream the file.
*/
public void stream(StreamHeader header, InetAddress to)
{
DebuggableThreadPoolExecutor executor = streamExecutors.get(to);
if (executor == null)
{
// Using a core pool size of 0 is important. See documentation of streamExecutors.
executor = DebuggableThreadPoolExecutor.createWithMaximumPoolSize("Streaming to " + to, 1, 1, TimeUnit.SECONDS);
DebuggableThreadPoolExecutor old = streamExecutors.putIfAbsent(to, executor);
if (old != null)
{
executor.shutdown();
executor = old;
}
}
executor.execute(header.file == null || header.file.compressionInfo == null
? new FileStreamTask(header, to)
: new CompressedFileStreamTask(header, to));
}
public void register(ILatencySubscriber subcriber)
{
subscribers.add(subcriber);
}
public void clearCallbacksUnsafe()
{
callbacks.reset();
}
/**
* Wait for callbacks and don't allow any more to be created (since they could require writing hints)
*/
public void shutdown()
{
logger.info("Waiting for messaging service to quiesce");
// We may need to schedule hints on the mutation stage, so it's erroneous to shut down the mutation stage first
assert !StageManager.getStage(Stage.MUTATION).isShutdown();
// the important part
callbacks.shutdownBlocking();
// attempt to humor tests that try to stop and restart MS
try
{
for (SocketThread th : socketThreads)
th.close();
}
catch (IOException e)
{
throw new IOError(e);
}
}
public void receive(MessageIn message, String id, long timestamp)
{
TraceState state = Tracing.instance().initializeFromMessage(message);
if (state != null)
state.trace("Message received from {}", message.from);
message = SinkManager.processInboundMessage(message, id);
if (message == null)
return;
Runnable runnable = new MessageDeliveryTask(message, id, timestamp);
TracingAwareExecutorService stage = StageManager.getStage(message.getMessageType());
assert stage != null : "No stage for message type " + message.verb;
if (message.verb == Verb.REQUEST_RESPONSE && PBSPredictor.instance().isLoggingEnabled())
{
IMessageCallback cb = MessagingService.instance().getRegisteredCallback(id).callback;
if (cb instanceof AbstractWriteResponseHandler)
{
PBSPredictor.instance().logWriteResponse(id, timestamp);
}
else if (cb instanceof ReadCallback)
{
PBSPredictor.instance().logReadResponse(id, timestamp);
}
}
stage.execute(runnable, state);
}
public void setCallbackForTests(String messageId, CallbackInfo callback)
{
callbacks.put(messageId, callback);
}
public CallbackInfo getRegisteredCallback(String messageId)
{
return callbacks.get(messageId);
}
public CallbackInfo removeRegisteredCallback(String messageId)
{
return callbacks.remove(messageId);
}
public long getRegisteredCallbackAge(String messageId)
{
return callbacks.getAge(messageId);
}
public static void validateMagic(int magic) throws IOException
{
if (magic != PROTOCOL_MAGIC)
throw new IOException("invalid protocol header");
}
public static int getBits(int packed, int start, int count)
{
return packed >>> (start + 1) - count & ~(-1 << count);
}
public ByteBuffer constructStreamHeader(StreamHeader streamHeader, boolean compress, int version)
{
int header = 0;
// set compression bit.
if (compress)
header |= 4;
// set streaming bit
header |= 8;
// Setting up the version bit
header |= (version << 8);
/* Adding the StreamHeader which contains the session Id along
* with the pendingfile info for the stream.
* | Session Id | Pending File Size | Pending File | Bool more files |
* | No. of Pending files | Pending Files ... |
*/
byte[] bytes;
try
{
DataOutputBuffer buffer = new DataOutputBuffer();
StreamHeader.serializer.serialize(streamHeader, buffer, version);
bytes = buffer.getData();
}
catch (IOException e)
{
throw new RuntimeException(e);
}
assert bytes.length > 0;
ByteBuffer buffer = ByteBuffer.allocate(4 + 4 + 4 + bytes.length);
buffer.putInt(PROTOCOL_MAGIC);
buffer.putInt(header);
buffer.putInt(bytes.length);
buffer.put(bytes);
buffer.flip();
return buffer;
}
/**
* @return the last version associated with address, or @param version if this is the first such version
*/
public int setVersion(InetAddress endpoint, int version)
{
logger.debug("Setting version {} for {}", version, endpoint);
Integer v = versions.put(endpoint, version);
return v == null ? version : v;
}
public void resetVersion(InetAddress endpoint)
{
logger.debug("Reseting version for {}", endpoint);
versions.remove(endpoint);
}
public int getVersion(InetAddress endpoint)
{
Integer v = versions.get(endpoint);
if (v == null)
{
// we don't know the version. assume current. we'll know soon enough if that was incorrect.
logger.trace("Assuming current protocol version for {}", endpoint);
return MessagingService.current_version;
}
else
return Math.min(v, MessagingService.current_version);
}
public int getVersion(String endpoint) throws UnknownHostException
{
return getVersion(InetAddress.getByName(endpoint));
}
public int getRawVersion(InetAddress endpoint)
{
Integer v = versions.get(endpoint);
if (v == null)
throw new IllegalStateException("getRawVersion() was called without checking knowsVersion() result first");
return v;
}
public boolean knowsVersion(InetAddress endpoint)
{
return versions.containsKey(endpoint);
}
public void incrementDroppedMessages(Verb verb)
{
assert DROPPABLE_VERBS.contains(verb) : "Verb " + verb + " should not legally be dropped";
droppedMessages.get(verb).dropped.mark();
}
private void logDroppedMessages()
{
boolean logTpstats = false;
for (Map.Entry<Verb, DroppedMessageMetrics> entry : droppedMessages.entrySet())
{
int dropped = (int) entry.getValue().dropped.count();
Verb verb = entry.getKey();
int recent = dropped - lastDroppedInternal.get(verb);
if (recent > 0)
{
logTpstats = true;
logger.info("{} {} messages dropped in last {}ms",
new Object[] {recent, verb, LOG_DROPPED_INTERVAL_IN_MS});
lastDroppedInternal.put(verb, dropped);
}
}
if (logTpstats)
StatusLogger.log();
}
private static class SocketThread extends Thread
{
private final ServerSocket server;
SocketThread(ServerSocket server, String name)
{
super(name);
this.server = server;
}
public void run()
{
while (true)
{
try
{
Socket socket = server.accept();
if (authenticate(socket))
new IncomingTcpConnection(socket).start();
else
socket.close();
}
catch (AsynchronousCloseException e)
{
// this happens when another thread calls close().
logger.info("MessagingService shutting down server thread.");
break;
}
catch (ClosedChannelException e)
{
logger.debug("MessagingService server thread already closed.");
break;
}
catch (IOException e)
{
throw new RuntimeException(e);
}
}
}
void close() throws IOException
{
server.close();
}
private boolean authenticate(Socket socket)
{
return DatabaseDescriptor.getInternodeAuthenticator().authenticate(socket.getInetAddress(), socket.getPort());
}
}
public Map<String, Integer> getCommandPendingTasks()
{
Map<String, Integer> pendingTasks = new HashMap<String, Integer>();
for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry : connectionManagers.entrySet())
pendingTasks.put(entry.getKey().getHostAddress(), entry.getValue().cmdCon.getPendingMessages());
return pendingTasks;
}
public int getCommandPendingTasks(InetAddress address)
{
OutboundTcpConnectionPool connection = connectionManagers.get(address);
return connection == null ? 0 : connection.cmdCon.getPendingMessages();
}
public Map<String, Long> getCommandCompletedTasks()
{
Map<String, Long> completedTasks = new HashMap<String, Long>();
for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry : connectionManagers.entrySet())
completedTasks.put(entry.getKey().getHostAddress(), entry.getValue().cmdCon.getCompletedMesssages());
return completedTasks;
}
public Map<String, Long> getCommandDroppedTasks()
{
Map<String, Long> droppedTasks = new HashMap<String, Long>();
for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry : connectionManagers.entrySet())
droppedTasks.put(entry.getKey().getHostAddress(), entry.getValue().cmdCon.getDroppedMessages());
return droppedTasks;
}
public Map<String, Integer> getResponsePendingTasks()
{
Map<String, Integer> pendingTasks = new HashMap<String, Integer>();
for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry : connectionManagers.entrySet())
pendingTasks.put(entry.getKey().getHostAddress(), entry.getValue().ackCon.getPendingMessages());
return pendingTasks;
}
public Map<String, Long> getResponseCompletedTasks()
{
Map<String, Long> completedTasks = new HashMap<String, Long>();
for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry : connectionManagers.entrySet())
completedTasks.put(entry.getKey().getHostAddress(), entry.getValue().ackCon.getCompletedMesssages());
return completedTasks;
}
public Map<String, Integer> getDroppedMessages()
{
Map<String, Integer> map = new HashMap<String, Integer>();
for (Map.Entry<Verb, DroppedMessageMetrics> entry : droppedMessages.entrySet())
map.put(entry.getKey().toString(), (int) entry.getValue().dropped.count());
return map;
}
public Map<String, Integer> getRecentlyDroppedMessages()
{
Map<String, Integer> map = new HashMap<String, Integer>();
for (Map.Entry<Verb, DroppedMessageMetrics> entry : droppedMessages.entrySet())
map.put(entry.getKey().toString(), entry.getValue().getRecentlyDropped());
return map;
}
public long getTotalTimeouts()
{
return ConnectionMetrics.totalTimeouts.count();
}
public long getRecentTotalTimouts()
{
return ConnectionMetrics.getRecentTotalTimeout();
}
public Map<String, Long> getTimeoutsPerHost()
{
Map<String, Long> result = new HashMap<String, Long>();
for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry: connectionManagers.entrySet())
{
String ip = entry.getKey().getHostAddress();
long recent = entry.getValue().getTimeouts();
result.put(ip, recent);
}
return result;
}
public Map<String, Long> getRecentTimeoutsPerHost()
{
Map<String, Long> result = new HashMap<String, Long>();
for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry: connectionManagers.entrySet())
{
String ip = entry.getKey().getHostAddress();
long recent = entry.getValue().getRecentTimeouts();
result.put(ip, recent);
}
return result;
}
}
|
|
/**
* Copyright 2013 Matija Mazi.
* Copyright 2014 Andreas Schildbach
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.fuelcoinj.crypto;
import com.fuelcoinj.core.*;
import com.google.common.base.Objects;
import com.google.common.base.Objects.ToStringHelper;
import com.google.common.collect.ImmutableList;
import org.spongycastle.crypto.params.KeyParameter;
import org.spongycastle.math.ec.ECPoint;
import javax.annotation.Nullable;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.util.Arrays;
import static com.fuelcoinj.core.Utils.HEX;
import static com.google.common.base.Preconditions.*;
/**
* A deterministic key is a node in a {@link DeterministicHierarchy}. As per
* <a href="https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki">the BIP 32 specification</a> it is a pair
* (key, chaincode). If you know its path in the tree and its chain code you can derive more keys from this. To obtain
* one of these, you can call {@link HDKeyDerivation#createMasterPrivateKey(byte[])}.
*/
public class DeterministicKey extends ECKey {
private static final long serialVersionUID = 1L;
private final DeterministicKey parent;
private final ImmutableList<ChildNumber> childNumberPath;
/** 32 bytes */
private final byte[] chainCode;
/** The 4 byte header that serializes in base58 to "xpub" */
public static final int HEADER_PUB = 0x0488B21E;
/** The 4 byte header that serializes in base58 to "xprv" */
public static final int HEADER_PRIV = 0x0488ADE4;
/** Constructs a key from its components. This is not normally something you should use. */
public DeterministicKey(ImmutableList<ChildNumber> childNumberPath,
byte[] chainCode,
ECPoint publicAsPoint,
@Nullable BigInteger priv,
@Nullable DeterministicKey parent) {
super(priv, compressPoint(checkNotNull(publicAsPoint)));
checkArgument(chainCode.length == 32);
this.parent = parent;
this.childNumberPath = checkNotNull(childNumberPath);
this.chainCode = Arrays.copyOf(chainCode, chainCode.length);
}
/** Constructs a key from its components. This is not normally something you should use. */
public DeterministicKey(ImmutableList<ChildNumber> childNumberPath,
byte[] chainCode,
BigInteger priv,
@Nullable DeterministicKey parent) {
super(priv, compressPoint(ECKey.CURVE.getG().multiply(priv)));
checkArgument(chainCode.length == 32);
this.parent = parent;
this.childNumberPath = checkNotNull(childNumberPath);
this.chainCode = Arrays.copyOf(chainCode, chainCode.length);
}
/** Constructs a key from its components. This is not normally something you should use. */
public DeterministicKey(ImmutableList<ChildNumber> childNumberPath,
byte[] chainCode,
KeyCrypter crypter, ECPoint pub, EncryptedData priv, @Nullable DeterministicKey parent) {
this(childNumberPath, chainCode, pub, null, parent);
this.encryptedPrivateKey = checkNotNull(priv);
this.keyCrypter = checkNotNull(crypter);
}
/** Clones the key */
public DeterministicKey(DeterministicKey keyToClone, DeterministicKey newParent) {
super(keyToClone.priv, keyToClone.pub);
this.parent = newParent;
this.childNumberPath = keyToClone.childNumberPath;
this.chainCode = keyToClone.chainCode;
this.encryptedPrivateKey = keyToClone.encryptedPrivateKey;
}
/**
* Returns the path through some {@link DeterministicHierarchy} which reaches this keys position in the tree.
* A path can be written as 1/2/1 which means the first child of the root, the second child of that node, then
* the first child of that node.
*/
public ImmutableList<ChildNumber> getPath() {
return childNumberPath;
}
/**
* Returns the path of this key as a human readable string starting with M to indicate the master key.
*/
public String getPathAsString() {
return HDUtils.formatPath(getPath());
}
private int getDepth() {
return childNumberPath.size();
}
/** Returns the last element of the path returned by {@link DeterministicKey#getPath()} */
public ChildNumber getChildNumber() {
return getDepth() == 0 ? ChildNumber.ZERO : childNumberPath.get(childNumberPath.size() - 1);
}
/**
* Returns the chain code associated with this key. See the specification to learn more about chain codes.
*/
public byte[] getChainCode() {
return chainCode;
}
/**
* Returns RIPE-MD160(SHA256(pub key bytes)).
*/
public byte[] getIdentifier() {
return Utils.sha256hash160(getPubKey());
}
/** Returns the first 32 bits of the result of {@link #getIdentifier()}. */
public byte[] getFingerprint() {
// TODO: why is this different than armory's fingerprint? BIP 32: "The first 32 bits of the identifier are called the fingerprint."
return Arrays.copyOfRange(getIdentifier(), 0, 4);
}
@Nullable
public DeterministicKey getParent() {
return parent;
}
/**
* Returns private key bytes, padded with zeros to 33 bytes.
* @throws java.lang.IllegalStateException if the private key bytes are missing.
*/
public byte[] getPrivKeyBytes33() {
byte[] bytes33 = new byte[33];
byte[] priv = getPrivKeyBytes();
System.arraycopy(priv, 0, bytes33, 33 - priv.length, priv.length);
return bytes33;
}
/**
* Returns the same key with the private part removed. May return the same instance.
*/
public DeterministicKey getPubOnly() {
if (isPubKeyOnly()) return this;
//final DeterministicKey parentPub = getParent() == null ? null : getParent().getPubOnly();
return new DeterministicKey(getPath(), getChainCode(), getPubKeyPoint(), null, parent);
}
static byte[] addChecksum(byte[] input) {
int inputLength = input.length;
byte[] checksummed = new byte[inputLength + 4];
System.arraycopy(input, 0, checksummed, 0, inputLength);
byte[] checksum = Utils.doubleDigest(input);
System.arraycopy(checksum, 0, checksummed, inputLength, 4);
return checksummed;
}
@Override
public DeterministicKey encrypt(KeyCrypter keyCrypter, KeyParameter aesKey) throws KeyCrypterException {
throw new UnsupportedOperationException("Must supply a new parent for encryption");
}
public DeterministicKey encrypt(KeyCrypter keyCrypter, KeyParameter aesKey, @Nullable DeterministicKey newParent) throws KeyCrypterException {
// Same as the parent code, except we construct a DeterministicKey instead of an ECKey.
checkNotNull(keyCrypter);
if (newParent != null)
checkArgument(newParent.isEncrypted());
final byte[] privKeyBytes = getPrivKeyBytes();
checkState(privKeyBytes != null, "Private key is not available");
EncryptedData encryptedPrivateKey = keyCrypter.encrypt(privKeyBytes, aesKey);
DeterministicKey key = new DeterministicKey(childNumberPath, chainCode, keyCrypter, pub, encryptedPrivateKey, newParent);
if (newParent == null)
key.setCreationTimeSeconds(getCreationTimeSeconds());
return key;
}
/**
* A deterministic key is considered to be encrypted if it has access to encrypted private key bytes, OR if its
* parent does. The reason is because the parent would be encrypted under the same key and this key knows how to
* rederive its own private key bytes from the parent, if needed.
*/
@Override
public boolean isEncrypted() {
return priv == null && (super.isEncrypted() || (parent != null && parent.isEncrypted()));
}
/**
* Returns this keys {@link com.fuelcoinj.crypto.KeyCrypter} <b>or</b> the keycrypter of its parent key.
*/
@Override @Nullable
public KeyCrypter getKeyCrypter() {
if (keyCrypter != null)
return keyCrypter;
else if (parent != null)
return parent.getKeyCrypter();
else
return null;
}
@Override
public ECDSASignature sign(Sha256Hash input, @Nullable KeyParameter aesKey) throws KeyCrypterException {
if (isEncrypted()) {
// If the key is encrypted, ECKey.sign will decrypt it first before rerunning sign. Decryption walks the
// key heirarchy to find the private key (see below), so, we can just run the inherited method.
return super.sign(input, aesKey);
} else {
// If it's not encrypted, derive the private via the parents.
final BigInteger privateKey = findOrDerivePrivateKey();
if (privateKey == null) {
// This key is a part of a public-key only heirarchy and cannot be used for signing
throw new MissingPrivateKeyException();
}
return super.doSign(input, privateKey);
}
}
@Override
public DeterministicKey decrypt(KeyCrypter keyCrypter, KeyParameter aesKey) throws KeyCrypterException {
checkNotNull(keyCrypter);
// Check that the keyCrypter matches the one used to encrypt the keys, if set.
if (this.keyCrypter != null && !this.keyCrypter.equals(keyCrypter))
throw new KeyCrypterException("The keyCrypter being used to decrypt the key is different to the one that was used to encrypt it");
BigInteger privKey = findOrDeriveEncryptedPrivateKey(keyCrypter, aesKey);
DeterministicKey key = new DeterministicKey(childNumberPath, chainCode, privKey, parent);
if (!Arrays.equals(key.getPubKey(), getPubKey()))
throw new KeyCrypterException("Provided AES key is wrong");
if (parent == null)
key.setCreationTimeSeconds(getCreationTimeSeconds());
return key;
}
@Override
public DeterministicKey decrypt(KeyParameter aesKey) throws KeyCrypterException {
return (DeterministicKey) super.decrypt(aesKey);
}
// For when a key is encrypted, either decrypt our encrypted private key bytes, or work up the tree asking parents
// to decrypt and re-derive.
private BigInteger findOrDeriveEncryptedPrivateKey(KeyCrypter keyCrypter, KeyParameter aesKey) {
if (encryptedPrivateKey != null)
return new BigInteger(1, keyCrypter.decrypt(encryptedPrivateKey, aesKey));
// Otherwise we don't have it, but maybe we can figure it out from our parents. Walk up the tree looking for
// the first key that has some encrypted private key data.
DeterministicKey cursor = parent;
while (cursor != null) {
if (cursor.encryptedPrivateKey != null) break;
cursor = cursor.parent;
}
if (cursor == null)
throw new KeyCrypterException("Neither this key nor its parents have an encrypted private key");
byte[] parentalPrivateKeyBytes = keyCrypter.decrypt(cursor.encryptedPrivateKey, aesKey);
return derivePrivateKeyDownwards(cursor, parentalPrivateKeyBytes);
}
@Nullable
private BigInteger findOrDerivePrivateKey() {
DeterministicKey cursor = this;
while (cursor != null) {
if (cursor.priv != null) break;
cursor = cursor.parent;
}
if (cursor == null)
return null;
return derivePrivateKeyDownwards(cursor, cursor.priv.toByteArray());
}
private BigInteger derivePrivateKeyDownwards(DeterministicKey cursor, byte[] parentalPrivateKeyBytes) {
DeterministicKey downCursor = new DeterministicKey(cursor.childNumberPath, cursor.chainCode,
cursor.pub, new BigInteger(1, parentalPrivateKeyBytes), cursor.parent);
// Now we have to rederive the keys along the path back to ourselves. That path can be found by just truncating
// our path with the length of the parents path.
ImmutableList<ChildNumber> path = childNumberPath.subList(cursor.getDepth(), childNumberPath.size());
for (ChildNumber num : path) {
downCursor = HDKeyDerivation.deriveChildKey(downCursor, num);
}
// downCursor is now the same key as us, but with private key bytes.
checkState(downCursor.pub.equals(pub));
return checkNotNull(downCursor.priv);
}
/**
* Derives a child at the given index using hardened derivation. Note: <code>index</code> is
* not the "i" value. If you want the softened derivation, then use instead
* <code>HDKeyDerivation.deriveChildKey(this, new ChildNumber(child, false))</code>.
*/
public DeterministicKey derive(int child) {
return HDKeyDerivation.deriveChildKey(this, new ChildNumber(child, true));
}
/**
* Returns the private key of this deterministic key. Even if this object isn't storing the private key,
* it can be re-derived by walking up to the parents if necessary and this is what will happen.
* @throws java.lang.IllegalStateException if the parents are encrypted or a watching chain.
*/
@Override
public BigInteger getPrivKey() {
final BigInteger key = findOrDerivePrivateKey();
checkState(key != null, "Private key bytes not available");
return key;
}
public byte[] serializePublic() {
return serialize(true);
}
public byte[] serializePrivate() {
return serialize(false);
}
private byte[] serialize(boolean pub) {
ByteBuffer ser = ByteBuffer.allocate(78);
ser.putInt(pub ? HEADER_PUB : HEADER_PRIV);
ser.put((byte) getDepth());
if (parent == null) {
ser.putInt(0);
} else {
ser.put(parent.getFingerprint());
}
ser.putInt(getChildNumber().i());
ser.put(getChainCode());
ser.put(pub ? getPubKey() : getPrivKeyBytes33());
checkState(ser.position() == 78);
return ser.array();
}
public String serializePubB58() {
return toBase58(serialize(true));
}
public String serializePrivB58() {
return toBase58(serialize(false));
}
static String toBase58(byte[] ser) {
return Base58.encode(addChecksum(ser));
}
public static DeterministicKey deserializeB58(@Nullable DeterministicKey parent, String base58) {
try {
return deserialize(parent, Base58.decodeChecked(base58));
} catch (AddressFormatException e) {
throw new IllegalArgumentException(e);
}
}
public static DeterministicKey deserialize(@Nullable DeterministicKey parent, byte[] serializedKey) {
ByteBuffer buffer = ByteBuffer.wrap(serializedKey);
int header = buffer.getInt();
if (header != HEADER_PRIV && header != HEADER_PUB)
throw new IllegalArgumentException("Unknown header bytes: " + toBase58(serializedKey).substring(0, 4));
boolean pub = header == HEADER_PUB;
byte depth = buffer.get();
byte[] parentFingerprint = new byte[4];
buffer.get(parentFingerprint);
final int i = buffer.getInt();
final ChildNumber childNumber = new ChildNumber(i);
ImmutableList<ChildNumber> path;
if (parent != null) {
if (Arrays.equals(parentFingerprint, HDUtils.longTo4ByteArray(0)))
throw new IllegalArgumentException("Parent was provided but this key doesn't have one");
if (!Arrays.equals(parent.getFingerprint(), parentFingerprint))
throw new IllegalArgumentException("Parent fingerprints don't match");
path = HDUtils.append(parent.getPath(), childNumber);
if (path.size() != depth)
throw new IllegalArgumentException("Depth does not match");
} else {
if (depth == 0) {
path = ImmutableList.of();
} else if (depth == 1) {
// We have been given a key that is not a root key, yet we also don't have any object representing
// the parent. This can happen when deserializing an account key for a watching wallet. In this case,
// we assume that the parent has a path of zero.
path = ImmutableList.of(childNumber);
} else {
throw new IllegalArgumentException("Depth is " + depth + " and no parent key was provided, so we " +
"cannot reconstruct the key path from the provided data.");
}
}
byte[] chainCode = new byte[32];
buffer.get(chainCode);
byte[] data = new byte[33];
buffer.get(data);
checkArgument(!buffer.hasRemaining(), "Found unexpected data in key");
if (pub) {
ECPoint point = ECKey.CURVE.getCurve().decodePoint(data);
return new DeterministicKey(path, chainCode, point, null, parent);
} else {
return new DeterministicKey(path, chainCode, new BigInteger(1, data), parent);
}
}
/**
* The creation time of a deterministic key is equal to that of its parent, unless this key is the root of a tree
* in which case the time is stored alongside the key as per normal, see {@link com.fuelcoinj.core.ECKey#getCreationTimeSeconds()}.
*/
@Override
public long getCreationTimeSeconds() {
if (parent != null)
return parent.getCreationTimeSeconds();
else
return super.getCreationTimeSeconds();
}
/**
* Verifies equality of all fields but NOT the parent pointer (thus the same key derived in two separate heirarchy
* objects will equal each other.
*/
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DeterministicKey other = (DeterministicKey) o;
return super.equals(other)
&& Arrays.equals(this.chainCode, other.chainCode)
&& Objects.equal(this.childNumberPath, other.childNumberPath);
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + childNumberPath.hashCode();
result = 31 * result + Arrays.hashCode(chainCode);
return result;
}
@Override
public String toString() {
final ToStringHelper helper = Objects.toStringHelper(this).omitNullValues();
helper.add("pub", Utils.HEX.encode(pub.getEncoded()));
helper.add("chainCode", HEX.encode(chainCode));
helper.add("path", getPathAsString());
if (creationTimeSeconds > 0)
helper.add("creationTimeSeconds", creationTimeSeconds);
return helper.toString();
}
}
|
|
package org.apache.solr.search.stats;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermContext;
import org.apache.lucene.search.CollectionStatistics;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermStatistics;
import org.apache.solr.client.solrj.SolrResponse;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.ShardParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.handler.component.ResponseBuilder;
import org.apache.solr.handler.component.ShardRequest;
import org.apache.solr.handler.component.ShardResponse;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.search.SolrIndexSearcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
/**
* This class implements exact caching of statistics. It requires an additional
* round-trip to parse query at shard servers, and return term statistics for
* query terms (and collection statistics for term fields).
*/
public class ExactStatsCache extends StatsCache {
private static final Logger LOG = LoggerFactory
.getLogger(ExactStatsCache.class);
// experimenting with strategy that takes more RAM, but also doesn't share memory
// across threads
private static final String CURRENT_GLOBAL_COL_STATS = "org.apache.solr.stats.currentGlobalColStats";
private static final String CURRENT_GLOBAL_TERM_STATS = "org.apache.solr.stats.currentGlobalTermStats";
private static final String PER_SHARD_TERM_STATS = "org.apache.solr.stats.perShardTermStats";
private static final String PER_SHARD_COL_STATS = "org.apache.solr.stats.perShardColStats";
@Override
public StatsSource get(SolrQueryRequest req) {
Map<String,CollectionStats> currentGlobalColStats = (Map<String,CollectionStats>) req.getContext().get(CURRENT_GLOBAL_COL_STATS);
Map<String,TermStats> currentGlobalTermStats = (Map<String,TermStats>) req.getContext().get(CURRENT_GLOBAL_TERM_STATS);
if (currentGlobalColStats == null) {
currentGlobalColStats = Collections.emptyMap();
}
if (currentGlobalTermStats == null) {
currentGlobalTermStats = Collections.emptyMap();
}
LOG.debug("Returning StatsSource. Collection stats={}, Term stats size= {}", currentGlobalColStats, currentGlobalTermStats.size());
return new ExactStatsSource(currentGlobalTermStats, currentGlobalColStats);
}
@Override
public void init(PluginInfo info) {}
@Override
public ShardRequest retrieveStatsRequest(ResponseBuilder rb) {
ShardRequest sreq = new ShardRequest();
sreq.purpose = ShardRequest.PURPOSE_GET_TERM_STATS;
sreq.params = new ModifiableSolrParams(rb.req.getParams());
// don't pass through any shards param
sreq.params.remove(ShardParams.SHARDS);
return sreq;
}
@Override
public void mergeToGlobalStats(SolrQueryRequest req,
List<ShardResponse> responses) {
for (ShardResponse r : responses) {
LOG.debug("Merging to global stats, shard={}, response={}", r.getShard(), r.getSolrResponse().getResponse());
String shard = r.getShard();
SolrResponse res = r.getSolrResponse();
NamedList<Object> nl = res.getResponse();
// TODO: nl == null if not all shards respond (no server hosting shard)
String termStatsString = (String) nl.get(TERM_STATS_KEY);
if (termStatsString != null) {
addToPerShardTermStats(req, shard, termStatsString);
}
List<Object> terms = nl.getAll(TERMS_KEY);
if (terms != null) {
req.getContext().put(TERMS_KEY, terms);
}
String colStatsString = (String) nl.get(COL_STATS_KEY);
if (colStatsString != null) {
Map<String,CollectionStats> colStats = StatsUtil
.colStatsMapFromString(colStatsString);
if (colStats != null) {
addToPerShardColStats(req, shard, colStats);
}
}
}
if (LOG.isDebugEnabled()) printStats(req);
}
protected void addToPerShardColStats(SolrQueryRequest req, String shard,
Map<String,CollectionStats> colStats) {
Map<String,Map<String,CollectionStats>> perShardColStats = (Map<String,Map<String,CollectionStats>>) req.getContext().get(PER_SHARD_COL_STATS);
if (perShardColStats == null) {
perShardColStats = new HashMap<String,Map<String,CollectionStats>>();
req.getContext().put(PER_SHARD_COL_STATS, perShardColStats);
}
perShardColStats.put(shard, colStats);
}
protected void printStats(SolrQueryRequest req) {
Map<String,Map<String,TermStats>> perShardTermStats = (Map<String,Map<String,TermStats>>) req.getContext().get(PER_SHARD_TERM_STATS);
if (perShardTermStats == null) {
perShardTermStats = Collections.emptyMap();
}
Map<String,Map<String,CollectionStats>> perShardColStats = (Map<String,Map<String,CollectionStats>>) req.getContext().get(PER_SHARD_COL_STATS);
if (perShardColStats == null) {
perShardColStats = Collections.emptyMap();
}
LOG.debug("perShardColStats={}, perShardTermStats={}", perShardColStats, perShardTermStats);
}
protected void addToPerShardTermStats(SolrQueryRequest req, String shard, String termStatsString) {
Map<String,TermStats> termStats = StatsUtil
.termStatsMapFromString(termStatsString);
if (termStats != null) {
Map<String,Map<String,TermStats>> perShardTermStats = (Map<String,Map<String,TermStats>>) req.getContext().get(PER_SHARD_TERM_STATS);
if (perShardTermStats == null) {
perShardTermStats = new HashMap<String,Map<String,TermStats>>();
req.getContext().put(PER_SHARD_TERM_STATS, perShardTermStats);
}
perShardTermStats.put(shard, termStats);
}
}
@Override
public void returnLocalStats(ResponseBuilder rb, SolrIndexSearcher searcher) {
Query q = rb.getQuery();
try {
HashSet<Term> terms = new HashSet<Term>();
searcher.createNormalizedWeight(q, true).extractTerms(terms);
IndexReaderContext context = searcher.getTopReaderContext();
HashMap<String,TermStats> statsMap = new HashMap<String,TermStats>();
HashMap<String,CollectionStats> colMap = new HashMap<String,CollectionStats>();
for (Term t : terms) {
TermContext termContext = TermContext.build(context, t);
TermStatistics tst = searcher.localTermStatistics(t, termContext);
if (tst.docFreq() == 0) { // skip terms that are not present here
continue;
}
statsMap.put(t.toString(), new TermStats(t.field(), tst));
rb.rsp.add(TERMS_KEY, t.toString());
if (!colMap.containsKey(t.field())) { // collection stats for this field
colMap.put(
t.field(),
new CollectionStats(searcher.localCollectionStatistics(t.field())));
}
}
String termStatsString = StatsUtil.termStatsMapToString(statsMap);
rb.rsp.add(TERM_STATS_KEY, termStatsString);
String colStatsString = StatsUtil.colStatsMapToString(colMap);
rb.rsp.add(COL_STATS_KEY, colStatsString);
if (LOG.isDebugEnabled()) {
LOG.debug("termStats=" + termStatsString + ", collectionStats="
+ colStatsString + ", terms=" + terms + ", numDocs="
+ searcher.maxDoc());
}
} catch (IOException e) {
LOG.error("Error collecting local stats, query='" + q.toString() + "'", e);
throw new SolrException(ErrorCode.SERVER_ERROR, "Error collecting local stats.", e);
}
}
@Override
public void sendGlobalStats(ResponseBuilder rb, ShardRequest outgoing) {
outgoing.purpose |= ShardRequest.PURPOSE_SET_TERM_STATS;
ModifiableSolrParams params = outgoing.params;
List<String> terms = (List<String>) rb.req.getContext().get(TERMS_KEY);
if (terms != null) {
Set<String> fields = new HashSet<String>();
for (String t : terms) {
String[] fv = t.split(":");
fields.add(fv[0]);
}
Map<String,TermStats> globalTermStats = new HashMap<String,TermStats>();
Map<String,CollectionStats> globalColStats = new HashMap<String,CollectionStats>();
// aggregate collection stats, only for the field in terms
for (String shard : rb.shards) {
Map<String,CollectionStats> s = getPerShardColStats(rb, shard);
if (s == null) {
continue;
}
for (Entry<String,CollectionStats> e : s.entrySet()) {
if (!fields.contains(e.getKey())) { // skip non-relevant fields
continue;
}
CollectionStats g = globalColStats.get(e.getKey());
if (g == null) {
g = new CollectionStats(e.getKey());
globalColStats.put(e.getKey(), g);
}
g.add(e.getValue());
}
}
params.add(COL_STATS_KEY, StatsUtil.colStatsMapToString(globalColStats));
// sum up only from relevant shards
for (String t : terms) {
params.add(TERMS_KEY, t);
for (String shard : rb.shards) {
TermStats termStats = getPerShardTermStats(rb.req, t, shard);
if (termStats == null || termStats.docFreq == 0) {
continue;
}
TermStats g = globalTermStats.get(t);
if (g == null) {
g = new TermStats(t);
globalTermStats.put(t, g);
}
g.add(termStats);
}
}
LOG.debug("terms={}, termStats={}", terms, globalTermStats);
// need global TermStats here...
params.add(TERM_STATS_KEY, StatsUtil.termStatsMapToString(globalTermStats));
}
}
protected Map<String,CollectionStats> getPerShardColStats(ResponseBuilder rb, String shard) {
Map<String,Map<String,CollectionStats>> perShardColStats = (Map<String,Map<String,CollectionStats>>) rb.req.getContext().get(PER_SHARD_COL_STATS);
if (perShardColStats == null) {
perShardColStats = Collections.emptyMap();
}
return perShardColStats.get(shard);
}
protected TermStats getPerShardTermStats(SolrQueryRequest req, String t, String shard) {
Map<String,Map<String,TermStats>> perShardTermStats = (Map<String,Map<String,TermStats>>) req.getContext().get(PER_SHARD_TERM_STATS);
if (perShardTermStats == null) {
perShardTermStats = Collections.emptyMap();
}
Map<String,TermStats> cache = perShardTermStats.get(shard);
return cache.get(t);
}
@Override
public void receiveGlobalStats(SolrQueryRequest req) {
String globalTermStats = req.getParams().get(TERM_STATS_KEY);
String globalColStats = req.getParams().get(COL_STATS_KEY);
if (globalColStats != null) {
Map<String,CollectionStats> colStats = StatsUtil
.colStatsMapFromString(globalColStats);
if (colStats != null) {
for (Entry<String,CollectionStats> e : colStats.entrySet()) {
addToGlobalColStats(req, e);
}
}
}
LOG.debug("Global collection stats={}", globalColStats);
if (globalTermStats == null) return;
Map<String,TermStats> termStats = StatsUtil
.termStatsMapFromString(globalTermStats);
if (termStats != null) {
for (Entry<String,TermStats> e : termStats.entrySet()) {
addToGlobalTermStats(req, e);
}
}
}
protected void addToGlobalColStats(SolrQueryRequest req,
Entry<String,CollectionStats> e) {
Map<String,CollectionStats> currentGlobalColStats = (Map<String,CollectionStats>) req.getContext().get(CURRENT_GLOBAL_COL_STATS);
if (currentGlobalColStats == null) {
currentGlobalColStats = new HashMap<String,CollectionStats>();
req.getContext().put(CURRENT_GLOBAL_COL_STATS, currentGlobalColStats);
}
currentGlobalColStats.put(e.getKey(), e.getValue());
}
protected void addToGlobalTermStats(SolrQueryRequest req, Entry<String,TermStats> e) {
Map<String,TermStats> currentGlobalTermStats = (Map<String,TermStats>) req.getContext().get(CURRENT_GLOBAL_TERM_STATS);
if (currentGlobalTermStats == null) {
currentGlobalTermStats = new HashMap<String,TermStats>();
req.getContext().put(CURRENT_GLOBAL_TERM_STATS, currentGlobalTermStats);
}
currentGlobalTermStats.put(e.getKey(), e.getValue());
}
protected static class ExactStatsSource extends StatsSource {
private final Map<String,TermStats> termStatsCache;
private final Map<String,CollectionStats> colStatsCache;
public ExactStatsSource(Map<String,TermStats> termStatsCache,
Map<String,CollectionStats> colStatsCache) {
this.termStatsCache = termStatsCache;
this.colStatsCache = colStatsCache;
}
public TermStatistics termStatistics(SolrIndexSearcher localSearcher, Term term, TermContext context)
throws IOException {
TermStats termStats = termStatsCache.get(term.toString());
// TermStats == null is also true if term has no docFreq anyway,
// see returnLocalStats, if docFreq == 0, they are not added anyway
// Not sure we need a warning here
if (termStats == null) {
LOG.debug("Missing global termStats info for term={}, using local stats", term);
return localSearcher.localTermStatistics(term, context);
} else {
return termStats.toTermStatistics();
}
}
@Override
public CollectionStatistics collectionStatistics(SolrIndexSearcher localSearcher, String field)
throws IOException {
CollectionStats colStats = colStatsCache.get(field);
if (colStats == null) {
LOG.debug("Missing global colStats info for field={}, using local", field);
return localSearcher.localCollectionStatistics(field);
} else {
return colStats.toCollectionStatistics();
}
}
}
}
|
|
package lobster.heroquestproj.Renders;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.AnimatorSet;
import android.animation.ObjectAnimator;
import android.animation.ValueAnimator;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.Typeface;
import android.view.View;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.view.animation.BounceInterpolator;
import android.os.Handler;
import lobster.heroquestproj.VFramework.CFontCollection;
import lobster.heroquestproj.VFramework.CSharedUtils;
import lobster.heroquestproj.VFramework.CTextureCollection;
import lobster.heroquestproj.VFramework.EFonts;
import lobster.heroquestproj.VFramework.ETexture;
/**
* Created by Vagner on 9/5/2016.
*/
public abstract class CSpriteBase extends RectF {
// texture of this rectangle
public Bitmap mTexture;
// object for actual drawing
protected Paint mObjectDrawer = new Paint();
// label - if applicable
// all labels are center aligned for now
public String mLabel = "";
// Fixed coordinates for top left corner - this way the application may calculate automatically all parameters
protected float mFixedX = -1.0f;
protected float mFixedY = -1.0f;
// X relative center position of the rectangle
protected float mRelativeX = 0.0f;
// Y relative center position of the rectangle
protected float mRelativeY = 0.0f;
// Size of the label in pixels - calculated based on the selected font type multiplied by scalar factor of this same font
protected float charInPixels = 0.0f;
// default font - if not specified
protected Typeface mTextFont = CFontCollection.useFont(EFonts.COMICS);
// scale - re-scale the current rectangle - important for animation effects
protected float mScaleX = 1.0f; // 1.0f means current size
protected float mScaleY = 1.0f; // 1.0f means current size
protected boolean mVisible = true; // is this visible
// to set a callback function in case there is an event for touch
public Runnable mTouchEvent = null;
// whenever there is an animation you may add an event to be executed at the end
public Runnable mEndAnimationEvent = null;
public CSpriteBase() {
super();
}
public CSpriteBase(ETexture text) {
super();
mTexture = CTextureCollection.instance().getTexture(text);
}
public CSpriteBase(ETexture text, float left, float top, float right, float bottom) {
super(left, top, right, bottom);
mTexture = CTextureCollection.instance().getTexture(text);
}
public CSpriteBase(RectF r) {
super(r);
}
public CSpriteBase(Rect r) {
super(r);
}
public void setVisibility(boolean visible) { mVisible = visible; }
public boolean isVisible() { return mVisible; }
public void setFixedX(float fixedX) { mFixedX = fixedX; }
public void setFixedY(float fixedY) { mFixedY = fixedY; }
public void setRelativeX(float relativeX) { mRelativeX = relativeX; }
public void setRelativeY(float relativeY) { mRelativeY = relativeY; }
public float getRelativeX() { return mRelativeX; }
public float getRelativeY() { return mRelativeY; }
public void printLabel(Canvas c) {
final int sTextSize = 72;
mObjectDrawer.setColor(Color.rgb(0xc0, 0xcf, 0x10));
mObjectDrawer.setTextSize(sTextSize);
charInPixels = sTextSize * CFontCollection.getFontScaleFactor(EFonts.COMICS);
mObjectDrawer.setTypeface(mTextFont);
c.drawText(mLabel, (((this.left + this.right) / 2) - ((mLabel.length() / 2) * charInPixels)), this.bottom + 45, mObjectDrawer);
}
public void setTop(float top){
this.top = top;
}
public void setBottom(float bottom){
this.bottom = bottom;
}
public void setRight(float right){
this.right = right;
}
public void setLeft(float left){
this.left = left;
}
public void setScreenDimensions(int width, int height) {
this.left = (width * mRelativeX) - (mTexture.getWidth() * mScaleX) / 2;
this.right = (width * mRelativeX) + (mTexture.getWidth() * mScaleX) / 2;
this.bottom = (height * mRelativeY) - (mTexture.getHeight() * mScaleY) / 2;
this.top = (height * mRelativeY) + (mTexture.getHeight() * mScaleY) / 2;
}
public void setLeft(int leftVal) {
this.left = leftVal;
}
public void setNameY(String yPropertyName) {
}
public void onClickEvent() {
if (mTouchEvent != null) {
Handler hand = new Handler();
hand.post(mTouchEvent);
}
}
// even though this function exists - you shouldn't use it
// really time consuming...
// better to use only for animation purposes
// if you have to resize something, create a new resized image
public void setScaleFactor(float scale) {
mScaleX = scale;
mScaleY = scale;
}
// this may be used by the animator class
public void setScaleX(float scaleX) {
mScaleX = scaleX;
}
// this may be used by the animator class
public void setScaleY(float scaleY) {
mScaleY = scaleY;
}
public void setAlpha(int alphaVal) {
mObjectDrawer.setAlpha(alphaVal);
}
//public void setColor(int RGBAColor) { mObjectDrawer.setColor(RGBAColor); }
public void setRotateY(float rotationAngle) {
this.left = (float) (this.left * Math.cos((Math.PI / 180.0) * rotationAngle) - (this.top * Math.sin((Math.PI / 180.0) * rotationAngle) ));
this.top = (float) (this.top * Math.cos((Math.PI / 180.0) * rotationAngle) + (this.left * Math.cos((Math.PI / 180.0) * rotationAngle) ));
this.right = (float) (this.right * Math.cos((Math.PI / 180.0) * rotationAngle) - (this.bottom * Math.sin((Math.PI / 180.0) * rotationAngle) ));
this.bottom = (float) (this.bottom * Math.cos((Math.PI / 180.0) * rotationAngle) + (this.right * Math.cos((Math.PI / 180.0) * rotationAngle) ));
}
public void startRotating(int minAngle, int finalAngle, int duration) {
//final TileView tileView = TileView.fromXml(CSharedUtils.instance().getmSurfaceView().getContext(), parent);
ObjectAnimator rotateAnimator = ObjectAnimator.ofFloat(this, "RotateY", minAngle, finalAngle);
rotateAnimator.setInterpolator(new BounceInterpolator());
AnimatorSet animatorSet = new AnimatorSet();
animatorSet.play(rotateAnimator);
animatorSet.setDuration(duration);
//tileView.setLayerType(View.LAYER_TYPE_HARDWARE, null);
animatorSet.start();
}
public void startFadeEffect(int minAlpha, int finalAlpha, int duration) {
//final TileView tileView = TileView.fromXml(CSharedUtils.instance().getmSurfaceView().getContext(), parent);
ObjectAnimator alphaAnimator = ObjectAnimator.ofInt(this, "Alpha", minAlpha, finalAlpha);
alphaAnimator.setInterpolator(new AccelerateDecelerateInterpolator());
AnimatorSet animatorSet = new AnimatorSet();
animatorSet.play(alphaAnimator);
animatorSet.setDuration(duration);
if (CSharedUtils.instance().getmSurfaceView() != null) {
CSharedUtils.instance().getmSurfaceView().setLayerType(View.LAYER_TYPE_HARDWARE, null);
// adds a listener - when the animation is over, stop using the gpu
animatorSet.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
CSharedUtils.instance().getmSurfaceView().setLayerType(View.LAYER_TYPE_NONE, null);
// in case there is an existing event for end animation event - execute it
if (mEndAnimationEvent != null) {
Handler hand = new Handler();
hand.post(mEndAnimationEvent);
}
}
});
}
animatorSet.start();
}
public void startBouncing(float minScale, float finalScale, int duration, int repeat) {
//final TileView tileView = TileView.fromXml(CSharedUtils.instance().getmSurfaceView().getContext(), parent);
ObjectAnimator scaleXAnimator = ObjectAnimator.ofFloat(this, "scaleX", minScale, finalScale);
scaleXAnimator.setInterpolator(new BounceInterpolator());
ObjectAnimator scaleYAnimator = ObjectAnimator.ofFloat(this, "scaleY", minScale, finalScale);
scaleYAnimator.setInterpolator(new BounceInterpolator());
if (repeat == -1) {
scaleXAnimator.setRepeatCount(ValueAnimator.INFINITE);
scaleYAnimator.setRepeatCount(ValueAnimator.INFINITE);
} else {
scaleXAnimator.setRepeatCount(repeat);
scaleYAnimator.setRepeatCount(repeat);
}
AnimatorSet animatorSet = new AnimatorSet();
animatorSet.playTogether(scaleXAnimator, scaleYAnimator);
animatorSet.setDuration(duration);
if (CSharedUtils.instance().getmSurfaceView() != null) {
CSharedUtils.instance().getmSurfaceView().setLayerType(View.LAYER_TYPE_HARDWARE, null);
// adds a listener - when the animation is over, stop using the gpu
animatorSet.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
CSharedUtils.instance().getmSurfaceView().setLayerType(View.LAYER_TYPE_NONE, null);
// in case there is an existing event for end animation event - execute it
if (mEndAnimationEvent != null) {
Handler hand = new Handler();
hand.post(mEndAnimationEvent);
}
}
});
}
animatorSet.start();
}
public abstract void draw(Canvas c);
}
|
|
/*
* oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
*
* Copyright (c) 2014, Gluu
*/
package org.xdi.oxauth.client;
import org.apache.commons.lang.StringUtils;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.xdi.oxauth.model.authorize.AuthorizeRequestParam;
import org.xdi.oxauth.model.authorize.CodeVerifier;
import org.xdi.oxauth.model.common.Display;
import org.xdi.oxauth.model.common.Prompt;
import org.xdi.oxauth.model.common.ResponseMode;
import org.xdi.oxauth.model.common.ResponseType;
import org.xdi.oxauth.model.util.Util;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Represents an authorization request to send to the authorization server.
*
* @author Javier Rojas Blum
* @version August 9, 2017
*/
public class AuthorizationRequest extends BaseRequest {
private List<ResponseType> responseTypes;
private String clientId;
private List<String> scopes;
private String redirectUri;
private String state;
private ResponseMode responseMode;
private String nonce;
private Display display;
private List<Prompt> prompts;
private Integer maxAge;
private List<String> uiLocales;
private List<String> claimsLocales;
private String idTokenHint;
private String loginHint;
private List<String> acrValues;
private JSONObject claims;
private String registration;
private String request;
private String requestUri;
private boolean requestSessionId;
private String sessionId;
private String accessToken;
private boolean useNoRedirectHeader;
// code verifier according to PKCE spec
private String codeChallenge;
private String codeChallengeMethod;
private Map<String, String> customResponseHeaders;
/**
* Constructs an authorization request.
*
* @param responseTypes The response type informs the authorization server of the desired response type:
* <strong>code</strong>, <strong>token</strong>, <strong>id_token</strong>
* a combination of them. The response type parameter is mandatory.
* @param clientId The client identifier is mandatory.
* @param scopes The scope of the access request.
* @param redirectUri Redirection URI
* @param nonce A string value used to associate a user agent session with an ID Token,
* and to mitigate replay attacks.
*/
public AuthorizationRequest(List<ResponseType> responseTypes, String clientId, List<String> scopes,
String redirectUri, String nonce) {
super();
this.responseTypes = responseTypes;
this.clientId = clientId;
this.scopes = scopes;
this.redirectUri = redirectUri;
this.nonce = nonce;
prompts = new ArrayList<Prompt>();
useNoRedirectHeader = false;
}
public CodeVerifier generateAndSetCodeChallengeWithMethod() {
CodeVerifier verifier = new CodeVerifier(CodeVerifier.CodeChallengeMethod.S256);
codeChallenge = verifier.getCodeChallenge();
codeChallengeMethod = verifier.getTransformationType().getPkceString();
return verifier;
}
public String getCodeChallenge() {
return codeChallenge;
}
public String getCodeChallengeMethod() {
return codeChallengeMethod;
}
public void setCodeChallenge(String codeChallenge) {
this.codeChallenge = codeChallenge;
}
public void setCodeChallengeMethod(String codeChallengeMethod) {
this.codeChallengeMethod = codeChallengeMethod;
}
/**
* Returns the response types.
*
* @return The response types.
*/
public List<ResponseType> getResponseTypes() {
return responseTypes;
}
/**
* Sets the response types.
*
* @param responseTypes The response types.
*/
public void setResponseTypes(List<ResponseType> responseTypes) {
this.responseTypes = responseTypes;
}
/**
* Returns the client identifier.
*
* @return The client identifier.
*/
public String getClientId() {
return clientId;
}
/**
* Sets the client identifier.
*
* @param clientId The client identifier.
*/
public void setClientId(String clientId) {
this.clientId = clientId;
}
/**
* Returns the scopes of the access request. The authorization endpoint allow
* the client to specify the scope of the access request using the scope
* request parameter. In turn, the authorization server uses the scope
* response parameter to inform the client of the scope of the access token
* issued. The value of the scope parameter is expressed as a list of
* space-delimited, case sensitive strings.
*
* @return The scopes of the access request.
*/
public List<String> getScopes() {
return scopes;
}
/**
* Sets the scope of the access request. The authorization endpoint allow
* the client to specify the scope of the access request using the scope
* request parameter. In turn, the authorization server uses the scope
* response parameter to inform the client of the scope of the access token
* issued. The value of the scope parameter is expressed as a list of
* space-delimited, case sensitive strings.
*
* @param scopes The scope of the access request.
*/
public void setScopes(List<String> scopes) {
this.scopes = scopes;
}
/**
* Returns the redirection URI.
*
* @return The redirection URI.
*/
public String getRedirectUri() {
return redirectUri;
}
/**
* Sets the redirection URI.
*
* @param redirectUri The redirection URI.
*/
public void setRedirectUri(String redirectUri) {
this.redirectUri = redirectUri;
}
/**
* Returns the state. The state is an opaque value used by the client to
* maintain state between the request and callback. The authorization server
* includes this value when redirecting the user-agent back to the client.
* The parameter should be used for preventing cross-site request forgery.
*
* @return The state.
*/
public String getState() {
return state;
}
/**
* Sets the state. The state is an opaque value used by the client to
* maintain state between the request and callback. The authorization server
* includes this value when redirecting the user-agent back to the client.
* The parameter should be used for preventing cross-site request forgery.
*
* @param state The state.
*/
public void setState(String state) {
this.state = state;
}
public ResponseMode getResponseMode() {
return responseMode;
}
public void setResponseMode(ResponseMode responseMode) {
this.responseMode = responseMode;
}
/**
* Returns a string value used to associate a user agent session with an ID Token,
* and to mitigate replay attacks.
*
* @return The nonce value.
*/
public String getNonce() {
return nonce;
}
/**
* Sets a string value used to associate a user agent session with an ID Token,
* and to mitigate replay attacks.
*
* @param nonce The nonce value.
*/
public void setNonce(String nonce) {
this.nonce = nonce;
}
/**
* Returns an ASCII string value that specifies how the Authorization Server displays the
* authentication page to the End-User.
*
* @return The display value.
*/
public Display getDisplay() {
return display;
}
/**
* Sets an ASCII string value that specifies how the Authorization Server displays the
* authentication page to the End-User.
*
* @param display The display value.
*/
public void setDisplay(Display display) {
this.display = display;
}
/**
* Returns a space delimited list of ASCII strings that can contain the values login, consent,
* select_account, and none.
*
* @return The prompt list.
*/
public List<Prompt> getPrompts() {
return prompts;
}
public void setPrompts(List<Prompt> prompts) {
this.prompts = prompts;
}
public Integer getMaxAge() {
return maxAge;
}
public void setMaxAge(Integer maxAge) {
this.maxAge = maxAge;
}
public List<String> getUiLocales() {
return uiLocales;
}
public void setUiLocales(List<String> uiLocales) {
this.uiLocales = uiLocales;
}
public List<String> getClaimsLocales() {
return claimsLocales;
}
public void setClaimsLocales(List<String> claimsLocales) {
this.claimsLocales = claimsLocales;
}
public String getIdTokenHint() {
return idTokenHint;
}
public void setIdTokenHint(String idTokenHint) {
this.idTokenHint = idTokenHint;
}
public String getLoginHint() {
return loginHint;
}
public void setLoginHint(String loginHint) {
this.loginHint = loginHint;
}
public List<String> getAcrValues() {
return acrValues;
}
public void setAcrValues(List<String> acrValues) {
this.acrValues = acrValues;
}
public JSONObject getClaims() {
return claims;
}
public void setClaims(JSONObject claims) {
this.claims = claims;
}
public String getRegistration() {
return registration;
}
public void setRegistration(String registration) {
this.registration = registration;
}
/**
* Returns a JWT encoded OpenID Request Object.
*
* @return A JWT encoded OpenID Request Object.
*/
public String getRequest() {
return request;
}
/**
* Sets a JWT encoded OpenID Request Object.
*
* @param request A JWT encoded OpenID Request Object.
*/
public void setRequest(String request) {
this.request = request;
}
/**
* Returns an URL that points to an OpenID Request Object.
*
* @return An URL that points to an OpenID Request Object.
*/
public String getRequestUri() {
return requestUri;
}
/**
* Sets an URL that points to an OpenID Request Object.
*
* @param requestUri An URL that points to an OpenID Request Object.
*/
public void setRequestUri(String requestUri) {
this.requestUri = requestUri;
}
/**
* Returns whether session id is requested.
*
* @return whether session id is requested
*/
public boolean isRequestSessionId() {
return requestSessionId;
}
/**
* Sets whether session id should be requested.
*
* @param p_requestSessionId session id.
*/
public void setRequestSessionId(boolean p_requestSessionId) {
requestSessionId = p_requestSessionId;
}
/**
* Gets session id.
*
* @return session id.
*/
public String getSessionId() {
return sessionId;
}
/**
* Sets session id.
*
* @param p_sessionId session id
*/
public void setSessionId(String p_sessionId) {
sessionId = p_sessionId;
}
public String getAccessToken() {
return accessToken;
}
public void setAccessToken(String accessToken) {
this.accessToken = accessToken;
}
public boolean isUseNoRedirectHeader() {
return useNoRedirectHeader;
}
public void setUseNoRedirectHeader(boolean useNoRedirectHeader) {
this.useNoRedirectHeader = useNoRedirectHeader;
}
public String getResponseTypesAsString() {
return Util.asString(responseTypes);
}
public String getScopesAsString() {
return Util.listAsString(scopes);
}
public String getPromptsAsString() {
return Util.asString(prompts);
}
public String getUiLocalesAsString() {
return Util.listAsString(uiLocales);
}
public String getClaimsLocalesAsString() {
return Util.listAsString(claimsLocales);
}
public String getAcrValuesAsString() {
return Util.listAsString(acrValues);
}
public String getCustomResponseHeadersAsString() throws JSONException {
return Util.mapAsString(customResponseHeaders);
}
public Map<String, String> getCustomResponseHeaders() {
return customResponseHeaders;
}
public void setCustomResponseHeaders(Map<String, String> customResponseHeaders) {
this.customResponseHeaders = customResponseHeaders;
}
public String getClaimsAsString() {
if (claims != null) {
return claims.toString();
} else {
return null;
}
}
/**
* Returns a query string with the parameters of the authorization request.
* Any <code>null</code> or empty parameter will be omitted.
*
* @return A query string of parameters.
*/
@Override
public String getQueryString() {
StringBuilder queryStringBuilder = new StringBuilder();
try {
// OAuth 2.0 request parameters
final String responseTypesAsString = getResponseTypesAsString();
final String scopesAsString = getScopesAsString();
final String promptsAsString = getPromptsAsString();
final String customResponseHeadersAsString = getCustomResponseHeadersAsString();
if (StringUtils.isNotBlank(responseTypesAsString)) {
queryStringBuilder.append(AuthorizeRequestParam.RESPONSE_TYPE)
.append("=").append(URLEncoder.encode(responseTypesAsString, Util.UTF8_STRING_ENCODING));
}
if (StringUtils.isNotBlank(clientId)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.CLIENT_ID)
.append("=").append(URLEncoder.encode(clientId, Util.UTF8_STRING_ENCODING));
}
if (StringUtils.isNotBlank(scopesAsString)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.SCOPE)
.append("=").append(URLEncoder.encode(scopesAsString, Util.UTF8_STRING_ENCODING));
}
if (StringUtils.isNotBlank(redirectUri)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.REDIRECT_URI)
.append("=").append(URLEncoder.encode(redirectUri, Util.UTF8_STRING_ENCODING));
}
if (StringUtils.isNotBlank(state)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.STATE)
.append("=").append(URLEncoder.encode(state, Util.UTF8_STRING_ENCODING));
}
// OpenID Connect request parameters
final String uiLocalesAsString = getUiLocalesAsString();
final String claimLocalesAsString = getClaimsLocalesAsString();
final String acrValuesAsString = getAcrValuesAsString();
final String claimsAsString = getClaimsAsString();
if (responseMode != null) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.RESPONSE_MODE)
.append("=").append(URLEncoder.encode(responseMode.toString(), Util.UTF8_STRING_ENCODING));
}
if (StringUtils.isNotBlank(nonce)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.NONCE)
.append("=").append(URLEncoder.encode(nonce, Util.UTF8_STRING_ENCODING));
}
if (display != null) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.DISPLAY)
.append("=").append(URLEncoder.encode(display.toString(), Util.UTF8_STRING_ENCODING));
}
if (StringUtils.isNotBlank(promptsAsString)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.PROMPT)
.append("=").append(URLEncoder.encode(promptsAsString, Util.UTF8_STRING_ENCODING));
}
if (maxAge != null) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.MAX_AGE)
.append("=").append(maxAge);
}
if (StringUtils.isNotBlank(uiLocalesAsString)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.UI_LOCALES)
.append("=").append(URLEncoder.encode(uiLocalesAsString, Util.UTF8_STRING_ENCODING));
}
if (StringUtils.isNotBlank(claimLocalesAsString)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.CLAIMS_LOCALES)
.append("=").append(URLEncoder.encode(claimLocalesAsString, Util.UTF8_STRING_ENCODING));
}
if (StringUtils.isNotBlank(idTokenHint)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.ID_TOKEN_HINT)
.append("=").append(idTokenHint);
}
if (StringUtils.isNotBlank(loginHint)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.LOGIN_HINT)
.append("=").append(loginHint);
}
if (StringUtils.isNotBlank(acrValuesAsString)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.ACR_VALUES)
.append("=").append(URLEncoder.encode(acrValuesAsString, Util.UTF8_STRING_ENCODING));
}
if (StringUtils.isNotBlank(claimsAsString)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.CLAIMS)
.append("=").append(URLEncoder.encode(claimsAsString, Util.UTF8_STRING_ENCODING));
}
if (StringUtils.isNotBlank(registration)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.REGISTRATION)
.append("=").append(registration);
}
if (StringUtils.isNotBlank(request)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.REQUEST)
.append("=").append(URLEncoder.encode(request, Util.UTF8_STRING_ENCODING));
}
if (StringUtils.isNotBlank(requestUri)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.REQUEST_URI)
.append("=").append(URLEncoder.encode(requestUri, Util.UTF8_STRING_ENCODING));
}
if (requestSessionId) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.REQUEST_SESSION_ID)
.append("=").append(URLEncoder.encode(Boolean.toString(requestSessionId), Util.UTF8_STRING_ENCODING));
}
if (StringUtils.isNotBlank(sessionId)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.SESSION_ID)
.append("=").append(URLEncoder.encode(sessionId, Util.UTF8_STRING_ENCODING));
}
if (StringUtils.isNotBlank(accessToken)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.ACCESS_TOKEN)
.append("=").append(URLEncoder.encode(accessToken, Util.UTF8_STRING_ENCODING));
}
if (StringUtils.isNotBlank(codeChallenge)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.CODE_CHALLENGE)
.append("=").append(codeChallenge);
}
if (StringUtils.isNotBlank(codeChallengeMethod)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.CODE_CHALLENGE_METHOD)
.append("=").append(codeChallengeMethod);
}
if (StringUtils.isNotBlank(customResponseHeadersAsString)) {
queryStringBuilder.append("&").append(AuthorizeRequestParam.CUSTOM_RESPONSE_HEADERS)
.append("=").append(URLEncoder.encode(customResponseHeadersAsString, Util.UTF8_STRING_ENCODING));
}
for (String key : getCustomParameters().keySet()) {
queryStringBuilder.append("&");
queryStringBuilder.append(key).append("=").append(getCustomParameters().get(key));
}
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (JSONException e) {
e.printStackTrace();
}
return queryStringBuilder.toString();
}
/**
* Returns a collection of parameters of the authorization request. Any
* <code>null</code> or empty parameter will be omitted.
*
* @return A collection of parameters.
*/
public Map<String, String> getParameters() {
Map<String, String> parameters = new HashMap<String, String>();
try {
// OAuth 2.0 request parameters
final String responseTypesAsString = getResponseTypesAsString();
final String scopesAsString = getScopesAsString();
final String promptsAsString = getPromptsAsString();
final String customResponseHeadersAsString = getCustomResponseHeadersAsString();
if (StringUtils.isNotBlank(responseTypesAsString)) {
parameters.put(AuthorizeRequestParam.RESPONSE_TYPE, responseTypesAsString);
}
if (StringUtils.isNotBlank(clientId)) {
parameters.put(AuthorizeRequestParam.CLIENT_ID, clientId);
}
if (StringUtils.isNotBlank(scopesAsString)) {
parameters.put(AuthorizeRequestParam.SCOPE, scopesAsString);
}
if (StringUtils.isNotBlank(redirectUri)) {
parameters.put(AuthorizeRequestParam.REDIRECT_URI, redirectUri);
}
if (StringUtils.isNotBlank(state)) {
parameters.put(AuthorizeRequestParam.STATE, state);
}
// OpenID Connect request parameters
final String uiLocalesAsString = getUiLocalesAsString();
final String claimLocalesAsString = getClaimsLocalesAsString();
final String acrValuesAsString = getAcrValuesAsString();
final String claimsAsString = getClaimsAsString();
if (responseMode != null) {
parameters.put(AuthorizeRequestParam.RESPONSE_MODE, responseMode.toString());
}
if (StringUtils.isNotBlank(nonce)) {
parameters.put(AuthorizeRequestParam.NONCE, nonce);
}
if (display != null) {
parameters.put(AuthorizeRequestParam.DISPLAY, display.toString());
}
if (StringUtils.isNotBlank(promptsAsString)) {
parameters.put(AuthorizeRequestParam.PROMPT, promptsAsString);
}
if (maxAge != null) {
parameters.put(AuthorizeRequestParam.MAX_AGE, maxAge.toString());
}
if (StringUtils.isNotBlank(uiLocalesAsString)) {
parameters.put(AuthorizeRequestParam.UI_LOCALES, uiLocalesAsString);
}
if (StringUtils.isNotBlank(claimLocalesAsString)) {
parameters.put(AuthorizeRequestParam.CLAIMS_LOCALES, claimLocalesAsString);
}
if (StringUtils.isNotBlank(idTokenHint)) {
parameters.put(AuthorizeRequestParam.ID_TOKEN_HINT, idTokenHint);
}
if (StringUtils.isNotBlank(loginHint)) {
parameters.put(AuthorizeRequestParam.LOGIN_HINT, loginHint);
}
if (StringUtils.isNotBlank(acrValuesAsString)) {
parameters.put(AuthorizeRequestParam.ACR_VALUES, acrValuesAsString);
}
if (StringUtils.isNotBlank(claimsAsString)) {
parameters.put(AuthorizeRequestParam.CLAIMS, claimsAsString);
}
if (StringUtils.isNotBlank(registration)) {
parameters.put(AuthorizeRequestParam.REGISTRATION, registration);
}
if (StringUtils.isNotBlank(request)) {
parameters.put(AuthorizeRequestParam.REQUEST, request);
}
if (StringUtils.isNotBlank(requestUri)) {
parameters.put(AuthorizeRequestParam.REQUEST_URI, requestUri);
}
if (requestSessionId) {
parameters.put(AuthorizeRequestParam.REQUEST_SESSION_ID, Boolean.toString(requestSessionId));
}
if (StringUtils.isNotBlank(sessionId)) {
parameters.put(AuthorizeRequestParam.SESSION_ID, sessionId);
}
if (StringUtils.isNotBlank(accessToken)) {
parameters.put(AuthorizeRequestParam.ACCESS_TOKEN, accessToken);
}
if (StringUtils.isNotBlank(codeChallenge)) {
parameters.put(AuthorizeRequestParam.CODE_CHALLENGE, codeChallenge);
}
if (StringUtils.isNotBlank(codeChallengeMethod)) {
parameters.put(AuthorizeRequestParam.CODE_CHALLENGE_METHOD, codeChallengeMethod);
}
if (StringUtils.isNotBlank(customResponseHeadersAsString)) {
parameters.put(AuthorizeRequestParam.CUSTOM_RESPONSE_HEADERS, customResponseHeadersAsString);
}
for (String key : getCustomParameters().keySet()) {
parameters.put(key, getCustomParameters().get(key));
}
} catch (JSONException e) {
e.printStackTrace();
}
return parameters;
}
}
|
|
/**
*/
package dk.dtu.se2.petrinet.impl;
import dk.dtu.se2.petrinet.AnimationLabel;
import dk.dtu.se2.petrinet.GeometryLabel;
import dk.dtu.se2.petrinet.InputPlace;
import dk.dtu.se2.petrinet.PetrinetPackage;
import dk.dtu.se2.petrinet.Place;
import dk.dtu.se2.petrinet.Token;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Place</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link dk.dtu.se2.petrinet.impl.PlaceImpl#getGeometryLabel <em>Geometry Label</em>}</li>
* <li>{@link dk.dtu.se2.petrinet.impl.PlaceImpl#getAnimationLabel <em>Animation Label</em>}</li>
* <li>{@link dk.dtu.se2.petrinet.impl.PlaceImpl#getTokens <em>Tokens</em>}</li>
* <li>{@link dk.dtu.se2.petrinet.impl.PlaceImpl#getInputPlaceLabel <em>Input Place Label</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class PlaceImpl extends org.pnml.tools.epnk.pnmlcoremodel.impl.PlaceImpl implements Place {
/**
* The cached value of the '{@link #getGeometryLabel() <em>Geometry Label</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getGeometryLabel()
* @generated
* @ordered
*/
protected GeometryLabel geometryLabel;
/**
* The cached value of the '{@link #getAnimationLabel() <em>Animation Label</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getAnimationLabel()
* @generated
* @ordered
*/
protected AnimationLabel animationLabel;
/**
* The cached value of the '{@link #getTokens() <em>Tokens</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getTokens()
* @generated
* @ordered
*/
protected EList<Token> tokens;
/**
* The cached value of the '{@link #getInputPlaceLabel() <em>Input Place Label</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getInputPlaceLabel()
* @generated
* @ordered
*/
protected InputPlace inputPlaceLabel;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected PlaceImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return PetrinetPackage.Literals.PLACE;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public GeometryLabel getGeometryLabel() {
return geometryLabel;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetGeometryLabel(GeometryLabel newGeometryLabel, NotificationChain msgs) {
GeometryLabel oldGeometryLabel = geometryLabel;
geometryLabel = newGeometryLabel;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, PetrinetPackage.PLACE__GEOMETRY_LABEL, oldGeometryLabel, newGeometryLabel);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setGeometryLabel(GeometryLabel newGeometryLabel) {
if (newGeometryLabel != geometryLabel) {
NotificationChain msgs = null;
if (geometryLabel != null)
msgs = ((InternalEObject)geometryLabel).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - PetrinetPackage.PLACE__GEOMETRY_LABEL, null, msgs);
if (newGeometryLabel != null)
msgs = ((InternalEObject)newGeometryLabel).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - PetrinetPackage.PLACE__GEOMETRY_LABEL, null, msgs);
msgs = basicSetGeometryLabel(newGeometryLabel, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, PetrinetPackage.PLACE__GEOMETRY_LABEL, newGeometryLabel, newGeometryLabel));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public AnimationLabel getAnimationLabel() {
return animationLabel;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetAnimationLabel(AnimationLabel newAnimationLabel, NotificationChain msgs) {
AnimationLabel oldAnimationLabel = animationLabel;
animationLabel = newAnimationLabel;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, PetrinetPackage.PLACE__ANIMATION_LABEL, oldAnimationLabel, newAnimationLabel);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setAnimationLabel(AnimationLabel newAnimationLabel) {
if (newAnimationLabel != animationLabel) {
NotificationChain msgs = null;
if (animationLabel != null)
msgs = ((InternalEObject)animationLabel).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - PetrinetPackage.PLACE__ANIMATION_LABEL, null, msgs);
if (newAnimationLabel != null)
msgs = ((InternalEObject)newAnimationLabel).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - PetrinetPackage.PLACE__ANIMATION_LABEL, null, msgs);
msgs = basicSetAnimationLabel(newAnimationLabel, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, PetrinetPackage.PLACE__ANIMATION_LABEL, newAnimationLabel, newAnimationLabel));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Token> getTokens() {
if (tokens == null) {
tokens = new EObjectContainmentEList<Token>(Token.class, this, PetrinetPackage.PLACE__TOKENS);
}
return tokens;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public InputPlace getInputPlaceLabel() {
return inputPlaceLabel;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetInputPlaceLabel(InputPlace newInputPlaceLabel, NotificationChain msgs) {
InputPlace oldInputPlaceLabel = inputPlaceLabel;
inputPlaceLabel = newInputPlaceLabel;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, PetrinetPackage.PLACE__INPUT_PLACE_LABEL, oldInputPlaceLabel, newInputPlaceLabel);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setInputPlaceLabel(InputPlace newInputPlaceLabel) {
if (newInputPlaceLabel != inputPlaceLabel) {
NotificationChain msgs = null;
if (inputPlaceLabel != null)
msgs = ((InternalEObject)inputPlaceLabel).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - PetrinetPackage.PLACE__INPUT_PLACE_LABEL, null, msgs);
if (newInputPlaceLabel != null)
msgs = ((InternalEObject)newInputPlaceLabel).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - PetrinetPackage.PLACE__INPUT_PLACE_LABEL, null, msgs);
msgs = basicSetInputPlaceLabel(newInputPlaceLabel, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, PetrinetPackage.PLACE__INPUT_PLACE_LABEL, newInputPlaceLabel, newInputPlaceLabel));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case PetrinetPackage.PLACE__GEOMETRY_LABEL:
return basicSetGeometryLabel(null, msgs);
case PetrinetPackage.PLACE__ANIMATION_LABEL:
return basicSetAnimationLabel(null, msgs);
case PetrinetPackage.PLACE__TOKENS:
return ((InternalEList<?>)getTokens()).basicRemove(otherEnd, msgs);
case PetrinetPackage.PLACE__INPUT_PLACE_LABEL:
return basicSetInputPlaceLabel(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case PetrinetPackage.PLACE__GEOMETRY_LABEL:
return getGeometryLabel();
case PetrinetPackage.PLACE__ANIMATION_LABEL:
return getAnimationLabel();
case PetrinetPackage.PLACE__TOKENS:
return getTokens();
case PetrinetPackage.PLACE__INPUT_PLACE_LABEL:
return getInputPlaceLabel();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case PetrinetPackage.PLACE__GEOMETRY_LABEL:
setGeometryLabel((GeometryLabel)newValue);
return;
case PetrinetPackage.PLACE__ANIMATION_LABEL:
setAnimationLabel((AnimationLabel)newValue);
return;
case PetrinetPackage.PLACE__TOKENS:
getTokens().clear();
getTokens().addAll((Collection<? extends Token>)newValue);
return;
case PetrinetPackage.PLACE__INPUT_PLACE_LABEL:
setInputPlaceLabel((InputPlace)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case PetrinetPackage.PLACE__GEOMETRY_LABEL:
setGeometryLabel((GeometryLabel)null);
return;
case PetrinetPackage.PLACE__ANIMATION_LABEL:
setAnimationLabel((AnimationLabel)null);
return;
case PetrinetPackage.PLACE__TOKENS:
getTokens().clear();
return;
case PetrinetPackage.PLACE__INPUT_PLACE_LABEL:
setInputPlaceLabel((InputPlace)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case PetrinetPackage.PLACE__GEOMETRY_LABEL:
return geometryLabel != null;
case PetrinetPackage.PLACE__ANIMATION_LABEL:
return animationLabel != null;
case PetrinetPackage.PLACE__TOKENS:
return tokens != null && !tokens.isEmpty();
case PetrinetPackage.PLACE__INPUT_PLACE_LABEL:
return inputPlaceLabel != null;
}
return super.eIsSet(featureID);
}
} //PlaceImpl
|
|
/**
* Licensed to Apereo under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright ownership. Apereo
* licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the License at the
* following location:
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apereo.portal.persondir;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.lang.Validate;
import org.apache.commons.lang3.StringUtils;
import org.apereo.services.persondir.IPersonAttributeDao;
import org.apereo.services.persondir.IPersonAttributes;
import org.apereo.services.persondir.support.AbstractFlatteningPersonAttributeDao;
import org.apereo.services.persondir.support.IUsernameAttributeProvider;
import org.apereo.services.persondir.support.MultivaluedPersonAttributeUtils;
import org.apereo.services.persondir.support.NamedPersonImpl;
import org.apereo.services.persondir.support.merger.IAttributeMerger;
import org.apereo.services.persondir.support.merger.ReplacingAttributeAdder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required;
/**
* This bean is the root of the User Attributes subsystem in uPortal. It provides support for
* overriding certain attributes for certain users. (By default it uses a concurrent hash map to
* manage admin-specified overrides.) It will also do its best to fill required uPortal attributes
* if they are absent.
*
* @since 5.0
*/
public class PortalRootPersonAttributeDao extends AbstractFlatteningPersonAttributeDao {
protected static final String CUSTOMARY_FIRST_NAME_ATTRIBUTE = "givenName";
protected static final String CUSTOMARY_LAST_NAME_ATTRIBUTE = "sn";
private final IAttributeMerger attributeMerger = new ReplacingAttributeAdder();
private Map<String, Map<String, List<Object>>> overridesMap = new ConcurrentHashMap<>();
private IPersonAttributeDao delegatePersonAttributeDao;
private IUsernameAttributeProvider usernameAttributeProvider;
private final Logger logger = LoggerFactory.getLogger(getClass());
/** @return the delegatePersonAttributeDao */
public IPersonAttributeDao getDelegatePersonAttributeDao() {
return delegatePersonAttributeDao;
}
/** @param delegatePersonAttributeDao the delegatePersonAttributeDao to set */
@Required
public void setDelegatePersonAttributeDao(IPersonAttributeDao delegatePersonAttributeDao) {
Validate.notNull(delegatePersonAttributeDao, "delegatePersonAttributeDao can not be null");
this.delegatePersonAttributeDao = delegatePersonAttributeDao;
}
@Autowired
public void setUsernameAttributeProvider(IUsernameAttributeProvider usernameAttributeProvider) {
this.usernameAttributeProvider = usernameAttributeProvider;
}
/** @return the overridesMap */
public Map<String, Map<String, List<Object>>> getAttributeOverridesMap() {
return overridesMap;
}
/** @param overridesMap the overridesMap to set */
public void setAttributeOverridesMap(Map<String, Map<String, List<Object>>> overridesMap) {
Validate.notNull(overridesMap, "overridesMap can not be null");
this.overridesMap = overridesMap;
}
public void setUserAttributeOverride(String uid, Map<String, Object> attributes) {
// Not really a seed but the function still works
final Map<String, List<Object>> multivaluedAttributes =
MultivaluedPersonAttributeUtils.toMultivaluedMap(attributes);
// Update the overrides map
overridesMap.put(uid, multivaluedAttributes);
}
public void removeUserAttributeOverride(String uid) {
// Remove the uid from the overrides map
overridesMap.remove(uid);
}
/**
* This method is for "filling" a specific individual.
*
* @param uid The username (identity) of a known individual
* @return A completely "filled" person, including overrides, or <code>null</code> if the person
* is unknown
*/
@Override
public IPersonAttributes getPerson(String uid) {
final IPersonAttributes rslt = delegatePersonAttributeDao.getPerson(uid);
if (rslt == null) {
// Nothing we can do with that
return null;
}
return postProcessPerson(rslt, uid);
}
/**
* This method is for matching a search query. Each matching item will subsequently be passed to
* <code>getPerson(uid)</code> for "filling."
*/
@Override
public Set<IPersonAttributes> getPeopleWithMultivaluedAttributes(
Map<String, List<Object>> query) {
final Set<IPersonAttributes> people =
delegatePersonAttributeDao.getPeopleWithMultivaluedAttributes(query);
if (people == null) {
return null;
}
final Set<IPersonAttributes> modifiedPeople = new LinkedHashSet<>();
for (final IPersonAttributes person : people) {
/*
* WARNING: Not safe to pass uidInQuery in this scenario; results will be "filled" by
* getPerson(uid) subsequently.
*/
final IPersonAttributes mergedPerson = postProcessPerson(person, null);
modifiedPeople.add(mergedPerson);
}
return modifiedPeople;
}
@Override
public Set<String> getPossibleUserAttributeNames() {
return delegatePersonAttributeDao.getPossibleUserAttributeNames();
}
@Override
public Set<String> getAvailableQueryAttributes() {
return delegatePersonAttributeDao.getAvailableQueryAttributes();
}
/**
* Implements support for overriding attributes by administrator choice and for intelligently
* selecting (if possible) values for 'username' and 'displayName' if none have been provided by
* other data sources.
*
* @param person The user
* @param uidInQuery The username specified in the PersonDirectory query, if certain
* @return The user, possibly with modifications
*/
private IPersonAttributes postProcessPerson(IPersonAttributes person, String uidInQuery) {
// Verify the person has a name
final String name = person.getName();
if (name == null) {
logger.warn("IPerson '{}' has no name and therefore cannot be post-processed", person);
return person;
}
// First apply specified overrides
IPersonAttributes rslt =
applyOverridesIfPresent(person); // default -- won't normally need to do anything
/*
* And secondly there are two attributes in uPortal that cause havoc if they're missing:
*
* - username
* - displayName
*
* We need to do our best to provide these if the external data sources don't cover it.
*/
rslt = selectUsernameIfAbsent(rslt);
rslt = selectDisplayNameIfAbsent(rslt);
logger.debug(
"Post-processing of person with name='{}' produced the following person: {}",
person.getName(),
rslt);
return rslt;
}
protected IPersonAttributes applyOverridesIfPresent(IPersonAttributes person) {
IPersonAttributes rslt = person; // default -- won't normally need to do anything
// Check for & process overrides
final Map<String, List<Object>> overrides = overridesMap.get(person.getName());
if (overrides != null) {
logger.debug(
"Overriding the following collection of attributes for user '{}': {}",
person.getName(),
overrides);
final Map<String, List<Object>> attributes = person.getAttributes();
final Map<String, List<Object>> mutableMap = new LinkedHashMap<>(attributes);
final Map<String, List<Object>> mergedAttributes =
attributeMerger.mergeAttributes(mutableMap, overrides);
rslt = new NamedPersonImpl(person.getName(), mergedAttributes);
}
return rslt;
}
protected IPersonAttributes selectUsernameIfAbsent(IPersonAttributes person) {
IPersonAttributes rslt = person; // default -- won't normally need to do anything
final String usernameAttribute = usernameAttributeProvider.getUsernameAttribute();
if (!rslt.getAttributes().containsKey(usernameAttribute) && person.getName() != null) {
// Alias the person.name property as the username attribute
logger.debug("Adding attribute username='{}' for person: ", person.getName(), person);
final Map<String, List<Object>> attributes = person.getAttributes();
final Map<String, List<Object>> mutableMap = new LinkedHashMap<>(attributes);
mutableMap.put(usernameAttribute, Collections.singletonList(person.getName()));
rslt = new NamedPersonImpl(person.getName(), mutableMap);
}
return rslt;
}
protected IPersonAttributes selectDisplayNameIfAbsent(IPersonAttributes person) {
IPersonAttributes rslt = person; // default -- won't normally need to do anything
if (!rslt.getAttributes().containsKey(ILocalAccountPerson.ATTR_DISPLAY_NAME)) {
/*
* This one is tougher; try some common attributes, but fall back on username
*/
final StringBuilder displayName = new StringBuilder();
final Map<String, List<Object>> attributes = person.getAttributes();
final String firstName =
attributes.containsKey(CUSTOMARY_FIRST_NAME_ATTRIBUTE)
? attributes.get(CUSTOMARY_FIRST_NAME_ATTRIBUTE).get(0).toString()
: null;
final String lastName =
attributes.containsKey(CUSTOMARY_LAST_NAME_ATTRIBUTE)
? attributes.get(CUSTOMARY_LAST_NAME_ATTRIBUTE).get(0).toString()
: null;
if (StringUtils.isNotBlank(firstName) && StringUtils.isNotBlank(lastName)) {
// Prefer "${firstName} ${lastName}"
displayName.append(firstName).append(" ").append(lastName);
} else {
// But fall back on username (if present)
final String usernameAttribute = usernameAttributeProvider.getUsernameAttribute();
if (attributes.containsKey(usernameAttribute)) {
final List<Object> username = attributes.get(usernameAttribute);
if (username.size() > 0 && StringUtils.isNotBlank(username.get(0).toString())) {
displayName.append(username.get(0).toString());
}
}
}
if (displayName.length() > 0) {
logger.debug(
"Selected new displayName of '{}' for user '{}'",
displayName,
person.getName());
final Map<String, List<Object>> mutableMap = new LinkedHashMap<>(attributes);
mutableMap.put(
ILocalAccountPerson.ATTR_DISPLAY_NAME,
Collections.singletonList(displayName.toString()));
rslt = new NamedPersonImpl(person.getName(), mutableMap);
}
}
return rslt;
}
}
|
|
package org.geoscript.js.raster;
import org.geoscript.js.GeoObject;
import org.geoscript.js.geom.Bounds;
import org.geoscript.js.geom.Geometry;
import org.geoscript.js.geom.Point;
import org.geoscript.js.proj.Projection;
import org.geotools.coverage.Category;
import org.geotools.coverage.GridSampleDimension;
import org.geotools.coverage.grid.*;
import org.geotools.coverage.processing.CoverageProcessor;
import org.geotools.geometry.DirectPosition2D;
import org.geotools.geometry.jts.ReferencedEnvelope;
import org.geotools.process.raster.RangeLookupProcess;
import org.geotools.util.NumberRange;
import org.jaitools.numeric.Range;
import org.mozilla.javascript.*;
import org.mozilla.javascript.annotations.JSConstructor;
import org.mozilla.javascript.annotations.JSFunction;
import org.mozilla.javascript.annotations.JSGetter;
import org.opengis.coverage.SampleDimension;
import org.opengis.geometry.DirectPosition;
import org.opengis.geometry.Envelope;
import org.opengis.parameter.ParameterValueGroup;
import org.opengis.referencing.crs.CoordinateReferenceSystem;
import org.opengis.referencing.operation.TransformException;
import javax.media.jai.RasterFactory;
import java.awt.*;
import java.awt.image.DataBuffer;
import java.awt.image.WritableRaster;
import java.util.*;
import java.util.List;
public class Raster extends GeoObject implements Wrapper {
private GridCoverage2D coverage;
public Raster() {
// Prototype
}
public Raster(GridCoverage2D coverage) {
this.coverage = coverage;
}
public Raster(Scriptable scope, GridCoverage2D coverage) {
this(coverage);
this.setParentScope(scope);
this.setPrototype(Module.getClassPrototype(Raster.class));
}
@JSGetter
public String getName() {
return this.coverage.getName().toString();
}
@JSGetter
public Projection getProj() {
return new Projection(this.getParentScope(), this.coverage.getCoordinateReferenceSystem2D());
}
@JSGetter
public Bounds getBounds() {
Envelope env = coverage.getEnvelope();
CoordinateReferenceSystem crs = env.getCoordinateReferenceSystem();
if (crs == null) {
crs = this.coverage.getCoordinateReferenceSystem2D();
}
double[] l = env.getLowerCorner().getCoordinate();
double[] u = env.getUpperCorner().getCoordinate();
ReferencedEnvelope referencedEnvelope = new ReferencedEnvelope(l[0], u[0], l[1], u[1], crs);
return new Bounds(this.getParentScope(), referencedEnvelope);
}
@JSGetter
public NativeArray getSize() {
GridEnvelope2D gridEnvelope2D = coverage.getGridGeometry().getGridRange2D();
return (NativeArray) javaToJS(Arrays.asList(
(int) gridEnvelope2D.getWidth(),
(int) gridEnvelope2D.getHeight()
), this.getParentScope());
}
@JSGetter
public int getCols() {
return (int) getSize().get(0);
}
@JSGetter
public int getRows() {
return (int) getSize().get(1);
}
@JSGetter
public NativeArray getBands() {
List<Band> bands = new ArrayList<>();
for(int i = 0; i<coverage.getNumSampleDimensions(); i++) {
SampleDimension d = coverage.getSampleDimension(i);
Band band = new Band(this.getParentScope(), d);
bands.add(band);
}
return (NativeArray) javaToJS(bands, this.getParentScope());
}
@JSFunction
public Point getPoint(int x, int y) {
GridGeometry2D gg = coverage.getGridGeometry();
try {
DirectPosition2D dp = (DirectPosition2D) gg.gridToWorld(new GridCoordinates2D(x, y));
Map<String,Object> coords = new HashMap<>();
coords.put("coordinates", Arrays.asList(dp.x, dp.y));
return new Point(this.getParentScope(), (NativeObject) javaToJS(coords, this.getParentScope()));
} catch (TransformException e) {
throw ScriptRuntime.constructError("Error", "Error getting Point from pixel coordinates for Raster.");
}
}
@JSFunction
public NativeObject getPixel(Point point) {
GridGeometry2D gg = coverage.getGridGeometry();
try {
GridCoordinates2D gridCoordinates2D = gg.worldToGrid(new DirectPosition2D((double) point.getX(), (double) point.getY()));
Map<String, Double> pixel = new HashMap<>();
pixel.put("x", gridCoordinates2D.getX());
pixel.put("y", gridCoordinates2D.getY());
return (NativeObject) javaToJS(pixel, this.getParentScope());
} catch (TransformException e) {
throw ScriptRuntime.constructError("Error", "Error getting Pixel coordinate from Point for Raster.");
}
}
@JSFunction
public Object getValue(Object pointOrPixel) {
Point point;
if (pointOrPixel instanceof Point) {
point = (Point) pointOrPixel;
} else {
NativeObject obj = (NativeObject) pointOrPixel;
int x = (int) obj.get("x", this.getParentScope());
int y = (int) obj.get("y", this.getParentScope());
point = getPoint(x, y);
}
DirectPosition dp = new DirectPosition2D(coverage.getCoordinateReferenceSystem2D(), (double) point.getX(), (double) point.getY());
Object result = coverage.evaluate(dp);
return javaToJS(result, getParentScope());
}
@JSFunction
public Raster crop(Object boundsOrGeometry) {
CoverageProcessor processor = new CoverageProcessor();
ParameterValueGroup params = processor.getOperation("CoverageCrop").getParameters();
params.parameter("Source").setValue(coverage);
if (boundsOrGeometry instanceof Bounds) {
Bounds bounds = (Bounds) boundsOrGeometry;
params.parameter("Envelope").setValue(new org.geotools.geometry.GeneralEnvelope(bounds.unwrap()));
} else {
Geometry geometry = (Geometry) boundsOrGeometry;
params.parameter("ROI").setValue(geometry.unwrap());
}
GridCoverage2D newCoverage = (GridCoverage2D) processor.doOperation(params);
return new Raster(this.getParentScope(), newCoverage);
}
@JSFunction
public Raster reproject(Projection projection) {
CoverageProcessor processor = new CoverageProcessor();
ParameterValueGroup params = processor.getOperation("Resample").getParameters();
params.parameter("Source").setValue(coverage);
params.parameter("CoordinateReferenceSystem").setValue(projection.unwrap());
GridCoverage2D newCoverage = (GridCoverage2D) processor.doOperation(params);
return new Raster(this.getParentScope(), newCoverage);
}
@JSFunction
public Raster reclassify(NativeArray ranges, NativeObject options) {
int band = (int) options.getOrDefault("band", 0);
double noData = (double) options.getOrDefault("noData",0);
List<Range> rangeList = new ArrayList<>();
int[] pixelValues = new int[ranges.size()];
for(int i = 0; i<ranges.size(); i++) {
NativeObject rangeObj = (NativeObject) ranges.get(i);
pixelValues[i] = getInt(rangeObj.get("value"));
rangeList.add(Range.create(
Double.parseDouble(rangeObj.get("min").toString()),
(boolean) rangeObj.getOrDefault("minIncluded", true),
Double.parseDouble(rangeObj.get("max").toString()),
(boolean) rangeObj.getOrDefault("maxIncluded", true)
));
}
RangeLookupProcess process = new RangeLookupProcess();
GridCoverage2D newCoverage = process.execute(this.coverage, band, rangeList, pixelValues, noData, null);
return new Raster(this.getParentScope(), newCoverage);
}
@JSGetter
public NativeObject getExtrema() {
CoverageProcessor processor = new CoverageProcessor();
ParameterValueGroup params = processor.getOperation("Extrema").getParameters();
params.parameter("Source").setValue(coverage);
GridCoverage2D coverage = (GridCoverage2D) processor.doOperation(params);
Map<String, Object> values = new HashMap<>();
values.put("min", coverage.getProperty("minimum"));
values.put("max",coverage.getProperty("maximum"));
return (NativeObject) javaToJS(values, this.getParentScope());
}
@JSFunction
public Object getMinValue(int band) {
double minValue = this.coverage.getSampleDimension(band).getMinimumValue();
if (Double.isInfinite(minValue)) {
minValue = ((double[])this.getExtrema().get("min"))[band];
}
return minValue;
}
@JSFunction
public Object getMaxValue(int band) {
double maxValue = this.coverage.getSampleDimension(band).getMaximumValue();
if (Double.isInfinite(maxValue)) {
maxValue = ((double[])this.getExtrema().get("max"))[band];
}
return maxValue;
}
@JSGetter
public NativeArray getBlockSize() {
int[] size = this.coverage.getOptimalDataBlockSizes();
return (NativeArray) javaToJS(Arrays.asList(
size[0],
size[1]
), this.getParentScope());
}
@JSGetter
public NativeArray getPixelSize() {
Bounds bounds = this.getBounds();
NativeArray size = this.getSize();
return (NativeArray) javaToJS(Arrays.asList(
((double) bounds.getWidth()) / ((int)size.get(0)),
((double) bounds.getHeight()) / ((int)size.get(1))
), this.getParentScope());
}
private int getInt(Object obj) {
if (obj instanceof Number) {
return ((Number)obj).intValue();
} else {
return getInt(Double.parseDouble(obj.toString()));
}
}
@Override
public String toString() {
return this.getName();
}
@Override
public Object unwrap() {
return coverage;
}
@JSConstructor
public static Object constructor(Context cx, Object[] args, Function ctorObj, boolean inNewExpr) {
NativeArray data = (NativeArray) args[0];
Bounds bounds = (Bounds) args[1];
double min = Double.MAX_VALUE;
double max = Double.MIN_VALUE;
float[][] matrix = new float[(int) data.getLength()][ (int) (data.getLength() > 0 ? ((NativeArray) data.get(0)).getLength() : 0)];
for(int i = 0; i<data.getLength(); i++) {
NativeArray datum = (NativeArray) data.get(i);
for(int j = 0; j<datum.getLength(); j++) {
float value = ((Number)datum.get(j)).floatValue();
if (!Float.isNaN(value) && value < min) {
min = value;
}
if (!Float.isNaN(value) && value > max) {
max = value;
}
matrix[i][j] = value;
}
}
int width = matrix[0].length;
int height = matrix.length;
WritableRaster writableRaster = RasterFactory.createBandedRaster(DataBuffer.TYPE_FLOAT, width, height, 1, null);
for(int i = 0; i<width; i++) {
for(int j = 0; j<height; j++) {
writableRaster.setSample(i, j, 0, matrix[j][i]);
}
}
GridCoverageFactory gridCoverageFactory = new GridCoverageFactory();
Category category = new Category("Raster", Color.BLACK, NumberRange.create(min, max));
GridSampleDimension gridSampleDimension = new GridSampleDimension("Raster", new Category[]{category}, null);
GridCoverage2D coverage = gridCoverageFactory.create("Raster", writableRaster, bounds.unwrap(), new GridSampleDimension[]{gridSampleDimension});
if (inNewExpr) {
return new Raster(coverage);
} else {
return new Raster(ctorObj.getParentScope(), coverage);
}
}
}
|
|
package com.ctrip.framework.apollo.internals;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicLong;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ctrip.framework.apollo.Config;
import com.ctrip.framework.apollo.ConfigChangeListener;
import com.ctrip.framework.apollo.build.ApolloInjector;
import com.ctrip.framework.apollo.core.utils.ApolloThreadFactory;
import com.ctrip.framework.apollo.enums.PropertyChangeType;
import com.ctrip.framework.apollo.exceptions.ApolloConfigException;
import com.ctrip.framework.apollo.model.ConfigChange;
import com.ctrip.framework.apollo.model.ConfigChangeEvent;
import com.ctrip.framework.apollo.tracer.Tracer;
import com.ctrip.framework.apollo.tracer.spi.Transaction;
import com.ctrip.framework.apollo.util.ConfigUtil;
import com.ctrip.framework.apollo.util.function.Functions;
import com.ctrip.framework.apollo.util.parser.Parsers;
import com.google.common.base.Function;
import com.google.common.base.Objects;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
/**
* @author Jason Song(song_s@ctrip.com)
*/
public abstract class AbstractConfig implements Config {
private static final Logger logger = LoggerFactory.getLogger(AbstractConfig.class);
private static ExecutorService m_executorService;
private List<ConfigChangeListener> m_listeners = Lists.newCopyOnWriteArrayList();
private ConfigUtil m_configUtil;
private volatile Cache<String, Integer> m_integerCache;
private volatile Cache<String, Long> m_longCache;
private volatile Cache<String, Short> m_shortCache;
private volatile Cache<String, Float> m_floatCache;
private volatile Cache<String, Double> m_doubleCache;
private volatile Cache<String, Byte> m_byteCache;
private volatile Cache<String, Boolean> m_booleanCache;
private volatile Cache<String, Date> m_dateCache;
private volatile Cache<String, Long> m_durationCache;
private Map<String, Cache<String, String[]>> m_arrayCache;
private List<Cache> allCaches;
private AtomicLong m_configVersion; //indicate config version
static {
m_executorService = Executors.newCachedThreadPool(ApolloThreadFactory
.create("Config", true));
}
public AbstractConfig() {
m_configUtil = ApolloInjector.getInstance(ConfigUtil.class);
m_configVersion = new AtomicLong();
m_arrayCache = Maps.newConcurrentMap();
allCaches = Lists.newArrayList();
}
@Override
public void addChangeListener(ConfigChangeListener listener) {
if (!m_listeners.contains(listener)) {
m_listeners.add(listener);
}
}
@Override
public Integer getIntProperty(String key, Integer defaultValue) {
try {
if (m_integerCache == null) {
synchronized (this) {
if (m_integerCache == null) {
m_integerCache = newCache();
}
}
}
return getValueFromCache(key, Functions.TO_INT_FUNCTION, m_integerCache, defaultValue);
} catch (Throwable ex) {
Tracer.logError(new ApolloConfigException(
String.format("getIntProperty for %s failed, return default value %d", key,
defaultValue), ex));
}
return defaultValue;
}
@Override
public Long getLongProperty(String key, Long defaultValue) {
try {
if (m_longCache == null) {
synchronized (this) {
if (m_longCache == null) {
m_longCache = newCache();
}
}
}
return getValueFromCache(key, Functions.TO_LONG_FUNCTION, m_longCache, defaultValue);
} catch (Throwable ex) {
Tracer.logError(new ApolloConfigException(
String.format("getLongProperty for %s failed, return default value %d", key,
defaultValue), ex));
}
return defaultValue;
}
@Override
public Short getShortProperty(String key, Short defaultValue) {
try {
if (m_shortCache == null) {
synchronized (this) {
if (m_shortCache == null) {
m_shortCache = newCache();
}
}
}
return getValueFromCache(key, Functions.TO_SHORT_FUNCTION, m_shortCache, defaultValue);
} catch (Throwable ex) {
Tracer.logError(new ApolloConfigException(
String.format("getShortProperty for %s failed, return default value %d", key,
defaultValue), ex));
}
return defaultValue;
}
@Override
public Float getFloatProperty(String key, Float defaultValue) {
try {
if (m_floatCache == null) {
synchronized (this) {
if (m_floatCache == null) {
m_floatCache = newCache();
}
}
}
return getValueFromCache(key, Functions.TO_FLOAT_FUNCTION, m_floatCache, defaultValue);
} catch (Throwable ex) {
Tracer.logError(new ApolloConfigException(
String.format("getFloatProperty for %s failed, return default value %f", key,
defaultValue), ex));
}
return defaultValue;
}
@Override
public Double getDoubleProperty(String key, Double defaultValue) {
try {
if (m_doubleCache == null) {
synchronized (this) {
if (m_doubleCache == null) {
m_doubleCache = newCache();
}
}
}
return getValueFromCache(key, Functions.TO_DOUBLE_FUNCTION, m_doubleCache, defaultValue);
} catch (Throwable ex) {
Tracer.logError(new ApolloConfigException(
String.format("getDoubleProperty for %s failed, return default value %f", key,
defaultValue), ex));
}
return defaultValue;
}
@Override
public Byte getByteProperty(String key, Byte defaultValue) {
try {
if (m_byteCache == null) {
synchronized (this) {
if (m_byteCache == null) {
m_byteCache = newCache();
}
}
}
return getValueFromCache(key, Functions.TO_BYTE_FUNCTION, m_byteCache, defaultValue);
} catch (Throwable ex) {
Tracer.logError(new ApolloConfigException(
String.format("getByteProperty for %s failed, return default value %d", key,
defaultValue), ex));
}
return defaultValue;
}
@Override
public Boolean getBooleanProperty(String key, Boolean defaultValue) {
try {
if (m_booleanCache == null) {
synchronized (this) {
if (m_booleanCache == null) {
m_booleanCache = newCache();
}
}
}
return getValueFromCache(key, Functions.TO_BOOLEAN_FUNCTION, m_booleanCache, defaultValue);
} catch (Throwable ex) {
Tracer.logError(new ApolloConfigException(
String.format("getBooleanProperty for %s failed, return default value %b", key,
defaultValue), ex));
}
return defaultValue;
}
@Override
public String[] getArrayProperty(String key, final String delimiter, String[] defaultValue) {
try {
if (!m_arrayCache.containsKey(delimiter)) {
synchronized (this) {
if (!m_arrayCache.containsKey(delimiter)) {
m_arrayCache.put(delimiter, this.<String[]>newCache());
}
}
}
Cache<String, String[]> cache = m_arrayCache.get(delimiter);
String[] result = cache.getIfPresent(key);
if (result != null) {
return result;
}
return getValueAndStoreToCache(key, new Function<String, String[]>() {
@Override
public String[] apply(String input) {
return input.split(delimiter);
}
}, cache, defaultValue);
} catch (Throwable ex) {
Tracer.logError(new ApolloConfigException(
String.format("getArrayProperty for %s failed, return default value", key), ex));
}
return defaultValue;
}
@Override
public <T extends Enum<T>> T getEnumProperty(String key, Class<T> enumType, T defaultValue) {
try {
String value = getProperty(key, null);
if (value != null) {
return Enum.valueOf(enumType, value);
}
} catch (Throwable ex) {
Tracer.logError(new ApolloConfigException(
String.format("getEnumProperty for %s failed, return default value %s", key,
defaultValue), ex));
}
return defaultValue;
}
@Override
public Date getDateProperty(String key, Date defaultValue) {
try {
if (m_dateCache == null) {
synchronized (this) {
if (m_dateCache == null) {
m_dateCache = newCache();
}
}
}
return getValueFromCache(key, Functions.TO_DATE_FUNCTION, m_dateCache, defaultValue);
} catch (Throwable ex) {
Tracer.logError(new ApolloConfigException(
String.format("getDateProperty for %s failed, return default value %s", key,
defaultValue), ex));
}
return defaultValue;
}
@Override
public Date getDateProperty(String key, String format, Date defaultValue) {
try {
String value = getProperty(key, null);
if (value != null) {
return Parsers.forDate().parse(value, format);
}
} catch (Throwable ex) {
Tracer.logError(new ApolloConfigException(
String.format("getDateProperty for %s failed, return default value %s", key,
defaultValue), ex));
}
return defaultValue;
}
@Override
public Date getDateProperty(String key, String format, Locale locale, Date defaultValue) {
try {
String value = getProperty(key, null);
if (value != null) {
return Parsers.forDate().parse(value, format, locale);
}
} catch (Throwable ex) {
Tracer.logError(new ApolloConfigException(
String.format("getDateProperty for %s failed, return default value %s", key,
defaultValue), ex));
}
return defaultValue;
}
@Override
public long getDurationProperty(String key, long defaultValue) {
try {
if (m_durationCache == null) {
synchronized (this) {
if (m_durationCache == null) {
m_durationCache = newCache();
}
}
}
return getValueFromCache(key, Functions.TO_DURATION_FUNCTION, m_durationCache, defaultValue);
} catch (Throwable ex) {
Tracer.logError(new ApolloConfigException(
String.format("getDurationProperty for %s failed, return default value %d", key,
defaultValue), ex));
}
return defaultValue;
}
private <T> T getValueFromCache(String key, Function<String, T> parser, Cache<String, T> cache, T defaultValue) {
T result = cache.getIfPresent(key);
if (result != null) {
return result;
}
return getValueAndStoreToCache(key, parser, cache, defaultValue);
}
private <T> T getValueAndStoreToCache(String key, Function<String, T> parser, Cache<String, T> cache, T defaultValue) {
long currentConfigVersion = m_configVersion.get();
String value = getProperty(key, null);
if (value != null) {
T result = parser.apply(value);
if (result != null) {
synchronized (this) {
if (m_configVersion.get() == currentConfigVersion) {
cache.put(key, result);
}
}
return result;
}
}
return defaultValue;
}
private <T> Cache<String, T> newCache() {
Cache<String, T> cache = CacheBuilder.newBuilder()
.maximumSize(m_configUtil.getMaxConfigCacheSize())
.expireAfterAccess(m_configUtil.getConfigCacheExpireTime(), m_configUtil.getConfigCacheExpireTimeUnit())
.build();
allCaches.add(cache);
return cache;
}
/**
* Clear config cache
*/
protected void clearConfigCache() {
synchronized (this) {
for (Cache c : allCaches) {
if (c != null) {
c.invalidateAll();
}
}
m_configVersion.incrementAndGet();
}
}
protected void fireConfigChange(final ConfigChangeEvent changeEvent) {
for (final ConfigChangeListener listener : m_listeners) {
m_executorService.submit(new Runnable() {
@Override
public void run() {
String listenerName = listener.getClass().getName();
Transaction transaction = Tracer.newTransaction("Apollo.ConfigChangeListener", listenerName);
try {
listener.onChange(changeEvent);
transaction.setStatus(Transaction.SUCCESS);
} catch (Throwable ex) {
transaction.setStatus(ex);
Tracer.logError(ex);
logger.error("Failed to invoke config change listener {}", listenerName, ex);
} finally {
transaction.complete();
}
}
});
}
}
List<ConfigChange> calcPropertyChanges(String namespace, Properties previous,
Properties current) {
if (previous == null) {
previous = new Properties();
}
if (current == null) {
current = new Properties();
}
Set<String> previousKeys = previous.stringPropertyNames();
Set<String> currentKeys = current.stringPropertyNames();
Set<String> commonKeys = Sets.intersection(previousKeys, currentKeys);
Set<String> newKeys = Sets.difference(currentKeys, commonKeys);
Set<String> removedKeys = Sets.difference(previousKeys, commonKeys);
List<ConfigChange> changes = Lists.newArrayList();
for (String newKey : newKeys) {
changes.add(new ConfigChange(namespace, newKey, null, current.getProperty(newKey),
PropertyChangeType.ADDED));
}
for (String removedKey : removedKeys) {
changes.add(new ConfigChange(namespace, removedKey, previous.getProperty(removedKey), null,
PropertyChangeType.DELETED));
}
for (String commonKey : commonKeys) {
String previousValue = previous.getProperty(commonKey);
String currentValue = current.getProperty(commonKey);
if (Objects.equal(previousValue, currentValue)) {
continue;
}
changes.add(new ConfigChange(namespace, commonKey, previousValue, currentValue,
PropertyChangeType.MODIFIED));
}
return changes;
}
}
|
|
package sres;
import java.io.EOFException;
import java.io.IOException;
import java.io.NotSerializableException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.Socket;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.AbstractExecutorService;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.TimeUnit;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class RemoteExecutionTracker extends AbstractExecutorService implements NonBlockingExecutorService {
private final int queueSize = 10;
private static final Logger logger = LogManager.getLogger(RemoteExecutionTracker.class);
Socket con;
String secret;
ObjectOutputStream out;
ObjectInputStream in;
SendingThread sendThread;
ReceivingThread recvThread;
GridExecutorService ges;
volatile Boolean shuttingDown = false;
volatile Boolean terminated = false;
Boolean remoteShutdown = false;
final BlockingQueue<Runnable> sendQueue;
final Map<UUID, Runnable> outstandingRunnables;
public RemoteExecutionTracker(GridExecutorService ges, Socket con, String secret) {
this.con = con;
this.secret = secret;
this.ges = ges;
sendQueue = new ArrayBlockingQueue<>(queueSize);
outstandingRunnables = new HashMap<>();
try {
boolean initialized = initializeConnection();
if (initialized) {
sendThread = new SendingThread();
recvThread = new ReceivingThread();
recvThread.start();
sendThread.start();
ges.addRemote(this);
}
} catch (SecurityException e) {
logger.error("SecurityException trying to initialize remote at " + con.getRemoteSocketAddress()
+ "; shutting down connection", e);
shutdown();
} catch (ClassNotFoundException e) {
logger.fatal("Could not find String.class?!", e);
throw new RuntimeException(e);
} catch (IOException e) {
logger.error("IOException trying to initialize remote at " + con.getRemoteSocketAddress()
+ "; shutting down connection", e);
shutdown();
}
}
@Override
public boolean offer(Runnable r) {
return execute(r, false);
}
@Override
public void execute(Runnable command) {
execute(command, true);
}
private boolean execute(Runnable command, boolean block) {
synchronized (shuttingDown) {
if (shuttingDown)
throw new RejectedExecutionException();
}
if (command == null)
throw new NullPointerException();
boolean toReturn = false;
try {
if (command instanceof ProducesRemoteResult) {
toReturn = execute2(command, block);
} else {
RemoteRunnable rr = new RemoteRunnable(command);
toReturn = execute2(rr, block);
}
} catch (InterruptedException e) {
logger.info("Interrupted while (waiting to?) put " + command + " in queue ", e);
} catch (NotSerializableException e) {
logger.error("Tried to execute a non-serializable Runnable: " + command);
}
return toReturn;
}
private boolean execute2(Runnable command, boolean block) throws InterruptedException {
if (block) {
sendQueue.put(command);
return true;
} else {
return sendQueue.offer(command);
}
}
@Override
public void shutdown() {
final long maxTimeToWait = 60_000; // 60s
long timeWaited = 0;
// Set state to make sure we don't accept any additional tasks
synchronized (shuttingDown) {
shuttingDown = true;
}
// Wait for outstanding tasks to finish
while (!sendQueue.isEmpty()) {
try {
Thread.sleep(100);
timeWaited += 100;
if (timeWaited > maxTimeToWait) {
logger.error("Did not shut down within " + (maxTimeToWait / 1000) + " second limit");
shutdownNow();
return;
}
} catch (InterruptedException e) {
logger.error("Interrupted while waiting to shutdown, calling shutdownNow");
shutdownNow();
return;
}
}
while (!outstandingRunnables.isEmpty()) {
try {
Thread.sleep(100);
timeWaited += 100;
if (timeWaited > maxTimeToWait) {
logger.error("Did not shut down within " + (maxTimeToWait / 1000) + " second limit");
shutdownNow();
return;
}
} catch (InterruptedException e) {
logger.error("Interrupted while waiting to shutdown, calling shutdownNow");
shutdownNow();
return;
}
}
// Tell remote to shut down too
if (!terminated) {
try {
ShutdownCommand sc = new ShutdownCommand((b) -> {
logger.info("Received shutdown confirmation = " + b + " from remote");
synchronized (remoteShutdown) {
remoteShutdown = true;
}
});
sendQueue.put(sc);
} catch (InterruptedException e) {
logger.error("Interrupted while waiting to shutdown, calling shutdownNow");
shutdownNow();
return;
}
}
// Wait for remote shutdown task to return
while (true) {
try {
synchronized (remoteShutdown) {
if (remoteShutdown)
break;
}
Thread.sleep(100);
timeWaited += 100;
if (timeWaited > maxTimeToWait) {
logger.error("Did not shut down within " + (maxTimeToWait / 1000) + " second limit");
shutdownNow();
return;
}
} catch (InterruptedException e) {
logger.error("Interrupted while waiting to shutdown, calling shutdownNow");
shutdownNow(); // Make sure that shutdownnow forcibly marks
// remote as shut down
return;
}
}
// clean up
shutdownNow();
}
@Override
public List<Runnable> shutdownNow() {
if (sendThread != null)
sendThread.shutdown = true;
if (recvThread != null)
recvThread.shutdown = true;
try {
if (out != null)
out.close();
} catch (IOException e) {
logger.error("IOException trying to close output stream to " + con.getRemoteSocketAddress(), e);
}
try {
if (in != null)
in.close();
} catch (IOException e) {
logger.error("IOException trying to close input stream from " + con.getRemoteSocketAddress(), e);
}
try {
if (con != null)
con.close();
} catch (IOException e) {
logger.error("IOException trying to close socket with " + con.getRemoteSocketAddress(), e);
}
List<Runnable> uncomplete = new ArrayList<>();
// Empty SendQueue
sendQueue.drainTo(uncomplete);
sendQueue.clear();
// Empty outstanding queue
for (UUID id : outstandingRunnables.keySet()) {
Runnable r = outstandingRunnables.get(id);
uncomplete.add(r);
outstandingRunnables.remove(id);
}
outstandingRunnables.clear();
terminated = true;
return uncomplete;
}
@Override
public boolean isShutdown() {
return shuttingDown;
}
@Override
public boolean isTerminated() {
return terminated;
}
@Override
public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException {
long ms = unit.convert(timeout, TimeUnit.MILLISECONDS);
try {
recvThread.join(ms);
return true;
} catch (InterruptedException e) {
throw e;
}
}
private boolean initializeConnection() throws IOException, SecurityException, ClassNotFoundException {
boolean authenticated = false;
System.out.println("Setting up object stream");
out = new ObjectOutputStream(con.getOutputStream());
in = new ObjectInputStream(con.getInputStream());
AuthenticationRequest request = new AuthenticationRequest();
out.writeObject(request);
Object o = in.readObject();
if (!(o instanceof AuthenticationRequest)) {
logger.error("Did not receive exepected AuthenticationCommand from remote");
} else {
AuthenticationRequest response = (AuthenticationRequest) o;
boolean uuidMatches = false;
uuidMatches = request.salt.equals(response.salt);
if (!uuidMatches) {
logger.error("Possible replay attack from " + con.getRemoteSocketAddress());
} else {
authenticated = response.checkResult(secret);
}
}
if (!authenticated) {
logger.error("Remote at " + con.getRemoteSocketAddress() + " failed authentication");
shutdownNow();
}
logger.info("Remote at " + con.getRemoteSocketAddress() + " initialized");
return authenticated;
}
// Raw types shouldn't matter here. Since we're passing the results back
// to the original RemoteCallable, the types should be the same.
@SuppressWarnings({ "rawtypes" })
private void processRemoteResult(RemoteResult result) {
Runnable r = outstandingRunnables.get(result.getID());
outstandingRunnables.remove(result.getID());
if (r instanceof RemoteRunnable) {
Exception e = ((RemoteRunnable) r).getException();
if (e != null) {
logger.error(r + " threw Exception: ", e);
}
} else if (r instanceof RemoteCallable) {
Exception oe = ((RemoteCallable) r).getException();
if (oe != null) {
logger.error(r + " threw Exception: ", oe);
}
((RemoteCallable) r).processResponse(result);
} else {
throw new RuntimeException("Received unknown response " + result);
}
}
class SendingThread extends Thread {
volatile boolean shutdown = false;
static final int pullFromQueueTimeout = 10_000; // 10s
static final int pullFromQueueWarningInterval = 60_000; // 60s (1m)
static final int sendToRemoteTimeout = 20; // 0.02s
static final int sendToRemoteWarningInterval = 10000; // 10s
@Override
public void run() {
while (true) {
long timeWaitingForNewTask = 0;
try {
// if we have too many tasks outstanding, wait for there to
// be room
long timeWaitingOnRemote = 0;
while (outstandingRunnables.size() > queueSize) {
try {
Thread.sleep(sendToRemoteTimeout);
timeWaitingOnRemote += sendToRemoteTimeout;
} catch (InterruptedException ie) {
// Do nothing, shutdown check is in the finally
// block
} finally {
// Watch for shutdown, so we don't wait forever
if (shutdown == true)
return;
if ((timeWaitingOnRemote % sendToRemoteWarningInterval) == 0) {
logger.warn(this + " waited " + (timeWaitingOnRemote / 1000)
+ " seconds for a response from remote");
}
}
}
Runnable r = sendQueue.poll(pullFromQueueTimeout, TimeUnit.MILLISECONDS);
if (r != null) {
out.writeObject(r);
if (r instanceof RemoteCallable) {
@SuppressWarnings("rawtypes")
RemoteCallable rc = (RemoteCallable) r;
outstandingRunnables.put(rc.getID(), rc);
}
timeWaitingForNewTask = 0;
} else {
timeWaitingForNewTask += pullFromQueueTimeout;
if ((timeWaitingForNewTask % pullFromQueueWarningInterval) == 0) {
logger.warn(this + " waited " + (pullFromQueueWarningInterval / 1000)
+ " seconds for a new task request");
}
}
} catch (InterruptedException e) {
// Do nothing, shutdown check is in finally block
} catch (IOException e) {
logger.error("IOException trying to send to remote at " + con.getRemoteSocketAddress()
+ "; shutting down connection", e);
shutdown();
} finally {
if (shutdown == true)
return;
}
}
}
}
class ReceivingThread extends Thread {
volatile boolean shutdown = false;
static final int timeout = 2;
int timeWaiting = 0;
@Override
public void run() {
while (true) {
try {
if (shutdown == true)
return;
Object recv = null;
if (in != null) {
recv = in.readObject();
if (recv instanceof RemoteResult) {
RemoteResult rr = (RemoteResult) recv;
processRemoteResult(rr);
} else {
logger.error("Received object other than RemoteResult, ignoring. Was: " + recv);
}
} else {
// socket was closed
shutdown();
}
if (recv != null) {
timeWaiting = 0;
} else {
timeWaiting += timeout;
}
} catch (IOException e) {
if (e instanceof EOFException) {
logger.info("Disconnecting from " + con.getRemoteSocketAddress());
} else {
if (!isShutdown())
logger.error("IOException waiting to receive from remote at " + con.getRemoteSocketAddress()
+ "; shutting down connection, Cause was: ", e);
}
shutdownNow();
} catch (ClassNotFoundException e) {
logger.error("ClassNotFoundException receiving from remote at " + con.getRemoteSocketAddress()
+ "; shutting down connection, Cause was: ", e);
shutdownNow();
}
}
}
}
}
|
|
/* The contents of this file are subject to the license and copyright terms
* detailed in the license directory at the root of the source tree (also
* available online at http://fedora-commons.org/license/).
*/
package org.fcrepo.server.utilities;
import java.io.IOException;
import java.io.InputStream;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.fcrepo.server.config.DatastoreConfiguration;
import org.fcrepo.server.config.Parameter;
import org.fcrepo.server.errors.InconsistentTableSpecException;
import org.fcrepo.server.storage.ConnectionPool;
/**
* This package-level class contains the methods that do much of the work for
* {@link SQLUtility}, which acts as a public facade around an instance of this
* class. This two-level structure allows the unit tests to mock out the
* functionality of {@link SQLUtility}. The worker methods are declared
* "protected" so they can be overridden by a mock class.
*/
class SQLUtilityImpl
extends SQLUtility {
private static final Logger logger =
LoggerFactory.getLogger(SQLUtilityImpl.class);
@Override
protected ConnectionPool i_getConnectionPool(DatastoreConfiguration cpDC)
throws SQLException {
String cpUsername = cpDC.getParameter("dbUsername",Parameter.class).getValue();
String cpPassword = cpDC.getParameter("dbPassword",Parameter.class).getValue();
String cpURL = cpDC.getParameter("jdbcURL",Parameter.class).getValue();
String cpDriver = cpDC.getParameter("jdbcDriverClass",Parameter.class).getValue();
String cpDDLConverter = cpDC.getParameter("ddlConverter",Parameter.class).getValue();
int cpMaxActive =
Integer.parseInt(cpDC.getParameter("maxActive",Parameter.class).getValue());
int cpMaxIdle =
Integer.parseInt(cpDC.getParameter("maxIdle",Parameter.class).getValue());
long cpMaxWait =
Long.parseLong(cpDC.getParameter("maxWait",Parameter.class).getValue());
int cpMinIdle =
Integer.parseInt(cpDC.getParameter("minIdle",Parameter.class).getValue());
long cpMinEvictableIdleTimeMillis =
Long.parseLong(cpDC.getParameter("minEvictableIdleTimeMillis",Parameter.class)
.getValue());
int cpNumTestsPerEvictionRun =
Integer.parseInt(cpDC.getParameter("numTestsPerEvictionRun",Parameter.class)
.getValue());
long cpTimeBetweenEvictionRunsMillis =
Long.parseLong(cpDC
.getParameter("timeBetweenEvictionRunsMillis",Parameter.class)
.getValue());
String cpValidationQuery = null;
if (cpDC.getParameter("validationQuery") != null) {
cpValidationQuery = cpDC.getParameter("validationQuery",Parameter.class).getValue();
}
boolean cpTestOnBorrow =
Boolean.parseBoolean(cpDC.getParameter("testOnBorrow",Parameter.class)
.getValue());
boolean cpTestOnReturn =
Boolean.parseBoolean(cpDC.getParameter("testOnReturn",Parameter.class)
.getValue());
boolean cpTestWhileIdle =
Boolean.parseBoolean(cpDC.getParameter("testWhileIdle",Parameter.class)
.getValue());
byte cpWhenExhaustedAction =
Byte.parseByte(cpDC.getParameter("whenExhaustedAction",Parameter.class)
.getValue());
DDLConverter ddlConverter = null;
if (cpDDLConverter != null) {
try {
ddlConverter =
(DDLConverter) Class.forName(cpDDLConverter)
.newInstance();
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
return new ConnectionPool(cpDriver,
cpURL,
cpUsername,
cpPassword,
ddlConverter,
cpMaxActive,
cpMaxIdle,
cpMaxWait,
cpMinIdle,
cpMinEvictableIdleTimeMillis,
cpNumTestsPerEvictionRun,
cpTimeBetweenEvictionRunsMillis,
cpValidationQuery,
cpTestOnBorrow,
cpTestOnReturn,
cpTestWhileIdle,
cpWhenExhaustedAction);
}
/**
* Adds or replaces a row in the given table.
*
* @param conn
* the connection to use
* @param table
* the name of the table
* @param columns
* the names of the columns whose values we're setting.
* @param values
* associated values
* @param uniqueColumn
* which column name is unique? The value of this column will be used
* in the where clause. It must be a column which is not numeric.
* @param numeric
* for each associated column, is it numeric? if null, all columns
* are assumed to be strings.
*/
@Override
protected void i_replaceInto(Connection conn,
String table,
String[] columns,
String[] values,
String uniqueColumn,
boolean[] numeric) throws SQLException {
if (!i_updateRow(conn, table, columns, values, uniqueColumn, numeric)) {
i_addRow(conn, table, columns, values, numeric);
}
}
/**
* Updates an existing row.
*
* @return false if the row did not previously exist and therefore was not
* updated.
*/
@Override
protected boolean i_updateRow(Connection conn,
String table,
String[] columns,
String[] values,
String uniqueColumn,
boolean[] numeric) throws SQLException {
// prepare update statement
StringBuffer sql = new StringBuffer();
sql.append("UPDATE " + table + " SET ");
boolean needComma = false;
for (int i = 0; i < columns.length; i++) {
if (!columns[i].equals(uniqueColumn)) {
if (needComma) {
sql.append(", ");
} else {
needComma = true;
}
sql.append(columns[i] + " = ");
if (values[i] == null) {
sql.append("NULL");
} else {
sql.append("?");
}
}
}
sql.append(" WHERE " + uniqueColumn + " = ?");
logger.debug("About to execute: " + sql.toString());
PreparedStatement stmt = conn.prepareStatement(sql.toString());
try {
// populate values
int varIndex = 0;
for (int i = 0; i < columns.length; i++) {
if (!columns[i].equals(uniqueColumn) && values[i] != null) {
varIndex++;
if (numeric != null && numeric[i]) {
setNumeric(stmt, varIndex, columns[i], values[i]);
} else {
stmt.setString(varIndex, values[i]);
}
}
}
varIndex++;
stmt
.setString(varIndex, getSelector(columns,
values,
uniqueColumn));
// execute and return true if existing row was updated
return stmt.executeUpdate() > 0;
} finally {
closeStatement(stmt);
}
}
/**
* Adds a new row.
*
* @throws SQLException
* if the row could not be added.
*/
@Override
protected void i_addRow(Connection conn,
String table,
String[] columns,
String[] values,
boolean[] numeric) throws SQLException {
// prepare insert statement
StringBuffer sql = new StringBuffer();
sql.append("INSERT INTO " + table + " (");
for (int i = 0; i < columns.length; i++) {
if (i > 0) {
sql.append(", ");
}
sql.append(columns[i]);
}
sql.append(") VALUES (");
for (int i = 0; i < columns.length; i++) {
if (i > 0) {
sql.append(", ");
}
if (values[i] == null) {
sql.append("NULL");
} else {
sql.append("?");
}
}
sql.append(")");
logger.debug("About to execute: " + sql.toString());
PreparedStatement stmt = conn.prepareStatement(sql.toString());
try {
// populate values
int varIndex = 0;
for (int i = 0; i < values.length; i++) {
if (values[i] != null) {
varIndex++;
if (numeric != null && numeric[i]) {
setNumeric(stmt, varIndex, columns[i], values[i]);
} else {
stmt.setString(varIndex, values[i]);
}
}
}
// execute
stmt.executeUpdate();
} finally {
closeStatement(stmt);
}
}
@Override
protected void i_createNonExistingTables(ConnectionPool cPool,
InputStream dbSpec)
throws IOException, InconsistentTableSpecException, SQLException {
List<TableSpec> nonExisting = null;
Connection conn = null;
try {
conn = cPool.getReadOnlyConnection();
nonExisting =
i_getNonExistingTables(conn, TableSpec
.getTableSpecs(dbSpec));
} finally {
if (conn != null) {
cPool.free(conn);
}
}
if (nonExisting.size() > 0) {
TableCreatingConnection tcConn = null;
try {
tcConn = cPool.getTableCreatingConnection();
if (tcConn == null) {
throw new SQLException("Unable to construct CREATE TABLE "
+ "statement(s) because there is no DDLConverter "
+ "registered for this connection type.");
}
i_createTables(tcConn, nonExisting);
} finally {
if (tcConn != null) {
cPool.free(tcConn);
}
}
}
}
@Override
protected List<TableSpec> i_getNonExistingTables(Connection conn,
List<TableSpec> tSpecs)
throws SQLException {
ArrayList<TableSpec> nonExisting = new ArrayList<TableSpec>();
DatabaseMetaData dbMeta = conn.getMetaData();
Iterator<TableSpec> tSpecIter = tSpecs.iterator();
ResultSet r = null;
// Get a list of tables that don't exist, if any
try {
r = dbMeta.getTables(null, null, "%", null);
HashSet<String> existingTableSet = new HashSet<String>();
while (r.next()) {
existingTableSet.add(r.getString("TABLE_NAME").toLowerCase());
}
r.close();
r = null;
while (tSpecIter.hasNext()) {
TableSpec spec = tSpecIter.next();
if (!existingTableSet.contains(spec.getName().toLowerCase())) {
nonExisting.add(spec);
}
}
} catch (SQLException sqle) {
throw new SQLException(sqle.getMessage());
} finally {
try {
if (r != null) {
r.close();
}
} catch (SQLException sqle2) {
throw sqle2;
} finally {
r = null;
}
}
return nonExisting;
}
@Override
protected void i_createTables(TableCreatingConnection tcConn,
List<TableSpec> tSpecs) throws SQLException {
Iterator<TableSpec> nii = tSpecs.iterator();
while (nii.hasNext()) {
TableSpec spec = nii.next();
if (logger.isInfoEnabled()) {
StringBuffer sqlCmds = new StringBuffer();
Iterator<String> iter =
tcConn.getDDLConverter().getDDL(spec).iterator();
while (iter.hasNext()) {
sqlCmds.append("\n");
sqlCmds.append(iter.next());
sqlCmds.append(";");
}
logger.info("Creating new " + "table '" + spec.getName()
+ "' with command(s): " + sqlCmds.toString());
}
tcConn.createTable(spec);
}
}
void closeStatement(Statement stmt) {
if (stmt != null) {
try {
stmt.close();
} catch (SQLException e) {
logger.warn("Unable to close statement", e);
}
}
}
/**
* Get a long string, which could be a TEXT or CLOB type. (CLOBs require
* special handling -- this method normalizes the reading of them)
*/
@Override
protected String i_getLongString(ResultSet rs, int pos) throws SQLException {
String s = rs.getString(pos);
if (s != null) {
// It's a String-based datatype, so just return it.
return s;
} else {
// It may be a CLOB. If so, return the contents as a String.
try {
Clob c = rs.getClob(pos);
return c.getSubString(1, (int) c.length());
} catch (Throwable th) {
th.printStackTrace();
return null;
}
}
}
/**
* Sets a numeric value in the prepared statement. Parsing the string is
* attempted as an int, then a long, and if that fails, a SQLException is
* thrown.
*/
private void setNumeric(PreparedStatement stmt,
int varIndex,
String columnName,
String value) throws SQLException {
try {
stmt.setInt(varIndex, Integer.parseInt(value));
} catch (NumberFormatException e) {
try {
stmt.setLong(varIndex, Long.parseLong(value));
} catch (NumberFormatException e2) {
throw new SQLException("Value specified for " + columnName
+ ", '" + value + "' was"
+ " specified as numeric, but is not");
}
}
}
/**
* Gets the value in the given array whose associated column name matches
* the given uniqueColumn name.
*
* @throws SQLException
* if the uniqueColumn doesn't exist in the given column array.
*/
private String getSelector(String[] columns,
String[] values,
String uniqueColumn) throws SQLException {
String selector = null;
for (int i = 0; i < columns.length; i++) {
if (columns[i].equals(uniqueColumn)) {
selector = values[i];
}
}
if (selector != null) {
return selector;
} else {
throw new SQLException("Unique column does not exist in given "
+ "column array");
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import junit.framework.TestCase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.server.namenodeFBT.msg.MessageException;
import org.apache.hadoop.hdfs.server.namenodeFBT.service.ServiceException;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
/**
* A JUnit test to test Mini Map-Reduce Cluster with Mini-DFS.
*/
public class TestMiniMRWithDFS extends TestCase {
private static final Log LOG =
LogFactory.getLog(TestMiniMRWithDFS.class.getName());
static final int NUM_MAPS = 10;
static final int NUM_SAMPLES = 100000;
public static class TestResult {
public String output;
public RunningJob job;
TestResult(RunningJob job, String output) {
this.job = job;
this.output = output;
}
}
public static TestResult launchWordCount(JobConf conf,
Path inDir,
Path outDir,
String input,
int numMaps,
int numReduces) throws IOException, MessageException, ServiceException {
FileSystem inFs = inDir.getFileSystem(conf);
FileSystem outFs = outDir.getFileSystem(conf);
outFs.delete(outDir, true);
if (!inFs.mkdirs(inDir)) {
throw new IOException("Mkdirs failed to create " + inDir.toString());
}
{
DataOutputStream file = inFs.create(new Path(inDir, "part-0"));
file.writeBytes(input);
file.close();
}
conf.setJobName("wordcount");
conf.setInputFormat(TextInputFormat.class);
// the keys are words (strings)
conf.setOutputKeyClass(Text.class);
// the values are counts (ints)
conf.setOutputValueClass(IntWritable.class);
conf.setMapperClass(WordCount.MapClass.class);
conf.setCombinerClass(WordCount.Reduce.class);
conf.setReducerClass(WordCount.Reduce.class);
FileInputFormat.setInputPaths(conf, inDir);
FileOutputFormat.setOutputPath(conf, outDir);
conf.setNumMapTasks(numMaps);
conf.setNumReduceTasks(numReduces);
RunningJob job = JobClient.runJob(conf);
return new TestResult(job, readOutput(outDir, conf));
}
public static String readOutput(Path outDir,
JobConf conf) throws IOException, MessageException {
FileSystem fs = outDir.getFileSystem(conf);
StringBuffer result = new StringBuffer();
{
Path[] fileList = FileUtil.stat2Paths(fs.listStatus(outDir,
new OutputLogFilter()));
for(int i=0; i < fileList.length; ++i) {
LOG.info("File list[" + i + "]" + ": "+ fileList[i]);
BufferedReader file =
new BufferedReader(new InputStreamReader(fs.open(fileList[i])));
String line = file.readLine();
while (line != null) {
result.append(line);
result.append("\n");
line = file.readLine();
}
file.close();
}
}
return result.toString();
}
/**
* Make sure that there are exactly the directories that we expect to find.
* @param mr the map-reduce cluster
* @param taskDirs the task ids that should be present
* @throws MessageException
*/
static void checkTaskDirectories(MiniMRCluster mr,
String[] jobIds,
String[] taskDirs) throws MessageException {
mr.waitUntilIdle();
int trackers = mr.getNumTaskTrackers();
List<String> neededDirs = new ArrayList<String>(Arrays.asList(taskDirs));
boolean[] found = new boolean[taskDirs.length];
for(int i=0; i < trackers; ++i) {
int numNotDel = 0;
File localDir = new File(mr.getTaskTrackerLocalDir(i));
LOG.debug("Tracker directory: " + localDir);
File trackerDir = new File(localDir, "taskTracker");
assertTrue("local dir " + localDir + " does not exist.",
localDir.isDirectory());
assertTrue("task tracker dir " + trackerDir + " does not exist.",
trackerDir.isDirectory());
String contents[] = localDir.list();
String trackerContents[] = trackerDir.list();
for(int j=0; j < contents.length; ++j) {
System.out.println("Local " + localDir + ": " + contents[j]);
}
for(int j=0; j < trackerContents.length; ++j) {
System.out.println("Local jobcache " + trackerDir + ": " + trackerContents[j]);
}
for(int fileIdx = 0; fileIdx < contents.length; ++fileIdx) {
String name = contents[fileIdx];
if (!("taskTracker".equals(contents[fileIdx]))) {
LOG.debug("Looking at " + name);
assertTrue("Spurious directory " + name + " found in " +
localDir, false);
}
}
for (int idx = 0; idx < neededDirs.size(); ++idx) {
String name = neededDirs.get(idx);
if (new File(new File(new File(trackerDir, "jobcache"),
jobIds[idx]), name).isDirectory()) {
found[idx] = true;
numNotDel++;
}
}
}
for(int i=0; i< found.length; i++) {
assertTrue("Directory " + taskDirs[i] + " not found", found[i]);
}
}
public static void runPI(MiniMRCluster mr, JobConf jobconf) throws IOException, MessageException, ServiceException {
LOG.info("runPI");
double estimate = org.apache.hadoop.examples.PiEstimator.estimate(
NUM_MAPS, NUM_SAMPLES, jobconf).doubleValue();
double error = Math.abs(Math.PI - estimate);
assertTrue("Error in PI estimation "+error+" exceeds 0.01", (error < 0.01));
checkTaskDirectories(mr, new String[]{}, new String[]{});
}
public static void runWordCount(MiniMRCluster mr, JobConf jobConf)
throws IOException, MessageException, ServiceException {
LOG.info("runWordCount");
// Run a word count example
// Keeping tasks that match this pattern
String pattern =
TaskAttemptID.getTaskAttemptIDsPattern(null, null, true, 1, null);
jobConf.setKeepTaskFilesPattern(pattern);
TestResult result;
final Path inDir = new Path("./wc/input");
final Path outDir = new Path("./wc/output");
String input = "The quick brown fox\nhas many silly\nred fox sox\n";
result = launchWordCount(jobConf, inDir, outDir, input, 3, 1);
assertEquals("The\t1\nbrown\t1\nfox\t2\nhas\t1\nmany\t1\n" +
"quick\t1\nred\t1\nsilly\t1\nsox\t1\n", result.output);
JobID jobid = result.job.getID();
TaskAttemptID taskid = new TaskAttemptID(new TaskID(jobid, true, 1),0);
checkTaskDirectories(mr, new String[]{jobid.toString()},
new String[]{taskid.toString()});
// test with maps=0
jobConf = mr.createJobConf();
input = "owen is oom";
result = launchWordCount(jobConf, inDir, outDir, input, 0, 1);
assertEquals("is\t1\noom\t1\nowen\t1\n", result.output);
Counters counters = result.job.getCounters();
long hdfsRead =
counters.findCounter(Task.FILESYSTEM_COUNTER_GROUP,
Task.getFileSystemCounterNames("hdfs")[0]).getCounter();
long hdfsWrite =
counters.findCounter(Task.FILESYSTEM_COUNTER_GROUP,
Task.getFileSystemCounterNames("hdfs")[1]).getCounter();
assertEquals(result.output.length(), hdfsWrite);
assertEquals(input.length(), hdfsRead);
// Run a job with input and output going to localfs even though the
// default fs is hdfs.
{
FileSystem localfs = FileSystem.getLocal(jobConf);
String TEST_ROOT_DIR =
new File(System.getProperty("test.build.data","/tmp"))
.toString().replace(' ', '+');
Path localIn = localfs.makeQualified
(new Path(TEST_ROOT_DIR + "/local/in"));
Path localOut = localfs.makeQualified
(new Path(TEST_ROOT_DIR + "/local/out"));
result = launchWordCount(jobConf, localIn, localOut,
"all your base belong to us", 1, 1);
assertEquals("all\t1\nbase\t1\nbelong\t1\nto\t1\nus\t1\nyour\t1\n",
result.output);
assertTrue("outputs on localfs", localfs.exists(localOut));
}
}
public void testWithDFS() throws Exception {
MiniDFSCluster dfs = null;
MiniMRCluster mr = null;
FileSystem fileSys = null;
try {
final int taskTrackers = 4;
Configuration conf = new Configuration();
dfs = new MiniDFSCluster(conf, 4, true, null);
fileSys = dfs.getFileSystem();
mr = new MiniMRCluster(taskTrackers, fileSys.getUri().toString(), 1);
runPI(mr, mr.createJobConf());
runWordCount(mr, mr.createJobConf());
} finally {
if (dfs != null) { dfs.shutdown(); }
if (mr != null) { mr.shutdown();
}
}
}
public void testWithDFSWithDefaultPort() throws Exception {
MiniDFSCluster dfs = null;
MiniMRCluster mr = null;
FileSystem fileSys = null;
try {
final int taskTrackers = 4;
Configuration conf = new Configuration();
// start a dfs with the default port number
dfs = new MiniDFSCluster(
NameNode.DEFAULT_PORT, conf, 4, true, true, null, null);
fileSys = dfs.getFileSystem();
mr = new MiniMRCluster(taskTrackers, fileSys.getUri().toString(), 1);
JobConf jobConf = mr.createJobConf();
TestResult result;
final Path inDir = new Path("./wc/input");
final Path outDir = new Path("hdfs://" +
dfs.getNameNode().getNameNodeAddress().getHostName() +
":" + NameNode.DEFAULT_PORT +"/./wc/output");
String input = "The quick brown fox\nhas many silly\nred fox sox\n";
result = launchWordCount(jobConf, inDir, outDir, input, 3, 1);
assertEquals("The\t1\nbrown\t1\nfox\t2\nhas\t1\nmany\t1\n" +
"quick\t1\nred\t1\nsilly\t1\nsox\t1\n", result.output);
final Path outDir2 = new Path("hdfs:/test/wc/output2");
jobConf.set("fs.default.name", "hdfs://localhost:" + NameNode.DEFAULT_PORT);
result = launchWordCount(jobConf, inDir, outDir2, input, 3, 1);
assertEquals("The\t1\nbrown\t1\nfox\t2\nhas\t1\nmany\t1\n" +
"quick\t1\nred\t1\nsilly\t1\nsox\t1\n", result.output);
} catch (java.net.BindException be) {
LOG.info("Skip the test this time because can not start namenode on port "
+ NameNode.DEFAULT_PORT, be);
} finally {
if (dfs != null) { dfs.shutdown(); }
if (mr != null) { mr.shutdown();
}
}
}
}
|
|
/*
* Copyright (C) 2016 JDBX
*
* https://github.com/jdlib/JDBX
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jdbx;
import java.sql.Connection;
import java.sql.SQLWarning;
import java.sql.Statement;
import javax.sql.DataSource;
import org.jdbx.function.CheckedConsumer;
import org.jdbx.function.CheckedFunction;
import org.jdbx.function.CheckedSupplier;
import org.jdbx.function.Unchecked;
/**
* Common base class of {@link StaticStmt}, {@link PrepStmt} and {@link CallStmt}.
*/
public abstract class Stmt implements AutoCloseable
{
private enum CloseAction
{
NOTHING,
STATEMENT,
CONNECTION,
}
protected Stmt(Connection con, boolean closeCon)
{
con_ = Check.notNull(con, "connection");
closeAction_ = closeCon ? CloseAction.CONNECTION : CloseAction.STATEMENT;
}
protected Stmt(DataSource dataSource) throws JdbxException
{
this(Check.notNull(dataSource, "dataSource")::getConnection, true);
}
protected Stmt(CheckedSupplier<Connection> supplier, boolean closeCon) throws JdbxException
{
this(Unchecked.get(Check.notNull(supplier, "supplier")), closeCon);
}
//------------------------------
// connection
//------------------------------
/**
* Returns the connection used by the statement.
* @return the connection
* @throws JdbxException if the statement is already closed.
*/
public Connection getConnection() throws JdbxException
{
checkOpen();
return con_;
}
void clearCon()
{
con_ = null;
}
//------------------------------
// accessors
//------------------------------
/**
* Returns the internal JDBC statement used by the JDBX statement.
* @return the JDBC statement
* @throws JdbxException if this statement was already closed or not yet initialized.
* @see #isClosed()
* @see #isInitialized()
*/
public abstract Statement getJdbcStmt() throws JdbxException;
//------------------------------
// state
//------------------------------
/**
* Returns if the statement is initialized.
* When initialized a statement can be used to execute sql commands.
* In implementation terms initialized means that the JDBC statement on which this statement operates was created.
* @return is the statement initialized
*/
public abstract boolean isInitialized();
/**
* Tests if the statement is initialized, i.e. {@link #getJdbcStmt()} may be called.
* @throws JdbxException thrown when the statement is not initialized
*/
protected void checkInitialized() throws JdbxException
{
checkOpen();
if (jdbcStmt_ == null)
throw JdbxException.illegalState("statement not initialized");
}
/**
* Returns if the statement is closed.
* @return is the statement closed
*/
public final boolean isClosed()
{
return con_ == null;
}
/**
* Tests if the statement is open.
* @throws JdbxException thrown when the statement is already closed
*/
protected void checkOpen() throws JdbxException
{
if (isClosed())
throw JdbxException.closed();
}
protected void closeJdbcStmt() throws JdbxException
{
if (jdbcStmt_ != null)
{
try
{
call(Statement::close);
}
finally
{
jdbcStmt_ = null;
}
}
}
/**
* Closes the statement. This operation has no effect if already closed.
*/
@Override public void close() throws JdbxException
{
if (!isClosed())
{
try
{
if (closeAction_ == CloseAction.CONNECTION)
con_.close();
else if ((jdbcStmt_ != null) && (closeAction_ == CloseAction.STATEMENT))
jdbcStmt_.close();
}
catch (Exception e)
{
throw JdbxException.of(e);
}
finally
{
con_ = null;
jdbcStmt_ = null;
}
}
}
//------------------------------
// options
//------------------------------
/**
* Returns the statement options.
* @return the options
*/
public final StmtOptions options() throws JdbxException
{
checkOpen();
if (options_ == null)
options_ = new StmtOptions(this);
return options_;
}
//------------------------------
// cancel
//------------------------------
/**
* Cancels execution of the current command.
* This method can be used by one thread to cancel a statement that
* is being executed by another thread.
*/
public void cancel() throws JdbxException
{
call(Statement::cancel);
}
//------------------------------
// warnings
//------------------------------
/**
* Returns the warnings collected by the statement.
* @return the first <code>SQLWarning</code> object or <code>null</code>
* if there are no warnings
* @see Statement#getWarnings()
*/
public SQLWarning getWarnings() throws JdbxException
{
checkOpen();
return jdbcStmt_ != null ? get(Statement::getWarnings) : null;
}
/**
* Clears the warnings.
* @see Statement#clearWarnings()
*/
public void clearWarnings() throws JdbxException
{
if (jdbcStmt_ != null)
call(Statement::clearWarnings);
}
//------------------------------
// helpers
//------------------------------
@SuppressWarnings("unchecked")
protected <STMT extends Statement, T> void call(CheckedConsumer<STMT> fn) throws JdbxException
{
Unchecked.accept(fn, (STMT)getJdbcStmt());
}
@SuppressWarnings("unchecked")
protected <STMT extends Statement, T> T get(CheckedFunction<STMT,T> fn) throws JdbxException
{
return Unchecked.apply(fn, (STMT)getJdbcStmt());
}
protected Connection con_;
protected Statement jdbcStmt_;
protected StmtOptions options_;
protected final CloseAction closeAction_;
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.web.api;
import com.sun.jersey.api.core.HttpContext;
import com.sun.jersey.api.representation.Form;
import com.sun.jersey.core.util.MultivaluedMapImpl;
import com.sun.jersey.server.impl.model.method.dispatch.FormDispatchProvider;
import java.io.Serializable;
import java.net.URI;
import java.net.URISyntaxException;
import java.security.cert.X509Certificate;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.core.CacheControl;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriBuilderException;
import javax.ws.rs.core.UriInfo;
import org.apache.nifi.action.Action;
import org.apache.nifi.action.Operation;
import org.apache.nifi.cluster.context.ClusterContext;
import org.apache.nifi.cluster.context.ClusterContextThreadLocal;
import org.apache.nifi.cluster.manager.impl.WebClusterManager;
import org.apache.nifi.web.security.DnUtils;
import org.apache.nifi.web.security.user.NiFiUserDetails;
import org.apache.nifi.web.security.x509.X509CertificateExtractor;
import org.apache.nifi.util.NiFiProperties;
import org.apache.nifi.web.api.entity.Entity;
import org.apache.nifi.web.api.request.ClientIdParameter;
import org.apache.nifi.web.util.WebUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
/**
* Base class for controllers.
*/
public abstract class ApplicationResource {
public static final String VERSION = "version";
public static final String CLIENT_ID = "clientId";
public static final String CLUSTER_CONTEXT_HTTP_HEADER = "X-ClusterContext";
public static final String PROXY_SCHEME_HTTP_HEADER = "X-ProxyScheme";
public static final String PROXY_HOST_HTTP_HEADER = "X-ProxyHost";
public static final String PROXY_PORT_HTTP_HEADER = "X-ProxyPort";
public static final String PROXY_CONTEXT_PATH_HTTP_HEADER = "X-ProxyContextPath";
public static final String PROXIED_ENTITIES_CHAIN_HTTP_HEADER = "X-ProxiedEntitiesChain";
public static final String PROXIED_ENTITY_USER_DETAILS_HTTP_HEADER = "X-ProxiedEntityUserDetails";
private static final int HEADER_BUFFER_SIZE = 16 * 1024; // 16kb
private static final int CLUSTER_CONTEXT_HEADER_VALUE_MAX_BYTES = (int) (0.75 * HEADER_BUFFER_SIZE);
private static final Logger logger = LoggerFactory.getLogger(ApplicationResource.class);
@Context
private HttpServletRequest httpServletRequest;
@Context
private UriInfo uriInfo;
@Context
private HttpContext httpContext;
/**
* Generate a resource uri based off of the specified parameters.
*
* @param path
* @return
*/
protected String generateResourceUri(String... path) {
UriBuilder uriBuilder = uriInfo.getBaseUriBuilder();
uriBuilder.segment(path);
URI uri = uriBuilder.build();
try {
// check for proxy settings
String scheme = httpServletRequest.getHeader(PROXY_SCHEME_HTTP_HEADER);
String host = httpServletRequest.getHeader(PROXY_HOST_HTTP_HEADER);
String port = httpServletRequest.getHeader(PROXY_PORT_HTTP_HEADER);
String baseContextPath = httpServletRequest.getHeader(PROXY_CONTEXT_PATH_HTTP_HEADER);
// if necessary, prepend the context path
String resourcePath = uri.getPath();
if (baseContextPath != null) {
// normalize context path
if (!baseContextPath.startsWith("/")) {
baseContextPath = "/" + baseContextPath;
}
// determine the complete resource path
resourcePath = baseContextPath + resourcePath;
}
// determine the port uri
int uriPort = uri.getPort();
if (port != null) {
if (StringUtils.isWhitespace(port)) {
uriPort = -1;
} else {
try {
uriPort = Integer.parseInt(port);
} catch (NumberFormatException nfe) {
logger.warn(String.format("Unable to parse proxy port HTTP header '%s'. Using port from request URI '%s'.", port, uriPort));
}
}
}
// construct the URI
uri = new URI(
(StringUtils.isBlank(scheme)) ? uri.getScheme() : scheme,
uri.getUserInfo(),
(StringUtils.isBlank(host)) ? uri.getHost() : host,
uriPort,
resourcePath,
uri.getQuery(),
uri.getFragment());
} catch (final URISyntaxException use) {
throw new UriBuilderException(use);
}
return uri.toString();
}
/**
* Edit the response headers to indicating no caching.
*
* @param response
* @return
*/
protected ResponseBuilder noCache(ResponseBuilder response) {
CacheControl cacheControl = new CacheControl();
cacheControl.setPrivate(true);
cacheControl.setNoCache(true);
cacheControl.setNoStore(true);
return response.cacheControl(cacheControl);
}
/**
* If the application is operating as a node, then this method adds the
* cluster context information to the response using the response header
* 'X-CLUSTER_CONTEXT'.
*
* @param response
* @return
*/
protected ResponseBuilder clusterContext(ResponseBuilder response) {
NiFiProperties properties = NiFiProperties.getInstance();
if (!properties.isNode()) {
return response;
}
// get cluster context from threadlocal
ClusterContext clusterCtx = ClusterContextThreadLocal.getContext();
// serialize cluster context
String serializedClusterContext = WebUtils.serializeObjectToHex(clusterCtx);
if (serializedClusterContext.length() > CLUSTER_CONTEXT_HEADER_VALUE_MAX_BYTES) {
/*
* Actions is the only field that can vary in size. If we have no
* actions and we exceeded the header size, then basic assumptions
* about the cluster context have been violated.
*/
if (clusterCtx.getActions().isEmpty()) {
throw new IllegalStateException(
String.format("Serialized Cluster context size '%d' is too big for response header", serializedClusterContext.length()));
}
// use the first action as the prototype for creating the "batch" action
Action prototypeAction = clusterCtx.getActions().get(0);
// log the batched actions
StringBuilder loggedActions = new StringBuilder();
createBatchedActionLogStatement(loggedActions, clusterCtx.getActions());
logger.info(loggedActions.toString());
// remove current actions and replace with batch action
clusterCtx.getActions().clear();
// create the batch action
Action batchAction = new Action();
batchAction.setOperation(Operation.Batch);
// copy values from prototype action
batchAction.setTimestamp(prototypeAction.getTimestamp());
batchAction.setUserDn(prototypeAction.getUserDn());
batchAction.setUserName(prototypeAction.getUserName());
batchAction.setSourceId(prototypeAction.getSourceId());
batchAction.setSourceName(prototypeAction.getSourceName());
batchAction.setSourceType(prototypeAction.getSourceType());
// add batch action
clusterCtx.getActions().add(batchAction);
// create the final serialized copy of the cluster context
serializedClusterContext = WebUtils.serializeObjectToHex(clusterCtx);
}
// put serialized cluster context in response header
response.header(WebClusterManager.CLUSTER_CONTEXT_HTTP_HEADER, serializedClusterContext);
return response;
}
/**
* @return the cluster context if found in the request header
* 'X-CLUSTER_CONTEXT'.
*/
protected ClusterContext getClusterContextFromRequest() {
String clusterContextHeaderValue = httpServletRequest.getHeader(WebClusterManager.CLUSTER_CONTEXT_HTTP_HEADER);
if (StringUtils.isNotBlank(clusterContextHeaderValue)) {
try {
// deserialize object
Serializable clusterContextObj = WebUtils.deserializeHexToObject(clusterContextHeaderValue);
if (clusterContextObj instanceof ClusterContext) {
return (ClusterContext) clusterContextObj;
}
} catch (ClassNotFoundException cnfe) {
logger.warn("Classpath issue detected because failed to deserialize cluster context from request due to: " + cnfe, cnfe);
}
}
return null;
}
/**
* Generates an Ok response with no content.
*
* @return
*/
protected ResponseBuilder generateOkResponse() {
return noCache(Response.ok());
}
/**
* Generates an Ok response with the specified content.
*
* @param entity The entity
* @return The response to be built
*/
protected ResponseBuilder generateOkResponse(Object entity) {
ResponseBuilder response = Response.ok(entity);
return noCache(response);
}
/**
* Generates a 201 Created response with the specified content.
*
* @param uri The URI
* @param jsonResponse The content
* @return The response to be built
*/
protected ResponseBuilder generateCreatedResponse(URI uri, Object entity) {
// generate the response builder
return Response.created(uri).entity(entity);
}
/**
* Generates a 150 Node Continue response to be used within the cluster
* request handshake.
*
* @return
*/
protected ResponseBuilder generateContinueResponse() {
return Response.status(WebClusterManager.NODE_CONTINUE_STATUS_CODE);
}
protected URI getAbsolutePath() {
return uriInfo.getAbsolutePath();
}
protected MultivaluedMap<String, String> getRequestParameters() {
final MultivaluedMap<String, String> entity = new MultivaluedMapImpl();
// get the form that jersey processed and use it if it exists (only exist for requests with a body and application form urlencoded
final Form form = (Form) httpContext.getProperties().get(FormDispatchProvider.FORM_PROPERTY);
if (form == null) {
for (Map.Entry<String, String[]> entry : (Set<Map.Entry<String, String[]>>) httpServletRequest.getParameterMap().entrySet()) {
if (entry.getValue() == null) {
entity.add(entry.getKey(), null);
} else {
for (final String aValue : entry.getValue()) {
entity.add(entry.getKey(), aValue);
}
}
}
} else {
entity.putAll(form);
}
return entity;
}
protected MultivaluedMap<String, String> getRequestParameters(final boolean forceClientId) {
final MultivaluedMap<String, String> params = getRequestParameters();
if (forceClientId) {
if (StringUtils.isBlank(params.getFirst(CLIENT_ID))) {
params.putSingle(CLIENT_ID, new ClientIdParameter().getClientId());
}
}
return params;
}
protected Entity updateClientId(final Entity entity) {
if (entity != null && entity.getRevision() != null && StringUtils.isBlank(entity.getRevision().getClientId())) {
entity.getRevision().setClientId(new ClientIdParameter().getClientId());
}
return entity;
}
protected Map<String, String> getHeaders() {
return getHeaders(new HashMap<String, String>());
}
protected Map<String, String> getHeaders(final Map<String, String> overriddenHeaders) {
final Map<String, String> result = new HashMap<String, String>();
final Map<String, String> overriddenHeadersIgnoreCaseMap = new TreeMap<String, String>(String.CASE_INSENSITIVE_ORDER);
overriddenHeadersIgnoreCaseMap.putAll(overriddenHeaders);
final Enumeration<String> headerNames = httpServletRequest.getHeaderNames();
while (headerNames.hasMoreElements()) {
final String headerName = headerNames.nextElement();
if (!overriddenHeadersIgnoreCaseMap.isEmpty() && headerName.equalsIgnoreCase("content-length")) {
continue;
}
if (overriddenHeadersIgnoreCaseMap.containsKey(headerName)) {
result.put(headerName, overriddenHeadersIgnoreCaseMap.get(headerName));
} else {
result.put(headerName, httpServletRequest.getHeader(headerName));
}
}
// set the proxy scheme to request scheme if not already set client
String proxyScheme = httpServletRequest.getHeader(PROXY_SCHEME_HTTP_HEADER);
if (proxyScheme == null) {
result.put(PROXY_SCHEME_HTTP_HEADER, httpServletRequest.getScheme());
}
// if this is a secure request, add the custom headers for proxying user requests
final X509Certificate cert = new X509CertificateExtractor().extractClientCertificate(httpServletRequest);
if (cert != null) {
// add the certificate DN to the proxy chain
final String xProxiedEntitiesChain = DnUtils.getXProxiedEntitiesChain(httpServletRequest);
if (StringUtils.isNotBlank(xProxiedEntitiesChain)) {
result.put(PROXIED_ENTITIES_CHAIN_HTTP_HEADER, xProxiedEntitiesChain);
}
// add the user's authorities (if any) to the headers
final Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
if (authentication != null) {
final Object userDetailsObj = authentication.getPrincipal();
if (userDetailsObj instanceof NiFiUserDetails) {
// serialize user details object
final String hexEncodedUserDetails = WebUtils.serializeObjectToHex((Serializable) userDetailsObj);
// put serialized user details in header
result.put(PROXIED_ENTITY_USER_DETAILS_HTTP_HEADER, hexEncodedUserDetails);
}
}
}
return result;
}
private void createBatchedActionLogStatement(StringBuilder strb, Collection<Action> actions) {
strb.append("Cluster context too big for response header. Replacing below actions with 'batch' action...\n");
for (Action action : actions) {
strb.append(ReflectionToStringBuilder.toString(action, ToStringStyle.MULTI_LINE_STYLE)).append("\n");
}
}
}
|
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.sql.jdbc;
import com.sun.net.httpserver.Headers;
import com.sun.net.httpserver.HttpExchange;
import com.sun.net.httpserver.HttpServer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.common.Strings;
import org.elasticsearch.core.SuppressForbidden;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.mocksocket.MockHttpServer;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.sql.client.ConnectionConfiguration;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.io.Closeable;
import java.io.IOException;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.nio.charset.StandardCharsets;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Queue;
import java.util.concurrent.ExecutorService;
public class JdbcHttpClientRequestTests extends ESTestCase {
private static RawRequestMockWebServer webServer = new RawRequestMockWebServer();
private static final Logger logger = LogManager.getLogger(JdbcHttpClientRequestTests.class);
@BeforeClass
public static void init() throws Exception {
webServer.start();
}
@AfterClass
public static void cleanup() {
try {
webServer.close();
} finally {
webServer = null;
}
}
public void testBinaryRequestEnabled() throws Exception {
assertBinaryRequest(true, XContentType.CBOR);
}
public void testBinaryRequestDisabled() throws Exception {
assertBinaryRequest(false, XContentType.JSON);
}
private void assertBinaryRequest(boolean isBinary, XContentType xContentType) throws Exception {
String url = JdbcConfiguration.URL_PREFIX + webServer.getHostName() + ":" + webServer.getPort();
Properties props = new Properties();
props.setProperty(ConnectionConfiguration.BINARY_COMMUNICATION, Boolean.toString(isBinary));
JdbcHttpClient httpClient = new JdbcHttpClient(JdbcConfiguration.create(url, props, 0), false);
prepareMockResponse();
try {
httpClient.query(randomAlphaOfLength(256), null,
new RequestMeta(randomIntBetween(1, 100), randomNonNegativeLong(), randomNonNegativeLong()));
} catch (SQLException e) {
logger.info("Ignored SQLException", e);
}
assertValues(isBinary, xContentType);
prepareMockResponse();
try {
httpClient.nextPage("", new RequestMeta(randomIntBetween(1, 100), randomNonNegativeLong(), randomNonNegativeLong()));
} catch (SQLException e) {
logger.info("Ignored SQLException", e);
}
assertValues(isBinary, xContentType);
}
private void assertValues(boolean isBinary, XContentType xContentType) {
assertEquals(1, webServer.requests().size());
RawRequest recordedRequest = webServer.takeRequest();
assertEquals(xContentType.mediaTypeWithoutParameters(), recordedRequest.getHeader("Content-Type"));
assertEquals("POST", recordedRequest.getMethod());
BytesReference bytesRef = recordedRequest.getBodyAsBytes();
Map<String, Object> reqContent = XContentHelper.convertToMap(bytesRef, false, xContentType).v2();
assertTrue(((String) reqContent.get("mode")).equalsIgnoreCase("jdbc"));
assertEquals(isBinary, reqContent.get("binary_format"));
}
private void prepareMockResponse() {
webServer.enqueue(new Response()
.setResponseCode(200)
.addHeader("Content-Type", "application/json")
.setBody("{\"rows\":[],\"columns\":[]}"));
}
@SuppressForbidden(reason = "use http server")
private static class RawRequestMockWebServer implements Closeable {
private HttpServer server;
private final Queue<Response> responses = ConcurrentCollections.newQueue();
private final Queue<RawRequest> requests = ConcurrentCollections.newQueue();
private String hostname;
private int port;
RawRequestMockWebServer() {
}
void start() throws IOException {
InetSocketAddress address = new InetSocketAddress(InetAddress.getLoopbackAddress().getHostAddress(), 0);
server = MockHttpServer.createHttp(address, 0);
server.start();
this.hostname = server.getAddress().getHostString();
this.port = server.getAddress().getPort();
server.createContext("/", s -> {
try {
Response response = responses.poll();
RawRequest request = createRequest(s);
requests.add(request);
s.getResponseHeaders().putAll(response.getHeaders());
if (Strings.isEmpty(response.getBody())) {
s.sendResponseHeaders(response.getStatusCode(), 0);
} else {
byte[] responseAsBytes = response.getBody().getBytes(StandardCharsets.UTF_8);
s.sendResponseHeaders(response.getStatusCode(), responseAsBytes.length);
if ("HEAD".equals(request.getMethod()) == false) {
try (OutputStream responseBody = s.getResponseBody()) {
responseBody.write(responseAsBytes);
}
}
}
} catch (Exception e) {
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to respond to request [{} {}]",
s.getRequestMethod(), s.getRequestURI()), e);
} finally {
s.close();
}
});
}
private RawRequest createRequest(HttpExchange exchange) throws IOException {
RawRequest request = new RawRequest(exchange.getRequestMethod(), exchange.getRequestHeaders());
if (exchange.getRequestBody() != null) {
BytesReference bytesRef = Streams.readFully(exchange.getRequestBody());
request.setBodyAsBytes(bytesRef);
}
return request;
}
String getHostName() {
return hostname;
}
int getPort() {
return port;
}
void enqueue(Response response) {
responses.add(response);
}
List<RawRequest> requests() {
return new ArrayList<>(requests);
}
RawRequest takeRequest() {
return requests.poll();
}
@Override
public void close() {
if (server.getExecutor() instanceof ExecutorService) {
terminate((ExecutorService) server.getExecutor());
}
server.stop(0);
}
}
@SuppressForbidden(reason = "use http server header class")
private static class RawRequest {
private final String method;
private final Headers headers;
private BytesReference bodyAsBytes = null;
RawRequest(String method, Headers headers) {
this.method = method;
this.headers = headers;
}
public String getMethod() {
return method;
}
public String getHeader(String name) {
return headers.getFirst(name);
}
public BytesReference getBodyAsBytes() {
return bodyAsBytes;
}
public void setBodyAsBytes(BytesReference bodyAsBytes) {
this.bodyAsBytes = bodyAsBytes;
}
}
@SuppressForbidden(reason = "use http server header class")
private class Response {
private String body = null;
private int statusCode = 200;
private Headers headers = new Headers();
public Response setBody(String body) {
this.body = body;
return this;
}
public Response setResponseCode(int statusCode) {
this.statusCode = statusCode;
return this;
}
public Response addHeader(String name, String value) {
headers.add(name, value);
return this;
}
String getBody() {
return body;
}
int getStatusCode() {
return statusCode;
}
Headers getHeaders() {
return headers;
}
}
}
|
|
/*
* Copyright 2016 Sumo Logic
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ws.epigraph.ideaplugin.schema.brains.hierarchy;
import com.intellij.psi.PsiElement;
import com.intellij.psi.util.PsiTreeUtil;
import ws.epigraph.ideaplugin.schema.psi.SchemaPsiUtil;
import ws.epigraph.schema.parser.psi.*;
import org.jetbrains.annotations.NotNull;
import java.util.Arrays;
import java.util.List;
import java.util.function.Predicate;
import static ws.epigraph.schema.lexer.SchemaElementTypes.S_WITH;
/**
* @author <a href="mailto:konstantin.sobolev@gmail.com">Konstantin Sobolev</a>
*/
public abstract class CompletionTypeFilters {
// TODO check for correct collection types, e.g. List[Foo] can't extend List[Bar] unless Foo extends Bar
private static final List<CompletionTypeFilter> FILTERS = Arrays.asList(
new SameTypeExtendsFilter(),
new SameKindExtendsFilter(),
new TypeAlreadyExtendedFilter(),
new WrongPrimitiveKindExtendsFilter(),
new SameTypeSupplementsFilter(),
new SameKindSupplementsFilter(),
new TypeAlreadySupplementedFilter(),
new WrongPrimitiveKindSupplementsFilter(),
new SameTypeSupplementTargetFilter(),
new SameKindSupplementTargetFilter(),
new TypeAlreadySupplementedTargetFilter(),
new WrongPrimitiveKindSupplementTargetFilter(),
new SameTypeSupplementSourceFilter(),
new SameKindSupplementSourceFilter(),
new TypeAlreadySupplementedSourceFilter(),
new WrongPrimitiveKindSupplementSourceFilter()
);
@NotNull
public static Predicate<SchemaTypeDef> combined(@NotNull PsiElement element) {
return typeDef -> {
for (CompletionTypeFilter filter : FILTERS) if (!filter.include(typeDef, element)) return false;
return true;
};
}
interface CompletionTypeFilter {
/**
* Checks if particular {@code typeDef} should stay in the completion variants
*
* @param typeDef type def to check
* @param element element completion was invoked on
* @return {@code true} iff {@code typeDef} should stay included
*/
boolean include(@NotNull SchemaTypeDef typeDef, @NotNull PsiElement element);
}
interface ExtendsCompletionFilter extends CompletionTypeFilter {
@Override
default boolean include(@NotNull SchemaTypeDef typeDef, @NotNull PsiElement element) {
SchemaTypeDef host = PsiTreeUtil.getParentOfType(element, SchemaTypeDef.class);
if (host == null) return true;
SchemaExtendsDecl extendsDecl = PsiTreeUtil.getParentOfType(element, SchemaExtendsDecl.class);
if (extendsDecl == null) return true;
return include(typeDef, host, extendsDecl);
}
boolean include(@NotNull SchemaTypeDef typeDef, @NotNull SchemaTypeDef host, @NotNull SchemaExtendsDecl extendsDecl);
}
interface SupplementsCompletionFilter extends CompletionTypeFilter {
@Override
default boolean include(@NotNull SchemaTypeDef typeDef, @NotNull PsiElement element) {
SchemaTypeDef host = PsiTreeUtil.getParentOfType(element, SchemaTypeDef.class);
if (host == null) return true;
SchemaSupplementsDecl supplementsDecl = PsiTreeUtil.getParentOfType(element, SchemaSupplementsDecl.class);
if (supplementsDecl == null) return true;
return include(typeDef, host, supplementsDecl);
}
boolean include(@NotNull SchemaTypeDef typeDef, @NotNull SchemaTypeDef host, @NotNull SchemaSupplementsDecl supplementsDecl);
}
interface SupplementTargetCompletionFilter extends CompletionTypeFilter {
@Override
default boolean include(@NotNull SchemaTypeDef typeDef, @NotNull PsiElement element) {
SchemaSupplementDef host = PsiTreeUtil.getParentOfType(element, SchemaSupplementDef.class);
if (host == null) return true;
if (SchemaPsiUtil.hasPrevSibling(element.getParent().getParent(), S_WITH)) return true; // we're completing source
return includeInTarget(typeDef, host);
}
boolean includeInTarget(@NotNull SchemaTypeDef typeDef, @NotNull SchemaSupplementDef host);
}
interface SupplementSourceCompletionFilter extends CompletionTypeFilter {
@Override
default boolean include(@NotNull SchemaTypeDef typeDef, @NotNull PsiElement element) {
SchemaSupplementDef host = PsiTreeUtil.getParentOfType(element, SchemaSupplementDef.class);
if (host == null) return true;
if (!SchemaPsiUtil.hasPrevSibling(element.getParent().getParent(), S_WITH))
return true; // we're completing target
return includeInSource(typeDef, host);
}
boolean includeInSource(@NotNull SchemaTypeDef typeDef, @NotNull SchemaSupplementDef host);
}
// ---------------------- common
private static abstract class SameTypeFilterBase {
private boolean notSameType(@NotNull SchemaTypeDef typeDef, @NotNull SchemaTypeDef host) {
return !host.equals(typeDef);
}
public boolean include(@NotNull SchemaTypeDef typeDef, @NotNull SchemaTypeDef host, @NotNull SchemaExtendsDecl extendsDecl) {
return notSameType(typeDef, host);
}
public boolean include(@NotNull SchemaTypeDef typeDef, @NotNull SchemaTypeDef host, @NotNull SchemaSupplementsDecl supplementsDecl) {
return notSameType(typeDef, host);
}
private boolean includeInSupplement(@NotNull SchemaTypeDef typeDef, @NotNull SchemaSupplementDef host, boolean checkSource) {
if (checkSource && typeDef.equals(host.source())) return false;
for (SchemaQnTypeRef targetRef : host.supplementedRefs()) {
SchemaTypeDef target = targetRef.resolve();
if (target != null && typeDef.equals(target)) return false;
}
return true;
}
public boolean includeInTarget(@NotNull SchemaTypeDef typeDef, @NotNull SchemaSupplementDef host) {
return includeInSupplement(typeDef, host, true);
}
public boolean includeInSource(@NotNull SchemaTypeDef typeDef, @NotNull SchemaSupplementDef host) {
return includeInSupplement(typeDef, host, false);
}
}
private static abstract class SameKindFilterBase {
public boolean include(@NotNull SchemaTypeDef typeDef, @NotNull SchemaTypeDef host, @NotNull SchemaExtendsDecl extendsDecl) {
return isSameKind(typeDef, host);
}
public boolean include(@NotNull SchemaTypeDef typeDef, @NotNull SchemaTypeDef host, @NotNull SchemaSupplementsDecl supplementsDecl) {
return isSameKind(typeDef, host);
}
private boolean isSameKind(@NotNull SchemaTypeDef typeDef, @NotNull SchemaTypeDef host) {
return typeDef.getKind() == host.getKind();
}
private boolean includeInSupplement(@NotNull SchemaTypeDef typeDef, @NotNull SchemaSupplementDef host, boolean checkSource) {
if (checkSource) {
SchemaTypeDef source = host.source();
if (source != null && typeDef.getKind() != source.getKind()) return false;
}
for (SchemaQnTypeRef targetRef : host.supplementedRefs()) {
SchemaTypeDef target = targetRef.resolve();
if (target != null && typeDef.getKind() != target.getKind()) return false;
}
return true;
}
public boolean includeInTarget(@NotNull SchemaTypeDef typeDef, @NotNull SchemaSupplementDef host) {
return includeInSupplement(typeDef, host, true);
}
public boolean includeInSource(@NotNull SchemaTypeDef typeDef, @NotNull SchemaSupplementDef host) {
return includeInSupplement(typeDef, host, false);
}
}
private abstract static class WrongPrimitiveKindFilterBase {
public boolean include(@NotNull SchemaTypeDef typeDef, @NotNull SchemaTypeDef host, @NotNull SchemaExtendsDecl extendsDecl) {
return isSamePrimitiveKind(typeDef, host);
}
public boolean include(@NotNull SchemaTypeDef typeDef, @NotNull SchemaTypeDef host, @NotNull SchemaSupplementsDecl supplementsDecl) {
return isSamePrimitiveKind(typeDef, host);
}
private boolean isSamePrimitiveKind(@NotNull SchemaTypeDef typeDef, @NotNull SchemaTypeDef host) {
if (host.getKind() != TypeKind.PRIMITIVE) return true;
if (typeDef.getKind() != TypeKind.PRIMITIVE) return false;
return ((SchemaPrimitiveTypeDef) host).getPrimitiveTypeKind() ==
((SchemaPrimitiveTypeDef) typeDef).getPrimitiveTypeKind();
}
private boolean includeInSupplement(@NotNull SchemaTypeDef typeDef, @NotNull SchemaSupplementDef host, boolean checkSource) {
if (typeDef.getKind() != TypeKind.PRIMITIVE) return true;
PrimitiveTypeKind primitiveTypeKind = ((SchemaPrimitiveTypeDef) typeDef).getPrimitiveTypeKind();
if (checkSource) {
SchemaTypeDef source = host.source();
if (source != null) {
if (source.getKind() != TypeKind.PRIMITIVE) return false;
if (((SchemaPrimitiveTypeDef) source).getPrimitiveTypeKind() != primitiveTypeKind) return false;
}
}
for (SchemaQnTypeRef targetRef : host.supplementedRefs()) {
SchemaTypeDef target = targetRef.resolve();
if (target instanceof SchemaPrimitiveTypeDef) {
SchemaPrimitiveTypeDef primitiveTarget = (SchemaPrimitiveTypeDef) target;
if (primitiveTarget.getPrimitiveTypeKind() != primitiveTypeKind) return false;
}
}
return true;
}
public boolean includeInTarget(@NotNull SchemaTypeDef typeDef, @NotNull SchemaSupplementDef host) {
return includeInSupplement(typeDef, host, true);
}
public boolean includeInSource(@NotNull SchemaTypeDef typeDef, @NotNull SchemaSupplementDef host) {
return includeInSupplement(typeDef, host, false);
}
}
// ---------------------- extends clause
private static class SameTypeExtendsFilter extends SameTypeFilterBase implements ExtendsCompletionFilter {}
private static class SameKindExtendsFilter extends SameKindFilterBase implements ExtendsCompletionFilter {}
private static class WrongPrimitiveKindExtendsFilter extends WrongPrimitiveKindFilterBase implements ExtendsCompletionFilter {}
private static class TypeAlreadyExtendedFilter implements ExtendsCompletionFilter {
@Override
public boolean include(@NotNull SchemaTypeDef typeDef, @NotNull SchemaTypeDef host, @NotNull SchemaExtendsDecl extendsDecl) {
HierarchyCache hierarchyCache = HierarchyCache.getHierarchyCache(host.getProject());
for (SchemaQnTypeRef qnTypeRef : extendsDecl.getQnTypeRefList()) {
SchemaTypeDef parent = qnTypeRef.resolve();
if (parent != null) {
if (parent.equals(typeDef) || hierarchyCache.getTypeParents(parent).contains(typeDef)) return false;
}
}
return true;
}
}
// ---------------------- supplements clause
private static class SameKindSupplementsFilter extends SameKindFilterBase implements SupplementsCompletionFilter {}
private static class SameTypeSupplementsFilter extends SameTypeFilterBase implements SupplementsCompletionFilter {}
private static class WrongPrimitiveKindSupplementsFilter extends WrongPrimitiveKindFilterBase implements SupplementsCompletionFilter {}
private static class TypeAlreadySupplementedFilter implements SupplementsCompletionFilter {
@Override
public boolean include(@NotNull SchemaTypeDef typeDef, @NotNull SchemaTypeDef host, @NotNull SchemaSupplementsDecl supplementsDecl) {
List<SchemaQnTypeRef> supplementsList = supplementsDecl.getQnTypeRefList();
if (supplementsList.isEmpty()) return true;
HierarchyCache hierarchyCache = HierarchyCache.getHierarchyCache(host.getProject());
List<SchemaTypeDef> typeParents = hierarchyCache.getTypeParents(typeDef);
for (SchemaQnTypeRef qnTypeRef : supplementsList) {
SchemaTypeDef child = qnTypeRef.resolve();
if (child != null && child.equals(typeDef) || typeParents.contains(child)) return false;
}
return true;
}
}
// ---------------------- supplement target
private static class SameTypeSupplementTargetFilter extends SameTypeFilterBase implements SupplementTargetCompletionFilter {}
private static class SameKindSupplementTargetFilter extends SameKindFilterBase implements SupplementTargetCompletionFilter {}
private static class WrongPrimitiveKindSupplementTargetFilter extends WrongPrimitiveKindFilterBase implements SupplementTargetCompletionFilter {}
private static class TypeAlreadySupplementedTargetFilter implements SupplementTargetCompletionFilter {
@Override
public boolean includeInTarget(@NotNull SchemaTypeDef typeDef, @NotNull SchemaSupplementDef host) {
HierarchyCache hierarchyCache = HierarchyCache.getHierarchyCache(host.getProject());
SchemaTypeDef source = host.source();
// if candidate is a parent of source then we have a circular inheritance
if (source != null && hierarchyCache.getTypeParents(source).contains(typeDef)) return false;
List<SchemaTypeDef> typeParents = hierarchyCache.getTypeParents(typeDef);
// if candidate is a child if source then it's a useless supplement
if (source != null && typeParents.contains(source)) return false;
for (SchemaQnTypeRef supplementedTypeRef : host.supplementedRefs()) {
SchemaTypeDef supplemented = supplementedTypeRef.resolve();
if (supplemented != null && supplemented.equals(typeDef) || typeParents.contains(supplemented)) return false;
}
return true;
}
}
// ---------------------- supplement source
private static class SameTypeSupplementSourceFilter extends SameTypeFilterBase implements SupplementSourceCompletionFilter {}
private static class SameKindSupplementSourceFilter extends SameKindFilterBase implements SupplementSourceCompletionFilter {}
private static class WrongPrimitiveKindSupplementSourceFilter extends WrongPrimitiveKindFilterBase implements SupplementSourceCompletionFilter {}
private static class TypeAlreadySupplementedSourceFilter implements SupplementSourceCompletionFilter {
@Override
public boolean includeInSource(@NotNull SchemaTypeDef typeDef, @NotNull SchemaSupplementDef host) {
List<SchemaTypeDef> supplementedList = host.supplemented();
if (supplementedList.isEmpty()) return true;
HierarchyCache hierarchyCache = HierarchyCache.getHierarchyCache(host.getProject());
List<SchemaTypeDef> typeParents = hierarchyCache.getTypeParents(typeDef);
boolean allTargetsExtendSource = true;
for (SchemaTypeDef supplemented : supplementedList) {
if (supplemented != null) {
if (supplemented.equals(typeDef)) return false; // don't supplement self
if (typeParents.contains(supplemented)) return false; // circular inheritance
if (!hierarchyCache.getTypeParents(supplemented).contains(typeDef)) allTargetsExtendSource = false;
}
}
return !allTargetsExtendSource;
}
}
}
|
|
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.block;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static org.spongepowered.common.data.util.DataUtil.checkDataExists;
import com.flowpowered.math.vector.Vector3i;
import com.google.common.collect.Lists;
import net.minecraft.block.ITileEntityProvider;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.tileentity.TileEntity;
import org.spongepowered.api.block.BlockSnapshot;
import org.spongepowered.api.block.BlockState;
import org.spongepowered.api.block.BlockTypes;
import org.spongepowered.api.data.DataView;
import org.spongepowered.api.data.Queries;
import org.spongepowered.api.data.key.Key;
import org.spongepowered.api.data.manipulator.DataManipulator;
import org.spongepowered.api.data.manipulator.ImmutableDataManipulator;
import org.spongepowered.api.data.persistence.AbstractDataBuilder;
import org.spongepowered.api.data.persistence.InvalidDataException;
import org.spongepowered.api.data.value.BaseValue;
import org.spongepowered.api.world.Location;
import org.spongepowered.api.world.World;
import org.spongepowered.api.world.storage.WorldProperties;
import org.spongepowered.common.data.persistence.NbtTranslator;
import org.spongepowered.common.data.util.DataQueries;
import org.spongepowered.common.data.util.DataUtil;
import org.spongepowered.common.data.util.NbtDataUtil;
import org.spongepowered.common.interfaces.data.IMixinCustomDataHolder;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
public class SpongeBlockSnapshotBuilder extends AbstractDataBuilder<BlockSnapshot> implements BlockSnapshot.Builder {
BlockState blockState;
BlockState extendedState;
UUID worldUuid;
UUID creatorUuid;
UUID notifierUuid;
Vector3i coords;
@Nullable List<ImmutableDataManipulator<?, ?>> manipulators;
@Nullable NBTTagCompound compound;
public SpongeBlockSnapshotBuilder() {
super(BlockSnapshot.class, 1);
}
@Override
public SpongeBlockSnapshotBuilder world(WorldProperties worldProperties) {
this.worldUuid = checkNotNull(worldProperties).getUniqueId();
return this;
}
public SpongeBlockSnapshotBuilder worldId(UUID worldUuid) {
this.worldUuid = checkNotNull(worldUuid);
return this;
}
@Override
public SpongeBlockSnapshotBuilder blockState(BlockState blockState) {
this.blockState = checkNotNull(blockState);
return this;
}
public SpongeBlockSnapshotBuilder extendedState(BlockState extendedState) {
this.extendedState = checkNotNull(extendedState);
return this;
}
@Override
public SpongeBlockSnapshotBuilder position(Vector3i position) {
this.coords = checkNotNull(position);
if (this.compound != null) {
this.compound.setInteger(NbtDataUtil.TILE_ENTITY_POSITION_X, position.getX());
this.compound.setInteger(NbtDataUtil.TILE_ENTITY_POSITION_Y, position.getY());
this.compound.setInteger(NbtDataUtil.TILE_ENTITY_POSITION_Z, position.getZ());
}
return this;
}
@Override
public SpongeBlockSnapshotBuilder from(Location<World> location) {
this.blockState = location.getBlock();
this.worldUuid = location.getExtent().getUniqueId();
this.coords = location.getBlockPosition();
if (this.blockState.getType() instanceof ITileEntityProvider) {
if (location.hasTileEntity()) {
this.compound = new NBTTagCompound();
org.spongepowered.api.block.tileentity.TileEntity te = location.getTileEntity().get();
((TileEntity) te).writeToNBT(this.compound);
this.manipulators = ((IMixinCustomDataHolder) te).getCustomManipulators().stream()
.map(DataManipulator::asImmutable)
.collect(Collectors.toList());
}
}
return this;
}
@Override
public BlockSnapshot.Builder creator(UUID uuid) {
this.creatorUuid = checkNotNull(uuid);
return this;
}
@Override
public BlockSnapshot.Builder notifier(UUID uuid) {
this.notifierUuid = checkNotNull(uuid);
return this;
}
public SpongeBlockSnapshotBuilder unsafeNbt(NBTTagCompound compound) {
this.compound = compound.copy();
return this;
}
@Override
public SpongeBlockSnapshotBuilder add(DataManipulator<?, ?> manipulator) {
return add(checkNotNull(manipulator, "manipulator").asImmutable());
}
@Override
public SpongeBlockSnapshotBuilder add(ImmutableDataManipulator<?, ?> manipulator) {
checkNotNull(manipulator, "manipulator");
if (this.manipulators == null) {
this.manipulators = Lists.newArrayList();
}
for (Iterator<ImmutableDataManipulator<?, ?>> iterator = this.manipulators.iterator(); iterator.hasNext();) {
final ImmutableDataManipulator<?, ?> existing = iterator.next();
if (manipulator.getClass().isInstance(existing)) {
iterator.remove();
}
}
this.manipulators.add(manipulator);
return this;
}
@Override
public <V> BlockSnapshot.Builder add(Key<? extends BaseValue<V>> key, V value) {
checkNotNull(key, "key");
checkState(this.blockState != null);
this.blockState = this.blockState.with(key, value).orElse(this.blockState);
if(this.extendedState != null) {
this.extendedState = this.extendedState.with(key, value).orElse(this.extendedState);
}
return this;
}
@Override
public SpongeBlockSnapshotBuilder from(BlockSnapshot holder) {
this.blockState = holder.getState();
this.worldUuid = holder.getWorldUniqueId();
if (holder.getCreator().isPresent()) {
this.creatorUuid = holder.getCreator().get();
}
if (holder.getNotifier().isPresent()) {
this.notifierUuid = holder.getNotifier().get();
}
this.coords = holder.getPosition();
this.manipulators = Lists.newArrayList(holder.getManipulators());
if (holder instanceof SpongeBlockSnapshot) {
if (((SpongeBlockSnapshot) holder).compound != null) {
this.compound = ((SpongeBlockSnapshot) holder).compound.copy();
}
}
return this;
}
@Override
public SpongeBlockSnapshotBuilder reset() {
this.blockState = BlockTypes.AIR.getDefaultState();
this.worldUuid = null;
this.creatorUuid = null;
this.notifierUuid = null;
this.coords = null;
this.manipulators = null;
this.compound = null;
return this;
}
@Override
public BlockSnapshot build() {
checkState(this.blockState != null);
if (this.extendedState == null) {
this.extendedState = this.blockState;
}
return new SpongeBlockSnapshot(this);
}
@Override
protected Optional<BlockSnapshot> buildContent(DataView container) throws InvalidDataException {
if (!container.contains(DataQueries.BLOCK_STATE, Queries.WORLD_ID, DataQueries.SNAPSHOT_WORLD_POSITION)) {
return Optional.empty();
}
checkDataExists(container, DataQueries.BLOCK_STATE);
checkDataExists(container, Queries.WORLD_ID);
final SpongeBlockSnapshotBuilder builder = new SpongeBlockSnapshotBuilder();
final UUID worldUuid = UUID.fromString(container.getString(Queries.WORLD_ID).get());
final Vector3i coordinate = DataUtil.getPosition3i(container);
Optional<String> creatorUuid = container.getString(Queries.CREATOR_ID);
Optional<String> notifierUuid = container.getString(Queries.NOTIFIER_ID);
// We now reconstruct the custom data and all extra data.
final BlockState blockState = container.getSerializable(DataQueries.BLOCK_STATE, BlockState.class).get();
BlockState extendedState = null;
if (container.contains(DataQueries.BLOCK_EXTENDED_STATE)) {
extendedState = container.getSerializable(DataQueries.BLOCK_EXTENDED_STATE, BlockState.class).get();
} else {
extendedState = blockState;
}
builder.blockState(blockState)
.extendedState(extendedState)
.position(coordinate)
.worldId(worldUuid);
if (creatorUuid.isPresent()) {
builder.creator(UUID.fromString(creatorUuid.get()));
}
if (notifierUuid.isPresent()) {
builder.notifier(UUID.fromString(notifierUuid.get()));
}
Optional<DataView> unsafeCompound = container.getView(DataQueries.UNSAFE_NBT);
final NBTTagCompound compound = unsafeCompound.isPresent() ? NbtTranslator.getInstance().translateData(unsafeCompound.get()) : null;
if (compound != null) {
builder.unsafeNbt(compound);
}
if (container.contains(DataQueries.SNAPSHOT_TILE_DATA)) {
final List<DataView> dataViews = container.getViewList(DataQueries.SNAPSHOT_TILE_DATA).get();
DataUtil.deserializeImmutableManipulatorList(dataViews).stream().forEach(builder::add);
}
return Optional.of(new SpongeBlockSnapshot(builder));
}
}
|
|
package cgeo.geocaching.connector.gc;
import cgeo.geocaching.CgeoApplication;
import cgeo.geocaching.DataStore;
import cgeo.geocaching.Geocache;
import cgeo.geocaching.LogCacheActivity;
import cgeo.geocaching.R;
import cgeo.geocaching.SearchResult;
import cgeo.geocaching.activity.ActivityMixin;
import cgeo.geocaching.connector.AbstractConnector;
import cgeo.geocaching.connector.ILoggingManager;
import cgeo.geocaching.connector.UserAction;
import cgeo.geocaching.connector.capability.FieldNotesCapability;
import cgeo.geocaching.connector.capability.ICredentials;
import cgeo.geocaching.connector.capability.ILogin;
import cgeo.geocaching.connector.capability.ISearchByCenter;
import cgeo.geocaching.connector.capability.ISearchByFinder;
import cgeo.geocaching.connector.capability.ISearchByGeocode;
import cgeo.geocaching.connector.capability.ISearchByKeyword;
import cgeo.geocaching.connector.capability.ISearchByNextPage;
import cgeo.geocaching.connector.capability.ISearchByOwner;
import cgeo.geocaching.connector.capability.ISearchByViewPort;
import cgeo.geocaching.connector.capability.IgnoreCapability;
import cgeo.geocaching.enumerations.StatusCode;
import cgeo.geocaching.loaders.RecaptchaReceiver;
import cgeo.geocaching.location.Geopoint;
import cgeo.geocaching.location.Viewport;
import cgeo.geocaching.network.Network;
import cgeo.geocaching.network.Parameters;
import cgeo.geocaching.settings.Settings;
import cgeo.geocaching.settings.SettingsActivity;
import cgeo.geocaching.utils.CancellableHandler;
import cgeo.geocaching.utils.Log;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.eclipse.jdt.annotation.NonNull;
import org.eclipse.jdt.annotation.Nullable;
import rx.functions.Action1;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.Handler;
import java.io.File;
import java.util.List;
import java.util.regex.Pattern;
public class GCConnector extends AbstractConnector implements ISearchByGeocode, ISearchByCenter, ISearchByNextPage, ISearchByViewPort, ISearchByKeyword, ILogin, ICredentials, ISearchByOwner, ISearchByFinder, FieldNotesCapability, IgnoreCapability {
@NonNull
private static final String CACHE_URL_SHORT = "http://coord.info/";
// Double slash is used to force open in browser
@NonNull
private static final String CACHE_URL_LONG = "http://www.geocaching.com/seek/cache_details.aspx?wp=";
/**
* Pocket queries downloaded from the website use a numeric prefix. The pocket query creator Android app adds a
* verbatim "pocketquery" prefix.
*/
@NonNull
private static final Pattern GPX_ZIP_FILE_PATTERN = Pattern.compile("((\\d{7,})|(pocketquery))" + "(_.+)?" + "\\.zip", Pattern.CASE_INSENSITIVE);
/**
* Pattern for GC codes
*/
@NonNull
private final static Pattern PATTERN_GC_CODE = Pattern.compile("GC[0-9A-Z]+", Pattern.CASE_INSENSITIVE);
private GCConnector() {
// singleton
}
/**
* initialization on demand holder pattern
*/
private static class Holder {
private static final @NonNull GCConnector INSTANCE = new GCConnector();
}
@NonNull
public static GCConnector getInstance() {
return Holder.INSTANCE;
}
@Override
public boolean canHandle(@NonNull final String geocode) {
return PATTERN_GC_CODE.matcher(geocode).matches();
}
@Override
@NonNull
public String getLongCacheUrl(@NonNull final Geocache cache) {
return CACHE_URL_LONG + cache.getGeocode();
}
@Override
@NonNull
public String getCacheUrl(@NonNull final Geocache cache) {
return CACHE_URL_SHORT + cache.getGeocode();
}
@Override
public boolean supportsPersonalNote() {
return Settings.isGCPremiumMember();
}
@Override
public boolean supportsOwnCoordinates() {
return true;
}
@Override
public boolean supportsWatchList() {
return true;
}
@Override
public boolean supportsLogging() {
return true;
}
@Override
public boolean supportsLogImages() {
return true;
}
@Override
@NonNull
public ILoggingManager getLoggingManager(@NonNull final LogCacheActivity activity, @NonNull final Geocache cache) {
return new GCLoggingManager(activity, cache);
}
@Override
public boolean canLog(@NonNull final Geocache cache) {
return StringUtils.isNotBlank(cache.getCacheId());
}
@Override
@NonNull
public String getName() {
return "geocaching.com";
}
@Override
@NonNull
public String getHost() {
return "www.geocaching.com";
}
@Override
public SearchResult searchByGeocode(final @Nullable String geocode, final @Nullable String guid, final CancellableHandler handler) {
CancellableHandler.sendLoadProgressDetail(handler, R.string.cache_dialog_loading_details_status_loadpage);
final String page = GCParser.requestHtmlPage(geocode, guid, "y");
if (StringUtils.isEmpty(page)) {
final SearchResult search = new SearchResult();
if (DataStore.isThere(geocode, guid, false)) {
if (StringUtils.isBlank(geocode) && StringUtils.isNotBlank(guid)) {
Log.i("Loading old cache from cache.");
search.addGeocode(DataStore.getGeocodeForGuid(guid));
} else {
search.addGeocode(geocode);
}
search.setError(StatusCode.NO_ERROR);
return search;
}
Log.e("GCConnector.searchByGeocode: No data from server");
search.setError(StatusCode.COMMUNICATION_ERROR);
return search;
}
assert page != null;
final SearchResult searchResult = GCParser.parseCache(page, handler);
if (searchResult == null || CollectionUtils.isEmpty(searchResult.getGeocodes())) {
Log.w("GCConnector.searchByGeocode: No cache parsed");
return searchResult;
}
// do not filter when searching for one specific cache
return searchResult;
}
@Override
public SearchResult searchByNextPage(final SearchResult search, final boolean showCaptcha, final RecaptchaReceiver recaptchaReceiver) {
return GCParser.searchByNextPage(search, showCaptcha, recaptchaReceiver);
}
@Override
@NonNull
public SearchResult searchByViewport(@NonNull final Viewport viewport, @NonNull final MapTokens tokens) {
return GCMap.searchByViewport(viewport, tokens);
}
@Override
public boolean isZippedGPXFile(@NonNull final String fileName) {
return GPX_ZIP_FILE_PATTERN.matcher(fileName).matches();
}
@Override
public boolean isReliableLatLon(final boolean cacheHasReliableLatLon) {
return cacheHasReliableLatLon;
}
@Override
public boolean isOwner(@NonNull final Geocache cache) {
final String user = Settings.getUsername();
return StringUtils.isNotEmpty(user) && StringUtils.equalsIgnoreCase(cache.getOwnerUserId(), user);
}
@Override
public boolean addToWatchlist(@NonNull final Geocache cache) {
final boolean added = GCParser.addToWatchlist(cache);
if (added) {
DataStore.saveChangedCache(cache);
}
return added;
}
@Override
public boolean removeFromWatchlist(@NonNull final Geocache cache) {
final boolean removed = GCParser.removeFromWatchlist(cache);
if (removed) {
DataStore.saveChangedCache(cache);
}
return removed;
}
/**
* Add a cache to the favorites list.
*
* This must not be called from the UI thread.
*
* @param cache
* the cache to add
* @return <code>true</code> if the cache was successfully added, <code>false</code> otherwise
*/
public static boolean addToFavorites(final Geocache cache) {
final boolean added = GCParser.addToFavorites(cache);
if (added) {
DataStore.saveChangedCache(cache);
}
return added;
}
/**
* Remove a cache from the favorites list.
*
* This must not be called from the UI thread.
*
* @param cache
* the cache to add
* @return <code>true</code> if the cache was successfully added, <code>false</code> otherwise
*/
public static boolean removeFromFavorites(final Geocache cache) {
final boolean removed = GCParser.removeFromFavorites(cache);
if (removed) {
DataStore.saveChangedCache(cache);
}
return removed;
}
@Override
public boolean uploadModifiedCoordinates(@NonNull final Geocache cache, @NonNull final Geopoint wpt) {
final boolean uploaded = GCParser.uploadModifiedCoordinates(cache, wpt);
if (uploaded) {
DataStore.saveChangedCache(cache);
}
return uploaded;
}
@Override
public boolean deleteModifiedCoordinates(@NonNull final Geocache cache) {
final boolean deleted = GCParser.deleteModifiedCoordinates(cache);
if (deleted) {
DataStore.saveChangedCache(cache);
}
return deleted;
}
@Override
public boolean uploadPersonalNote(@NonNull final Geocache cache) {
final boolean uploaded = GCParser.uploadPersonalNote(cache);
if (uploaded) {
DataStore.saveChangedCache(cache);
}
return uploaded;
}
@Override
public SearchResult searchByCenter(@NonNull final Geopoint center, final @NonNull RecaptchaReceiver recaptchaReceiver) {
return GCParser.searchByCoords(center, Settings.getCacheType(), Settings.isShowCaptcha(), recaptchaReceiver);
}
@Override
public boolean supportsFavoritePoints(@NonNull final Geocache cache) {
return !cache.getType().isEvent();
}
@Override
@NonNull
protected String getCacheUrlPrefix() {
return StringUtils.EMPTY; // UNUSED
}
@Override
@Nullable
public String getGeocodeFromUrl(@NonNull final String url) {
final String noQueryString = StringUtils.substringBefore(url, "?");
// coord.info URLs
final String afterCoord = StringUtils.substringAfterLast(noQueryString, "coord.info/");
if (canHandle(afterCoord)) {
return afterCoord;
}
// expanded geocaching.com URLs
final String afterGeocache = StringUtils.substringBetween(noQueryString, "/geocache/", "_");
if (afterGeocache != null && canHandle(afterGeocache)) {
return afterGeocache;
}
return null;
}
@Override
public boolean isActive() {
return Settings.isGCConnectorActive();
}
@Override
public int getCacheMapMarkerId(final boolean disabled) {
if (disabled) {
return R.drawable.marker_disabled;
}
return R.drawable.marker;
}
@Override
public boolean login(final Handler handler, final Context fromActivity) {
// login
final StatusCode status = GCLogin.getInstance().login();
if (CgeoApplication.getInstance().showLoginToast && handler != null) {
handler.sendMessage(handler.obtainMessage(0, status));
CgeoApplication.getInstance().showLoginToast = false;
// invoke settings activity to insert login details
if (status == StatusCode.NO_LOGIN_INFO_STORED && fromActivity != null) {
SettingsActivity.openForScreen(R.string.preference_screen_gc, fromActivity);
}
}
return status == StatusCode.NO_ERROR;
}
@Override
public void logout() {
GCLogin.getInstance().logout();
}
@Override
public String getUserName() {
return GCLogin.getInstance().getActualUserName();
}
@Override
public int getCachesFound() {
return GCLogin.getInstance().getActualCachesFound();
}
@Override
public String getLoginStatusString() {
return GCLogin.getInstance().getActualStatus();
}
@Override
public boolean isLoggedIn() {
return GCLogin.getInstance().isActualLoginStatus();
}
@Override
public String getWaypointGpxId(final String prefix, @NonNull final String geocode) {
String gpxId = prefix;
if (StringUtils.isNotBlank(geocode) && geocode.length() > 2) {
gpxId += geocode.substring(2);
}
return gpxId;
}
@Override
@NonNull
public String getWaypointPrefix(final String name) {
String prefix = name;
if (StringUtils.isNotBlank(prefix) && prefix.length() >= 2) {
prefix = name.substring(0, 2);
}
return prefix;
}
@Override
public SearchResult searchByKeyword(@NonNull final String keyword, final @NonNull RecaptchaReceiver recaptchaReceiver) {
return GCParser.searchByKeyword(keyword, Settings.getCacheType(), Settings.isShowCaptcha(), recaptchaReceiver);
}
@Override
public int getUsernamePreferenceKey() {
return R.string.pref_username;
}
@Override
public int getPasswordPreferenceKey() {
return R.string.pref_password;
}
@Override
public @NonNull
List<UserAction> getUserActions() {
final List<UserAction> actions = super.getUserActions();
actions.add(new UserAction(R.string.user_menu_open_browser, new Action1<UserAction.Context>() {
@Override
public void call(final cgeo.geocaching.connector.UserAction.Context context) {
context.activity.startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("http://www.geocaching.com/profile/?u=" + Network.encode(context.userName))));
}
}));
actions.add(new UserAction(R.string.user_menu_send_message, new Action1<UserAction.Context>() {
@Override
public void call(final cgeo.geocaching.connector.UserAction.Context context) {
try {
context.activity.startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("http://www.geocaching.com/email/?u=" + Network.encode(context.userName))));
} catch (final ActivityNotFoundException e) {
Log.e("Cannot find suitable activity", e);
ActivityMixin.showToast(context.activity, R.string.err_application_no);
}
}
}));
return actions;
}
@Override
public SearchResult searchByOwner(final @NonNull String username, final @NonNull RecaptchaReceiver recaptchaReceiver) {
return GCParser.searchByOwner(username, Settings.getCacheType(), Settings.isShowCaptcha(), recaptchaReceiver);
}
@Override
public SearchResult searchByFinder(final @NonNull String username, final @NonNull RecaptchaReceiver recaptchaReceiver) {
return GCParser.searchByUsername(username, Settings.getCacheType(), Settings.isShowCaptcha(), recaptchaReceiver);
}
@Override
public boolean uploadFieldNotes(@NonNull final File exportFile) {
if (!GCLogin.getInstance().isActualLoginStatus()) {
// no need to upload (possibly large file) if we're not logged in
final StatusCode loginState = GCLogin.getInstance().login();
if (loginState != StatusCode.NO_ERROR) {
Log.e("FieldnoteExport.ExportTask upload: Login failed");
}
}
final String uri = "http://www.geocaching.com/my/uploadfieldnotes.aspx";
final String page = GCLogin.getInstance().getRequestLogged(uri, null);
if (StringUtils.isBlank(page)) {
Log.e("FieldnoteExport.ExportTask get page: No data from server");
return false;
}
final String[] viewstates = GCLogin.getViewstates(page);
final Parameters uploadParams = new Parameters(
"__EVENTTARGET", "",
"__EVENTARGUMENT", "",
"ctl00$ContentBody$btnUpload", "Upload Field Note");
GCLogin.putViewstates(uploadParams, viewstates);
Network.getResponseData(Network.postRequest(uri, uploadParams, "ctl00$ContentBody$FieldNoteLoader", "text/plain", exportFile));
if (StringUtils.isBlank(page)) {
Log.e("FieldnoteExport.ExportTask upload: No data from server");
return false;
}
return true;
}
@Override
public boolean canIgnoreCache(@NonNull final Geocache cache) {
return StringUtils.isNotEmpty(cache.getType().wptTypeId) && Settings.isGCPremiumMember();
}
@Override
public void ignoreCache(@NonNull final Geocache cache) {
GCParser.ignoreCache(cache);
}
}
|
|
package com.jme3.scene.plugins.fbx.objects;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import com.jme3.animation.Bone;
import com.jme3.animation.Skeleton;
import com.jme3.material.Material;
import com.jme3.material.RenderState.FaceCullMode;
import com.jme3.math.Matrix4f;
import com.jme3.math.Transform;
import com.jme3.math.Vector3f;
import com.jme3.scene.Geometry;
import com.jme3.scene.Node;
import com.jme3.scene.plugins.fbx.InheritType;
import com.jme3.scene.plugins.fbx.RotationOrder;
import com.jme3.scene.plugins.fbx.SceneLoader;
import com.jme3.scene.plugins.fbx.file.FbxElement;
public class FbxNode extends FbxObject {
public Map<String, Object> userData = new HashMap<String, Object>();
public FaceCullMode cullMode = FaceCullMode.Back;
public Transform localTransform;
public Node node;
public FbxNode parentFbxNode;
public boolean rotationActive = false;
public RotationOrder rotationOrder = RotationOrder.EULER_XYZ;
public InheritType inheritType = InheritType.RrSs;
// For bones and animation, in world space
public Matrix4f bindTransform = null;
public int boneIndex;
public Map<Long,FbxAnimNode> animTranslations = new HashMap<Long,FbxAnimNode>();
public Map<Long,FbxAnimNode> animRotations = new HashMap<Long,FbxAnimNode>();
public Map<Long,FbxAnimNode> animScales = new HashMap<Long,FbxAnimNode>();
public Bone bone;
private FbxAnimNode lastAnimTranslation;
private FbxAnimNode lastAnimRotation;
private FbxAnimNode lastAnimScale;
private FbxMesh mesh;
public Map<Long,FbxCluster> skinToCluster = new HashMap<Long,FbxCluster>();
public List<FbxNode> children = new ArrayList<FbxNode>();
/**
* Cache to store materials if linking order is wrong
*/
private List<Material> wrongOrderMaterial = new ArrayList<Material>();
public Vector3f translationLocalRaw = new Vector3f();
public Vector3f rotationOffsetRaw = new Vector3f();
public Vector3f rotationPivotRaw = new Vector3f();
public Vector3f rotationPreRaw = new Vector3f();
public Vector3f rotationLocalRaw = new Vector3f();
public Vector3f rotationPostRaw = new Vector3f();
public Vector3f scaleOffsetRaw = new Vector3f();
public Vector3f scalePivotRaw = new Vector3f();
public Vector3f scaleLocalRaw = new Vector3f(1, 1, 1);
public Matrix4f transformMatrix;
public FbxNode(SceneLoader scene, FbxElement element) {
super(scene, element);
for(FbxElement prop : element.getFbxProperties()) {
double x, y, z;
String propName = (String) prop.properties.get(0);
switch(propName) {
case "RotationOrder":
rotationOrder = RotationOrder.values[(Integer) prop.properties.get(4)];
break;
case "Lcl Translation":
readVectorFromProp(translationLocalRaw, prop);
break;
case "Lcl Rotation":
readVectorFromProp(rotationLocalRaw, prop);
break;
case "Lcl Scaling":
readVectorFromProp(scaleLocalRaw, prop);
break;
case "PreRotation":
readVectorFromProp(rotationPreRaw, prop);
break;
case "RotationActive":
rotationActive = ((Number) prop.properties.get(4)).intValue() == 1;
break;
case "RotationPivot":
readVectorFromProp(rotationPivotRaw, prop);
break;
case "PostRotation":
readVectorFromProp(rotationPostRaw, prop);
break;
case "ScaleOffset":
readVectorFromProp(scaleOffsetRaw, prop);
break;
case "ScalePivot":
readVectorFromProp(scalePivotRaw, prop);
break;
case "InheritType":
inheritType = InheritType.values[(Integer) prop.properties.get(4)];
break;
case "U":
String userDataKey = (String) prop.properties.get(0);
String userDataType = (String) prop.properties.get(1);
Object userDataValue;
if(userDataType.equals("KString")) {
userDataValue = (String) prop.properties.get(4);
} else if(userDataType.equals("int")) {
userDataValue = (Integer) prop.properties.get(4);
} else if(userDataType.equals("double")) {
// NOTE: jME3 does not support doubles in UserData.
// Need to convert to float.
userDataValue = ((Double) prop.properties.get(4)).floatValue();
} else if(userDataType.equals("Vector")) {
x = (Double) prop.properties.get(4);
y = (Double) prop.properties.get(5);
z = (Double) prop.properties.get(6);
userDataValue = new Vector3f((float) x, (float) y, (float) z);
} else {
scene.warning("Unsupported user data type: " + userDataType + ". Ignoring.");
continue;
}
userData.put(userDataKey, userDataValue);
break;
}
}
FbxElement cullingElement = element.getChildById("Culling");
if(cullingElement != null && cullingElement.properties.get(0).equals("CullingOff"))
cullMode = FaceCullMode.Off; // TODO Add other variants
/*From http://area.autodesk.com/forum/autodesk-fbx/fbx-sdk/the-makeup-of-the-local-matrix-of-an-kfbxnode/
Local Matrix = LclTranslation * RotationOffset * RotationPivot *
PreRotation * LclRotation * PostRotation * RotationPivotInverse *
ScalingOffset * ScalingPivot * LclScaling * ScalingPivotInverse
LocalTranslation : translate (xform -query -translation)
RotationOffset: translation compensates for the change in the rotate pivot point (xform -q -rotateTranslation)
RotationPivot: current rotate pivot position (xform -q -rotatePivot)
PreRotation : joint orientation(pre rotation)
LocalRotation: rotate transform (xform -q -rotation & xform -q -rotateOrder)
PostRotation : rotate axis (xform -q -rotateAxis)
RotationPivotInverse: inverse of RotationPivot
ScalingOffset: translation compensates for the change in the scale pivot point (xform -q -scaleTranslation)
ScalingPivot: current scale pivot position (xform -q -scalePivot)
LocalScaling: scale transform (xform -q -scale)
ScalingPivotInverse: inverse of ScalingPivot
*/
transformMatrix = computeTransformationMatrix(translationLocalRaw, rotationLocalRaw, scaleLocalRaw, rotationOrder);
localTransform = new Transform(transformMatrix.toTranslationVector(), transformMatrix.toRotationQuat(), transformMatrix.toScaleVector());
node = new Node(name);
if(userData.size() > 0) {
Iterator<Entry<String,Object>> iterator = userData.entrySet().iterator();
while(iterator.hasNext()) {
Entry<String,Object> e = iterator.next();
node.setUserData(e.getKey(), e.getValue());
}
}
node.setLocalTransform(localTransform);
}
public Matrix4f computeTransformationMatrix(Vector3f rawTranslation, Vector3f rawRotation, Vector3f rawScale, RotationOrder rotOrder) {
Matrix4f transformMatrix = new Matrix4f();
Matrix4f mat = new Matrix4f();
mat.setTranslation(rawTranslation.x + rotationOffsetRaw.x + rotationPivotRaw.x, rawTranslation.y + rotationOffsetRaw.y + rotationPivotRaw.y, rawTranslation.z + rotationOffsetRaw.z + rotationPivotRaw.z);
transformMatrix.multLocal(mat);
if(rotationActive) {
// Because of majic, FBX uses rotation order only to Lcl Rotations. Pre Rotations (Joint Orient) uses always XYZ order
// What is Post Rotations is still a mystery
Matrix4f preRotation = RotationOrder.EULER_XYZ.rotateToMatrix(rotationPreRaw.x, rotationPreRaw.y, rotationPreRaw.z);
Matrix4f localRotation = rotOrder.rotateToMatrix(rawRotation.x, rawRotation.y, rawRotation.z);
Matrix4f postRotation = RotationOrder.EULER_XYZ.rotateToMatrix(rotationPostRaw.x, rotationPostRaw.y, rotationPostRaw.z);
transformMatrix.multLocal(preRotation);
transformMatrix.multLocal(localRotation);
transformMatrix.multLocal(postRotation);
} else {
transformMatrix.multLocal(RotationOrder.EULER_XYZ.rotate(rawRotation.x, rawRotation.y, rawRotation.z));
}
mat.setTranslation(scaleOffsetRaw.x + scalePivotRaw.x - rotationPivotRaw.x, scaleOffsetRaw.y + scalePivotRaw.y - rotationPivotRaw.y, scaleOffsetRaw.z + scalePivotRaw.z - rotationPivotRaw.z);
transformMatrix.multLocal(mat);
transformMatrix.scale(rawScale);
mat.setTranslation(scalePivotRaw.negate());
transformMatrix.multLocal(mat);
return transformMatrix;
}
@Override
public void linkToZero() {
scene.sceneNode.attachChild(node);
scene.rootNodes.add(this);
}
public void setSkeleton(Skeleton skeleton) {
if(bone != null)
boneIndex = skeleton.getBoneIndex(bone);
}
public void buildBindPoseBoneTransform() {
if(bone != null) {
Matrix4f t = bindTransform;
if(t != null) {
Matrix4f parentMatrix = parentFbxNode != null ? parentFbxNode.bindTransform : Matrix4f.IDENTITY;
if(parentMatrix == null)
parentMatrix = node.getLocalToWorldMatrix(null);
t = parentMatrix.invert().multLocal(t);
bone.setBindTransforms(t.toTranslationVector(), t.toRotationQuat(), t.toScaleVector());
} else {
bone.setBindTransforms(node.getLocalTranslation(), node.getLocalRotation(), node.getLocalScale());
}
}
}
@Override
public void link(FbxObject child, String propertyName) {
if(child instanceof FbxAnimNode) {
FbxAnimNode anim = (FbxAnimNode) child;
switch(propertyName) {
case "Lcl Translation":
animTranslations.put(anim.layerId, anim);
lastAnimTranslation = anim;
break;
case "Lcl Rotation":
animRotations.put(anim.layerId, anim);
lastAnimRotation = anim;
break;
case "Lcl Scaling":
animScales.put(anim.layerId, anim);
lastAnimScale = anim;
break;
}
}
}
public FbxAnimNode animTranslation(long layerId) {
if(layerId == 0)
return lastAnimTranslation;
return animTranslations.get(layerId);
}
public FbxAnimNode animRotation(long layerId) {
if(layerId == 0)
return lastAnimRotation;
return animRotations.get(layerId);
}
public FbxAnimNode animScale(long layerId) {
if(layerId == 0)
return lastAnimScale;
return animScales.get(layerId);
}
@Override
public void link(FbxObject otherObject) {
if(otherObject instanceof FbxMaterial) {
FbxMaterial m = (FbxMaterial) otherObject;
Material mat = m.material;
if(mesh == null) {
wrongOrderMaterial.add(mat);
return;
}
if(cullMode != FaceCullMode.Back)
mat.getAdditionalRenderState().setFaceCullMode(cullMode);
for(Geometry g : mesh.geometries) {
if(g.getUserData("FBXMaterial") != null) {
if((Integer) g.getUserData("FBXMaterial") == mesh.lastMaterialId)
g.setMaterial(mat);
} else {
g.setMaterial(mat);
}
}
mesh.lastMaterialId++;
} else if(otherObject instanceof FbxNode) {
FbxNode n = (FbxNode) otherObject;
node.attachChild(n.node);
children.add(n);
if(n.inheritType == InheritType.Rrs) {
Vector3f scale = node.getWorldScale();
n.node.scale(1f / scale.x, 1f / scale.y, 1f / scale.z);
}
n.parentFbxNode = this;
if(isLimb() && n.isLimb()) {
if(bone == null)
bone = new Bone(name);
if(n.bone == null)
n.bone = new Bone(n.name);
bone.addChild(n.bone);
}
} else if(otherObject instanceof FbxMesh) {
FbxMesh m = (FbxMesh) otherObject;
m.setParent(node);
m.parent = this;
mesh = m;
if(wrongOrderMaterial.size() > 0) {
for(int i = 0; i < wrongOrderMaterial.size(); ++i) {
Material mat = wrongOrderMaterial.remove(i--);
for(Geometry g : mesh.geometries) {
if(g.getUserData("FBXMaterial") != null) {
if((Integer) g.getUserData("FBXMaterial") == mesh.lastMaterialId)
g.setMaterial(mat);
} else {
g.setMaterial(mat);
}
}
mesh.lastMaterialId++;
}
}
}
}
public boolean isLimb() {
return type.equals("LimbNode");
}
}
|
|
/**
* Copyright (c) 2012, Ben Fortuna
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* o Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* o Neither the name of Ben Fortuna nor the names of any other contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.fortuna.ical4j.vcard;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.io.StringWriter;
import net.fortuna.ical4j.data.ParserException;
import net.fortuna.ical4j.model.ValidationException;
import net.fortuna.ical4j.util.CompatibilityHints;
import net.fortuna.ical4j.vcard.Property.Id;
import net.fortuna.ical4j.vcard.parameter.Encoding;
import net.fortuna.ical4j.vcard.parameter.Type;
import net.fortuna.ical4j.vcard.property.BDay;
import net.fortuna.ical4j.vcard.property.Org;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.net.QuotedPrintableCodec;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
* Created on: 2009-02-26
*
* @author antheque
*
*/
public class Outlook2003Test {
@Before
public void setUp() {
CompatibilityHints.setHintEnabled(CompatibilityHints.KEY_RELAXED_PARSING, true);
}
@After
public void tearDown() {
CompatibilityHints.setHintEnabled(CompatibilityHints.KEY_RELAXED_PARSING, false);
}
/**
* This example has been prepared with Outlook 2003, it is full of errors,
* but still the library should be able to parse it as well as possible.
*
* This test also makes use of a custom ParameterRegistry, that allows me
* to work around the Outlook quirk, that places the TYPE parameter values
* without the TYPE string, i.e. instead of TYPE=HOME,WORK, we have only
* HOME,WORK.
*
* @throws ParserException
* @throws IOException
* @throws ValidationException
* @throws DecoderException
*/
@Test
public void testOutlookExample() throws IOException, ParserException,
ValidationException, DecoderException {
File file = new File(
"src/test/resources/samples/vcard-antoni-outlook2003.vcf");
Reader reader = new FileReader(file);
GroupRegistry groupRegistry = new GroupRegistry();
PropertyFactoryRegistry propReg = new PropertyFactoryRegistry();
ParameterFactoryRegistry parReg = new ParameterFactoryRegistry();
addTypeParamsToRegistry(parReg);
/*
* The custom registry allows the file to be parsed correctly. It's the
* first workaround for the Outlook problem.
*/
VCardBuilder builder =
new VCardBuilder(reader, groupRegistry, propReg, parReg);
VCard card = builder.build();
assertEquals("Antoni Jozef Mylka jun.",
card.getProperty(Id.FN).getValue());
/*
* To test whether the file has really been parsed correctly, we
* generate a string out of it. Before writing this test, the builder
* contained a bug. The file contains non-standard folding. The LABEL
* property has two lines, but the second line is not indented properly.
* The builder used to interpret it as a separate property. Since it
* didn't know it, it used to insert NULL into the property list. This
* NULL yielded a NullPointerException when trying to serialize the file
* back.
*
* If we can't preserve all data we should still have "something"
*
* note: we use non-validating outputter, since the ENCODING parameter
* has been deprecated in the newest versions
*/
VCardOutputter outputter = new VCardOutputter(false);
StringWriter writer = new StringWriter();
outputter.output(card, writer);
/*
* We don't support quoted printable, and we don't try to support
* the crappy Outlook 2003 folding, but we would still like to
* get something.
*/
Property labelProperty = card.getProperty(Id.LABEL);
String labelvalue = labelProperty.getValue();
assertEquals(
"3.10=0D=0ATrippstadter Str. 122=0D=0AKaiserslautern, " +
"Rheinland-Pfalz 67663=",
labelvalue);
/*
* A workaround for the limitation above, a utility method, that
* checks the encoding of a property, and returns an un-encoded
* value.
*/
assertEquals(
"3.10\r\nTrippstadter Str. 122\r\nKaiserslautern, " +
"Rheinland-Pfalz 67663",
getDecodedPropertyalue(labelProperty));
/*
* Another issue found, the BDAY property is parsed, but the
* value is not converted to a date, and te BDay.getDate() method
* returns null.
*/
BDay bday = (BDay)card.getProperty(Id.BDAY);
assertNotNull(bday.getDate());
assertEquals("19800118",bday.getValue());
/*
* Yet another issue. The entry in PropertyFactoryRegistry for the ORG
* property was invalid. There should be TWO values for this file
* and the org property.
*/
String [] vals = ((Org)card.getProperty(Id.ORG)).getValues();
assertEquals(2,vals.length);
assertEquals("DFKI",vals[0]);
assertEquals("Knowledge-Management",vals[1]);
}
/**
* @param prop
* @return
* @throws DecoderException
*/
private String getDecodedPropertyalue(Property prop) throws DecoderException {
Encoding enc = (Encoding)prop.getParameter(Parameter.Id.ENCODING);
String val = prop.getValue();
if (enc != null && enc.getValue().equalsIgnoreCase("QUOTED-PRINTABLE")) {
/*
* A special Outlook2003 hack.
*/
if (val.endsWith("=")) {
val = val.substring(0,val.length() - 1);
}
QuotedPrintableCodec codec = new QuotedPrintableCodec();
return codec.decode(val);
} else {
return val;
}
}
private void addTypeParamsToRegistry(ParameterFactoryRegistry parReg) {
for (final String name : new String[] { "HOME", "WORK", "MSG", "PREF",
"VOICE", "FAX", "CELL", "VIDEO", "PAGER", "BBS", "MODEM",
"CAR", "ISDN", "PCS", "INTERNET", "X400", "DOM", "INTL",
"POSTAL", "PARCEL" }) {
parReg.register(name, new ParameterFactory<Parameter>() {
public Parameter createParameter(String value) {
return new Type(name);
}
});
String lc = name.toLowerCase();
parReg.register(lc, new ParameterFactory<Parameter>() {
public Parameter createParameter(String value) {
return new Type(name);
}
});
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.server;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.fasterxml.jackson.datatype.joda.ser.DateTimeSerializer;
import com.fasterxml.jackson.jaxrs.smile.SmileMediaTypes;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.io.CountingOutputStream;
import com.google.inject.Inject;
import org.apache.druid.client.DirectDruidClient;
import org.apache.druid.guice.LazySingleton;
import org.apache.druid.guice.annotations.Json;
import org.apache.druid.guice.annotations.Self;
import org.apache.druid.guice.annotations.Smile;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.java.util.common.guava.Yielder;
import org.apache.druid.java.util.common.guava.Yielders;
import org.apache.druid.java.util.emitter.EmittingLogger;
import org.apache.druid.query.BadJsonQueryException;
import org.apache.druid.query.BadQueryException;
import org.apache.druid.query.Query;
import org.apache.druid.query.QueryCapacityExceededException;
import org.apache.druid.query.QueryContexts;
import org.apache.druid.query.QueryException;
import org.apache.druid.query.QueryInterruptedException;
import org.apache.druid.query.QueryTimeoutException;
import org.apache.druid.query.QueryToolChest;
import org.apache.druid.query.QueryUnsupportedException;
import org.apache.druid.query.ResourceLimitExceededException;
import org.apache.druid.query.TruncatedResponseContextException;
import org.apache.druid.query.context.ResponseContext;
import org.apache.druid.server.metrics.QueryCountStatsProvider;
import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.server.security.AuthorizationUtils;
import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.server.security.ForbiddenException;
import org.joda.time.DateTime;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.StreamingOutput;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicLong;
@LazySingleton
@Path("/druid/v2/")
public class QueryResource implements QueryCountStatsProvider
{
protected static final EmittingLogger log = new EmittingLogger(QueryResource.class);
@Deprecated // use SmileMediaTypes.APPLICATION_JACKSON_SMILE
protected static final String APPLICATION_SMILE = "application/smile";
/**
* HTTP response header name containing {@link ResponseContext} serialized string
*/
public static final String HEADER_RESPONSE_CONTEXT = "X-Druid-Response-Context";
public static final String HEADER_IF_NONE_MATCH = "If-None-Match";
public static final String HEADER_ETAG = "ETag";
protected final QueryLifecycleFactory queryLifecycleFactory;
protected final ObjectMapper jsonMapper;
protected final ObjectMapper smileMapper;
protected final ObjectMapper serializeDateTimeAsLongJsonMapper;
protected final ObjectMapper serializeDateTimeAsLongSmileMapper;
protected final QueryScheduler queryScheduler;
protected final AuthConfig authConfig;
protected final AuthorizerMapper authorizerMapper;
private final ResponseContextConfig responseContextConfig;
private final DruidNode selfNode;
private final AtomicLong successfulQueryCount = new AtomicLong();
private final AtomicLong failedQueryCount = new AtomicLong();
private final AtomicLong interruptedQueryCount = new AtomicLong();
private final AtomicLong timedOutQueryCount = new AtomicLong();
@Inject
public QueryResource(
QueryLifecycleFactory queryLifecycleFactory,
@Json ObjectMapper jsonMapper,
@Smile ObjectMapper smileMapper,
QueryScheduler queryScheduler,
AuthConfig authConfig,
AuthorizerMapper authorizerMapper,
ResponseContextConfig responseContextConfig,
@Self DruidNode selfNode
)
{
this.queryLifecycleFactory = queryLifecycleFactory;
this.jsonMapper = jsonMapper;
this.smileMapper = smileMapper;
this.serializeDateTimeAsLongJsonMapper = serializeDataTimeAsLong(jsonMapper);
this.serializeDateTimeAsLongSmileMapper = serializeDataTimeAsLong(smileMapper);
this.queryScheduler = queryScheduler;
this.authConfig = authConfig;
this.authorizerMapper = authorizerMapper;
this.responseContextConfig = responseContextConfig;
this.selfNode = selfNode;
}
@DELETE
@Path("{id}")
@Produces(MediaType.APPLICATION_JSON)
public Response cancelQuery(@PathParam("id") String queryId, @Context final HttpServletRequest req)
{
if (log.isDebugEnabled()) {
log.debug("Received cancel request for query [%s]", queryId);
}
Set<String> datasources = queryScheduler.getQueryDatasources(queryId);
if (datasources == null) {
log.warn("QueryId [%s] not registered with QueryScheduler, cannot cancel", queryId);
datasources = new TreeSet<>();
}
Access authResult = AuthorizationUtils.authorizeAllResourceActions(
req,
Iterables.transform(datasources, AuthorizationUtils.DATASOURCE_WRITE_RA_GENERATOR),
authorizerMapper
);
if (!authResult.isAllowed()) {
throw new ForbiddenException(authResult.toString());
}
queryScheduler.cancelQuery(queryId);
return Response.status(Response.Status.ACCEPTED).build();
}
@POST
@Produces({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE})
@Consumes({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE, APPLICATION_SMILE})
public Response doPost(
final InputStream in,
@QueryParam("pretty") final String pretty,
// used to get request content-type,Accept header, remote address and auth-related headers
@Context final HttpServletRequest req
) throws IOException
{
final QueryLifecycle queryLifecycle = queryLifecycleFactory.factorize();
Query<?> query = null;
String acceptHeader = req.getHeader("Accept");
if (Strings.isNullOrEmpty(acceptHeader)) {
//default to content-type
acceptHeader = req.getContentType();
}
final ResourceIOReaderWriter ioReaderWriter = createResourceIOReaderWriter(acceptHeader, pretty != null);
final String currThreadName = Thread.currentThread().getName();
try {
queryLifecycle.initialize(readQuery(req, in, ioReaderWriter));
query = queryLifecycle.getQuery();
final String queryId = query.getId();
final String queryThreadName = StringUtils.format(
"%s[%s_%s_%s]",
currThreadName,
query.getType(),
query.getDataSource().getTableNames(),
queryId
);
Thread.currentThread().setName(queryThreadName);
if (log.isDebugEnabled()) {
log.debug("Got query [%s]", query);
}
final Access authResult = queryLifecycle.authorize(req);
if (!authResult.isAllowed()) {
throw new ForbiddenException(authResult.toString());
}
final QueryLifecycle.QueryResponse queryResponse = queryLifecycle.execute();
final Sequence<?> results = queryResponse.getResults();
final ResponseContext responseContext = queryResponse.getResponseContext();
final String prevEtag = getPreviousEtag(req);
if (prevEtag != null && prevEtag.equals(responseContext.get(ResponseContext.Key.ETAG))) {
queryLifecycle.emitLogsAndMetrics(null, req.getRemoteAddr(), -1);
successfulQueryCount.incrementAndGet();
return Response.notModified().build();
}
final Yielder<?> yielder = Yielders.each(results);
try {
boolean shouldFinalize = QueryContexts.isFinalize(query, true);
boolean serializeDateTimeAsLong =
QueryContexts.isSerializeDateTimeAsLong(query, false)
|| (!shouldFinalize && QueryContexts.isSerializeDateTimeAsLongInner(query, false));
final ObjectWriter jsonWriter = ioReaderWriter.newOutputWriter(
queryLifecycle.getToolChest(),
queryLifecycle.getQuery(),
serializeDateTimeAsLong
);
Response.ResponseBuilder responseBuilder = Response
.ok(
new StreamingOutput()
{
@Override
public void write(OutputStream outputStream) throws WebApplicationException
{
Exception e = null;
CountingOutputStream os = new CountingOutputStream(outputStream);
try {
// json serializer will always close the yielder
jsonWriter.writeValue(os, yielder);
os.flush(); // Some types of OutputStream suppress flush errors in the .close() method.
os.close();
}
catch (Exception ex) {
e = ex;
log.noStackTrace().error(ex, "Unable to send query response.");
throw new RuntimeException(ex);
}
finally {
Thread.currentThread().setName(currThreadName);
queryLifecycle.emitLogsAndMetrics(e, req.getRemoteAddr(), os.getCount());
if (e == null) {
successfulQueryCount.incrementAndGet();
} else {
failedQueryCount.incrementAndGet();
}
}
}
},
ioReaderWriter.getContentType()
)
.header("X-Druid-Query-Id", queryId);
Object entityTag = responseContext.remove(ResponseContext.Key.ETAG);
if (entityTag != null) {
responseBuilder.header(HEADER_ETAG, entityTag);
}
DirectDruidClient.removeMagicResponseContextFields(responseContext);
//Limit the response-context header, see https://github.com/apache/druid/issues/2331
//Note that Response.ResponseBuilder.header(String key,Object value).build() calls value.toString()
//and encodes the string using ASCII, so 1 char is = 1 byte
final ResponseContext.SerializationResult serializationResult = responseContext.serializeWith(
jsonMapper,
responseContextConfig.getMaxResponseContextHeaderSize()
);
if (serializationResult.isTruncated()) {
final String logToPrint = StringUtils.format(
"Response Context truncated for id [%s]. Full context is [%s].",
queryId,
serializationResult.getFullResult()
);
if (responseContextConfig.shouldFailOnTruncatedResponseContext()) {
log.error(logToPrint);
throw new QueryInterruptedException(
new TruncatedResponseContextException(
"Serialized response context exceeds the max size[%s]",
responseContextConfig.getMaxResponseContextHeaderSize()
),
selfNode.getHostAndPortToUse()
);
} else {
log.warn(logToPrint);
}
}
return responseBuilder
.header(HEADER_RESPONSE_CONTEXT, serializationResult.getResult())
.build();
}
catch (QueryException e) {
// make sure to close yielder if anything happened before starting to serialize the response.
yielder.close();
throw e;
}
catch (Exception e) {
// make sure to close yielder if anything happened before starting to serialize the response.
yielder.close();
throw new RuntimeException(e);
}
finally {
// do not close yielder here, since we do not want to close the yielder prior to
// StreamingOutput having iterated over all the results
}
}
catch (QueryInterruptedException e) {
interruptedQueryCount.incrementAndGet();
queryLifecycle.emitLogsAndMetrics(e, req.getRemoteAddr(), -1);
return ioReaderWriter.gotError(e);
}
catch (QueryTimeoutException timeout) {
timedOutQueryCount.incrementAndGet();
queryLifecycle.emitLogsAndMetrics(timeout, req.getRemoteAddr(), -1);
return ioReaderWriter.gotTimeout(timeout);
}
catch (QueryCapacityExceededException cap) {
failedQueryCount.incrementAndGet();
queryLifecycle.emitLogsAndMetrics(cap, req.getRemoteAddr(), -1);
return ioReaderWriter.gotLimited(cap);
}
catch (QueryUnsupportedException unsupported) {
failedQueryCount.incrementAndGet();
queryLifecycle.emitLogsAndMetrics(unsupported, req.getRemoteAddr(), -1);
return ioReaderWriter.gotUnsupported(unsupported);
}
catch (BadJsonQueryException | ResourceLimitExceededException e) {
interruptedQueryCount.incrementAndGet();
queryLifecycle.emitLogsAndMetrics(e, req.getRemoteAddr(), -1);
return ioReaderWriter.gotBadQuery(e);
}
catch (ForbiddenException e) {
// don't do anything for an authorization failure, ForbiddenExceptionMapper will catch this later and
// send an error response if this is thrown.
throw e;
}
catch (Exception e) {
failedQueryCount.incrementAndGet();
queryLifecycle.emitLogsAndMetrics(e, req.getRemoteAddr(), -1);
log.noStackTrace()
.makeAlert(e, "Exception handling request")
.addData("query", query != null ? jsonMapper.writeValueAsString(query) : "unparseable query")
.addData("peer", req.getRemoteAddr())
.emit();
return ioReaderWriter.gotError(e);
}
finally {
Thread.currentThread().setName(currThreadName);
}
}
private Query<?> readQuery(
final HttpServletRequest req,
final InputStream in,
final ResourceIOReaderWriter ioReaderWriter
) throws IOException
{
Query baseQuery;
try {
baseQuery = ioReaderWriter.getInputMapper().readValue(in, Query.class);
}
catch (JsonParseException e) {
throw new BadJsonQueryException(e);
}
String prevEtag = getPreviousEtag(req);
if (prevEtag != null) {
baseQuery = baseQuery.withOverriddenContext(
ImmutableMap.of(HEADER_IF_NONE_MATCH, prevEtag)
);
}
return baseQuery;
}
private static String getPreviousEtag(final HttpServletRequest req)
{
return req.getHeader(HEADER_IF_NONE_MATCH);
}
protected ObjectMapper serializeDataTimeAsLong(ObjectMapper mapper)
{
return mapper.copy().registerModule(new SimpleModule().addSerializer(DateTime.class, new DateTimeSerializer()));
}
protected ResourceIOReaderWriter createResourceIOReaderWriter(String requestType, boolean pretty)
{
boolean isSmile = SmileMediaTypes.APPLICATION_JACKSON_SMILE.equals(requestType) ||
APPLICATION_SMILE.equals(requestType);
String contentType = isSmile ? SmileMediaTypes.APPLICATION_JACKSON_SMILE : MediaType.APPLICATION_JSON;
return new ResourceIOReaderWriter(
contentType,
isSmile ? smileMapper : jsonMapper,
isSmile ? serializeDateTimeAsLongSmileMapper : serializeDateTimeAsLongJsonMapper,
pretty
);
}
protected static class ResourceIOReaderWriter
{
private final String contentType;
private final ObjectMapper inputMapper;
private final ObjectMapper serializeDateTimeAsLongInputMapper;
private final boolean isPretty;
ResourceIOReaderWriter(
String contentType,
ObjectMapper inputMapper,
ObjectMapper serializeDateTimeAsLongInputMapper,
boolean isPretty
)
{
this.contentType = contentType;
this.inputMapper = inputMapper;
this.serializeDateTimeAsLongInputMapper = serializeDateTimeAsLongInputMapper;
this.isPretty = isPretty;
}
String getContentType()
{
return contentType;
}
ObjectMapper getInputMapper()
{
return inputMapper;
}
ObjectWriter newOutputWriter(
@Nullable QueryToolChest toolChest,
@Nullable Query query,
boolean serializeDateTimeAsLong
)
{
final ObjectMapper mapper = serializeDateTimeAsLong ? serializeDateTimeAsLongInputMapper : inputMapper;
final ObjectMapper decoratedMapper;
if (toolChest != null) {
decoratedMapper = toolChest.decorateObjectMapper(mapper, Preconditions.checkNotNull(query, "query"));
} else {
decoratedMapper = mapper;
}
return isPretty ? decoratedMapper.writerWithDefaultPrettyPrinter() : decoratedMapper.writer();
}
Response ok(Object object) throws IOException
{
return Response.ok(newOutputWriter(null, null, false).writeValueAsString(object), contentType).build();
}
Response gotError(Exception e) throws IOException
{
return buildNonOkResponse(
Status.INTERNAL_SERVER_ERROR.getStatusCode(),
QueryInterruptedException.wrapIfNeeded(e)
);
}
Response gotTimeout(QueryTimeoutException e) throws IOException
{
return buildNonOkResponse(QueryTimeoutException.STATUS_CODE, e);
}
Response gotLimited(QueryCapacityExceededException e) throws IOException
{
return buildNonOkResponse(QueryCapacityExceededException.STATUS_CODE, e);
}
Response gotUnsupported(QueryUnsupportedException e) throws IOException
{
return buildNonOkResponse(QueryUnsupportedException.STATUS_CODE, e);
}
Response gotBadQuery(BadQueryException e) throws IOException
{
return buildNonOkResponse(BadQueryException.STATUS_CODE, e);
}
Response buildNonOkResponse(int status, Exception e) throws JsonProcessingException
{
return Response.status(status)
.type(contentType)
.entity(newOutputWriter(null, null, false).writeValueAsBytes(e))
.build();
}
}
@Override
public long getSuccessfulQueryCount()
{
return successfulQueryCount.get();
}
@Override
public long getFailedQueryCount()
{
return failedQueryCount.get();
}
@Override
public long getInterruptedQueryCount()
{
return interruptedQueryCount.get();
}
@Override
public long getTimedOutQueryCount()
{
return timedOutQueryCount.get();
}
}
|
|
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.util;
import com.hazelcast.internal.eviction.Expirable;
import com.hazelcast.internal.util.ThreadLocalRandomProvider;
import com.hazelcast.nio.serialization.SerializableByConvention;
import java.util.AbstractMap;
import java.util.Collections;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
/**
* ConcurrentHashMap to extend iterator capability.
*
* @param <K> Type of the key
* @param <V> Type of the value
*/
@SerializableByConvention
public class SampleableConcurrentHashMap<K, V> extends ConcurrentReferenceHashMap<K, V> {
private static final float LOAD_FACTOR = 0.91f;
public SampleableConcurrentHashMap(int initialCapacity) {
// Concurrency level 1 is important for fetch-method to function properly.
// Moreover partitions are single threaded and higher concurrency has not much gain
this(initialCapacity, LOAD_FACTOR, 1, ReferenceType.STRONG, ReferenceType.STRONG, null);
}
public SampleableConcurrentHashMap(int initialCapacity, float loadFactor, int concurrencyLevel,
ReferenceType keyType, ReferenceType valueType, EnumSet<Option> options) {
super(initialCapacity, loadFactor, concurrencyLevel, keyType, valueType, options);
}
/**
* Fetches keys from given <code>tableIndex</code> as <code>size</code>
* and puts them into <code>keys</code> list.
*
* @param tableIndex Index (checkpoint) for starting point of fetch operation
* @param size Count of how many keys will be fetched
* @param keys List that fetched keys will be put into
* @return the next index (checkpoint) for later fetches
*/
public int fetchKeys(int tableIndex, int size, List<K> keys) {
final long now = Clock.currentTimeMillis();
final Segment<K, V> segment = segments[0];
final HashEntry<K, V>[] currentTable = segment.table;
int nextTableIndex;
if (tableIndex >= 0 && tableIndex < segment.table.length) {
nextTableIndex = tableIndex;
} else {
nextTableIndex = currentTable.length - 1;
}
int counter = 0;
while (nextTableIndex >= 0 && counter < size) {
HashEntry<K, V> nextEntry = currentTable[nextTableIndex--];
while (nextEntry != null) {
if (nextEntry.key() != null) {
final V value = nextEntry.value();
if (isValidForFetching(value, now)) {
keys.add(nextEntry.key());
counter++;
}
}
nextEntry = nextEntry.next;
}
}
return nextTableIndex;
}
/**
* Fetches entries from given <code>tableIndex</code> as <code>size</code>
* and puts them into <code>entries</code> list.
*
* @param tableIndex Index (checkpoint) for starting point of fetch operation
* @param size Count of how many entries will be fetched
* @param entries List that fetched entries will be put into
* @return the next index (checkpoint) for later fetches
*/
public int fetchEntries(int tableIndex, int size, List<Map.Entry<K, V>> entries) {
final long now = Clock.currentTimeMillis();
final Segment<K, V> segment = segments[0];
final HashEntry<K, V>[] currentTable = segment.table;
int nextTableIndex;
if (tableIndex >= 0 && tableIndex < segment.table.length) {
nextTableIndex = tableIndex;
} else {
nextTableIndex = currentTable.length - 1;
}
int counter = 0;
while (nextTableIndex >= 0 && counter < size) {
HashEntry<K, V> nextEntry = currentTable[nextTableIndex--];
while (nextEntry != null) {
if (nextEntry.key() != null) {
final V value = nextEntry.value();
if (isValidForFetching(value, now)) {
K key = nextEntry.key();
entries.add(new AbstractMap.SimpleEntry<K, V>(key, value));
counter++;
}
}
nextEntry = nextEntry.next;
}
}
return nextTableIndex;
}
protected boolean isValidForFetching(V value, long now) {
if (value instanceof Expirable) {
return !((Expirable) value).isExpiredAt(now);
}
return true;
}
/**
* Entry to define keys and values for sampling.
*/
public static class SamplingEntry<K, V> {
protected final K key;
protected final V value;
public SamplingEntry(K key, V value) {
this.key = key;
this.value = value;
}
public K getEntryKey() {
return key;
}
public V getEntryValue() {
return value;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof SamplingEntry)) {
return false;
}
@SuppressWarnings("unchecked")
SamplingEntry e = (SamplingEntry) o;
return eq(key, e.key) && eq(value, e.value);
}
private static boolean eq(Object o1, Object o2) {
return o1 == null ? o2 == null : o1.equals(o2);
}
@Override
public int hashCode() {
return (key == null ? 0 : key.hashCode())
^ (value == null ? 0 : value.hashCode());
}
public String toString() {
return key + "=" + value;
}
}
protected <E extends SamplingEntry> E createSamplingEntry(K key, V value) {
return (E) new SamplingEntry(key, value);
}
/**
* Gets and returns samples as <code>sampleCount</code>.
*
* @param sampleCount Count of samples
* @return the sampled {@link SamplingEntry} list
*/
public <E extends SamplingEntry> Iterable<E> getRandomSamples(int sampleCount) {
if (sampleCount < 0) {
throw new IllegalArgumentException("Sample count cannot be a negative value.");
}
if (sampleCount == 0 || size() == 0) {
return Collections.EMPTY_LIST;
}
return new LazySamplingEntryIterableIterator<E>(sampleCount);
}
/**
* This class is implements both of "Iterable" and "Iterator" interfaces.
* So we can use only one object (instead of two) both for "Iterable" and "Iterator" interfaces.
*
* NOTE: Assumed that it is not accessed by multiple threads. So there is no synchronization.
*/
private final class LazySamplingEntryIterableIterator<E extends SamplingEntry> implements Iterable<E>, Iterator<E> {
private final int maxEntryCount;
private final int randomNumber;
private final int firstSegmentIndex;
private int currentSegmentIndex;
private int currentBucketIndex;
private HashEntry<K, V> currentEntry;
private int returnedEntryCount;
private boolean reachedToEnd;
private E currentSample;
private LazySamplingEntryIterableIterator(int maxEntryCount) {
this.maxEntryCount = maxEntryCount;
this.randomNumber = ThreadLocalRandomProvider.get().nextInt(Integer.MAX_VALUE);
this.firstSegmentIndex = randomNumber % segments.length;
this.currentSegmentIndex = firstSegmentIndex;
this.currentBucketIndex = -1;
}
@Override
public Iterator<E> iterator() {
return this;
}
/**
* Originally taken by Jaromir Hamala's implementation and changed as incremental implementation.
* So kudos to Jaromir :)
*/
//CHECKSTYLE:OFF
private void iterate() {
if (returnedEntryCount >= maxEntryCount || reachedToEnd) {
currentSample = null;
return;
}
do {
Segment<K, V> segment = segments[currentSegmentIndex];
if (segment != null) {
HashEntry<K, V>[] table = segment.table;
// Pick up a starting point
int firstBucketIndex = randomNumber % table.length;
// If current bucket index is not initialized yet, initialize it with starting point
if (currentBucketIndex == -1) {
currentBucketIndex = firstBucketIndex;
}
do {
// If current entry is not initialized yet, initialize it
if (currentEntry == null) {
currentEntry = table[currentBucketIndex];
}
while (currentEntry != null) {
V value = currentEntry.value();
K key = currentEntry.key();
// Advance to next entry
currentEntry = currentEntry.next;
if (isValidForSampling(value)) {
currentSample = createSamplingEntry(key, value);
// If we reached end of entries, advance current bucket index
if (currentEntry == null) {
currentBucketIndex = ++currentBucketIndex < table.length ? currentBucketIndex : 0;
}
returnedEntryCount++;
return;
}
}
// Advance current bucket index
currentBucketIndex = ++currentBucketIndex < table.length ? currentBucketIndex : 0;
// Clear current entry index to initialize at next bucket
currentEntry = null;
} while (currentBucketIndex != firstBucketIndex);
}
// Advance current segment index
currentSegmentIndex = ++currentSegmentIndex < segments.length ? currentSegmentIndex : 0;
// Clear current bucket index to initialize at next segment
currentBucketIndex = -1;
// Clear current entry index to initialize at next segment
currentEntry = null;
} while (currentSegmentIndex != firstSegmentIndex);
reachedToEnd = true;
currentSample = null;
}
//CHECKSTYLE:ON
@Override
public boolean hasNext() {
iterate();
return currentSample != null;
}
@Override
public E next() {
if (currentSample != null) {
return currentSample;
} else {
throw new NoSuchElementException();
}
}
@Override
public void remove() {
throw new UnsupportedOperationException("Removing is not supported");
}
}
protected boolean isValidForSampling(V value) {
return value != null;
}
}
|
|
/**
* Copyright (C) 2015 BCC Risk Advisory (info@bccriskadvisory.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.bccriskadvisory.link.processor;
import static com.bccriskadvisory.link.utility.Utilities.now;
import static com.google.common.base.Preconditions.checkNotNull;
import java.util.List;
import java.util.Map.Entry;
import org.joda.time.DateTime;
import com.atlassian.jira.bc.ServiceResult;
import com.atlassian.jira.bc.project.ProjectService.GetProjectResult;
import com.atlassian.jira.project.Project;
import com.atlassian.jira.user.ApplicationUser;
import com.atlassian.jira.user.util.UserManager;
import com.bccriskadvisory.jira.ao.connection.Connection;
import com.bccriskadvisory.jira.ao.projectlink.ProjectLink;
import com.bccriskadvisory.link.JiraPluginContext;
import com.bccriskadvisory.link.connector.EdgescanConnectionException;
import com.bccriskadvisory.link.connector.EdgescanV1Connector;
import com.bccriskadvisory.link.connector.EdgescanV1Connector.RequestBuilder;
import com.bccriskadvisory.link.rest.PluginError;
import com.bccriskadvisory.link.rest.edgescan.Risk;
import com.bccriskadvisory.link.rest.edgescan.Vulnerability;
import com.bccriskadvisory.link.utility.AbstractLogSupported;
import com.bccriskadvisory.link.utility.TimedTask;
import com.google.common.base.Joiner;
public abstract class AbstractProjectImportProcessor extends AbstractLogSupported implements ProjectImportProcessor {
protected final JiraPluginContext pluginContext;
private final ImportMode importMode;
protected final boolean testMode;
protected final ImportResults importResults;
protected ProjectLink link;
protected ApplicationUser user;
private Project project;
private EdgescanV1Connector connector;
private VulnerabilityDetailGenerator vulnerabilityDetailGenerator;
private DateTime importStartedAt;
public AbstractProjectImportProcessor(final JiraPluginContext pluginContext, final ImportMode importMode, final boolean testMode) {
this.pluginContext = checkNotNull(pluginContext, "Plugin context");
this.importMode = importMode;
this.testMode = testMode;
this.importResults = new ImportResults(importMode, testMode);
}
@Override
public ProjectImportProcessor initWithLink(final ProjectLink link) {
this.link = link;
Connection connection = pluginContext.getConnectionService().find(link.getConnectionId());
this.connector = new EdgescanV1Connector(pluginContext.getRequestFactory(), connection);
this.vulnerabilityDetailGenerator = new VulnerabilityDetailGenerator(connector);
final UserManager jiraUserManager = pluginContext.getUserManager().getJiraUserManager();
this.user = checkNotNull(jiraUserManager.getUserByKey(link.getUserKey()), "Could not find user with key " + link.getUserKey());
GetProjectResult result = pluginContext.getProjectService().getProjectByKey(user, link.getProjectKey());
this.project = checkNotNull(result.getProject(), "Could not find project with key " + link.getProjectKey());
return this;
}
@Override
public ImportResults processImport() {
if (importLockAcquired()) {
try {
noteStartTime();
final List<Vulnerability> vulnerabilities = getVulnerabilitiesToImport();
if (!vulnerabilities.isEmpty()) {
preProcess();
try (TimedTask linkUpdateTask = new TimedTask("Import vulnerability data from edgescan for project: " + link)) {
for (final Vulnerability vulnerability : vulnerabilities) {
processVulnerability(vulnerability);
}
}
postProcess();
}
setProjectLinkLastUpdated();
} catch (final EdgescanConnectionException e) {
getLog().error("Unable to get vulnerabilities for import", e);
importResults.addError(new PluginError(e));
} finally {
releaseImportLock();
}
}
return importResults;
}
protected abstract boolean importLockAcquired();
protected abstract void releaseImportLock();
private void noteStartTime() {
//Should the task take a significant amount of time, there's a risk that a vulnerability will be updated in edgescan while it's running.
//We avoid missing such vulnerabilities by taking our update time to be *before* retrieving them.
importStartedAt = now();
}
private void setProjectLinkLastUpdated() {
if (!testMode) {
link.setLastUpdated(importStartedAt);
pluginContext.getProjectLinkService().update(link);
}
}
private List<Vulnerability> getVulnerabilitiesToImport() throws EdgescanConnectionException {
final RequestBuilder vulnerabilityRequest = connector.vulnerabilities();
vulnerabilityRequest.stringQuery("asset_id_in", Joiner.on(",").join(link.getAssets()));
if (importMode.isUpdate() && link.getLastUpdated() != null) {
vulnerabilityRequest.dateQuery("updated_at_after", link.getLastUpdated());
}
return vulnerabilityRequest.execute().getVulnerabilities();
}
protected abstract void preProcess();
protected abstract void postProcess();
protected void processVulnerability(Vulnerability vulnerability) {
VulnerabilityImportProcessor processor = createProcessor(vulnerability);
Risk edgescanRisk = Risk.fromInt(vulnerability.getRisk());
if (processor.isLinked()) {
if (link.riskIsMapped(edgescanRisk) && vulnerability.isOpen()) {
update(processor, edgescanRisk);
} else {
close(processor, edgescanRisk);
}
} else {
if (link.riskIsMapped(edgescanRisk) && vulnerability.isOpen()) {
open(processor, edgescanRisk);
} else {
importResults.forRisk(edgescanRisk).ignored();
}
}
}
private VulnerabilityImportProcessor createProcessor(Vulnerability vulnerability) {
final VulnerabilityImportProcessor processor = new VulnerabilityImportProcessor(pluginContext, vulnerabilityDetailGenerator, link, user, project, vulnerability);
processor.init(testMode);
return processor;
}
private void open(VulnerabilityImportProcessor processor, Risk edgescanRisk) {
final ServiceResult openResult = recordValidationErrors(processor.open());
if (openResult != null && openResult.isValid()) {
processor.link();
importResults.forRisk(edgescanRisk).opened();
} else {
importResults.forRisk(edgescanRisk).failed();
}
}
private void close(VulnerabilityImportProcessor processor, Risk edgescanRisk) {
final ServiceResult closeResult = recordValidationErrors(processor.close());
if (closeResult != null && closeResult.isValid()) {
processor.unlink();
importResults.forRisk(edgescanRisk).closed();
} else {
importResults.forRisk(edgescanRisk).failed();
}
}
private void update(VulnerabilityImportProcessor processor, Risk edgescanRisk) {
final ServiceResult updateResult = recordValidationErrors(processor.update());
if (updateResult != null && updateResult.isValid()) {
importResults.forRisk(edgescanRisk).updated();
} else {
importResults.forRisk(edgescanRisk).failed();
}
}
protected ServiceResult recordValidationErrors(ServiceResult result) {
if (result != null && !result.isValid()) {
for (Entry<String, String> error : result.getErrorCollection().getErrors().entrySet()) {
final String errorString = String.format("Import failed on [%s], because [%s]", error.getKey(), error.getValue());
importResults.addError(new PluginError("Import failed", errorString));
getLog().error(errorString);
}
}
return result;
}
}
|
|
package jackfrancisdalton.com.LiteratureAnalyser.service;
import android.app.Activity;
import android.app.NotificationManager;
import android.arch.persistence.db.SupportSQLiteOpenHelper;
import android.arch.persistence.room.Room;
import android.content.Context;
import android.content.Intent;
import android.content.res.Resources;
import android.os.AsyncTask;
import android.support.v4.app.NotificationCompat;
import android.util.Log;
import android.widget.ProgressBar;
import android.widget.TextView;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import jackfrancisdalton.com.LiteratureAnalyser.MainActivity;
import jackfrancisdalton.com.LiteratureAnalyser.R;
import jackfrancisdalton.com.LiteratureAnalyser.data.AppDatabase;
import jackfrancisdalton.com.LiteratureAnalyser.model.Book;
import jackfrancisdalton.com.LiteratureAnalyser.model.Chapter;
import jackfrancisdalton.com.LiteratureAnalyser.model.Section;
import jackfrancisdalton.com.LiteratureAnalyser.model.SuperText;
import jackfrancisdalton.com.LiteratureAnalyser.model.resultModels.ChapterViewModel;
import jackfrancisdalton.com.LiteratureAnalyser.util.SupportedSuperTexts;
public class DatabaseService {
public static AsyncTask<Void, Void, Boolean> checkIfDataExists(final Activity activity) {
return new AsyncTask<Void, Void, Boolean>() {
@Override
protected Boolean doInBackground(Void... params) {
AppDatabase db = Room.databaseBuilder(activity, AppDatabase.class, "main-database").build();
SuperText quran = db.superTextDao().findById(SupportedSuperTexts.quran.getUid());
SuperText bible = db.superTextDao().findById(SupportedSuperTexts.KJBible.getUid());
if(quran != null && bible != null)
return true;
else
return false;
}
};
}
public static List<Chapter> getChapterByBook(final Context context, final int bookUid) throws ExecutionException, InterruptedException {
return new AsyncTask<Void, Void, List<Chapter>>() {
@Override
protected List<Chapter> doInBackground(Void... params) {
AppDatabase db = Room.databaseBuilder(context, AppDatabase.class, "main-database").build();
return db.chapterDao().findByBook(bookUid);
}
}.execute().get();
}
public static Section getFirstSection(final Activity activity) throws ExecutionException, InterruptedException {
return new AsyncTask<Void, Void, Section>() {
@Override
protected Section doInBackground(Void... params) {
AppDatabase db = Room.databaseBuilder(activity, AppDatabase.class, "main-database").build();
return db.sectionDao().getFirstSection();
}
}.execute().get();
}
public static int getHighestSectionNumberBySuperText(final Activity activity, final int superTextUid) {
int highestValue = 0;
try {
highestValue = new AsyncTask<Void, Void, Integer>() {
@Override
protected Integer doInBackground(Void... params) {
AppDatabase db = Room.databaseBuilder(activity, AppDatabase.class, "main-database").build();
return db.sectionDao().getHighestSectionNumberBySuperText(superTextUid);
}
}.execute().get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
return highestValue;
}
public static int getHighestChapterNumberBySuperText(final Activity activity, final int superTextUid) {
int highestValue = 0;
try {
highestValue = new AsyncTask<Void, Void, Integer>() {
@Override
protected Integer doInBackground(Void... params) {
AppDatabase db = Room.databaseBuilder(activity, AppDatabase.class, "main-database").build();
return db.chapterDao().getHighestChapterNumberBySuperText(superTextUid);
}
}.execute().get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
return highestValue;
}
public static int getHighestSectionNumberOfBook(final Activity activity, final int bookUid) {
int highestValue = 0;
try {
highestValue = new AsyncTask<Void, Void, Integer>() {
@Override
protected Integer doInBackground(Void... params) {
AppDatabase db = Room.databaseBuilder(activity, AppDatabase.class, "main-database").build();
return db.sectionDao().getHighestSectionNumberByBook(bookUid);
}
}.execute().get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
return highestValue;
}
public static int getHighestChapterNumberOfBook(final Activity activity, final int bookUid) {
int highestValue = 0;
try {
highestValue = new AsyncTask<Void, Void, Integer>() {
@Override
protected Integer doInBackground(Void... params) {
AppDatabase db = Room.databaseBuilder(activity, AppDatabase.class, "main-database").build();
return db.chapterDao().getHighestChapterNumberByBook(bookUid);
}
}.execute().get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
return highestValue;
}
public static List<Section> getFavouriteSections(final Activity activity) {
List<Section> sections = new ArrayList<>();
try {
sections = new AsyncTask<Void, Void, List<Section>>() {
@Override
protected List<Section> doInBackground(Void... params) {
AppDatabase db = Room.databaseBuilder(activity, AppDatabase.class, "main-database").build();
List<Section> favouriteSections = db.sectionDao().getFavourites();
return favouriteSections;
}
}.execute().get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
return sections;
}
public static void setSectionFavouriteValue(final Activity activity, final int bookUid, final boolean isFavourite) {
try {
new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
AppDatabase db = Room.databaseBuilder(activity, AppDatabase.class, "main-database").build();
db.sectionDao().setAsFavourite(bookUid, isFavourite);
return null;
}
}.execute().get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
public static AsyncTask<Void, Integer, Void> loadQuranIfDoesntExist(
final Activity activity,
final TextView displayText,
final ProgressBar progressBar
) {
new AsyncTask<Void, Integer, Void>() {
@Override
protected Void doInBackground(Void... params) {
AppDatabase db = Room.databaseBuilder(activity, AppDatabase.class, "main-database").build();
db.sectionDao().deleteAll();
db.chapterDao().deleteAll();
db.bookDao().deleteAll();
db.superTextDao().deleteAll();
// db.sectionDao().deleteAllForSuperText(quran_SuperTextUID);
// db.chapterDao().deleteAllForSuperText(quran_SuperTextUID);
// db.bookDao().deleteAllForSuperText(quran_SuperTextUID);
// db.superTextDao().delete(new SuperText(quran_SuperTextUID, "Quran"));
if(null == db.superTextDao().findById(SupportedSuperTexts.quran.getUid())) {
Resources resources = activity.getResources();
InputStream is = resources.openRawResource(R.raw.quran);
BufferedReader reader = new BufferedReader(new InputStreamReader(is, Charset.forName("UTF-8")));
SuperText quran = new SuperText(SupportedSuperTexts.quran.getUid(), "Quran");
db.sectionDao().deleteAllForSuperText(quran.getUid());
db.chapterDao().deleteAllForSuperText(quran.getUid());
db.bookDao().deleteAllForSuperText(quran.getUid());
db.superTextDao().delete(quran);
int quranUid = (int) db.superTextDao().insert(quran);
Book mainBook = new Book("Main Book", 1, quranUid);
int bookUid = (int) db.bookDao().insert(mainBook);
try {
String line;
List<Section> chapterSectionBatch = new ArrayList<>();
boolean batchReady = false;
boolean isFirstChapter = true;
Map<String, Chapter> chapters = new HashMap<>();
String emptyFirstLine = reader.readLine();
BufferedReader readerForGettingLineCount = new BufferedReader(
new InputStreamReader(
resources.openRawResource(R.raw.quran),
Charset.forName("UTF-8")
)
);
int count = 0;
while((readerForGettingLineCount.readLine()) != null) { count++; }
int progressMax = count;
int currentProgress = 0;
while ((line = reader.readLine()) != null) {
String[] rowData = line.split("\\|");
String chapterNumber = rowData[0];
String sectionNumber = rowData[1];
String content = rowData[2];
//if a book already exists AND the chapter doesn't yet
if(!chapters.containsKey(chapterNumber)) {
Chapter chapter = new Chapter(
Integer.parseInt(chapterNumber),
"Chapter " + chapterNumber,
bookUid,
quran.getUid()
);
int chapterUid = (int) db.chapterDao().insert(chapter);
chapter.setUid(chapterUid);
chapters.put(chapterNumber, chapter);
if(isFirstChapter)
isFirstChapter = false;
else
batchReady = true;
}
//if the chapter exists create sections and insert them
if(chapters.containsKey(chapterNumber)) {
Section section = new Section(
Integer.parseInt(sectionNumber),
Integer.parseInt(chapterNumber),
mainBook.getTitle(),
quran.getTitle(),
"Section " + sectionNumber,
content,
false,
chapters.get(chapterNumber).getUid(),
bookUid,
quran.getUid()
);
chapterSectionBatch.add(section);
currentProgress++;
}
if(batchReady) {
db.sectionDao().insertAll(chapterSectionBatch);
chapterSectionBatch.clear();
batchReady = false;
publishProgress(currentProgress, progressMax);
}
}
} catch (IOException ex) {
} finally {
try { is.close(); }
catch (IOException e) {
try {
throw e;
} catch (IOException e1) {
e1.printStackTrace();
}
}
}
}
return null;
}
@Override
protected void onProgressUpdate(final Integer... values) {
activity.runOnUiThread(new Runnable() {
public void run() {
double progressAmount = ((double)values[0] / (double)values[1]) * 100;
displayText.setText("Loading Quran");
if(progressAmount > 97) {
progressAmount = 100;
displayText.setText("Quran Loaded");
}
progressBar.setProgress((int) progressAmount);
}
});
}
@Override
protected void onPostExecute(Void aVoid) {
String temp = "";
loadKingJamesBibleIfDoesntExist(activity, displayText, progressBar);
}
}.execute();
return null;
}
public static void loadKingJamesBibleIfDoesntExist(
final Activity activity,
final TextView displayText,
final ProgressBar progressBar
) {
new AsyncTask<Void, Integer, Void>() {
@Override
protected Void doInBackground(Void... params) {
AppDatabase db = Room.databaseBuilder(activity, AppDatabase.class, "main-database").build();
if(null == db.superTextDao().findById(SupportedSuperTexts.KJBible.getUid())) {
Resources resources = activity.getResources();
InputStream is = resources.openRawResource(R.raw.bible_kj);
BufferedReader reader = new BufferedReader(new InputStreamReader(is, Charset.forName("UTF-8")));
SuperText bible = new SuperText(SupportedSuperTexts.KJBible.getUid(), "King James Bible");
db.sectionDao().deleteAllForSuperText(bible.getUid());
db.chapterDao().deleteAllForSuperText(bible.getUid());
db.bookDao().deleteAllForSuperText(bible.getUid());
db.superTextDao().delete(bible);
db.superTextDao().insert(bible);
try {
String line;
List<Section> chapterSectionBatch = new ArrayList<>();
boolean batchReady = false;
boolean isFirstChapter = true;
Map<String, Book> books = new HashMap<>();
Map<String, Chapter> chapters = new HashMap<>();
BufferedReader readerForGettingLineCount = new BufferedReader(
new InputStreamReader(
resources.openRawResource(R.raw.bible_kj),
Charset.forName("UTF-8")
)
);
int count = 0;
while((readerForGettingLineCount.readLine()) != null) { count++; }
int progressMax = count;
int currentProgress = 0;
int bookNumberToAssign = 1;
while ((line = reader.readLine()) != null) {
String[] rowData = line.split("\\|");
String bookName = rowData[0];
String chapterNumber = rowData[1];
String sectionNumber = rowData[2];
String content = rowData[3];
//chapters have the same number across books so add the name to make map key unique
String uniqueMapChapterKey = bookName + chapterNumber;
//if book isn't in the list, add to db and add to list with uids
if(!books.containsKey(bookName)) {
Book book = new Book(bookName, bookNumberToAssign, bible.getUid());
long bookUid = db.bookDao().insert(book);
book.setUid((int) bookUid);
books.put(bookName, book);
bookNumberToAssign++;
}
//if a book already exists AND the chapter doesn't yet
if(books.containsKey(bookName) && !chapters.containsKey(uniqueMapChapterKey)) {
Chapter chapter = new Chapter(
Integer.parseInt(chapterNumber),
"Chapter " + chapterNumber,
books.get(bookName).getUid(),
bible.getUid()
);
int chapterUid = (int) db.chapterDao().insert(chapter);
chapter.setUid(chapterUid);
chapters.put(uniqueMapChapterKey, chapter);
if(isFirstChapter) {
isFirstChapter = false;
} else {
batchReady = true;
}
}
//TODO: convert this part to batch per chapter
//if the chapter exists create sections and insert them
if(chapters.containsKey(uniqueMapChapterKey)) {
Section section = new Section(
Integer.parseInt(sectionNumber),
chapters.get(uniqueMapChapterKey).getChapterNumber(),
books.get(bookName).getTitle(),
bible.getTitle(),
"Section " + sectionNumber,
content,
false,
chapters.get(uniqueMapChapterKey).getUid(),
books.get(bookName).getUid(),
bible.getUid()
);
chapterSectionBatch.add(section);
currentProgress++;
}
if(batchReady) {
db.sectionDao().insertAll(chapterSectionBatch);
chapterSectionBatch.clear();
batchReady = false;
publishProgress(currentProgress, progressMax);
}
}
} catch (IOException ex) {
} finally {
try { is.close(); }
catch (IOException e) {
try {
throw e;
} catch (IOException e1) {
e1.printStackTrace();
}
}
}
}
return null;
}
@Override
protected void onProgressUpdate(final Integer... values) {
activity.runOnUiThread(new Runnable() {
public void run() {
double progressAmount = ((double)values[0] / (double)values[1]) * 100;
displayText.setText("Loading Bible");
if(progressAmount > 97) {
progressAmount = 100;
displayText.setText("Bible loaded");
}
progressBar.setProgress((int) progressAmount);
}
});
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
Intent intent = new Intent(activity, MainActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
activity.startActivity(intent);
}
}.execute();
}
public static ChapterViewModel getChapter(final Context context, final int chapterId) throws ExecutionException, InterruptedException {
ChapterViewModel chapter;
chapter = new AsyncTask<Void, Void, ChapterViewModel>() {
@Override
protected ChapterViewModel doInBackground(Void... params) {
AppDatabase db = Room.databaseBuilder(context, AppDatabase.class, "main-database").build();
List<Section> sections = db.sectionDao().getSectionsByChapter(chapterId);
Chapter chapter = db.chapterDao().findById(chapterId);
return new ChapterViewModel(chapter.getTitle(), chapter.getChapterNumber(), sections);
}
}.execute().get();
return chapter;
}
public static List<Section> searchQuran(final Context applicationContext, final String searchTerm) throws ExecutionException, InterruptedException {
List<Section> results = new AsyncTask<Void, Void, List<Section>>() {
@Override
protected List<Section> doInBackground(Void... params) {
AppDatabase db = Room.databaseBuilder(applicationContext, AppDatabase.class, "main-database").build();
List<Section> sections = db.sectionDao().containsWord('%' + searchTerm + '%');
return sections;
}
}.execute().get();
return results;
}
public static List<Book> getBooks(final Context context) {
List<Book> results = null;
try {
results = new AsyncTask<Void, Void, List<Book>>() {
@Override
protected List<Book> doInBackground(Void... params) {
AppDatabase db = Room.databaseBuilder(context, AppDatabase.class, "main-database").build();
return db.bookDao().getAll();
}
}.execute().get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
return results;
}
public static List<Book> getBooksBySuperText(final Context context, final int superTextUid) {
List<Book> results = null;
try {
results = new AsyncTask<Void, Void, List<Book>>() {
@Override
protected List<Book> doInBackground(Void... params) {
AppDatabase db = Room.databaseBuilder(context, AppDatabase.class, "main-database").build();
return db.bookDao().findBySuperText(superTextUid);
}
}.execute().get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
return results;
}
public static List<Chapter> getBookChapters(final Context context, final int bookUid) {
List<Chapter> results = null;
try {
results = new AsyncTask<Void, Void, List<Chapter>>() {
@Override
protected List<Chapter> doInBackground(Void... params) {
AppDatabase db = Room.databaseBuilder(context, AppDatabase.class, "main-database").build();
Book test = db.bookDao().findByUid(bookUid);
List<Chapter> chapters = db.chapterDao().findByBook(bookUid);
return chapters;
}
}.execute().get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
return results;
}
public static List<SuperText> getAllSuperTexts(final Context context) {
List<SuperText> results = null;
try {
results = new AsyncTask<Void, Void, List<SuperText>>() {
@Override
protected List<SuperText> doInBackground(Void... params) {
AppDatabase db = Room.databaseBuilder(context, AppDatabase.class, "main-database").build();
return db.superTextDao().findAll();
}
}.execute().get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
return results;
}
}
|
|
package org.knowm.xchange.bitmex;
import java.io.IOException;
import java.math.BigDecimal;
import java.util.Date;
import javax.annotation.Nullable;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import org.knowm.xchange.bitmex.dto.account.BitmexAccount;
import org.knowm.xchange.bitmex.dto.account.BitmexMarginAccount;
import org.knowm.xchange.bitmex.dto.account.BitmexMarginAccountList;
import org.knowm.xchange.bitmex.dto.account.BitmexWallet;
import org.knowm.xchange.bitmex.dto.account.BitmexWalletTransaction;
import org.knowm.xchange.bitmex.dto.account.BitmexWalletTransactionList;
import org.knowm.xchange.bitmex.dto.marketdata.BitmexPrivateOrder;
import org.knowm.xchange.bitmex.dto.marketdata.BitmexPrivateOrderList;
import org.knowm.xchange.bitmex.dto.trade.BitmexCancelAll;
import org.knowm.xchange.bitmex.dto.trade.BitmexPosition;
import org.knowm.xchange.bitmex.dto.trade.BitmexPositionList;
import org.knowm.xchange.bitmex.dto.trade.BitmexPrivateExecution;
import org.knowm.xchange.bitmex.dto.trade.PlaceOrderCommand;
import org.knowm.xchange.bitmex.dto.trade.ReplaceOrderCommand;
import si.mazi.rescu.ParamsDigest;
import si.mazi.rescu.SynchronizedValueFactory;
@Path("api/v1")
@Produces(MediaType.APPLICATION_JSON)
public interface BitmexAuthenticated extends Bitmex {
/**
* Get all raw executions for your account. This returns all raw transactions, which includes
* order opening and cancelation, and order status changes. It can be quite noisy. More focused
* information is available at /execution/tradeHistory.
*/
@GET
@Path("execution")
HttpResponseAwareList<BitmexPrivateExecution> getExecutions(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest,
@Nullable @QueryParam("symbol") String symbol,
@Nullable @QueryParam("filter") String filter,
@Nullable @QueryParam("columns") String columns,
@Nullable @QueryParam("count") Integer count,
@Nullable @QueryParam("start") Long start,
@Nullable @QueryParam("reverse") Boolean reverse,
@Nullable @QueryParam("startTime") Date startTime,
@Nullable @QueryParam("endTime") Date endTime)
throws IOException, BitmexException;
/**
* Get all balance-affecting executions. This includes each trade, insurance charge, and
* settlement.
*/
@GET
@Path("execution/tradeHistory")
HttpResponseAwareList<BitmexPrivateExecution> getTradeHistory(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest,
@Nullable @QueryParam("symbol") String symbol,
@Nullable @QueryParam("filter") String filter,
@Nullable @QueryParam("columns") String columns,
@Nullable @QueryParam("count") Integer count,
@Nullable @QueryParam("start") Long start,
@Nullable @QueryParam("reverse") Boolean reverse,
@Nullable @QueryParam("startTime") Date startTime,
@Nullable @QueryParam("endTime") Date endTime)
throws IOException, BitmexException;
/**
* @param apiKey
* @param nonce
* @param paramsDigest
* @param symbol Instrument symbol. Send a bare series (e.g. XBU) to get data for the nearest
* expiring contract in that series. You can also send a timeframe, e.g. XBU:monthly.
* Timeframes are daily, weekly, monthly, quarterly, and biquarterly.
* @param filter Generic table filter. Send JSON key/value pairs, such as {"key": "value"}. You
* can key on individual fields, and do more advanced querying on timestamps. See the
* Timestamp Docs for more details.
* @param columns Generic table filter. Send JSON key/value pairs, such as {"key": "value"}. You
* can key on individual fields, and do more advanced querying on timestamps. See the
* Timestamp Docs for more details.
* @param count Number of results to fetch.
* @param start Starting point for results.
* @param reverse If true, will sort results newest first.
* @param startTime Starting date filter for results.
* @param endTime Ending date filter for results.
* @return {@link BitmexPrivateOrderList} containing the requested order(s).
* @throws IOException
* @throws BitmexException
*/
@GET
@Path("order")
BitmexPrivateOrderList getOrders(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest,
@Nullable @QueryParam("symbol") String symbol,
@Nullable @QueryParam("filter") String filter,
@Nullable @QueryParam("columns") String columns,
@Nullable @QueryParam("count") Integer count,
@Nullable @QueryParam("start") Long start,
@Nullable @QueryParam("reverse") Boolean reverse,
@Nullable @QueryParam("startTime") Date startTime,
@Nullable @QueryParam("endTime") Date endTime)
throws IOException, BitmexException;
/**
* @param apiKey
* @param nonce
* @param paramsDigest
* @param symbol Instrument symbol. e.g. {@code XBTUSD}.
* @param side Optional Order side. Valid options: {@code Buy}, {@code Sell}. Defaults to {@code
* Buy} unless {@code orderQty} or {@code simpleOrderQty} is negative.
* @param orderQuantity Optional Order quantity in units of the instrument (i.e. contracts).
* @param simpleOrderQuantity Optional Order quantity in units of the underlying instrument (i.e.
* Bitcoin).
* @param displayQuantity Optional quantity to display in the book. Use {@code 0} for a fully
* hidden order.
* @param price Optional limit price for {@code Limit}, {@code StopLimit}, and {@code
* LimitIfTouched} orders.
* @param stopPrice Optional trigger price for {@code Stop}, {@code StopLimit}, {@code
* MarketIfTouched}, and {@code LimitIfTouched} orders. Use a price below the current price
* for stop-sell orders and buy-if-touched orders. Use {@code execInst} of {@code MarkPrice}
* or {@code LastPrice} to define the current price used for triggering.
* @param orderType Optional Order type. Valid options: {@code Market}, {@code Limit}, {@code
* Stop}, {@code StopLimit}, {@code MarketIfTouched}, {@code LimitIfTouched}, {@code
* MarketWithLeftOverAsLimit}, {@code Pegged}. Defaults to {@code Limit} when {@code price} is
* specified. Defaults to {@code Stop} when {@code stopPx} is specified. Defaults to {@code
* StopLimit} when {@code price} and {@code stopPx} are specified.
* @param clOrdID Optional Client Order ID. This {@code clOrdID} will come back on the order and
* any related executions.
* @param executionInstructions Optional execution instructions. Valid options: {@code
* ParticipateDoNotInitiate}, {@code AllOrNone}, {@code MarkPrice}, {@code IndexPrice}, {@code
* LastPrice}, {@code Close}, {@code ReduceOnly}, {@code Fixed}. {@code AllOrNone} instruction
* requires {@code displayQty} to be {@code 0}. {@code MarkPrice}, {@code IndexPrice} or
* {@code LastPrice} instruction valid for {@code Stop}, {@code StopLimit}, {@code
* MarketIfTouched}, and {@code LimitIfTouched} orders.
* @param clOrdLinkID Optional Client Order Link ID for contingent orders.
* @param contingencyType Optional contingency type for use with clOrdLinkID. Valid options:
* {@code OneCancelsTheOther}, {@code OneTriggersTheOther}, {@code
* OneUpdatesTheOtherAbsolute}, {@code OneUpdatesTheOtherProportional}.
* @param pegPriceType Optional peg price type. Valid options: {@code LastPeg}, {@code
* MidPricePeg}, {@code MarketPeg}, {@code PrimaryPeg}, {@code TrailingStopPeg}.
* @param pegOffsetValue Optional trailing offset from the current price for {@code Stop}, {@code
* StopLimit}, {@code MarketIfTouched}, and {@code LimitIfTouched} orders; use a negative
* offset for stop-sell orders and buy-if-touched orders. Optional offset from the peg price
* for {@code Pegged} orders.
* @param timeInForce Optional Time in force. Valid options: {@code Day}, {@code GoodTillCancel},
* {@code ImmediateOrCancel}, {@code FillOrKill}. Defaults to {@code GoodTillCancel} for
* {@code Limit}, {@code StopLimit}, {@code LimitIfTouched}, and {@code
* MarketWithLeftOverAsLimit} orders.
* @param text Optional order annotation. e.g. {@code Take profit}.
* @return {@link BitmexPrivateOrder} contains the result of the call.
* @throws IOException
* @throws BitmexException
*/
@POST
@Path("order")
BitmexPrivateOrder placeOrder(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest,
@FormParam("symbol") String symbol,
@Nullable @FormParam("side") String side,
@Nullable @FormParam("orderQty") BigDecimal orderQuantity,
@Nullable @FormParam("simpleOrderQty") BigDecimal simpleOrderQuantity,
@Nullable @FormParam("displayQty") BigDecimal displayQuantity,
@Nullable @FormParam("price") BigDecimal price,
@Nullable @FormParam("stopPx") BigDecimal stopPrice,
@Nullable @FormParam("ordType") String orderType,
@Nullable @FormParam("clOrdID") String clOrdID,
@Nullable @FormParam("execInst") String executionInstructions,
@Nullable @FormParam("clOrdLinkID") String clOrdLinkID,
@Nullable @FormParam("contingencyType") String contingencyType,
@Nullable @FormParam("pegOffsetValue") BigDecimal pegOffsetValue,
@Nullable @FormParam("pegPriceType") String pegPriceType,
@Nullable @FormParam("timeInForce") String timeInForce,
@Nullable @FormParam("text") String text)
throws IOException, BitmexException;
/**
* @param apiKey
* @param nonce
* @param paramsDigest
* @param orderId Order ID
* @param origClOrdID Client Order ID. See {@link Bitmex#placeOrder}.
* @param clOrdID Optional new Client Order ID, requires {@code origClOrdID}.
* @param simpleOrderQty Optional order quantity in units of the underlying instrument (i.e.
* Bitcoin).
* @param orderQuantity Optional order quantity in units of the instrument (i.e. contracts).
* @param simpleLeavesQty Optional leaves quantity in units of the underlying instrument (i.e.
* Bitcoin). Useful for amending partially filled orders.
* @param leavesQty Optional leaves quantity in units of the instrument (i.e. contracts). Useful
* for amending partially filled orders.
* @param price Optional limit price for {@code Limit}, {@code StopLimit}, and {@code
* LimitIfTouched} orders.
* @param stopPrice Optional trigger price for {@code Stop}, {@code StopLimit}, {@code
* MarketIfTouched}, and {@code LimitIfTouched} orders. Use a price below the current price
* for stop-sell orders and buy-if-touched orders.
* @param pegOffsetValue Optional trailing offset from the current price for {@code Stop}, {@code
* StopLimit}, {@code MarketIfTouched}, and {@code LimitIfTouched} orders; use a negative
* offset for stop-sell orders and buy-if-touched orders. Optional offset from the peg price
* for {@code Pegged} orders.
* @param text Optional amend annotation. e.g. {@code Adjust skew}.
* @return {@link BitmexPrivateOrder} contains the result of the call.
* @throws IOException
* @throws BitmexException
*/
@PUT
@Path("order")
// for some reason underlying library doesn't add contenty type for PUT requests automatically
@Consumes("application/x-www-form-urlencoded")
BitmexPrivateOrder replaceOrder(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest,
@Nullable @FormParam("orderID") String orderId,
@Nullable @FormParam("origClOrdID") String origClOrdID,
@Nullable @FormParam("clOrdID") String clOrdID,
@Nullable @FormParam("simpleOrderQty") BigDecimal simpleOrderQty,
@Nullable @FormParam("orderQty") BigDecimal orderQuantity,
@Nullable @FormParam("simpleLeavesQty") BigDecimal simpleLeavesQty,
@Nullable @FormParam("leavesQty") BigDecimal leavesQty,
@Nullable @FormParam("price") BigDecimal price,
@Nullable @FormParam("stopPx") BigDecimal stopPrice,
@Nullable @FormParam("pegOffsetValue") BigDecimal pegOffsetValue,
@Nullable @FormParam("text") String text)
throws IOException, BitmexException;
/**
* @param apiKey
* @param nonce
* @param paramsDigest
* @param orderCommands JSON Array of order(s). Use {@link PlaceOrderCommand} to generate JSON.
* @return {@link BitmexPrivateOrderList} contains the results of the call.
* @throws IOException
* @throws BitmexException
*/
@POST
@Path("order/bulk")
BitmexPrivateOrderList placeOrderBulk(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest,
@FormParam("orders") String orderCommands)
throws IOException, BitmexException;
/**
* @param apiKey
* @param nonce
* @param paramsDigest
* @param orderCommands JSON Array of order(s). Use {@link ReplaceOrderCommand} to generate JSON.
* @return {@link BitmexPrivateOrderList} contains the results of the call.
* @throws IOException
* @throws BitmexException
*/
@PUT
@Path("order/bulk")
// for some reason underlying library doesn't add contenty type for PUT requests automatically
@Consumes("application/x-www-form-urlencoded")
BitmexPrivateOrderList replaceOrderBulk(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest,
@FormParam("orders") String orderCommands)
throws IOException, BitmexException;
/**
* Either an orderID or a clOrdID must be provided.
*
* @param apiKey
* @param nonce
* @param paramsDigest
* @param orderID Order ID(s).
* @param clOrdID Client Order ID(s). See {@link Bitmex#placeOrder}.
* @return {@link BitmexPrivateOrderList} contains the results of the call.
* @throws IOException
* @throws BitmexException
*/
@DELETE
@Path("order")
BitmexPrivateOrderList cancelOrder(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest,
@Nullable @FormParam("orderID") String orderID,
@Nullable @FormParam("clOrdID") String clOrdID)
throws IOException, BitmexException;
/**
* @param apiKey
* @param nonce
* @param paramsDigest
* @param symbol Optional symbol. If provided, only cancels orders for that symbol.
* @param filter Optional filter for cancellation. Use to only cancel some orders, e.g. {"side":
* "Buy"}.
* @param text Optional cancellation annotation. e.g. 'Spread Exceeded'
* @return {@link BitmexPrivateOrderList} contains the results of the call.
* @throws IOException
* @throws BitmexException
*/
@DELETE
@Path("order/all")
BitmexPrivateOrderList cancelAllOrders(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest,
@Nullable @FormParam("symbol") String symbol,
@Nullable @FormParam("filter") String filter,
@Nullable @FormParam("text") String text)
throws IOException, BitmexException;
/**
* Useful as a dead-man's switch to ensure your orders are canceled in case of an outage. If
* called repeatedly, the existing offset will be canceled and a new one will be inserted in its
* place. Example usage: call this route at 15s intervals with an offset of 60000 (60s). If this
* route is not called within 60 seconds, all your orders will be automatically canceled.
*
* @param timeout Timeout in ms. Set to 0 to cancel this timer.
* @return {@link BitmexPrivateOrderList} contains the results of the call.
* @throws IOException
* @throws BitmexException
*/
@POST
@Path("order/cancelAllAfter")
BitmexCancelAll cancelAllAfter(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest,
@FormParam("timeout") long timeout)
throws IOException, BitmexException;
@POST
@Path("position/leverage")
BitmexPosition updateLeveragePosition(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest,
@FormParam("symbol") String symbol,
@FormParam("leverage") BigDecimal leverage)
throws IOException, BitmexException;
@GET
@Path("position")
BitmexPositionList getPositions(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest)
throws IOException, BitmexException;
@GET
@Path("position")
BitmexPositionList getPositions(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest,
@Nullable @QueryParam("filter") String filter)
throws IOException, BitmexException;
@GET
@Path("user")
BitmexAccount getAccount(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest)
throws IOException, BitmexException;
@GET
@Path("user/wallet")
BitmexWallet getWallet(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest /*,
@Nullable @QueryParam("currency") String currency*/)
throws IOException, BitmexException;
/** Get a history of all of your wallet transactions (deposits, withdrawals, PNL) */
@GET
@Path("user/walletHistory")
BitmexWalletTransactionList getWalletHistory(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest,
@Nullable @QueryParam("currency") String currency)
throws IOException, BitmexException;
/** Get a summary of all of your wallet transactions (deposits, withdrawals, PNL) */
@GET
@Path("user/walletSummary")
BitmexWalletTransactionList getWalletSummary(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest,
@Nullable @QueryParam("currency") String currency)
throws IOException, BitmexException;
@GET
@Path("user/margin")
BitmexMarginAccount getMarginAccountStatus(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest,
@Nullable @QueryParam("currency") String currency)
throws IOException, BitmexException;
@GET
@Path("user/margin?currency=all")
BitmexMarginAccountList getMarginAccountsStatus(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest)
throws IOException, BitmexException;
@GET
@Path("user/depositAddress")
String getDepositAddress(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest,
@QueryParam("currency") String currency)
throws IOException, BitmexException;
@POST
@Path("user/requestWithdrawal")
BitmexWalletTransaction withdrawFunds(
@HeaderParam("api-key") String apiKey,
@HeaderParam("api-expires") SynchronizedValueFactory<Long> nonce,
@HeaderParam("api-signature") ParamsDigest paramsDigest,
@FormParam("currency") String currency,
@FormParam("amount") BigDecimal amount,
@FormParam("address") String address)
throws IOException, BitmexException;
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.protocol.http;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Locale;
import javax.servlet.http.HttpServletRequest;
import org.apache.wicket.RestartResponseException;
import org.apache.wicket.core.request.handler.IPageRequestHandler;
import org.apache.wicket.core.request.handler.RenderPageRequestHandler;
import org.apache.wicket.request.IRequestHandler;
import org.apache.wicket.request.IRequestHandlerDelegate;
import org.apache.wicket.request.component.IRequestablePage;
import org.apache.wicket.request.cycle.AbstractRequestCycleListener;
import org.apache.wicket.request.cycle.IRequestCycleListener;
import org.apache.wicket.request.cycle.RequestCycle;
import org.apache.wicket.request.http.WebRequest;
import org.apache.wicket.request.http.flow.AbortWithHttpErrorCodeException;
import org.apache.wicket.util.lang.Checks;
import org.apache.wicket.util.string.Strings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Prevents CSRF attacks on Wicket components by checking the {@code Origin} and {@code Referer}
* HTTP headers for cross domain requests. By default only checks requests that try to perform an
* action on a component, such as a form submit, or link click.
* <p>
* <h3>Installation</h3>
* <p>
* You can enable this CSRF prevention filter by adding it to the request cycle listeners in your
* {@link WebApplication#init() application's init method}:
*
* <pre>
* @Override
* protected void init()
* {
* // ...
* getRequestCycleListeners().add(new CsrfPreventionRequestCycleListener());
* // ...
* }
* </pre>
* <p>
* <h3>Configuration</h3>
* <p>
* When the {@code Origin} or {@code Referer} HTTP header is present but doesn't match the requested
* URL this listener will by default throw a HTTP error ( {@code 400 BAD REQUEST}) and abort the
* request. You can {@link #setConflictingOriginAction(CsrfAction) configure} this specific action.
* <p>
* A missing {@code Origin} and {@code Referer} HTTP header is handled as if it were a bad request
* and rejected. You can {@link #setNoOriginAction(CsrfAction) configure the specific action} to a
* different value, suppressing or allowing the request when the HTTP headers are missing.
* <p>
* When the {@code Origin} HTTP header is present and has the value {@code null} it is considered to
* be from a "privacy-sensitive" context and will trigger the no origin action. You can customize
* what happens in those actions by overriding the respective {@code onXXXX} methods.
* <p>
* When you want to accept certain cross domain request from a range of hosts, you can
* {@link #addAcceptedOrigin(String) whitelist those domains}.
* <p>
* You can {@link #isEnabled() enable or disable} this listener by overriding {@link #isEnabled()}.
* <p>
* You can {@link #isChecked(IRequestablePage) customize} whether a particular page should be
* checked for CSRF requests. For example you can skip checking pages that have a
* {@code @NoCsrfCheck} annotation, or only those pages that extend your base secure page class. For
* example:
*
* <pre>
* @Override
* protected boolean isChecked(IRequestablePage requestedPage)
* {
* return requestedPage instanceof SecurePage;
* }
* </pre>
* <p>
* You can also tweak the request handlers that are checked. The CSRF prevention request cycle
* listener checks only action handlers, not render handlers. Override
* {@link #isChecked(IRequestHandler)} to customize this behavior.
* </p>
* <p>
* You can customize the default actions that are performed by overriding the event handlers for
* them:
* <ul>
* <li>{@link #onWhitelisted(HttpServletRequest, String, IRequestablePage)} when an origin was
* whitelisted</li>
* <li>{@link #onMatchingOrigin(HttpServletRequest, String, IRequestablePage)} when an origin was
* matching</li>
* <li>{@link #onAborted(HttpServletRequest, String, IRequestablePage)} when an origin was in
* conflict and the request should be aborted</li>
* <li>{@link #onAllowed(HttpServletRequest, String, IRequestablePage)} when an origin was in
* conflict and the request should be allowed</li>
* <li>{@link #onSuppressed(HttpServletRequest, String, IRequestablePage)} when an origin was in
* conflict and the request should be suppressed</li>
* </ul>
*/
public class CsrfPreventionRequestCycleListener implements IRequestCycleListener
{
private static final Logger log = LoggerFactory
.getLogger(CsrfPreventionRequestCycleListener.class);
/**
* The action to perform when a missing or conflicting source URI is detected.
*/
public enum CsrfAction {
/** Aborts the request and throws an exception when a CSRF request is detected. */
ABORT {
@Override
public String toString()
{
return "aborted";
}
},
/**
* Ignores the action of a CSRF request, and just renders the page it was targeted against.
*/
SUPPRESS {
@Override
public String toString()
{
return "suppressed";
}
},
/** Detects a CSRF request, logs it and allows the request to continue. */
ALLOW {
@Override
public String toString()
{
return "allowed";
}
},
}
/**
* Action to perform when no Origin header is present in the request.
*/
private CsrfAction noOriginAction = CsrfAction.ABORT;
/**
* Action to perform when a conflicting Origin header is found.
*/
private CsrfAction conflictingOriginAction = CsrfAction.ABORT;
/**
* The error code to report when the action to take for a CSRF request is
* {@link CsrfAction#ABORT}. Default {@code 400 BAD REQUEST}.
*/
private int errorCode = javax.servlet.http.HttpServletResponse.SC_BAD_REQUEST;
/**
* The error message to report when the action to take for a CSRF request is {@code ERROR}.
* Default {@code "Origin does not correspond to request"}.
*/
private String errorMessage = "Origin does not correspond to request";
/**
* A white list of accepted origins (host names/domain names) presented as
* <domainname>.<TLD>. The domain part can contain subdomains.
*/
private Collection<String> acceptedOrigins = new ArrayList<>();
/**
* Sets the action when no Origin header is present in the request. Default {@code ALLOW}.
*
* @param action
* the alternate action
*
* @return this (for chaining)
*/
public CsrfPreventionRequestCycleListener setNoOriginAction(CsrfAction action)
{
this.noOriginAction = action;
return this;
}
/**
* Sets the action when a conflicting Origin header is detected. Default is {@code ERROR}.
*
* @param action
* the alternate action
*
* @return this
*/
public CsrfPreventionRequestCycleListener setConflictingOriginAction(CsrfAction action)
{
this.conflictingOriginAction = action;
return this;
}
/**
* Modifies the HTTP error code in the exception when a conflicting Origin header is detected.
*
* @param errorCode
* the alternate HTTP error code, default {@code 400 BAD REQUEST}
*
* @return this
*/
public CsrfPreventionRequestCycleListener setErrorCode(int errorCode)
{
this.errorCode = errorCode;
return this;
}
/**
* Modifies the HTTP message in the exception when a conflicting Origin header is detected.
*
* @param errorMessage
* the alternate message
*
* @return this
*/
public CsrfPreventionRequestCycleListener setErrorMessage(String errorMessage)
{
this.errorMessage = errorMessage;
return this;
}
/**
* Adds an origin (host name/domain name) to the white list. An origin is in the form of
* <domainname>.<TLD>, and can contain a subdomain. Every Origin header that matches
* a domain from the whitelist is accepted and not checked any further for CSRF issues.
*
* E.g. when {@code example.com} is in the white list, this allows requests from (i.e. with an
* {@code Origin:} header containing) {@code example.com} and {@code blabla.example.com} but
* rejects requests from {@code blablaexample.com} and {@code example2.com}.
*
* @param acceptedOrigin
* the acceptable origin
* @return this
*/
public CsrfPreventionRequestCycleListener addAcceptedOrigin(String acceptedOrigin)
{
Checks.notNull("acceptedOrigin", acceptedOrigin);
// strip any leading dot characters
final int len = acceptedOrigin.length();
int i = 0;
while (i < len && acceptedOrigin.charAt(i) == '.')
{
i++;
}
acceptedOrigins.add(acceptedOrigin.substring(i));
return this;
}
@Override
public void onBeginRequest(RequestCycle cycle)
{
if (log.isDebugEnabled())
{
HttpServletRequest containerRequest = (HttpServletRequest)cycle.getRequest()
.getContainerRequest();
log.debug("Request Source URI: {}", getSourceUri(containerRequest));
}
}
/**
* Dynamic override for enabling/disabling the CSRF detection. Might be handy for specific
* tenants in a multi-tenant application. When false, the CSRF detection is not performed for
* the running request. Default {@code true}
*
* @return {@code true} when the CSRF checks need to be performed.
*/
protected boolean isEnabled()
{
return true;
}
/**
* Override to limit whether the request to the specific page should be checked for a possible
* CSRF attack.
*
* @param targetedPage
* the page that is the target for the action
* @return {@code true} when the request to the page should be checked for CSRF issues.
*/
protected boolean isChecked(IRequestablePage targetedPage)
{
return true;
}
/**
* Override to change the request handler types that are checked. Currently only action handlers
* (form submits, link clicks, AJAX events) are checked for a matching Origin HTTP header.
*
* @param handler
* the handler that is currently processing
* @return true when the Origin HTTP header should be checked for this {@code handler}
*/
protected boolean isChecked(IRequestHandler handler)
{
return handler instanceof IPageRequestHandler &&
!(handler instanceof RenderPageRequestHandler);
}
/**
* Unwraps the handler if it is a {@code IRequestHandlerDelegate} down to the deepest nested
* handler.
*
* @param handler
* The handler to unwrap
* @return the deepest handler that does not implement {@code IRequestHandlerDelegate}
*/
protected IRequestHandler unwrap(IRequestHandler handler)
{
while (handler instanceof IRequestHandlerDelegate)
handler = ((IRequestHandlerDelegate)handler).getDelegateHandler();
return handler;
}
@Override
public void onRequestHandlerResolved(RequestCycle cycle, IRequestHandler handler)
{
if (!isEnabled())
{
log.trace("CSRF listener is disabled, no checks performed");
return;
}
handler = unwrap(handler);
// check if the request is targeted at a page
if (isChecked(handler))
{
IPageRequestHandler prh = (IPageRequestHandler)handler;
IRequestablePage targetedPage = prh.getPage();
HttpServletRequest containerRequest = (HttpServletRequest)cycle.getRequest()
.getContainerRequest();
String sourceUri = getSourceUri(containerRequest);
// Check if the page should be CSRF protected
if (isChecked(targetedPage))
{
// if so check the Origin HTTP header
checkRequest(containerRequest, sourceUri, targetedPage);
}
else
{
if (log.isDebugEnabled())
{
log.debug("Targeted page {} was opted out of the CSRF origin checks, allowed",
targetedPage.getClass().getName());
}
allowHandler(containerRequest, sourceUri, targetedPage);
}
}
else
{
if (log.isTraceEnabled())
log.trace(
"Resolved handler {} doesn't target an action on a page, no CSRF check performed",
handler.getClass().getName());
}
}
/**
* Resolves the source URI from the request headers ({@code Origin} or {@code Referer}).
*
* @param containerRequest
* the current container request
* @return the normalized source URI.
*/
protected String getSourceUri(HttpServletRequest containerRequest)
{
String sourceUri = containerRequest.getHeader(WebRequest.HEADER_ORIGIN);
if (Strings.isEmpty(sourceUri))
{
sourceUri = containerRequest.getHeader(WebRequest.HEADER_REFERER);
}
return normalizeUri(sourceUri);
}
/**
* Performs the check of the {@code Origin} or {@code Referer} header that is targeted at the
* {@code page}.
*
* @param request
* the current container request
* @param sourceUri
* the source URI
* @param page
* the page that is the target of the request
*/
protected void checkRequest(HttpServletRequest request, String sourceUri, IRequestablePage page)
{
if (sourceUri == null || sourceUri.isEmpty())
{
log.debug("Source URI not present in request, {}", noOriginAction);
switch (noOriginAction)
{
case ALLOW :
allowHandler(request, sourceUri, page);
break;
case SUPPRESS :
suppressHandler(request, sourceUri, page);
break;
case ABORT :
abortHandler(request, sourceUri, page);
break;
}
return;
}
sourceUri = sourceUri.toLowerCase();
// if the origin is a know and trusted origin, don't check any further but allow the request
if (isWhitelistedHost(sourceUri))
{
whitelistedHandler(request, sourceUri, page);
return;
}
// check if the origin HTTP header matches the request URI
if (!isLocalOrigin(request, sourceUri))
{
log.debug("Source URI conflicts with request origin, {}", conflictingOriginAction);
switch (conflictingOriginAction)
{
case ALLOW :
allowHandler(request, sourceUri, page);
break;
case SUPPRESS :
suppressHandler(request, sourceUri, page);
break;
case ABORT :
abortHandler(request, sourceUri, page);
break;
}
}
else
{
matchingOrigin(request, sourceUri, page);
}
}
/**
* Checks whether the domain part of the {@code sourceUri} ({@code Origin} or {@code Referer}
* header) is whitelisted.
*
* @param sourceUri
* the contents of the {@code Origin} or {@code Referer} HTTP header
* @return {@code true} when the source domain was whitelisted
*/
protected boolean isWhitelistedHost(final String sourceUri)
{
try
{
final String sourceHost = new URI(sourceUri).getHost();
if (Strings.isEmpty(sourceHost))
return false;
for (String whitelistedOrigin : acceptedOrigins)
{
if (sourceHost.equalsIgnoreCase(whitelistedOrigin) ||
sourceHost.endsWith("." + whitelistedOrigin))
{
log.trace("Origin {} matched whitelisted origin {}, request accepted",
sourceUri, whitelistedOrigin);
return true;
}
}
}
catch (URISyntaxException e)
{
log.debug("Origin: {} not parseable as an URI. Whitelisted-origin check skipped.",
sourceUri);
}
return false;
}
/**
* Checks whether the {@code Origin} HTTP header of the request matches where the request came
* from.
*
* @param containerRequest
* the current container request
* @param originHeader
* the contents of the {@code Origin} HTTP header
* @return {@code true} when the origin of the request matches the {@code Origin} HTTP header
*/
protected boolean isLocalOrigin(HttpServletRequest containerRequest, String originHeader)
{
// Make comparable strings from Origin and Location
String origin = normalizeUri(originHeader);
if (origin == null)
return false;
String request = getTargetUriFromRequest(containerRequest);
if (request == null)
return false;
return origin.equalsIgnoreCase(request);
}
/**
* Creates a RFC-6454 comparable URI from the {@code uri} string.
*
* @param uri
* the contents of the Origin or Referer HTTP header
* @return only the scheme://host[:port] part, or {@code null} when the URI string is not
* compliant
*/
protected final String normalizeUri(String uri)
{
// the request comes from a privacy sensitive context, flag as non-local origin. If
// alternative action is required, an implementor can override any of the onAborted,
// onSuppressed or onAllowed and implement such needed action.
if (Strings.isEmpty(uri) || "null".equals(uri))
return null;
StringBuilder target = new StringBuilder();
try
{
URI originUri = new URI(uri);
String scheme = originUri.getScheme();
if (scheme == null)
{
return null;
}
else
{
scheme = scheme.toLowerCase(Locale.ENGLISH);
}
target.append(scheme);
target.append("://");
String host = originUri.getHost();
if (host == null)
{
return null;
}
target.append(host);
int port = originUri.getPort();
boolean portIsSpecified = port != -1;
boolean isAlternateHttpPort = "http".equals(scheme) && port != 80;
boolean isAlternateHttpsPort = "https".equals(scheme) && port != 443;
if (portIsSpecified && (isAlternateHttpPort || isAlternateHttpsPort))
{
target.append(':');
target.append(port);
}
return target.toString();
}
catch (URISyntaxException e)
{
log.debug("Invalid URI provided: {}, marked conflicting", uri);
return null;
}
}
/**
* Creates a RFC-6454 comparable URI from the {@code request} requested resource.
*
* @param request
* the incoming request
* @return only the scheme://host[:port] part, or {@code null} when the origin string is not
* compliant
*/
protected final String getTargetUriFromRequest(HttpServletRequest request)
{
// Build scheme://host:port from request
StringBuilder target = new StringBuilder();
String scheme = request.getScheme();
if (scheme == null)
{
return null;
}
else
{
scheme = scheme.toLowerCase(Locale.ENGLISH);
}
target.append(scheme);
target.append("://");
String host = request.getServerName();
if (host == null)
{
return null;
}
target.append(host);
int port = request.getServerPort();
if ("http".equals(scheme) && port != 80 || "https".equals(scheme) && port != 443)
{
target.append(':');
target.append(port);
}
return target.toString();
}
/**
* Handles the case where an origin is in the whitelist. Default action is to allow the
* whitelisted origin.
*
* @param request
* the request
* @param origin
* the contents of the {@code Origin} HTTP header
* @param page
* the page that is targeted with this request
*/
protected void whitelistedHandler(HttpServletRequest request, String origin,
IRequestablePage page)
{
onWhitelisted(request, origin, page);
if (log.isDebugEnabled())
{
log.debug("CSRF Origin {} was whitelisted, allowed for page {}", origin,
page.getClass().getName());
}
}
/**
* Called when the origin was available in the whitelist. Override this method to implement your
* own custom action.
*
* @param request
* the request
* @param origin
* the contents of the {@code Origin} HTTP header
* @param page
* the page that is targeted with this request
*/
protected void onWhitelisted(HttpServletRequest request, String origin, IRequestablePage page)
{
}
/**
* Handles the case where an origin was checked and matched the request origin. Default action
* is to allow the whitelisted origin.
*
* @param request
* the request
* @param origin
* the contents of the {@code Origin} HTTP header
* @param page
* the page that is targeted with this request
*/
protected void matchingOrigin(HttpServletRequest request, String origin,
IRequestablePage page)
{
onMatchingOrigin(request, origin, page);
if (log.isDebugEnabled())
{
log.debug("CSRF Origin {} matched requested resource, allowed for page {}", origin,
page.getClass().getName());
}
}
/**
* Called when the origin HTTP header matched the request. Override this method to implement
* your own custom action.
*
* @param request
* the request
* @param origin
* the contents of the {@code Origin} HTTP header
* @param page
* the page that is targeted with this request
*/
protected void onMatchingOrigin(HttpServletRequest request, String origin,
IRequestablePage page)
{
}
/**
* Handles the case where an Origin HTTP header was not present or did not match the request
* origin, and the corresponding action ({@link #noOriginAction} or
* {@link #conflictingOriginAction}) is set to {@code ALLOW}.
*
* @param request
* the request
* @param origin
* the contents of the {@code Origin} HTTP header, may be {@code null} or empty
* @param page
* the page that is targeted with this request
*/
protected void allowHandler(HttpServletRequest request, String origin,
IRequestablePage page)
{
onAllowed(request, origin, page);
log.info("Possible CSRF attack, request URL: {}, Origin: {}, action: allowed",
request.getRequestURL(), origin);
}
/**
* Override this method to customize the case where an Origin HTTP header was not present or did
* not match the request origin, and the corresponding action ({@link #noOriginAction} or
* {@link #conflictingOriginAction}) is set to {@code ALLOW}.
*
* @param request
* the request
* @param origin
* the contents of the {@code Origin} HTTP header, may be {@code null} or empty
* @param page
* the page that is targeted with this request
*/
protected void onAllowed(HttpServletRequest request, String origin, IRequestablePage page)
{
}
/**
* Handles the case where an Origin HTTP header was not present or did not match the request
* origin, and the corresponding action ({@link #noOriginAction} or
* {@link #conflictingOriginAction}) is set to {@code SUPPRESS}.
*
* @param request
* the request
* @param origin
* the contents of the {@code Origin} HTTP header, may be {@code null} or empty
* @param page
* the page that is targeted with this request
*/
protected void suppressHandler(HttpServletRequest request, String origin,
IRequestablePage page)
{
onSuppressed(request, origin, page);
log.info("Possible CSRF attack, request URL: {}, Origin: {}, action: suppressed",
request.getRequestURL(), origin);
throw new RestartResponseException(page);
}
/**
* Override this method to customize the case where an Origin HTTP header was not present or did
* not match the request origin, and the corresponding action ({@link #noOriginAction} or
* {@link #conflictingOriginAction}) is set to {@code SUPPRESSED}.
*
* @param request
* the request
* @param origin
* the contents of the {@code Origin} HTTP header, may be {@code null} or empty
* @param page
* the page that is targeted with this request
*/
protected void onSuppressed(HttpServletRequest request, String origin, IRequestablePage page)
{
}
/**
* Handles the case where an Origin HTTP header was not present or did not match the request
* origin, and the corresponding action ({@link #noOriginAction} or
* {@link #conflictingOriginAction}) is set to {@code ABORT}.
*
* @param request
* the request
* @param origin
* the contents of the {@code Origin} HTTP header, may be {@code null} or empty
* @param page
* the page that is targeted with this request
*/
protected void abortHandler(HttpServletRequest request, String origin,
IRequestablePage page)
{
onAborted(request, origin, page);
log.info(
"Possible CSRF attack, request URL: {}, Origin: {}, action: aborted with error {} {}",
request.getRequestURL(), origin, errorCode, errorMessage);
throw new AbortWithHttpErrorCodeException(errorCode, errorMessage);
}
/**
* Override this method to customize the case where an Origin HTTP header was not present or did
* not match the request origin, and the corresponding action ({@link #noOriginAction} or
* {@link #conflictingOriginAction}) is set to {@code ABORTED}.
*
* @param request
* the request
* @param origin
* the contents of the {@code Origin} HTTP header, may be {@code null} or empty
* @param page
* the page that is targeted with this request
*/
protected void onAborted(HttpServletRequest request, String origin, IRequestablePage page)
{
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.state.internals;
import org.apache.kafka.common.header.internals.RecordHeaders;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.streams.kstream.Windowed;
import org.apache.kafka.streams.kstream.internals.SessionWindow;
import org.apache.kafka.streams.processor.ProcessorContext;
import org.apache.kafka.streams.processor.StateStoreContext;
import org.apache.kafka.streams.processor.TaskId;
import org.apache.kafka.streams.processor.api.RecordMetadata;
import org.apache.kafka.streams.processor.internals.ProcessorContextImpl;
import org.apache.kafka.streams.processor.internals.ProcessorRecordContext;
import org.apache.kafka.streams.query.Position;
import org.apache.kafka.streams.state.SessionStore;
import org.apache.kafka.test.MockRecordCollector;
import org.easymock.EasyMock;
import org.easymock.EasyMockRunner;
import org.easymock.Mock;
import org.easymock.MockType;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.Optional;
import static org.apache.kafka.common.utils.Utils.mkEntry;
import static org.apache.kafka.common.utils.Utils.mkMap;
@RunWith(EasyMockRunner.class)
public class ChangeLoggingSessionBytesStoreTest {
private final TaskId taskId = new TaskId(0, 0);
private final MockRecordCollector collector = new MockRecordCollector();
@Mock(type = MockType.NICE)
private SessionStore<Bytes, byte[]> inner;
@Mock(type = MockType.NICE)
private ProcessorContextImpl context;
private ChangeLoggingSessionBytesStore store;
private final byte[] value1 = {0};
private final Bytes bytesKey = Bytes.wrap(value1);
private final Windowed<Bytes> key1 = new Windowed<>(bytesKey, new SessionWindow(0, 0));
private final static Position POSITION = Position.fromMap(mkMap(mkEntry("", mkMap(mkEntry(0, 1L)))));
@Before
public void setUp() {
store = new ChangeLoggingSessionBytesStore(inner);
}
private void init() {
EasyMock.expect(context.taskId()).andReturn(taskId).anyTimes();
EasyMock.expect(context.recordCollector()).andReturn(collector).anyTimes();
EasyMock.expect(context.recordMetadata()).andReturn(Optional.empty()).anyTimes();
inner.init((StateStoreContext) context, store);
EasyMock.expectLastCall();
EasyMock.replay(inner, context);
store.init((StateStoreContext) context, store);
}
@SuppressWarnings("deprecation")
@Test
public void shouldDelegateDeprecatedInit() {
inner.init((ProcessorContext) context, store);
EasyMock.expectLastCall();
EasyMock.replay(inner);
store.init((ProcessorContext) context, store);
EasyMock.verify(inner);
}
@Test
public void shouldDelegateInit() {
inner.init((StateStoreContext) context, store);
EasyMock.expectLastCall();
EasyMock.replay(inner);
store.init((StateStoreContext) context, store);
EasyMock.verify(inner);
}
@Test
public void shouldLogPuts() {
EasyMock.expect(inner.getPosition()).andReturn(Position.emptyPosition()).anyTimes();
inner.put(key1, value1);
EasyMock.expectLastCall();
init();
final Bytes binaryKey = SessionKeySchema.toBinary(key1);
EasyMock.reset(context);
EasyMock.expect(context.recordMetadata()).andStubReturn(Optional.empty());
context.logChange(store.name(), binaryKey, value1, 0L, Position.emptyPosition());
EasyMock.replay(context);
store.put(key1, value1);
EasyMock.verify(inner, context);
}
@Test
public void shouldLogPutsWithPosition() {
EasyMock.expect(inner.getPosition()).andReturn(POSITION).anyTimes();
inner.put(key1, value1);
EasyMock.expectLastCall();
init();
final Bytes binaryKey = SessionKeySchema.toBinary(key1);
EasyMock.reset(context);
final RecordMetadata recordContext = new ProcessorRecordContext(0L, 1L, 0, "", new RecordHeaders());
EasyMock.expect(context.recordMetadata()).andStubReturn(Optional.of(recordContext));
EasyMock.expect(context.timestamp()).andStubReturn(0L);
context.logChange(store.name(), binaryKey, value1, 0L, POSITION);
EasyMock.replay(context);
store.put(key1, value1);
EasyMock.verify(inner, context);
}
@Test
public void shouldLogRemoves() {
EasyMock.expect(inner.getPosition()).andReturn(Position.emptyPosition()).anyTimes();
inner.remove(key1);
EasyMock.expectLastCall();
init();
store.remove(key1);
final Bytes binaryKey = SessionKeySchema.toBinary(key1);
EasyMock.reset(context);
EasyMock.expect(context.recordMetadata()).andStubReturn(Optional.empty());
context.logChange(store.name(), binaryKey, null, 0L, Position.emptyPosition());
EasyMock.replay(context);
store.remove(key1);
EasyMock.verify(inner, context);
}
@Test
public void shouldDelegateToUnderlyingStoreWhenFetching() {
EasyMock.expect(inner.fetch(bytesKey)).andReturn(KeyValueIterators.emptyIterator());
init();
store.fetch(bytesKey);
EasyMock.verify(inner);
}
@Test
public void shouldDelegateToUnderlyingStoreWhenBackwardFetching() {
EasyMock.expect(inner.backwardFetch(bytesKey)).andReturn(KeyValueIterators.emptyIterator());
init();
store.backwardFetch(bytesKey);
EasyMock.verify(inner);
}
@Test
public void shouldDelegateToUnderlyingStoreWhenFetchingRange() {
EasyMock.expect(inner.fetch(bytesKey, bytesKey)).andReturn(KeyValueIterators.emptyIterator());
init();
store.fetch(bytesKey, bytesKey);
EasyMock.verify(inner);
}
@Test
public void shouldDelegateToUnderlyingStoreWhenBackwardFetchingRange() {
EasyMock.expect(inner.backwardFetch(bytesKey, bytesKey)).andReturn(KeyValueIterators.emptyIterator());
init();
store.backwardFetch(bytesKey, bytesKey);
EasyMock.verify(inner);
}
@Test
public void shouldDelegateToUnderlyingStoreWhenFindingSessions() {
EasyMock.expect(inner.findSessions(bytesKey, 0, 1)).andReturn(KeyValueIterators.emptyIterator());
init();
store.findSessions(bytesKey, 0, 1);
EasyMock.verify(inner);
}
@Test
public void shouldDelegateToUnderlyingStoreWhenBackwardFindingSessions() {
EasyMock.expect(inner.backwardFindSessions(bytesKey, 0, 1)).andReturn(KeyValueIterators.emptyIterator());
init();
store.backwardFindSessions(bytesKey, 0, 1);
EasyMock.verify(inner);
}
@Test
public void shouldDelegateToUnderlyingStoreWhenFindingSessionRange() {
EasyMock.expect(inner.findSessions(bytesKey, bytesKey, 0, 1)).andReturn(KeyValueIterators.emptyIterator());
init();
store.findSessions(bytesKey, bytesKey, 0, 1);
EasyMock.verify(inner);
}
@Test
public void shouldDelegateToUnderlyingStoreWhenBackwardFindingSessionRange() {
EasyMock.expect(inner.backwardFindSessions(bytesKey, bytesKey, 0, 1)).andReturn(KeyValueIterators.emptyIterator());
init();
store.backwardFindSessions(bytesKey, bytesKey, 0, 1);
EasyMock.verify(inner);
}
@Test
public void shouldFlushUnderlyingStore() {
inner.flush();
EasyMock.expectLastCall();
init();
store.flush();
EasyMock.verify(inner);
}
@Test
public void shouldCloseUnderlyingStore() {
inner.close();
EasyMock.expectLastCall();
init();
store.close();
EasyMock.verify(inner);
}
}
|
|
package org.houxg.pixiurss;
import java.util.List;
import java.util.ArrayList;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteStatement;
import de.greenrobot.dao.AbstractDao;
import de.greenrobot.dao.Property;
import de.greenrobot.dao.internal.SqlUtils;
import de.greenrobot.dao.internal.DaoConfig;
import de.greenrobot.dao.query.Query;
import de.greenrobot.dao.query.QueryBuilder;
import org.houxg.pixiurss.Article;
// THIS CODE IS GENERATED BY greenDAO, DO NOT EDIT.
/**
* DAO for table "ARTICLE".
*/
public class ArticleDao extends AbstractDao<Article, Long> {
public static final String TABLENAME = "ARTICLE";
/**
* Properties of entity Article.<br/>
* Can be used for QueryBuilder and for referencing column names.
*/
public static class Properties {
public final static Property Id = new Property(0, Long.class, "id", true, "_id");
public final static Property Title = new Property(1, String.class, "title", false, "TITLE");
public final static Property Link = new Property(2, String.class, "link", false, "LINK");
public final static Property PubTime = new Property(3, Long.class, "pubTime", false, "PUB_TIME");
public final static Property Desc = new Property(4, String.class, "desc", false, "DESC");
public final static Property SourceId = new Property(5, Long.class, "SourceId", false, "SOURCE_ID");
};
private DaoSession daoSession;
private Query<Article> source_ArticleListQuery;
public ArticleDao(DaoConfig config) {
super(config);
}
public ArticleDao(DaoConfig config, DaoSession daoSession) {
super(config, daoSession);
this.daoSession = daoSession;
}
/** Creates the underlying database table. */
public static void createTable(SQLiteDatabase db, boolean ifNotExists) {
String constraint = ifNotExists? "IF NOT EXISTS ": "";
db.execSQL("CREATE TABLE " + constraint + "\"ARTICLE\" (" + //
"\"_id\" INTEGER PRIMARY KEY AUTOINCREMENT ," + // 0: id
"\"TITLE\" TEXT," + // 1: title
"\"LINK\" TEXT UNIQUE ," + // 2: link
"\"PUB_TIME\" INTEGER," + // 3: pubTime
"\"DESC\" TEXT," + // 4: desc
"\"SOURCE_ID\" INTEGER);"); // 5: SourceId
}
/** Drops the underlying database table. */
public static void dropTable(SQLiteDatabase db, boolean ifExists) {
String sql = "DROP TABLE " + (ifExists ? "IF EXISTS " : "") + "\"ARTICLE\"";
db.execSQL(sql);
}
/** @inheritdoc */
@Override
protected void bindValues(SQLiteStatement stmt, Article entity) {
stmt.clearBindings();
Long id = entity.getId();
if (id != null) {
stmt.bindLong(1, id);
}
String title = entity.getTitle();
if (title != null) {
stmt.bindString(2, title);
}
String link = entity.getLink();
if (link != null) {
stmt.bindString(3, link);
}
Long pubTime = entity.getPubTime();
if (pubTime != null) {
stmt.bindLong(4, pubTime);
}
String desc = entity.getDesc();
if (desc != null) {
stmt.bindString(5, desc);
}
Long SourceId = entity.getSourceId();
if (SourceId != null) {
stmt.bindLong(6, SourceId);
}
}
@Override
protected void attachEntity(Article entity) {
super.attachEntity(entity);
entity.__setDaoSession(daoSession);
}
/** @inheritdoc */
@Override
public Long readKey(Cursor cursor, int offset) {
return cursor.isNull(offset + 0) ? null : cursor.getLong(offset + 0);
}
/** @inheritdoc */
@Override
public Article readEntity(Cursor cursor, int offset) {
Article entity = new Article( //
cursor.isNull(offset + 0) ? null : cursor.getLong(offset + 0), // id
cursor.isNull(offset + 1) ? null : cursor.getString(offset + 1), // title
cursor.isNull(offset + 2) ? null : cursor.getString(offset + 2), // link
cursor.isNull(offset + 3) ? null : cursor.getLong(offset + 3), // pubTime
cursor.isNull(offset + 4) ? null : cursor.getString(offset + 4), // desc
cursor.isNull(offset + 5) ? null : cursor.getLong(offset + 5) // SourceId
);
return entity;
}
/** @inheritdoc */
@Override
public void readEntity(Cursor cursor, Article entity, int offset) {
entity.setId(cursor.isNull(offset + 0) ? null : cursor.getLong(offset + 0));
entity.setTitle(cursor.isNull(offset + 1) ? null : cursor.getString(offset + 1));
entity.setLink(cursor.isNull(offset + 2) ? null : cursor.getString(offset + 2));
entity.setPubTime(cursor.isNull(offset + 3) ? null : cursor.getLong(offset + 3));
entity.setDesc(cursor.isNull(offset + 4) ? null : cursor.getString(offset + 4));
entity.setSourceId(cursor.isNull(offset + 5) ? null : cursor.getLong(offset + 5));
}
/** @inheritdoc */
@Override
protected Long updateKeyAfterInsert(Article entity, long rowId) {
entity.setId(rowId);
return rowId;
}
/** @inheritdoc */
@Override
public Long getKey(Article entity) {
if(entity != null) {
return entity.getId();
} else {
return null;
}
}
/** @inheritdoc */
@Override
protected boolean isEntityUpdateable() {
return true;
}
/** Internal query to resolve the "articleList" to-many relationship of Source. */
public List<Article> _querySource_ArticleList(Long SourceId) {
synchronized (this) {
if (source_ArticleListQuery == null) {
QueryBuilder<Article> queryBuilder = queryBuilder();
queryBuilder.where(Properties.SourceId.eq(null));
queryBuilder.orderRaw("T.'PUB_TIME' DESC");
source_ArticleListQuery = queryBuilder.build();
}
}
Query<Article> query = source_ArticleListQuery.forCurrentThread();
query.setParameter(0, SourceId);
return query.list();
}
private String selectDeep;
protected String getSelectDeep() {
if (selectDeep == null) {
StringBuilder builder = new StringBuilder("SELECT ");
SqlUtils.appendColumns(builder, "T", getAllColumns());
builder.append(',');
SqlUtils.appendColumns(builder, "T0", daoSession.getSourceDao().getAllColumns());
builder.append(" FROM ARTICLE T");
builder.append(" LEFT JOIN SOURCE T0 ON T.\"SOURCE_ID\"=T0.\"_id\"");
builder.append(' ');
selectDeep = builder.toString();
}
return selectDeep;
}
protected Article loadCurrentDeep(Cursor cursor, boolean lock) {
Article entity = loadCurrent(cursor, 0, lock);
int offset = getAllColumns().length;
Source source = loadCurrentOther(daoSession.getSourceDao(), cursor, offset);
entity.setSource(source);
return entity;
}
public Article loadDeep(Long key) {
assertSinglePk();
if (key == null) {
return null;
}
StringBuilder builder = new StringBuilder(getSelectDeep());
builder.append("WHERE ");
SqlUtils.appendColumnsEqValue(builder, "T", getPkColumns());
String sql = builder.toString();
String[] keyArray = new String[] { key.toString() };
Cursor cursor = db.rawQuery(sql, keyArray);
try {
boolean available = cursor.moveToFirst();
if (!available) {
return null;
} else if (!cursor.isLast()) {
throw new IllegalStateException("Expected unique result, but count was " + cursor.getCount());
}
return loadCurrentDeep(cursor, true);
} finally {
cursor.close();
}
}
/** Reads all available rows from the given cursor and returns a list of new ImageTO objects. */
public List<Article> loadAllDeepFromCursor(Cursor cursor) {
int count = cursor.getCount();
List<Article> list = new ArrayList<Article>(count);
if (cursor.moveToFirst()) {
if (identityScope != null) {
identityScope.lock();
identityScope.reserveRoom(count);
}
try {
do {
list.add(loadCurrentDeep(cursor, false));
} while (cursor.moveToNext());
} finally {
if (identityScope != null) {
identityScope.unlock();
}
}
}
return list;
}
protected List<Article> loadDeepAllAndCloseCursor(Cursor cursor) {
try {
return loadAllDeepFromCursor(cursor);
} finally {
cursor.close();
}
}
/** A raw-style query where you can pass any WHERE clause and arguments. */
public List<Article> queryDeep(String where, String... selectionArg) {
Cursor cursor = db.rawQuery(getSelectDeep() + where, selectionArg);
return loadDeepAllAndCloseCursor(cursor);
}
}
|
|
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.ddmlib;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
/**
* Log class that mirrors the API in main Android sources.
* <p/>Default behavior outputs the log to {@link System#out}. Use
* {@link #setLogOutput(com.android.ddmlib.Log.ILogOutput)} to redirect the log somewhere else.
*/
public final class Log {
/**
* Log Level enum.
*/
public enum LogLevel {
VERBOSE(2, "verbose", 'V'), //$NON-NLS-1$
DEBUG(3, "debug", 'D'), //$NON-NLS-1$
INFO(4, "info", 'I'), //$NON-NLS-1$
WARN(5, "warn", 'W'), //$NON-NLS-1$
ERROR(6, "error", 'E'), //$NON-NLS-1$
ASSERT(7, "assert", 'A'); //$NON-NLS-1$
private int mPriorityLevel;
private String mStringValue;
private char mPriorityLetter;
LogLevel(int intPriority, String stringValue, char priorityChar) {
mPriorityLevel = intPriority;
mStringValue = stringValue;
mPriorityLetter = priorityChar;
}
public static LogLevel getByString(String value) {
for (LogLevel mode : values()) {
if (mode.mStringValue.equals(value)) {
return mode;
}
}
return null;
}
/**
* Returns the {@link LogLevel} enum matching the specified letter.
* @param letter the letter matching a <code>LogLevel</code> enum
* @return a <code>LogLevel</code> object or <code>null</code> if no match were found.
*/
public static LogLevel getByLetter(char letter) {
for (LogLevel mode : values()) {
if (mode.mPriorityLetter == letter) {
return mode;
}
}
return null;
}
/**
* Returns the {@link LogLevel} enum matching the specified letter.
* <p/>
* The letter is passed as a {@link String} argument, but only the first character
* is used.
* @param letter the letter matching a <code>LogLevel</code> enum
* @return a <code>LogLevel</code> object or <code>null</code> if no match were found.
*/
public static LogLevel getByLetterString(String letter) {
if (!letter.isEmpty()) {
return getByLetter(letter.charAt(0));
}
return null;
}
/**
* Returns the letter identifying the priority of the {@link LogLevel}.
*/
public char getPriorityLetter() {
return mPriorityLetter;
}
/**
* Returns the numerical value of the priority.
*/
public int getPriority() {
return mPriorityLevel;
}
/**
* Returns a non translated string representing the LogLevel.
*/
public String getStringValue() {
return mStringValue;
}
}
/**
* Classes which implement this interface provides methods that deal with outputting log
* messages.
*/
public interface ILogOutput {
/**
* Sent when a log message needs to be printed.
* @param logLevel The {@link LogLevel} enum representing the priority of the message.
* @param tag The tag associated with the message.
* @param message The message to display.
*/
void printLog(LogLevel logLevel, String tag, String message);
/**
* Sent when a log message needs to be printed, and, if possible, displayed to the user
* in a dialog box.
* @param logLevel The {@link LogLevel} enum representing the priority of the message.
* @param tag The tag associated with the message.
* @param message The message to display.
*/
void printAndPromptLog(LogLevel logLevel, String tag, String message);
}
private static LogLevel sLevel = DdmPreferences.getLogLevel();
private static ILogOutput sLogOutput;
private static final char[] mSpaceLine = new char[72];
private static final char[] mHexDigit = new char[]
{ '0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f' };
static {
/* prep for hex dump */
int i = mSpaceLine.length-1;
while (i >= 0)
mSpaceLine[i--] = ' ';
mSpaceLine[0] = mSpaceLine[1] = mSpaceLine[2] = mSpaceLine[3] = '0';
mSpaceLine[4] = '-';
}
static final class Config {
static final boolean LOGV = true;
static final boolean LOGD = true;
}
private Log() {}
/**
* Outputs a {@link LogLevel#VERBOSE} level message.
* @param tag The tag associated with the message.
* @param message The message to output.
*/
public static void v(String tag, String message) {
println(LogLevel.VERBOSE, tag, message);
}
/**
* Outputs a {@link LogLevel#DEBUG} level message.
* @param tag The tag associated with the message.
* @param message The message to output.
*/
public static void d(String tag, String message) {
println(LogLevel.DEBUG, tag, message);
}
/**
* Outputs a {@link LogLevel#INFO} level message.
* @param tag The tag associated with the message.
* @param message The message to output.
*/
public static void i(String tag, String message) {
println(LogLevel.INFO, tag, message);
}
/**
* Outputs a {@link LogLevel#WARN} level message.
* @param tag The tag associated with the message.
* @param message The message to output.
*/
public static void w(String tag, String message) {
println(LogLevel.WARN, tag, message);
}
/**
* Outputs a {@link LogLevel#ERROR} level message.
* @param tag The tag associated with the message.
* @param message The message to output.
*/
public static void e(String tag, String message) {
println(LogLevel.ERROR, tag, message);
}
/**
* Outputs a log message and attempts to display it in a dialog.
* @param tag The tag associated with the message.
* @param message The message to output.
*/
public static void logAndDisplay(LogLevel logLevel, String tag, String message) {
if (sLogOutput != null) {
sLogOutput.printAndPromptLog(logLevel, tag, message);
} else {
println(logLevel, tag, message);
}
}
/**
* Outputs a {@link LogLevel#ERROR} level {@link Throwable} information.
* @param tag The tag associated with the message.
* @param throwable The {@link Throwable} to output.
*/
public static void e(String tag, Throwable throwable) {
if (throwable != null) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
throwable.printStackTrace(pw);
println(LogLevel.ERROR, tag, throwable.getMessage() + '\n' + sw.toString());
}
}
static void setLevel(LogLevel logLevel) {
sLevel = logLevel;
}
/**
* Sets the {@link ILogOutput} to use to print the logs. If not set, {@link System#out}
* will be used.
* @param logOutput The {@link ILogOutput} to use to print the log.
*/
public static void setLogOutput(ILogOutput logOutput) {
sLogOutput = logOutput;
}
/**
* Show hex dump.
* <p/>
* Local addition. Output looks like:
* 1230- 00 11 22 33 44 55 66 77 88 99 aa bb cc dd ee ff 0123456789abcdef
* <p/>
* Uses no string concatenation; creates one String object per line.
*/
static void hexDump(String tag, LogLevel level, byte[] data, int offset, int length) {
int kHexOffset = 6;
int kAscOffset = 55;
char[] line = new char[mSpaceLine.length];
int addr, baseAddr, count;
int i, ch;
boolean needErase = true;
//Log.w(tag, "HEX DUMP: off=" + offset + ", length=" + length);
baseAddr = 0;
while (length != 0) {
if (length > 16) {
// full line
count = 16;
} else {
// partial line; re-copy blanks to clear end
count = length;
needErase = true;
}
if (needErase) {
System.arraycopy(mSpaceLine, 0, line, 0, mSpaceLine.length);
needErase = false;
}
// output the address (currently limited to 4 hex digits)
addr = baseAddr;
addr &= 0xffff;
ch = 3;
while (addr != 0) {
line[ch] = mHexDigit[addr & 0x0f];
ch--;
addr >>>= 4;
}
// output hex digits and ASCII chars
ch = kHexOffset;
for (i = 0; i < count; i++) {
byte val = data[offset + i];
line[ch++] = mHexDigit[(val >>> 4) & 0x0f];
line[ch++] = mHexDigit[val & 0x0f];
ch++;
if (val >= 0x20 && val < 0x7f)
line[kAscOffset + i] = (char) val;
else
line[kAscOffset + i] = '.';
}
println(level, tag, new String(line));
// advance to next chunk of data
length -= count;
offset += count;
baseAddr += count;
}
}
/**
* Dump the entire contents of a byte array with DEBUG priority.
*/
static void hexDump(byte[] data) {
hexDump("ddms", LogLevel.DEBUG, data, 0, data.length);
}
/* currently prints to stdout; could write to a log window */
private static void println(LogLevel logLevel, String tag, String message) {
if (logLevel.getPriority() >= sLevel.getPriority()) {
if (sLogOutput != null) {
sLogOutput.printLog(logLevel, tag, message);
} else {
printLog(logLevel, tag, message);
}
}
}
/**
* Prints a log message.
* @param logLevel
* @param tag
* @param message
*/
public static void printLog(LogLevel logLevel, String tag, String message) {
System.out.print(getLogFormatString(logLevel, tag, message));
}
/**
* Formats a log message.
* @param logLevel
* @param tag
* @param message
*/
public static String getLogFormatString(LogLevel logLevel, String tag, String message) {
SimpleDateFormat formatter = new SimpleDateFormat("hh:mm:ss", Locale.getDefault());
return String.format("%s %c/%s: %s\n", formatter.format(new Date()),
logLevel.getPriorityLetter(), tag, message);
}
}
|
|
/**
*
* Copyright the original author or authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.bytestreams.socks5;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketException;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.jivesoftware.smack.SmackException;
/**
* The Socks5Proxy class represents a local SOCKS5 proxy server. It can be enabled/disabled by
* invoking {@link #setLocalSocks5ProxyEnabled(boolean)}. The proxy is enabled by default.
* <p>
* The port of the local SOCKS5 proxy can be configured by invoking
* {@link #setLocalSocks5ProxyPort(int)}. Default port is 7777. If you set the port to a negative
* value Smack tries to the absolute value and all following until it finds an open port.
* <p>
* If your application is running on a machine with multiple network interfaces or if you want to
* provide your public address in case you are behind a NAT router, invoke
* {@link #addLocalAddress(String)} or {@link #replaceLocalAddresses(Collection)} to modify the list of
* local network addresses used for outgoing SOCKS5 Bytestream requests.
* <p>
* The local SOCKS5 proxy server refuses all connections except the ones that are explicitly allowed
* in the process of establishing a SOCKS5 Bytestream (
* {@link Socks5BytestreamManager#establishSession(String)}).
* <p>
* This Implementation has the following limitations:
* <ul>
* <li>only supports the no-authentication authentication method</li>
* <li>only supports the <code>connect</code> command and will not answer correctly to other
* commands</li>
* <li>only supports requests with the domain address type and will not correctly answer to requests
* with other address types</li>
* </ul>
* (see <a href="http://tools.ietf.org/html/rfc1928">RFC 1928</a>)
*
* @author Henning Staib
*/
public class Socks5Proxy {
private static final Logger LOGGER = Logger.getLogger(Socks5Proxy.class.getName());
/* SOCKS5 proxy singleton */
private static Socks5Proxy socks5Server;
private static boolean localSocks5ProxyEnabled = true;
/**
* The port of the local Socks5 Proxy. If this value is negative, the next ports will be tried
* until a unused is found.
*/
private static int localSocks5ProxyPort = -7777;
/* reusable implementation of a SOCKS5 proxy server process */
private Socks5ServerProcess serverProcess;
/* thread running the SOCKS5 server process */
private Thread serverThread;
/* server socket to accept SOCKS5 connections */
private ServerSocket serverSocket;
/* assigns a connection to a digest */
private final Map<String, Socket> connectionMap = new ConcurrentHashMap<String, Socket>();
/* list of digests connections should be stored */
private final List<String> allowedConnections = Collections.synchronizedList(new LinkedList<String>());
private final Set<String> localAddresses = new LinkedHashSet<String>(4);
/**
* Private constructor.
*/
private Socks5Proxy() {
this.serverProcess = new Socks5ServerProcess();
Enumeration<NetworkInterface> networkInterfaces;
try {
networkInterfaces = NetworkInterface.getNetworkInterfaces();
} catch (SocketException e) {
throw new IllegalStateException(e);
}
Set<String> localHostAddresses = new HashSet<String>();
for (NetworkInterface networkInterface : Collections.list(networkInterfaces)) {
// We can't use NetworkInterface.getInterfaceAddresses here, which
// would return a List instead the deprecated Enumeration, because
// it's Android API 9 and Smack currently uses 8. Change that when
// we raise Smack's minimum Android API.
Enumeration<InetAddress> inetAddresses = networkInterface.getInetAddresses();
for (InetAddress address : Collections.list(inetAddresses)) {
localHostAddresses.add(address.getHostAddress());
}
}
if (localHostAddresses.isEmpty()) {
throw new IllegalStateException("Could not determine any local host address");
}
replaceLocalAddresses(localHostAddresses);
}
/**
* Returns true if the local Socks5 proxy should be started. Default is true.
*
* @return if the local Socks5 proxy should be started
*/
public static boolean isLocalSocks5ProxyEnabled() {
return localSocks5ProxyEnabled;
}
/**
* Sets if the local Socks5 proxy should be started. Default is true.
*
* @param localSocks5ProxyEnabled if the local Socks5 proxy should be started
*/
public static void setLocalSocks5ProxyEnabled(boolean localSocks5ProxyEnabled) {
Socks5Proxy.localSocks5ProxyEnabled = localSocks5ProxyEnabled;
}
/**
* Return the port of the local Socks5 proxy. Default is 7777.
*
* @return the port of the local Socks5 proxy
*/
public static int getLocalSocks5ProxyPort() {
return localSocks5ProxyPort;
}
/**
* Sets the port of the local Socks5 proxy. Default is 7777. If you set the port to a negative
* value Smack tries the absolute value and all following until it finds an open port.
*
* @param localSocks5ProxyPort the port of the local Socks5 proxy to set
*/
public static void setLocalSocks5ProxyPort(int localSocks5ProxyPort) {
if (Math.abs(localSocks5ProxyPort) > 65535) {
throw new IllegalArgumentException("localSocks5ProxyPort must be within (-65535,65535)");
}
Socks5Proxy.localSocks5ProxyPort = localSocks5ProxyPort;
}
/**
* Returns the local SOCKS5 proxy server.
*
* @return the local SOCKS5 proxy server
*/
public static synchronized Socks5Proxy getSocks5Proxy() {
if (socks5Server == null) {
socks5Server = new Socks5Proxy();
}
if (isLocalSocks5ProxyEnabled()) {
socks5Server.start();
}
return socks5Server;
}
/**
* Starts the local SOCKS5 proxy server. If it is already running, this method does nothing.
*/
public synchronized void start() {
if (isRunning()) {
return;
}
try {
if (getLocalSocks5ProxyPort() < 0) {
int port = Math.abs(getLocalSocks5ProxyPort());
for (int i = 0; i < 65535 - port; i++) {
try {
this.serverSocket = new ServerSocket(port + i);
break;
}
catch (IOException e) {
// port is used, try next one
}
}
}
else {
this.serverSocket = new ServerSocket(getLocalSocks5ProxyPort());
}
if (this.serverSocket != null) {
this.serverThread = new Thread(this.serverProcess);
this.serverThread.start();
}
}
catch (IOException e) {
// couldn't setup server
LOGGER.log(Level.SEVERE, "couldn't setup local SOCKS5 proxy on port " + getLocalSocks5ProxyPort(), e);
}
}
/**
* Stops the local SOCKS5 proxy server. If it is not running this method does nothing.
*/
public synchronized void stop() {
if (!isRunning()) {
return;
}
try {
this.serverSocket.close();
}
catch (IOException e) {
// do nothing
}
if (this.serverThread != null && this.serverThread.isAlive()) {
try {
this.serverThread.interrupt();
this.serverThread.join();
}
catch (InterruptedException e) {
// do nothing
}
}
this.serverThread = null;
this.serverSocket = null;
}
/**
* Adds the given address to the list of local network addresses.
* <p>
* Use this method if you want to provide multiple addresses in a SOCKS5 Bytestream request.
* This may be necessary if your application is running on a machine with multiple network
* interfaces or if you want to provide your public address in case you are behind a NAT router.
* <p>
* The order of the addresses used is determined by the order you add addresses.
* <p>
* Note that the list of addresses initially contains the address returned by
* <code>InetAddress.getLocalHost().getHostAddress()</code>. You can replace the list of
* addresses by invoking {@link #replaceLocalAddresses(Collection)}.
*
* @param address the local network address to add
*/
public void addLocalAddress(String address) {
if (address == null) {
return;
}
synchronized (localAddresses) {
this.localAddresses.add(address);
}
}
/**
* Removes the given address from the list of local network addresses. This address will then no
* longer be used of outgoing SOCKS5 Bytestream requests.
*
* @param address the local network address to remove
* @return true if the address was removed.
*/
public boolean removeLocalAddress(String address) {
synchronized(localAddresses) {
return localAddresses.remove(address);
}
}
/**
* Returns an set of the local network addresses that will be used for streamhost
* candidates of outgoing SOCKS5 Bytestream requests.
*
* @return set of the local network addresses
*/
public List<String> getLocalAddresses() {
synchronized (localAddresses) {
return new LinkedList<String>(localAddresses);
}
}
/**
* Replaces the list of local network addresses.
* <p>
* Use this method if you want to provide multiple addresses in a SOCKS5 Bytestream request and
* want to define their order. This may be necessary if your application is running on a machine
* with multiple network interfaces or if you want to provide your public address in case you
* are behind a NAT router.
*
* @param addresses the new list of local network addresses
*/
public void replaceLocalAddresses(Collection<String> addresses) {
if (addresses == null) {
throw new IllegalArgumentException("list must not be null");
}
synchronized(localAddresses) {
localAddresses.clear();
localAddresses.addAll(addresses);
}
}
/**
* Returns the port of the local SOCKS5 proxy server. If it is not running -1 will be returned.
*
* @return the port of the local SOCKS5 proxy server or -1 if proxy is not running
*/
public int getPort() {
if (!isRunning()) {
return -1;
}
return this.serverSocket.getLocalPort();
}
/**
* Returns the socket for the given digest. A socket will be returned if the given digest has
* been in the list of allowed transfers (see {@link #addTransfer(String)}) while the peer
* connected to the SOCKS5 proxy.
*
* @param digest identifying the connection
* @return socket or null if there is no socket for the given digest
*/
protected Socket getSocket(String digest) {
return this.connectionMap.get(digest);
}
/**
* Add the given digest to the list of allowed transfers. Only connections for allowed transfers
* are stored and can be retrieved by invoking {@link #getSocket(String)}. All connections to
* the local SOCKS5 proxy that don't contain an allowed digest are discarded.
*
* @param digest to be added to the list of allowed transfers
*/
protected void addTransfer(String digest) {
this.allowedConnections.add(digest);
}
/**
* Removes the given digest from the list of allowed transfers. After invoking this method
* already stored connections with the given digest will be removed.
* <p>
* The digest should be removed after establishing the SOCKS5 Bytestream is finished, an error
* occurred while establishing the connection or if the connection is not allowed anymore.
*
* @param digest to be removed from the list of allowed transfers
*/
protected void removeTransfer(String digest) {
this.allowedConnections.remove(digest);
this.connectionMap.remove(digest);
}
/**
* Returns <code>true</code> if the local SOCKS5 proxy server is running, otherwise
* <code>false</code>.
*
* @return <code>true</code> if the local SOCKS5 proxy server is running, otherwise
* <code>false</code>
*/
public boolean isRunning() {
return this.serverSocket != null;
}
/**
* Implementation of a simplified SOCKS5 proxy server.
*/
private class Socks5ServerProcess implements Runnable {
public void run() {
while (true) {
Socket socket = null;
try {
if (Socks5Proxy.this.serverSocket.isClosed()
|| Thread.currentThread().isInterrupted()) {
return;
}
// accept connection
socket = Socks5Proxy.this.serverSocket.accept();
// initialize connection
establishConnection(socket);
}
catch (SocketException e) {
/*
* do nothing, if caused by closing the server socket, thread will terminate in
* next loop
*/
}
catch (Exception e) {
try {
if (socket != null) {
socket.close();
}
}
catch (IOException e1) {
/* do nothing */
}
}
}
}
/**
* Negotiates a SOCKS5 connection and stores it on success.
*
* @param socket connection to the client
* @throws SmackException if client requests a connection in an unsupported way
* @throws IOException if a network error occurred
*/
private void establishConnection(Socket socket) throws SmackException, IOException {
DataOutputStream out = new DataOutputStream(socket.getOutputStream());
DataInputStream in = new DataInputStream(socket.getInputStream());
// first byte is version should be 5
int b = in.read();
if (b != 5) {
throw new SmackException("Only SOCKS5 supported");
}
// second byte number of authentication methods supported
b = in.read();
// read list of supported authentication methods
byte[] auth = new byte[b];
in.readFully(auth);
byte[] authMethodSelectionResponse = new byte[2];
authMethodSelectionResponse[0] = (byte) 0x05; // protocol version
// only authentication method 0, no authentication, supported
boolean noAuthMethodFound = false;
for (int i = 0; i < auth.length; i++) {
if (auth[i] == (byte) 0x00) {
noAuthMethodFound = true;
break;
}
}
if (!noAuthMethodFound) {
authMethodSelectionResponse[1] = (byte) 0xFF; // no acceptable methods
out.write(authMethodSelectionResponse);
out.flush();
throw new SmackException("Authentication method not supported");
}
authMethodSelectionResponse[1] = (byte) 0x00; // no-authentication method
out.write(authMethodSelectionResponse);
out.flush();
// receive connection request
byte[] connectionRequest = Socks5Utils.receiveSocks5Message(in);
// extract digest
String responseDigest = new String(connectionRequest, 5, connectionRequest[4]);
// return error if digest is not allowed
if (!Socks5Proxy.this.allowedConnections.contains(responseDigest)) {
connectionRequest[1] = (byte) 0x05; // set return status to 5 (connection refused)
out.write(connectionRequest);
out.flush();
throw new SmackException("Connection is not allowed");
}
connectionRequest[1] = (byte) 0x00; // set return status to 0 (success)
out.write(connectionRequest);
out.flush();
// store connection
Socks5Proxy.this.connectionMap.put(responseDigest, socket);
}
}
}
|
|
/*
*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.wso2.am.integration.ui.tests;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.Select;
import org.openqa.selenium.support.ui.WebDriverWait;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.wso2.am.integration.ui.tests.util.APIMTestConstants;
import org.wso2.am.integration.ui.tests.util.TestUtil;
import org.wso2.carbon.automation.extensions.selenium.BrowserManager;
public class APIMANAGER3344ScopeSpecificTokenTestCase extends APIMIntegrationUiTestBase {
private WebDriver driver;
private String publisherURL;
private String storeURL;
WebDriverWait wait;
private static final String SUPER_TENANT_DOMAIN_NAME = "carbon.super";
private static final String API_URL = "http://gdata.youtube.com/feeds/api/standardfeeds";
private static final Log log =
LogFactory.getLog(APIMANAGER3344ScopeSpecificTokenTestCase.class);
@BeforeClass(alwaysRun = true)
public void setUp() throws Exception {
super.init();
driver = BrowserManager.getWebDriver();
publisherURL = getPublisherURL();
storeURL = getStoreURL();
wait = new WebDriverWait(driver, 60);
}
@Test(groups = "wso2.am", description = "publish api with scopes defined and generate a token")
public void testPublishApiWithScopesDefined()
throws Exception {
// logging into publisher
driver.get(publisherURL + "/site/pages/login.jag");
WebElement userNameField = driver.findElement(By.id("username"));
WebElement passwordField = driver.findElement(By.id("pass"));
userNameField.sendKeys(gatewayContextMgt.getContextTenant().getContextUser().getUserName());
passwordField.sendKeys(gatewayContextMgt.getContextTenant().getContextUser().getPassword());
driver.findElement(By.id("loginButton")).click();
//add api details
wait.until(ExpectedConditions.visibilityOfElementLocated(By.linkText("Add")));
driver.findElement(By.linkText("Add")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("create-new-api")));
driver.findElement(By.id("create-new-api")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("designNewAPI")));
driver.findElement(By.id("designNewAPI")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("name")));
driver.findElement(By.id("name")).clear();
driver.findElement(By.id("name")).sendKeys("TwitterAPI");
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("context")));
driver.findElement(By.id("context")).clear();
driver.findElement(By.id("context")).sendKeys("twitter");
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("version")));
driver.findElement(By.id("version")).clear();
driver.findElement(By.id("version")).sendKeys("1.0");
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("resource_url_pattern")));
driver.findElement(By.id("resource_url_pattern")).clear();
driver.findElement(By.id("resource_url_pattern")).sendKeys("tweet");
wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("input.http_verb_select")));
driver.findElement(By.cssSelector("input.http_verb_select")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("resource_url_pattern")));
driver.findElement(By.id("add_resource")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("context")));
driver.findElement(By.id("resource_url_pattern")).clear();
driver.findElement(By.id("resource_url_pattern")).sendKeys("retweet");
wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("input.http_verb_select")));
driver.findElement(By.cssSelector("input.http_verb_select")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("add_resource")));
driver.findElement(By.id("add_resource")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("resource_url_pattern")));
driver.findElement(By.id("resource_url_pattern")).clear();
driver.findElement(By.id("resource_url_pattern")).sendKeys("view");
wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("input.http_verb_select")));
driver.findElement(By.cssSelector("input.http_verb_select")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("add_resource")));
driver.findElement(By.id("add_resource")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("resource_url_pattern")));
driver.findElement(By.id("resource_url_pattern")).clear();
driver.findElement(By.id("resource_url_pattern")).sendKeys("delete");
wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("input.http_verb_select")));
driver.findElement(By.cssSelector("input.http_verb_select")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("add_resource")));
driver.findElement(By.id("add_resource")).click();
//go to implement and select specify inline
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("go_to_implement")));
driver.findElement(By.id("go_to_implement")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//div[@value='#managed-api']")));
driver.findElement(By.xpath("//div[@value='#managed-api']")).click();
//go to manage
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("go_to_manage")));
driver.findElement(By.id("jsonform-0-elt-production_endpoints")).clear();
driver.findElement(By.id("jsonform-0-elt-production_endpoints")).sendKeys(API_URL);
driver.findElement(By.id("go_to_manage")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("publish_api")));
driver.findElement(By.xpath("//button[@type='button']")).click();
driver.findElement(By.xpath("//input[@value='Unlimited']")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("#layout-base > div.row-fluid")));
driver.findElement(By.cssSelector("#layout-base > div.row-fluid")).click();
//define scopes
defineScope("tweetScope", "Tweet", "admin");
defineScope("retweetScope", "Retweet", "admin");
defineScope("deleteScope", "Delete", "reader");
//assign defined scopes
driver.findElement(By.linkText("+ Scope")).click();
new Select(driver.findElement(By.cssSelector("select.input-medium")))
.selectByVisibleText("Tweet");
driver.findElement(By.xpath("//button[@type='submit']")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.linkText("+ Scope")));
driver.findElement(By.linkText("+ Scope")).click();
new Select(driver.findElement(By.cssSelector("select.input-medium")))
.selectByVisibleText("Retweet");
driver.findElement(By.xpath("//button[@type='submit']")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("(//a[contains(text(),'+ Scope')])[2]")));
driver.findElement(By.xpath("(//a[contains(text(),'+ Scope')])[2]")).click();
new Select(driver.findElement(By.cssSelector("select.input-medium")))
.selectByVisibleText("Delete");
driver.findElement(By.xpath("//button[@type='submit']")).click();
// threadWait(1000);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("publish_api")));
//publish api
driver.findElement(By.id("publish_api")).click();
//wait 15 seconds for API to get published
// threadWait(15000);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//a[@href='#lifecycles']")));
// Go to the Tenant store and click Login
driver.get(getStoreURL() + "?tenant=" + SUPER_TENANT_DOMAIN_NAME);
wait.until(ExpectedConditions.visibilityOfElementLocated(By.linkText("Login")));
driver.findElement(By.linkText("Login")).click();
// Find and fill Username
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("username")));
WebElement usernameEle = driver.findElement(By.id("username"));
usernameEle.sendKeys(gatewayContextMgt.getContextTenant().getContextUser().getUserName());
// Find and fill Password
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("password")));
WebElement passwordEle = driver.findElement(By.id("password"));
passwordEle.sendKeys(gatewayContextMgt.getContextTenant().getContextUser().getPassword());
// find Login button and click on it.
driver.findElement(By.id("loginBtn")).click();
//go to my applications and add an application
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//a[@class='link-myapplications']")));
driver.findElement(By.xpath("//a[@class='link-myapplications']")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("application-add-button")));
driver.findElement(By.id("application-name")).clear();
driver.findElement(By.id("application-name")).sendKeys("app01");
driver.findElement(By.id("application-add-button")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//a[@class='link-home']")));
driver.findElement(By.xpath("//a[@class='link-home']")).click();
long loopMaxTime = APIMTestConstants.MAX_LOOP_WAIT_TIME_MILLISECONDS;
long startTime = System.currentTimeMillis();
long nowTime = startTime;
while ((!driver.getPageSource().contains("TwitterAPI")) && (nowTime - startTime) < loopMaxTime) {
driver.findElement(By.linkText("APIs")).click();
threadWait(500);
nowTime = System.currentTimeMillis();
}
// driver.findElement(By.xpath("//input[@name='query']")).sendKeys("Twitter");
// driver.findElement(By.xpath("//button[@class='btn btn-primary search-button']")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//a[contains(.,'TwitterAPI-1.0')]")));
driver.findElement(By.xpath("//a[contains(.,'TwitterAPI-1.0')]")).click();
new Select(driver.findElement(By.id("application-list"))).selectByVisibleText("app01");
driver.findElement(By.id("subscribe-button")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.linkText("Go to My Subscriptions")));
//go to my subscriptions and generate key using defined scopes
driver.findElement(By.linkText("Go to My Subscriptions")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.linkText("Select Scopes")));
driver.findElement(By.linkText("Select Scopes")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("retweetScope")));
driver.findElement(By.id("retweetScope")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("tweetScope")));
driver.findElement(By.id("tweetScope")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("deleteScope")));
driver.findElement(By.id("deleteScope")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("scopeSelectButtonPop")));
driver.findElement(By.id("scopeSelectButtonPop")).click();
wait.until(ExpectedConditions.visibilityOfElementLocated(
By.xpath("//button[@class='app-key-generate-button btn btn-primary btn-generatekeys']")));
driver.findElement(By.xpath("//button[@class='app-key-generate-button btn btn-primary btn-generatekeys']")).click();
//wait 5 seconds for token to get generated
wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("prodAccessScope")));
//get the generated scope
WebElement scope = driver.findElement(By.id("prodAccessScope"));
String finalScope = scope.getText();
/*
out of the three scopes defined, admin only has privilages to tweetScope and
retweetScope only. The test is success if the generated key's scope is only within
those two scopes.
*/
if (!(finalScope.equals("Retweet, Tweet") || finalScope.equals("Tweet, Retweet"))) {
throw new Exception("Generated scope doesn't match");
}
}
private void defineScope(String scopeKey, String scopeName, String roles) {
driver.findElement(By.id("define_scopes")).click();
driver.findElement(By.id("scopeKey")).clear();
driver.findElement(By.id("scopeKey")).sendKeys(scopeKey);
driver.findElement(By.id("scopeName")).clear();
driver.findElement(By.id("scopeName")).sendKeys(scopeName);
driver.findElement(By.id("scopeRoles")).clear();
driver.findElement(By.id("scopeRoles")).sendKeys(roles);
driver.findElement(By.id("scope_submit")).click();
threadWait(1000);
}
private void threadWait(int milliseconds) {
try {
Thread.sleep(milliseconds);
} catch (InterruptedException e) {
log.warn("Interrupted Exception while scope specific token test " + e);
}
}
@AfterClass(alwaysRun = true)
public void tearDown() throws Exception {
TestUtil.cleanUp(gatewayContextMgt.getContextTenant().getContextUser().getUserName(),
gatewayContextMgt.getContextTenant().getContextUser().getPassword(),
storeUrls.getWebAppURLHttp(), publisherUrls.getWebAppURLHttp());
driver.quit();
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package dk.statsbiblioteket.summa.common.lucene.search;
import dk.statsbiblioteket.summa.common.index.IndexAlias;
import dk.statsbiblioteket.summa.common.index.IndexDescriptor;
import dk.statsbiblioteket.summa.common.index.IndexGroup;
import dk.statsbiblioteket.summa.common.lucene.LuceneIndexDescriptor;
import dk.statsbiblioteket.summa.common.lucene.LuceneIndexField;
import dk.statsbiblioteket.util.qa.QAInfo;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.lucene.queryparser.classic.ParseException;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Version;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Vector;
/**
* This query parser handles expansion of non-qualified query terms and group
* expansion. Members of groups are OR'ed with the highest boost being used,
* instead of a combination of the boosts.
*/
@QAInfo(level = QAInfo.Level.NORMAL,
state = QAInfo.State.IN_DEVELOPMENT,
author = "te, hal")
public class DisjunctionQueryParser extends QueryParser {
private static Log log = LogFactory.getLog(DisjunctionQueryParser.class);
@SuppressWarnings({"FieldCanBeLocal"})
private static float tieBreakerMultiplier = 0.0f;
private LuceneIndexDescriptor descriptor;
// TODO: Make this a property
@SuppressWarnings({"FieldCanBeLocal"})
private boolean disjunctGroups = true;
@SuppressWarnings({"FieldCanBeLocal"})
private boolean disjunctDefaults = false;
public DisjunctionQueryParser(LuceneIndexDescriptor descriptor) {
// TODO: Handle update of analyzer
super(Version.LUCENE_30, null, descriptor.getQueryAnalyzer());
setDefaultOperator(descriptor.getDefaultOperator() == IndexDescriptor.OPERATOR.and ?
Operator.AND : Operator.OR);
this.descriptor = descriptor;
}
@Override
protected Query getFieldQuery(String field, final String queryText, final int slop) throws ParseException {
return getExpanded(field, new InnerQueryMaker() {
@Override
public Query getRecursiveQuery(String fieldOrGroup) throws ParseException {
return getFieldQuery(fieldOrGroup, queryText, slop);
}
@Override
public Query getFinalQuery(String field) throws ParseException {
return getFinalFieldQuery(field, queryText, slop);
}
});
}
/**
* Handles expansion of groups independend of query-type.<br />
* If the field is empty, the default fields are used recursively.<br />
* If the field is a group, the fields in the group are used.<br />
* If the field is defined and not a group, it is used directly.
* @param field the field to expand.
* @param inner how to create the inner Queries.
* @return a group- ande fefault-field expanded Query.
* @throws ParseException if the query could not be parsed.
*/
private Query getExpanded(String field, InnerQueryMaker inner) throws ParseException {
if (log.isTraceEnabled()) {
//noinspection DuplicateStringLiteralInspection
log.trace("getExpanded(" + field + ", " + inner + ") called");
}
if (field == null) {
Collection<Query> subQueries = new ArrayList<>(10);
for (String defaultField : descriptor.getDefaultFields()) {
Query q = inner.getRecursiveQuery(defaultField);
if (q != null) {
subQueries.add(q);
}
}
if (subQueries.isEmpty()) { // happens for stopwords
return null;
}
return makeMulti(subQueries, disjunctDefaults);
}
IndexGroup<LuceneIndexField> group = descriptor.getGroup(field);
if (group != null) {
if (log.isTraceEnabled()) {
log.trace("Expanding group '" + group.getName() + "'");
}
List<Query> queries = new ArrayList<>(group.getFields().size());
for (LuceneIndexField groupField: group.getFields()) {
Query q = inner.getFinalQuery(groupField.getName());
if (q != null) {
queries.add(q);
}
}
return makeMulti(queries, disjunctGroups);
}
LuceneIndexField resolvedField = descriptor.getField(field);
if (resolvedField == null) {
// TODO: The field is unknown in the descriptor but might be indexed
return inner.getFinalQuery(field);
}
// Note: This was introduced to support dynamic fields. It _should_ not collide with existing usage
for (IndexAlias alias: resolvedField.getAliases()) {
if (alias.getName().equals(field)) {
// It's an alias so we return the real name
return inner.getFinalQuery(resolvedField.getName());
}
}
return inner.getFinalQuery(field);
//return inner.getFinalQuery(resolvedField.getName());
}
private abstract interface InnerQueryMaker {
/**
* If the original field was a null, this method is called for every default field. Implementations should
* perform a recursive expansion based on the received field as it is possible that it is a group.
* @param fieldOrGroup designation for a field or group.
* @return a Query expanded with the given field or group.
* @throws ParseException if the query could not be processed.
*/
public Query getRecursiveQuery(String fieldOrGroup) throws ParseException;
/**
* This method is called only with fully resolved field names and should produce a query based on that.
* @param field a fully resolved field name, expected to be present in the index.
* @return a query based on the field.
* @throws ParseException if the query could not be processed.
*/
public Query getFinalQuery(String field) throws ParseException;
}
// Calls super.getFieldQuery and ensures that slop is set if relevant
private Query getFinalFieldQuery(String field, String queryText, int slop) throws ParseException {
Query query;
try {
query = super.getFieldQuery(field, queryText, slop);
} catch(NullPointerException e) {
ParseException pe = new ParseException(
"Got NullPointerException while calling getFieldQuery('" + field + "', '" + queryText + "')");
pe.initCause(e);
throw pe;
}
if (query != null) {
if (query instanceof PhraseQuery) {
((PhraseQuery) query).setSlop(slop);
}
if (query instanceof MultiPhraseQuery) {
((MultiPhraseQuery) query).setSlop(slop);
}
}
return query;
}
/**
* Create a Query based on the given queries. Depending on the value for
* disjunct, this will either be a BooleanQuery or a DisjunctionMaxQuery.
* @param queries the queries to wrap.
* @param disjunct if true, a DisjunctMaxQuery is generated.
* If false, a BooleanQuery is generated.
* @return a Query wrapping queries.
* @throws ParseException in case of underlying parse errors.
*/
private Query makeMulti(Collection<Query> queries, boolean disjunct) throws
ParseException {
if (queries.isEmpty()) {
return null; // Stopwords?
}
if (disjunct) {
return new DisjunctionMaxQuery(queries, tieBreakerMultiplier);
}
//noinspection UseOfObsoleteCollectionType
Vector<BooleanClause> clauses = new Vector<>(queries.size());
for (Query query: queries) {
//noinspection unchecked
clauses.add(new BooleanClause(query, BooleanClause.Occur.SHOULD));
}
return getBooleanQuery(clauses, true);
}
/* @Override
protected Query getFieldQuery(String field, String queryText) throws
ParseException {
return getFieldQuery(field, queryText, 0);
}
*/
@Override
protected Query getFuzzyQuery(String field, final String termStr, final float minSimilarity) throws ParseException {
return getExpanded(field, new InnerQueryMaker() {
@Override
public Query getRecursiveQuery(String fieldOrGroup) throws ParseException {
return getFuzzyQuery(fieldOrGroup, termStr, minSimilarity);
}
@Override
public Query getFinalQuery(String field) throws ParseException {
return getSuperFuzzyQuery(field, termStr, minSimilarity);
}
});
/* if (field == null) {
Vector clauses = new Vector();
for (int i = 0; i < fields.length; i++) {
clauses.add(new BooleanClause(super.getFuzzyQuery(fields[i], termStr, minSimilarity),
BooleanClause.Occur.SHOULD));
}
return getBooleanQuery(clauses, true);
}
return super.getFuzzyQuery(field, termStr, minSimilarity);*/
}
private Query getSuperFuzzyQuery(
String field, final String termStr, final float minSimilarity) throws ParseException {
return super.getFuzzyQuery(field, termStr, minSimilarity);
}
@Override
protected Query getFieldQuery(
final String field, final String queryText, final boolean quoted) throws ParseException {
return getExpanded(field, new InnerQueryMaker() {
@Override
public Query getRecursiveQuery(String fieldOrGroup) throws ParseException {
return getFieldQuery(fieldOrGroup, queryText, quoted);
}
@Override
public Query getFinalQuery(String field) throws ParseException {
return getSuperFieldQuery(field, queryText, quoted);
}
});
}
private Query getSuperFieldQuery(String field, String queryText, boolean quoted) throws ParseException {
// System.out.println(field + ":" + queryText + ", quoted=" + quoted);
return super.getFieldQuery(field, queryText, quoted);
}
@Override
protected Query getPrefixQuery(String field, final String termStr) throws ParseException {
return getExpanded(field, new InnerQueryMaker() {
@Override
public Query getRecursiveQuery(String fieldOrGroup) throws ParseException {
return getPrefixQuery(fieldOrGroup, termStr);
}
@Override
public Query getFinalQuery(String field) throws ParseException {
return getSuperPrefixQuery(field, termStr);
}
});
/* if (field == null) {
Vector clauses = new Vector();
for (int i = 0; i < fields.length; i++) {
clauses.add(new BooleanClause(super.getPrefixQuery(fields[i], termStr),
BooleanClause.Occur.SHOULD));
}
return getBooleanQuery(clauses, true);
}
return super.getPrefixQuery(field, termStr);
*/
}
protected Query getSuperPrefixQuery(String field, final String termStr) throws ParseException {
return super.getPrefixQuery(field, termStr);
}
@Override
protected Query getWildcardQuery(String field, final String termStr) throws ParseException {
return getExpanded(field, new InnerQueryMaker() {
@Override
public Query getRecursiveQuery(String fieldOrGroup) throws ParseException {
return getWildcardQuery(fieldOrGroup, termStr);
}
@Override
public Query getFinalQuery(String field) throws ParseException {
return getSuperWildcardQuery(field, termStr);
}
});
/* if (field == null) {
Vector clauses = new Vector();
for (int i = 0; i < fields.length; i++) {
clauses.add(new BooleanClause(super.getWildcardQuery(fields[i], termStr),
BooleanClause.Occur.SHOULD));
}
return getBooleanQuery(clauses, true);
}
return super.getWildcardQuery(field, termStr);*/
}
protected Query getSuperWildcardQuery(String field, String termStr) throws ParseException {
return super.getWildcardQuery(field, termStr);
}
@Override
protected Query getRangeQuery(
String field, final String part1, final String part2, final boolean inclusive, final boolean exclusive) {
try {
return getExpanded(field, new InnerQueryMaker() {
@Override
public Query getRecursiveQuery(String fieldOrGroup) {
return getRangeQuery(fieldOrGroup, part1, part2, inclusive, exclusive);
}
@Override
public Query getFinalQuery(String field) throws ParseException {
return getSuperRangeQuery(field, part1, part2, inclusive, exclusive);
}
});
} catch (ParseException e) {
throw new RuntimeException("ParseException", e);
}
/* if (field == null) {
Vector clauses = new Vector();
for (int i = 0; i < fields.length; i++) {
clauses.add(new BooleanClause(super.getRangeQuery(fields[i], part1, part2, inclusive),
BooleanClause.Occur.SHOULD));
}
return getBooleanQuery(clauses, true);
}
return super.getRangeQuery(field, part1, part2, inclusive);*/
}
protected Query getSuperRangeQuery(String field, final String part1, final String part2,
final boolean startInclusive, final boolean endInclusive) throws ParseException {
return super.getRangeQuery(field, part1, part2, startInclusive, endInclusive);
}
@Override
protected Query getRegexpQuery(final String field, final String termStr) throws ParseException {
try {
return getExpanded(field, new InnerQueryMaker() {
@Override
public Query getRecursiveQuery(String fieldOrGroup) throws ParseException {
return getRegexpQuery(fieldOrGroup, termStr);
}
@Override
public Query getFinalQuery(String field) throws ParseException {
return getSuperRegexpQuery(field, termStr);
}
});
} catch (ParseException e) {
throw new RuntimeException("ParseException", e);
}
}
protected Query getSuperRegexpQuery(String field, String termStr) throws ParseException {
return super.getRegexpQuery(field, termStr);
}
}
|
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.desktopvirtualization.models;
import com.azure.core.management.SystemData;
import com.azure.core.util.Context;
import com.azure.resourcemanager.desktopvirtualization.fluent.models.ApplicationInner;
import java.util.Map;
/** An immutable client-side representation of Application. */
public interface Application {
/**
* Gets the id property: Fully qualified resource Id for the resource.
*
* @return the id value.
*/
String id();
/**
* Gets the name property: The name of the resource.
*
* @return the name value.
*/
String name();
/**
* Gets the type property: The type of the resource.
*
* @return the type value.
*/
String type();
/**
* Gets the systemData property: Metadata pertaining to creation and last modification of the resource.
*
* @return the systemData value.
*/
SystemData systemData();
/**
* Gets the objectId property: ObjectId of Application. (internal use).
*
* @return the objectId value.
*/
String objectId();
/**
* Gets the description property: Description of Application.
*
* @return the description value.
*/
String description();
/**
* Gets the friendlyName property: Friendly name of Application.
*
* @return the friendlyName value.
*/
String friendlyName();
/**
* Gets the filePath property: Specifies a path for the executable file for the application.
*
* @return the filePath value.
*/
String filePath();
/**
* Gets the msixPackageFamilyName property: Specifies the package family name for MSIX applications.
*
* @return the msixPackageFamilyName value.
*/
String msixPackageFamilyName();
/**
* Gets the msixPackageApplicationId property: Specifies the package application Id for MSIX applications.
*
* @return the msixPackageApplicationId value.
*/
String msixPackageApplicationId();
/**
* Gets the applicationType property: Resource Type of Application.
*
* @return the applicationType value.
*/
RemoteApplicationType applicationType();
/**
* Gets the commandLineSetting property: Specifies whether this published application can be launched with command
* line arguments provided by the client, command line arguments specified at publish time, or no command line
* arguments at all.
*
* @return the commandLineSetting value.
*/
CommandLineSetting commandLineSetting();
/**
* Gets the commandLineArguments property: Command Line Arguments for Application.
*
* @return the commandLineArguments value.
*/
String commandLineArguments();
/**
* Gets the showInPortal property: Specifies whether to show the RemoteApp program in the RD Web Access server.
*
* @return the showInPortal value.
*/
Boolean showInPortal();
/**
* Gets the iconPath property: Path to icon.
*
* @return the iconPath value.
*/
String iconPath();
/**
* Gets the iconIndex property: Index of the icon.
*
* @return the iconIndex value.
*/
Integer iconIndex();
/**
* Gets the iconHash property: Hash of the icon.
*
* @return the iconHash value.
*/
String iconHash();
/**
* Gets the iconContent property: the icon a 64 bit string as a byte array.
*
* @return the iconContent value.
*/
byte[] iconContent();
/**
* Gets the inner com.azure.resourcemanager.desktopvirtualization.fluent.models.ApplicationInner object.
*
* @return the inner object.
*/
ApplicationInner innerModel();
/** The entirety of the Application definition. */
interface Definition
extends DefinitionStages.Blank,
DefinitionStages.WithParentResource,
DefinitionStages.WithCommandLineSetting,
DefinitionStages.WithCreate {
}
/** The Application definition stages. */
interface DefinitionStages {
/** The first stage of the Application definition. */
interface Blank extends WithParentResource {
}
/** The stage of the Application definition allowing to specify parent resource. */
interface WithParentResource {
/**
* Specifies resourceGroupName, applicationGroupName.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param applicationGroupName The name of the application group.
* @return the next definition stage.
*/
WithCommandLineSetting withExistingApplicationGroup(String resourceGroupName, String applicationGroupName);
}
/** The stage of the Application definition allowing to specify commandLineSetting. */
interface WithCommandLineSetting {
/**
* Specifies the commandLineSetting property: Specifies whether this published application can be launched
* with command line arguments provided by the client, command line arguments specified at publish time, or
* no command line arguments at all..
*
* @param commandLineSetting Specifies whether this published application can be launched with command line
* arguments provided by the client, command line arguments specified at publish time, or no command
* line arguments at all.
* @return the next definition stage.
*/
WithCreate withCommandLineSetting(CommandLineSetting commandLineSetting);
}
/**
* The stage of the Application definition which contains all the minimum required properties for the resource
* to be created, but also allows for any other optional properties to be specified.
*/
interface WithCreate
extends DefinitionStages.WithDescription,
DefinitionStages.WithFriendlyName,
DefinitionStages.WithFilePath,
DefinitionStages.WithMsixPackageFamilyName,
DefinitionStages.WithMsixPackageApplicationId,
DefinitionStages.WithApplicationType,
DefinitionStages.WithCommandLineArguments,
DefinitionStages.WithShowInPortal,
DefinitionStages.WithIconPath,
DefinitionStages.WithIconIndex {
/**
* Executes the create request.
*
* @return the created resource.
*/
Application create();
/**
* Executes the create request.
*
* @param context The context to associate with this operation.
* @return the created resource.
*/
Application create(Context context);
}
/** The stage of the Application definition allowing to specify description. */
interface WithDescription {
/**
* Specifies the description property: Description of Application..
*
* @param description Description of Application.
* @return the next definition stage.
*/
WithCreate withDescription(String description);
}
/** The stage of the Application definition allowing to specify friendlyName. */
interface WithFriendlyName {
/**
* Specifies the friendlyName property: Friendly name of Application..
*
* @param friendlyName Friendly name of Application.
* @return the next definition stage.
*/
WithCreate withFriendlyName(String friendlyName);
}
/** The stage of the Application definition allowing to specify filePath. */
interface WithFilePath {
/**
* Specifies the filePath property: Specifies a path for the executable file for the application..
*
* @param filePath Specifies a path for the executable file for the application.
* @return the next definition stage.
*/
WithCreate withFilePath(String filePath);
}
/** The stage of the Application definition allowing to specify msixPackageFamilyName. */
interface WithMsixPackageFamilyName {
/**
* Specifies the msixPackageFamilyName property: Specifies the package family name for MSIX applications.
*
* @param msixPackageFamilyName Specifies the package family name for MSIX applications.
* @return the next definition stage.
*/
WithCreate withMsixPackageFamilyName(String msixPackageFamilyName);
}
/** The stage of the Application definition allowing to specify msixPackageApplicationId. */
interface WithMsixPackageApplicationId {
/**
* Specifies the msixPackageApplicationId property: Specifies the package application Id for MSIX
* applications.
*
* @param msixPackageApplicationId Specifies the package application Id for MSIX applications.
* @return the next definition stage.
*/
WithCreate withMsixPackageApplicationId(String msixPackageApplicationId);
}
/** The stage of the Application definition allowing to specify applicationType. */
interface WithApplicationType {
/**
* Specifies the applicationType property: Resource Type of Application..
*
* @param applicationType Resource Type of Application.
* @return the next definition stage.
*/
WithCreate withApplicationType(RemoteApplicationType applicationType);
}
/** The stage of the Application definition allowing to specify commandLineArguments. */
interface WithCommandLineArguments {
/**
* Specifies the commandLineArguments property: Command Line Arguments for Application..
*
* @param commandLineArguments Command Line Arguments for Application.
* @return the next definition stage.
*/
WithCreate withCommandLineArguments(String commandLineArguments);
}
/** The stage of the Application definition allowing to specify showInPortal. */
interface WithShowInPortal {
/**
* Specifies the showInPortal property: Specifies whether to show the RemoteApp program in the RD Web Access
* server..
*
* @param showInPortal Specifies whether to show the RemoteApp program in the RD Web Access server.
* @return the next definition stage.
*/
WithCreate withShowInPortal(Boolean showInPortal);
}
/** The stage of the Application definition allowing to specify iconPath. */
interface WithIconPath {
/**
* Specifies the iconPath property: Path to icon..
*
* @param iconPath Path to icon.
* @return the next definition stage.
*/
WithCreate withIconPath(String iconPath);
}
/** The stage of the Application definition allowing to specify iconIndex. */
interface WithIconIndex {
/**
* Specifies the iconIndex property: Index of the icon..
*
* @param iconIndex Index of the icon.
* @return the next definition stage.
*/
WithCreate withIconIndex(Integer iconIndex);
}
}
/**
* Begins update for the Application resource.
*
* @return the stage of resource update.
*/
Application.Update update();
/** The template for Application update. */
interface Update
extends UpdateStages.WithTags,
UpdateStages.WithDescription,
UpdateStages.WithFriendlyName,
UpdateStages.WithFilePath,
UpdateStages.WithCommandLineSetting,
UpdateStages.WithCommandLineArguments,
UpdateStages.WithShowInPortal,
UpdateStages.WithIconPath,
UpdateStages.WithIconIndex,
UpdateStages.WithMsixPackageFamilyName,
UpdateStages.WithMsixPackageApplicationId,
UpdateStages.WithApplicationType {
/**
* Executes the update request.
*
* @return the updated resource.
*/
Application apply();
/**
* Executes the update request.
*
* @param context The context to associate with this operation.
* @return the updated resource.
*/
Application apply(Context context);
}
/** The Application update stages. */
interface UpdateStages {
/** The stage of the Application update allowing to specify tags. */
interface WithTags {
/**
* Specifies the tags property: tags to be updated.
*
* @param tags tags to be updated.
* @return the next definition stage.
*/
Update withTags(Map<String, String> tags);
}
/** The stage of the Application update allowing to specify description. */
interface WithDescription {
/**
* Specifies the description property: Description of Application..
*
* @param description Description of Application.
* @return the next definition stage.
*/
Update withDescription(String description);
}
/** The stage of the Application update allowing to specify friendlyName. */
interface WithFriendlyName {
/**
* Specifies the friendlyName property: Friendly name of Application..
*
* @param friendlyName Friendly name of Application.
* @return the next definition stage.
*/
Update withFriendlyName(String friendlyName);
}
/** The stage of the Application update allowing to specify filePath. */
interface WithFilePath {
/**
* Specifies the filePath property: Specifies a path for the executable file for the application..
*
* @param filePath Specifies a path for the executable file for the application.
* @return the next definition stage.
*/
Update withFilePath(String filePath);
}
/** The stage of the Application update allowing to specify commandLineSetting. */
interface WithCommandLineSetting {
/**
* Specifies the commandLineSetting property: Specifies whether this published application can be launched
* with command line arguments provided by the client, command line arguments specified at publish time, or
* no command line arguments at all..
*
* @param commandLineSetting Specifies whether this published application can be launched with command line
* arguments provided by the client, command line arguments specified at publish time, or no command
* line arguments at all.
* @return the next definition stage.
*/
Update withCommandLineSetting(CommandLineSetting commandLineSetting);
}
/** The stage of the Application update allowing to specify commandLineArguments. */
interface WithCommandLineArguments {
/**
* Specifies the commandLineArguments property: Command Line Arguments for Application..
*
* @param commandLineArguments Command Line Arguments for Application.
* @return the next definition stage.
*/
Update withCommandLineArguments(String commandLineArguments);
}
/** The stage of the Application update allowing to specify showInPortal. */
interface WithShowInPortal {
/**
* Specifies the showInPortal property: Specifies whether to show the RemoteApp program in the RD Web Access
* server..
*
* @param showInPortal Specifies whether to show the RemoteApp program in the RD Web Access server.
* @return the next definition stage.
*/
Update withShowInPortal(Boolean showInPortal);
}
/** The stage of the Application update allowing to specify iconPath. */
interface WithIconPath {
/**
* Specifies the iconPath property: Path to icon..
*
* @param iconPath Path to icon.
* @return the next definition stage.
*/
Update withIconPath(String iconPath);
}
/** The stage of the Application update allowing to specify iconIndex. */
interface WithIconIndex {
/**
* Specifies the iconIndex property: Index of the icon..
*
* @param iconIndex Index of the icon.
* @return the next definition stage.
*/
Update withIconIndex(Integer iconIndex);
}
/** The stage of the Application update allowing to specify msixPackageFamilyName. */
interface WithMsixPackageFamilyName {
/**
* Specifies the msixPackageFamilyName property: Specifies the package family name for MSIX applications.
*
* @param msixPackageFamilyName Specifies the package family name for MSIX applications.
* @return the next definition stage.
*/
Update withMsixPackageFamilyName(String msixPackageFamilyName);
}
/** The stage of the Application update allowing to specify msixPackageApplicationId. */
interface WithMsixPackageApplicationId {
/**
* Specifies the msixPackageApplicationId property: Specifies the package application Id for MSIX
* applications.
*
* @param msixPackageApplicationId Specifies the package application Id for MSIX applications.
* @return the next definition stage.
*/
Update withMsixPackageApplicationId(String msixPackageApplicationId);
}
/** The stage of the Application update allowing to specify applicationType. */
interface WithApplicationType {
/**
* Specifies the applicationType property: Resource Type of Application..
*
* @param applicationType Resource Type of Application.
* @return the next definition stage.
*/
Update withApplicationType(RemoteApplicationType applicationType);
}
}
/**
* Refreshes the resource to sync with Azure.
*
* @return the refreshed resource.
*/
Application refresh();
/**
* Refreshes the resource to sync with Azure.
*
* @param context The context to associate with this operation.
* @return the refreshed resource.
*/
Application refresh(Context context);
}
|
|
/*
* Licensed to Crate.io GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.execution.engine.collect.collectors;
import io.crate.data.BatchIterator;
import io.crate.data.CollectingBatchIterator;
import io.crate.data.Row;
import io.crate.exceptions.SQLExceptions;
import io.crate.execution.dsl.phases.RoutedCollectPhase;
import io.crate.execution.engine.collect.RowsTransformer;
import io.crate.execution.engine.collect.stats.NodeStatsRequest;
import io.crate.execution.engine.collect.stats.NodeStatsResponse;
import io.crate.execution.engine.collect.stats.TransportNodeStatsAction;
import io.crate.expression.InputFactory;
import io.crate.expression.reference.StaticTableReferenceResolver;
import io.crate.expression.reference.sys.node.NodeStatsContext;
import io.crate.expression.symbol.RefVisitor;
import io.crate.expression.symbol.Symbol;
import io.crate.metadata.ColumnIdent;
import io.crate.metadata.TransactionContext;
import io.crate.metadata.expressions.RowCollectExpressionFactory;
import io.crate.metadata.Reference;
import io.crate.metadata.sys.SysNodesTableInfo;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.node.DiscoveryNode;
import io.crate.common.unit.TimeValue;
import org.elasticsearch.transport.ConnectTransportException;
import org.elasticsearch.transport.ReceiveTimeoutTransportException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import java.util.function.Supplier;
/**
* Collects information for sys.nodes locally or remote based on the provided {@link RoutedCollectPhase}
*/
public final class NodeStats {
public static BatchIterator<Row> newInstance(TransportNodeStatsAction transportStatTablesAction,
RoutedCollectPhase collectPhase,
Collection<DiscoveryNode> nodes,
TransactionContext txnCtx,
InputFactory inputFactory) {
return CollectingBatchIterator.newInstance(
() -> {},
t -> {},
new LoadNodeStats(
transportStatTablesAction,
collectPhase,
nodes,
txnCtx,
inputFactory
),
true
);
}
private static final class LoadNodeStats implements Supplier<CompletableFuture<? extends Iterable<? extends Row>>> {
private static final TimeValue REQUEST_TIMEOUT = TimeValue.timeValueMillis(3000L);
private final TransportNodeStatsAction nodeStatsAction;
private final RoutedCollectPhase collectPhase;
private final Collection<DiscoveryNode> nodes;
private final TransactionContext txnCtx;
private final InputFactory inputFactory;
private final Map<ColumnIdent, RowCollectExpressionFactory<NodeStatsContext>> expressions;
LoadNodeStats(TransportNodeStatsAction nodeStatsAction,
RoutedCollectPhase collectPhase,
Collection<DiscoveryNode> nodes,
TransactionContext txnCtx,
InputFactory inputFactory) {
this.nodeStatsAction = nodeStatsAction;
this.collectPhase = collectPhase;
this.nodes = nodes;
this.txnCtx = txnCtx;
this.inputFactory = inputFactory;
this.expressions = SysNodesTableInfo.create().expressions();
}
@Override
public CompletableFuture<Iterable<Row>> get() {
StaticTableReferenceResolver<NodeStatsContext> referenceResolver =
new StaticTableReferenceResolver<>(expressions);
return getNodeStatsContexts()
.thenApply(result -> RowsTransformer.toRowsIterable(txnCtx, inputFactory, referenceResolver, collectPhase, result));
}
private CompletableFuture<List<NodeStatsContext>> getNodeStatsContexts() {
Set<ColumnIdent> toCollect = getRootColumns(collectPhase.toCollect());
toCollect.addAll(getRootColumns(List.of(collectPhase.where())));
return dataAvailableInClusterState(toCollect)
? getStatsFromLocalState()
: getStatsFromRemote(toCollect);
}
private CompletableFuture<List<NodeStatsContext>> getStatsFromLocalState() {
List<NodeStatsContext> rows = new ArrayList<>(nodes.size());
for (DiscoveryNode node : nodes) {
rows.add(new NodeStatsContext(node.getId(), node.getName()));
}
return CompletableFuture.completedFuture(rows);
}
private CompletableFuture<List<NodeStatsContext>> getStatsFromRemote(Set<ColumnIdent> toCollect) {
final CompletableFuture<List<NodeStatsContext>> nodeStatsContextsFuture = new CompletableFuture<>();
final List<NodeStatsContext> rows = new ArrayList<>(nodes.size());
final AtomicInteger remainingNodesToCollect = new AtomicInteger(nodes.size());
for (final DiscoveryNode node : nodes) {
final String nodeId = node.getId();
NodeStatsRequest request = new NodeStatsRequest(toCollect);
nodeStatsAction.execute(nodeId, request, new ActionListener<NodeStatsResponse>() {
@Override
public void onResponse(NodeStatsResponse response) {
synchronized (rows) {
rows.add(response.nodeStatsContext());
}
if (remainingNodesToCollect.decrementAndGet() == 0) {
nodeStatsContextsFuture.complete(rows);
}
}
@Override
public void onFailure(Exception e) {
Throwable t = SQLExceptions.unwrap(e);
if (isTimeoutOrNodeNotReachable(t)) {
NodeStatsContext statsContext = new NodeStatsContext(nodeId, node.getName());
synchronized (rows) {
rows.add(statsContext);
}
if (remainingNodesToCollect.decrementAndGet() == 0) {
nodeStatsContextsFuture.complete(rows);
}
} else {
nodeStatsContextsFuture.completeExceptionally(t);
}
}
}, REQUEST_TIMEOUT);
}
return nodeStatsContextsFuture;
}
}
private static boolean isTimeoutOrNodeNotReachable(Throwable t) {
return t instanceof ReceiveTimeoutTransportException
|| t instanceof ConnectTransportException;
}
/**
* @return true if all required column can be provided from the local state.
*/
private static boolean dataAvailableInClusterState(Set<ColumnIdent> toCollect) {
switch (toCollect.size()) {
case 1:
return toCollect.contains(SysNodesTableInfo.Columns.ID) ||
toCollect.contains(SysNodesTableInfo.Columns.NAME);
case 2:
return toCollect.contains(SysNodesTableInfo.Columns.ID) &&
toCollect.contains(SysNodesTableInfo.Columns.NAME);
default:
return false;
}
}
private static Set<ColumnIdent> getRootColumns(Iterable<? extends Symbol> symbols) {
HashSet<ColumnIdent> columns = new HashSet<>();
Consumer<Reference> addRootColumn = ref -> columns.add(ref.column().getRoot());
for (Symbol symbol: symbols) {
RefVisitor.visitRefs(symbol, addRootColumn);
}
return columns;
}
}
|
|
package edu.upenn.seas.pennapps.dumbledore.pollio;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.StatusLine;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.params.BasicHttpParams;
import org.json.JSONException;
import org.json.JSONObject;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GooglePlayServicesUtil;
import com.google.android.gms.gcm.GoogleCloudMessaging;
import android.accounts.Account;
import android.accounts.AccountManager;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager.NameNotFoundException;
import android.database.sqlite.SQLiteDatabase;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.TextView;
/**
* Main UI for the demo app.
*/
public class GCMUtils extends Activity {
public static final String EXTRA_MESSAGE = "message";
public static final String PROPERTY_REG_ID = "registration_id";
private static final String PROPERTY_APP_VERSION = "appVersion";
private final static int PLAY_SERVICES_RESOLUTION_REQUEST = 9000;
/**
* Substitute you own sender ID here. This is the project number you got
* from the API Console, as described in "Getting Started."
*/
static String SENDER_ID = "1033493645859";
/**
* Tag used on log messages.
*/
static final String TAG = "PollioGCM";
TextView mDisplay;
static GoogleCloudMessaging gcm;
static AtomicInteger msgId = new AtomicInteger();
static SharedPreferences prefs;
static Context context;
static String regid, userid;
static DatabaseWrangler dbh;
static SQLiteDatabase db;
public static String getUsername(Context that) {
// get user name magically from AccountManager
AccountManager am = AccountManager.get(that); // "this" references the current Context
Account[] accounts = am.getAccountsByType("com.google");
String name = accounts[0].name;
Log.i(TAG, "Nice to meet you, " + name);
return name;
}
public static void initGCM(final Activity that) {
context = that.getApplicationContext();
dbh = new DatabaseWrangler(context);
db = dbh.getWritableDatabase();
dbh.create_tables();
// Check device for Play Services APK. If check succeeds, proceed with
// GCM registration.
if (checkPlayServices(that)) {
gcm = GoogleCloudMessaging.getInstance(that);
regid = getRegistrationId(context);
userid = Utils.getUserId(context);
if (regid.equals("")) {
registerInBackground(that.getApplicationContext());
} else {
Log.i(TAG, "saved gcm id: " + regid);
}
if (userid.equals("")) {
new AsyncTask<Void, Void, String>() {
@Override
protected String doInBackground(Void... params) {
JSONObject json = InternetUtils.json_request("http://" + context.getResources().getString(R.string.server) + "/polls/initialize/",
"name", getUsername(that),
"reg_id", regid);
try {
return json.getString("user_id");
} catch (JSONException e) {
return "The server is broke yo :(";
}
}
@Override
protected void onPostExecute(String result) {
userid = result;
Utils.setUserId(context, userid);
Log.i(TAG, "new user id: " + result);
}
}.execute(null, null, null);
} else {
Log.i(TAG, "saved user id: " + userid);
}
} else {
Log.e(TAG, "No valid Google Play Services APK found.");
}
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
mDisplay = (TextView) findViewById(R.id.display);
context = getApplicationContext();
dbh = new DatabaseWrangler(context);
db = dbh.getWritableDatabase();
dbh.create_tables();
// Check device for Play Services APK. If check succeeds, proceed with
// GCM registration.
if (checkPlayServices(this)) {
gcm = GoogleCloudMessaging.getInstance(this);
regid = getRegistrationId(context);
userid = Utils.getUserId(context);
if (regid.equals("")) {
registerInBackground(getApplicationContext());
} else {
mDisplay.append("saved gcm id: " + regid + "\n");
}
if (userid.equals("")) {
new AsyncTask<Void, Void, String>() {
@Override
protected String doInBackground(Void... params) {
JSONObject json = InternetUtils.json_request("http://" + getResources().getString(R.string.server) + "/polls/initialize/",
"name", "Reuven Rand",
"reg_id", regid);
try {
return json.getString("user_id");
} catch (JSONException e) {
return "The server is broke yo :(";
}
}
@Override
protected void onPostExecute(String result) {
userid = result;
Utils.setUserId(context, userid);
mDisplay.append("new user id: " + result + "\n");
}
}.execute(null, null, null);
} else {
mDisplay.append("saved user id: " + userid + "\n");
}
} else {
Log.i(TAG, "No valid Google Play Services APK found.");
}
Intent intent = getIntent();
final Bundle extras = intent.getExtras();
if (extras != null && !extras.isEmpty()) {
final Bundle stuff = extras.getBundle("data");
if (stuff.getString("command").equals("results")) {
new AsyncTask<Void, Void, String>() {
@Override
protected String doInBackground(Void... params) {
JSONObject json = InternetUtils.json_request("http://" + getResources().getString(R.string.server) + "/polls/request_results/",
"user_id", userid,
"poll_id", stuff.getString("poll_id"));
return json.toString();
}
@Override
protected void onPostExecute(String msg) {
mDisplay.append(msg + "\n");
}
}.execute(null, null, null);
}
}
}
public void onClick(final View view) {
if (view == findViewById(R.id.create)){
new AsyncTask<Void, Void, String>() {
@Override
protected String doInBackground(Void... params) {
JSONObject json = InternetUtils.json_request("http://" + getResources().getString(R.string.server) + "/polls/new_poll/",
"user_id", userid,
"question", "What color is your parachute?",
"choices", "red|green|blue",
"pollees", "1|2");
return json.toString();
}
@Override
protected void onPostExecute(String msg) {
mDisplay.append(msg + "\n");
}
}.execute(null, null, null);
} else if (view == findViewById(R.id.vote)) {
new AsyncTask<Void, Void, String>() {
@Override
protected String doInBackground(Void... params) {
JSONObject json = InternetUtils.json_request("http://" + getResources().getString(R.string.server) + "/polls/submit_vote/",
"user_id", userid,
"poll_id", "1",
"choice_id", "2");
return json.toString();
}
@Override
protected void onPostExecute(String msg) {
mDisplay.append(msg + "\n");
}
}.execute(null, null, null);
} else if (view == findViewById(R.id.clear)) {
mDisplay.setText("");
Utils.setUserId(context, "");
} else if (view == findViewById(R.id.app)) {
startActivity(new Intent(context, NewPollActivity.class));
/*new AsyncTask<Void, Void, String>() {
@Override
protected String doInBackground(Void... params) {
JSONObject json = InternetUtils.json_request("http://" + SERVER + "/polls/");
try {
return json.getString("something");
} catch (JSONException e) {
return "The server is broken yo :(";
}
}
@Override
protected void onPostExecute(String msg) {
mDisplay.append(msg);
}
}.execute(null, null, null);
*/
}
}
/**
* Check the device to make sure it has the Google Play Services APK. If
* it doesn't, display a dialog that allows users to download the APK from
* the Google Play Store or enable it in the device's system settings.
*/
public static boolean checkPlayServices(Activity that) {
int resultCode = GooglePlayServicesUtil.isGooglePlayServicesAvailable(that);
if (resultCode != ConnectionResult.SUCCESS) {
if (GooglePlayServicesUtil.isUserRecoverableError(resultCode)) {
GooglePlayServicesUtil.getErrorDialog(resultCode, that,
PLAY_SERVICES_RESOLUTION_REQUEST).show();
} else {
Log.i(TAG, "This device is not supported.");
return false;
}
return false;
}
return true;
}
/**
* Gets the current registration ID for application on GCM service.
* <p>
* If result is empty, the app needs to register.
*
* @return registration ID, or empty string if there is no existing
* registration ID.
*/
public static String getRegistrationId(Context context) {
final SharedPreferences prefs = getGCMPreferences(context);
String registrationId = prefs.getString(PROPERTY_REG_ID, "");
if (registrationId.equals("")) {
Log.i(TAG, "Registration not found.");
return "";
}
// Check if app was updated; if so, it must clear the registration ID
// since the existing regID is not guaranteed to work with the new
// app version.
int registeredVersion = prefs.getInt(PROPERTY_APP_VERSION, Integer.MIN_VALUE);
int currentVersion = getAppVersion(context);
if (registeredVersion != currentVersion) {
Log.i(TAG, "App version changed.");
return "";
}
return registrationId;
}
/**
* @return Application's {@code SharedPreferences}.
*/
public static SharedPreferences getGCMPreferences(Context context) {
// This sample app persists the registration ID in shared preferences, but
// how you store the regID in your app is up to you.
return context.getSharedPreferences(GCMUtils.class.getSimpleName(),
Context.MODE_PRIVATE);
}
/**
* @return Application's version code from the {@code PackageManager}.
*/
private static int getAppVersion(Context context) {
try {
PackageInfo packageInfo = context.getPackageManager()
.getPackageInfo(context.getPackageName(), 0);
return packageInfo.versionCode;
} catch (NameNotFoundException e) {
// should never happen
throw new RuntimeException("Could not get package name: " + e);
}
}
/**
* Registers the application with GCM servers asynchronously.
* <p>
* Stores the registration ID and app versionCode in the application's
* shared preferences.
*/
public static void registerInBackground(final Context context) {
new AsyncTask<Void, Void, String>() {
@Override
protected String doInBackground(Void... params) {
String msg = "";
try {
if (gcm == null) {
gcm = GoogleCloudMessaging.getInstance(context);
}
regid = gcm.register(SENDER_ID);
msg = "Device registered, registration ID=" + regid;
// You should send the registration ID to your server over HTTP,
// so it can use GCM/HTTP or CCS to send messages to your app.
// The request to your server should be authenticated if your app
// is using accounts.
//Toast.makeText(context, msg, Toast.LENGTH_LONG).show();
JSONObject json = InternetUtils.json_request("http://" + context.getResources().getString(R.string.server) + "/polls/initialize",
"name", getUsername(context),
"reg_id", regid);
// For this demo: we don't need to send it because the device
// will send upstream messages to a server that echo back the
// message using the 'from' address in the message.
// Persist the regID - no need to register again.
storeRegistrationId(context, regid);
} catch (IOException ex) {
msg = "Error :" + ex.getMessage();
// If there is an error, don't just keep trying to register.
// Require the user to click a button again, or perform
// exponential back-off.
}
return msg;
}
@Override
protected void onPostExecute(String msg) {
Log.i(TAG, msg);
}
}.execute(null, null, null);
}
/**
* Stores the registration ID and app versionCode in the application's
* {@code SharedPreferences}.
*
* @param context application's context.
* @param regId registration ID
*/
public static void storeRegistrationId(Context context, String regId) {
final SharedPreferences prefs = getGCMPreferences(context);
int appVersion = getAppVersion(context);
Log.i(TAG, "Saving regId on app version " + appVersion);
SharedPreferences.Editor editor = prefs.edit();
editor.putString(PROPERTY_REG_ID, regId);
editor.putInt(PROPERTY_APP_VERSION, appVersion);
editor.commit();
}
}
|
|
package org.docksidestage.dockside.dbflute.whitebox.cbean.query;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.Calendar;
import java.util.Date;
import org.dbflute.cbean.coption.FromToOption;
import org.dbflute.cbean.result.ListResultBean;
import org.dbflute.cbean.scoping.AndQuery;
import org.dbflute.cbean.scoping.OrQuery;
import org.dbflute.exception.InvalidQueryRegisteredException;
import org.dbflute.helper.HandyDate;
import org.dbflute.util.Srl;
import org.docksidestage.dockside.dbflute.cbean.MemberCB;
import org.docksidestage.dockside.dbflute.exbhv.MemberBhv;
import org.docksidestage.dockside.dbflute.exentity.Member;
import org.docksidestage.dockside.unit.UnitContainerTestCase;
/**
* @author jflute
* @since 0.9.7.9 (2011/01/21 Friday)
*/
public class WxCBFromToTest extends UnitContainerTestCase {
// ===================================================================================
// Attribute
// =========
private MemberBhv memberBhv;
// ===================================================================================
// Plain
// =====
public void test_FromTo_plain_basic() throws Exception {
// ## Arrange ##
Member updated = updateFormalizedDatetime("2011/11/18 12:34:56.789");
Member member = memberBhv.selectEntityWithDeletedCheck(cb -> {
/* ## Act ## */
cb.query().setMemberId_Equal(updated.getMemberId());
LocalDateTime fromDate = toLocalDateTime("2011/11/17 12:34:56.789");
LocalDateTime toDate = toLocalDateTime("2011/11/19 02:04:06.009");
cb.query().setFormalizedDatetime_FromTo(fromDate, toDate, op -> {});
pushCB(cb);
});
// ## Assert ##
String sql = popCB().toDisplaySql();
log(ln() + sql);
assertTrue(Srl.contains(sql, " >= '2011-11-17 12:34:56.789'"));
assertTrue(Srl.contains(sql, " <= '2011-11-19 02:04:06.009'"));
log(member.getFormalizedDatetime());
assertEquals(updated.getFormalizedDatetime(), member.getFormalizedDatetime());
}
// ===================================================================================
// Year
// ====
public void test_FromTo_compareAsYear_basic() throws Exception {
// ## Arrange ##
Member updated = updateFormalizedDatetime("2011/12/30 12:34:56.789");
Member member = memberBhv.selectEntityWithDeletedCheck(cb -> {
/* ## Act ## */
LocalDateTime fromDate = toLocalDateTime("2011/11/17 12:34:56.789");
LocalDateTime toDate = toLocalDateTime("2011/11/19 02:04:06.009");
cb.query().setFormalizedDatetime_FromTo(fromDate, toDate, op -> op.compareAsYear());
pushCB(cb);
});
// ## Assert ##
String sql = popCB().toDisplaySql();
log(ln() + sql);
assertTrue(Srl.contains(sql, " >= '2011-01-01 00:00:00.000'"));
assertTrue(Srl.contains(sql, " < '2012-01-01 00:00:00.000'"));
log(member.getFormalizedDatetime());
assertEquals(updated.getFormalizedDatetime(), member.getFormalizedDatetime());
}
// ===================================================================================
// Month
// =====
public void test_FromTo_compareAsMonth_basic() throws Exception {
// ## Arrange ##
Member updated = updateFormalizedDatetime("2011/11/29 23:34:56.789");
Member member = memberBhv.selectEntityWithDeletedCheck(cb -> {
/* ## Act ## */
LocalDateTime fromDate = toLocalDateTime("2011/11/17 12:34:56.789");
LocalDateTime toDate = toLocalDateTime("2011/11/19 02:04:06.009");
cb.query().setFormalizedDatetime_FromTo(fromDate, toDate, op -> op.compareAsMonth());
pushCB(cb);
});
// ## Assert ##
String sql = popCB().toDisplaySql();
log(ln() + sql);
assertTrue(Srl.contains(sql, " >= '2011-11-01 00:00:00.000'"));
assertTrue(Srl.contains(sql, " < '2011-12-01 00:00:00.000'"));
log(member.getFormalizedDatetime());
assertEquals(updated.getFormalizedDatetime(), member.getFormalizedDatetime());
}
// ===================================================================================
// Date
// ====
public void test_FromTo_compareAsDate_basic() throws Exception {
// ## Arrange ##
Member updated = updateFormalizedDatetime("2011/11/19 23:34:56.789");
Member member = memberBhv.selectEntityWithDeletedCheck(cb -> {
/* ## Act ## */
LocalDateTime fromDate = toLocalDateTime("2011/11/17 12:34:56.789");
LocalDateTime toDate = toLocalDateTime("2011/11/19 02:04:06.009");
cb.query().setFormalizedDatetime_FromTo(fromDate, toDate, op -> op.compareAsDate());
pushCB(cb);
});
// ## Assert ##
String sql = popCB().toDisplaySql();
log(ln() + sql);
assertTrue(Srl.contains(sql, " >= '2011-11-17 00:00:00.000'"));
assertTrue(Srl.contains(sql, " < '2011-11-20 00:00:00.000'"));
log(member.getFormalizedDatetime());
assertEquals(updated.getFormalizedDatetime(), member.getFormalizedDatetime());
}
// ===================================================================================
// Hour
// ====
public void test_FromTo_compareAsHour_basic() throws Exception {
// ## Arrange ##
Member updated = updateFormalizedDatetime("2011/11/17 18:34:56.789");
Member member = memberBhv.selectEntityWithDeletedCheck(cb -> {
/* ## Act ## */
LocalDateTime fromDate = toLocalDateTime("2011/11/17 12:34:56.789");
LocalDateTime toDate = toLocalDateTime("2011/11/17 18:04:06.009");
cb.query().setFormalizedDatetime_FromTo(fromDate, toDate, op -> op.compareAsHour());
pushCB(cb);
});
// ## Assert ##
String sql = popCB().toDisplaySql();
log(ln() + sql);
assertTrue(Srl.contains(sql, " >= '2011-11-17 12:00:00.000'"));
assertTrue(Srl.contains(sql, " < '2011-11-17 19:00:00.000'"));
log(member.getFormalizedDatetime());
assertEquals(updated.getFormalizedDatetime(), member.getFormalizedDatetime());
}
// ===================================================================================
// Week
// ====
public void test_FromTo_compareAsWeek_basic() throws Exception {
// ## Arrange ##
MemberCB cb = new MemberCB();
LocalDateTime targetDate = toLocalDateTime("2011/11/17");
cb.query().setFormalizedDatetime_FromTo(targetDate, targetDate, op -> op.compareAsWeek().beginWeek_DayOfWeek(targetDate));
// ## Assert ##
String sql = cb.toDisplaySql();
log(ln() + sql);
assertTrue(Srl.contains(sql, " >= '2011-11-17 00:00:00.000'"));
assertTrue(Srl.contains(sql, " < '2011-11-24 00:00:00.000'"));
}
public void test_FromTo_compareAsWeek_beginWeek_moveToScope() throws Exception {
doTest_FromTo_compareAsWeek_beginWeek_moveToScope(toUtilDate("2011/10/31"));
doTest_FromTo_compareAsWeek_beginWeek_moveToScope(toUtilDate("2011/11/01"));
doTest_FromTo_compareAsWeek_beginWeek_moveToScope(toUtilDate("2011/11/02"));
doTest_FromTo_compareAsWeek_beginWeek_moveToScope(toUtilDate("2011/11/03"));
doTest_FromTo_compareAsWeek_beginWeek_moveToScope(toUtilDate("2011/11/04"));
doTest_FromTo_compareAsWeek_beginWeek_moveToScope(toUtilDate("2011/11/05"));
doTest_FromTo_compareAsWeek_beginWeek_moveToScope(toUtilDate("2011/11/06"));
doTest_FromTo_compareAsWeek_beginWeek_moveToScope(toUtilDate("2011/11/07"));
doTest_FromTo_compareAsWeek_beginWeek_moveToScope(toUtilDate("2011/11/08"));
doTest_FromTo_compareAsWeek_beginWeek_moveToScope(toUtilDate("2011/11/09"));
doTest_FromTo_compareAsWeek_beginWeek_moveToScope(toUtilDate("2011/11/10"));
doTest_FromTo_compareAsWeek_beginWeek_moveToScope(toUtilDate("2011/11/11"));
doTest_FromTo_compareAsWeek_beginWeek_moveToScope(toUtilDate("2011/11/12"));
doTest_FromTo_compareAsWeek_beginWeek_moveToScope(toUtilDate("2011/11/13"));
doTest_FromTo_compareAsWeek_beginWeek_moveToScope(toUtilDate("2011/11/14"));
doTest_FromTo_compareAsWeek_beginWeek_moveToScope(toUtilDate("2011/11/15"));
doTest_FromTo_compareAsWeek_beginWeek_moveToScope(toUtilDate("2011/11/16"));
doTest_FromTo_compareAsWeek_beginWeek_moveToScope(toUtilDate("2011/11/17"));
}
protected void doTest_FromTo_compareAsWeek_beginWeek_moveToScope(Date targetDate) {
// ## Act ##
FromToOption option = new FromToOption();
option.compareAsWeek();
option.beginWeek_DayOfWeek2nd_Monday();
option.moveToScope(-1);
Date fromDate = option.filterFromDate(targetDate);
Date toDate = option.filterToDate(targetDate);
// ## Assert ##
String fmt = "yyyy/MM/dd HH:mm:ss.SSS";
Calendar cal = Calendar.getInstance();
cal.setTime(targetDate);
if (cal.get(Calendar.DAY_OF_WEEK) == Calendar.SUNDAY) {
cal.add(Calendar.WEEK_OF_MONTH, -1);
}
cal.add(Calendar.WEEK_OF_MONTH, -1);
cal.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY);
Date expectedFromDate = cal.getTime();
cal.set(Calendar.DAY_OF_WEEK, Calendar.SATURDAY);
cal.add(Calendar.DATE, 1);
Date expectedEndDate = new HandyDate(cal.getTime()).addDay(1).getDate();
log("[" + toString(targetDate, fmt) + "]");
log(toString(expectedFromDate, fmt) + " = " + toString(fromDate, fmt));
log(toString(expectedEndDate, fmt) + " = " + toString(toDate, fmt));
log("");
assertEquals(expectedFromDate, fromDate);
assertEquals(expectedEndDate, toDate);
}
// ===================================================================================
// Either-Or
// =========
public void test_DateFromTo_eitherOr_from() {
// ## Arrange ##
MemberCB cb = new MemberCB();
cb.query().setBirthdate_FromTo(toLocalDate("2011-01-21"), null, op -> op.compareAsDate().allowOneSide());
// ## Assert ##
assertTrue(cb.hasWhereClauseOnBaseQuery());
String sql = cb.toDisplaySql();
log(ln() + sql);
assertTrue(Srl.contains(sql, " >= '2011-01-21'"));
}
public void test_DateFromTo_eitherOr_to() {
// ## Arrange ##
MemberCB cb = new MemberCB();
cb.query().setBirthdate_FromTo(null, toLocalDate("2011-01-21"), op -> op.compareAsDate().allowOneSide());
// ## Assert ##
assertTrue(cb.hasWhereClauseOnBaseQuery());
String sql = cb.toDisplaySql();
log(ln() + sql);
assertTrue(Srl.contains(sql, " < '2011-01-22'")); // added
}
// ===================================================================================
// No Query
// ========
public void test_DateFromTo_noQuery_basic() {
// ## Arrange ##
MemberCB cb = new MemberCB();
// ## Act ##
try {
cb.query().setBirthdate_FromTo(null, null, op -> op.compareAsDate().allowOneSide());
fail();
} catch (InvalidQueryRegisteredException e) {
log(e.getMessage());
}
// ## Assert ##
assertFalse(cb.hasWhereClauseOnBaseQuery());
}
public void test_DateFromTo_noQuery_allowOneSide() {
// ## Arrange ##
MemberCB cb = new MemberCB();
// ## Act ##
try {
cb.query().setBirthdate_FromTo(null, null, op -> op.compareAsDate().allowOneSide());
fail();
} catch (InvalidQueryRegisteredException e) {
log(e.getMessage());
}
// ## Assert ##
assertFalse(cb.hasWhereClauseOnBaseQuery());
}
public void test_DateFromTo_noQuery_ignore() {
// ## Arrange ##
MemberCB cb = new MemberCB();
cb.ignoreNullOrEmptyQuery();
// ## Act ##
cb.query().setBirthdate_FromTo(null, null, op -> op.compareAsDate()); // expects no exception
// ## Assert ##
assertFalse(cb.hasWhereClauseOnBaseQuery());
}
// ===================================================================================
// OrIsNull
// ========
public void test_FromTo_orIsNull_greaterEqual() throws Exception {
// ## Arrange ##
HandyDate date = new HandyDate(toLocalDate("1970/01/01"));
int countAll = memberBhv.selectCount(cb -> {});
// ## Act ##
ListResultBean<Member> memberList = memberBhv.selectList(cb -> {
cb.query().setBirthdate_FromTo(date.getLocalDate(), null, op -> op.orIsNull().allowOneSide());
pushCB(cb);
});
// ## Assert ##
assertHasAnyElement(memberList);
boolean existsGreaterEqual = false;
boolean existsIsNull = false;
for (Member member : memberList) {
LocalDate birthdate = member.getBirthdate();
if (birthdate == null) {
existsIsNull = true;
} else if (date.isLessEqual(birthdate)) {
existsGreaterEqual = true;
} else {
fail(toString(birthdate, "yyyy/MM/dd"));
}
}
assertTrue(existsGreaterEqual);
assertTrue(existsIsNull);
assertTrue(memberList.size() < countAll);
String sql = popCB().toDisplaySql();
log(ln() + sql);
assertTrue(Srl.contains(sql, "where (dfloc.BIRTHDATE >= '1970-01-01' or dfloc.BIRTHDATE is null)"));
}
public void test_FromTo_orIsNull_lessEqual() throws Exception {
// ## Arrange ##
HandyDate date = new HandyDate(toLocalDate("1970/01/01"));
int countAll = memberBhv.selectCount(cb -> {});
// ## Act ##
ListResultBean<Member> memberList = memberBhv.selectList(cb -> {
cb.query().setBirthdate_FromTo(null, date.getLocalDate(), op -> op.orIsNull().allowOneSide());
pushCB(cb);
});
// ## Assert ##
assertHasAnyElement(memberList);
boolean existsLessEqual = false;
boolean existsIsNull = false;
for (Member member : memberList) {
LocalDate birthdate = member.getBirthdate();
if (birthdate == null) {
existsIsNull = true;
} else if (date.isGreaterEqual(birthdate)) {
existsLessEqual = true;
} else {
fail(toString(birthdate, "yyyy/MM/dd"));
}
}
assertTrue(existsLessEqual);
assertTrue(existsIsNull);
assertTrue(memberList.size() < countAll);
String sql = popCB().toDisplaySql();
log(ln() + sql);
assertTrue(Srl.contains(sql, "where (dfloc.BIRTHDATE <= '1970-01-01' or dfloc.BIRTHDATE is null)"));
}
public void test_FromTo_orIsNull_bothThan() throws Exception {
// ## Arrange ##
HandyDate date = new HandyDate(toLocalDate("1970/01/01"));
MemberCB cb = new MemberCB();
// ## Act ##
cb.query().setBirthdate_FromTo(date.getLocalDate(), date.getLocalDate(), op -> op.orIsNull().greaterThan().lessThan());
// ## Assert ##
String sql = cb.toDisplaySql();
log(ln() + sql);
assertTrue(Srl.contains(sql, "where (dfloc.BIRTHDATE > '1970-01-01' or dfloc.BIRTHDATE is null)"));
assertTrue(Srl.contains(sql, " and (dfloc.BIRTHDATE < '1970-01-01' or dfloc.BIRTHDATE is null)"));
}
public void test_DateFromTo_orIsNull() throws Exception {
// ## Arrange ##
HandyDate date = new HandyDate(toLocalDate("1970/01/01"));
MemberCB cb = new MemberCB();
// ## Act ##
cb.query().setBirthdate_FromTo(date.getLocalDate(), date.getLocalDate(), op -> op.compareAsDate().orIsNull());
// ## Assert ##
String sql = cb.toDisplaySql();
log(ln() + sql);
assertTrue(Srl.contains(sql, "where (dfloc.BIRTHDATE >= '1970-01-01' or dfloc.BIRTHDATE is null)"));
assertTrue(Srl.contains(sql, " and (dfloc.BIRTHDATE < '1970-01-02' or dfloc.BIRTHDATE is null)"));
}
public void test_FromTo_orIsNull_orScopeQuery() throws Exception {
// ## Arrange ##
final HandyDate date = new HandyDate(toLocalDate("1970/01/01"));
MemberCB cb = new MemberCB();
cb.orScopeQuery(new OrQuery<MemberCB>() {
public void query(MemberCB orCB) {
orCB.query().setBirthdate_FromTo(null, date.getLocalDate(), op -> op.orIsNull().greaterThan().lessThan().allowOneSide());
orCB.orScopeQueryAndPart(new AndQuery<MemberCB>() {
public void query(MemberCB andCB) {
andCB.query().setMemberId_Equal(3);
andCB.query().setFormalizedDatetime_FromTo(date.getLocalDateTime(), null,
op -> op.orIsNull().greaterThan().lessThan().allowOneSide());
}
});
}
});
String sql = cb.toDisplaySql();
// ## Assert ##
log(ln() + sql);
assertTrue(Srl.contains(sql, "where ((dfloc.BIRTHDATE < '1970-01-01' or dfloc.BIRTHDATE is null)"));
assertTrue(Srl.contains(sql, "or (dfloc.MEMBER_ID = 3 and (dfloc.FORMALIZED_DATETIME > '"));
assertTrue(Srl.contains(sql, "FORMALIZED_DATETIME > '1970-01-01 00:00:00.000' or dfloc.FORMALIZED_DATETIME is null))"));
}
// ===================================================================================
// Assist Helper
// =============
protected Member updateFormalizedDatetime(String exp) {
Member member = memberBhv.selectEntityWithDeletedCheck(cb -> {
cb.query().setMemberStatusCode_Equal_Formalized();
cb.fetchFirst(1);
});
member.setFormalizedDatetime(toLocalDateTime(exp));
memberBhv.updateNonstrict(member);
return member;
}
}
|
|
/* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.util;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
/**
* PriorityQueue holds elements on a priority heap, which orders elements
* according to the comparator specified at construction or their natural order.
* If the queue uses natural order, any element that is not comparable is not
* permitted to insert to the queue.
*
* The least element of the specified ordering is stored at the head of the
* queue and the greatest element is stored at the tail of the queue.
*
* PriorityQueue is not synchronized. If multiple threads will access it
* concurrently, use the PriorityBlockingQueue.
*/
public class PriorityQueue<E> extends AbstractQueue<E> implements Serializable {
private static final long serialVersionUID = -7720805057305804111L;
private static final int DEFAULT_CAPACITY = 11;
private static final double DEFAULT_INIT_CAPACITY_RATIO = 1.1;
private static final int DEFAULT_CAPACITY_RATIO = 2;
private int size;
private Comparator<? super E> comparator;
private transient E[] elements;
/**
* Constructs a priority queue with the capacity of 11 and natural ordering.
*/
public PriorityQueue() {
this(DEFAULT_CAPACITY);
}
/**
* Constructs a priority queue with specified capacity and natural ordering.
*
* @param initialCapacity the specified capacity.
* @throws IllegalArgumentException if the initialCapacity is less than 1
*/
public PriorityQueue(int initialCapacity) {
this(initialCapacity, null);
}
/**
* Constructs a priority queue with specified capacity and comparator.
*
* @param initialCapacity the specified capacity.
* @param comparator the specified comparator. If it is null, the natural
* ordering will be used.
* @throws IllegalArgumentException if the initialCapacity is less than 1
*/
public PriorityQueue(int initialCapacity, Comparator<? super E> comparator) {
if (initialCapacity < 1) {
throw new IllegalArgumentException();
}
elements = newElementArray(initialCapacity);
this.comparator = comparator;
}
/**
* Constructs a priority queue that contains the elements of a collection.
* The constructed priority queue has the initial capacity of 110% the
* collection. And the priority queue uses natural ordering to order its
* elements.
*
* @param c the collection whose elements will be added to the priority
* queue to be constructed.
* @throws ClassCastException if any of the elements in the collection is
* not comparable.
* @throws NullPointerExcepiton if any of the elements in the collection is
* null.
*/
public PriorityQueue(Collection<? extends E> c) {
if (c instanceof PriorityQueue) {
getFromPriorityQueue((PriorityQueue<? extends E>) c);
} else if (c instanceof SortedSet) {
getFromSortedSet((SortedSet<? extends E>) c);
} else {
initSize(c);
addAll(c);
}
}
/**
* Constructs a priority queue that contains the elements of another
* priority queue. The constructed priority queue has the initial capacity
* of 110% the latter one. And the two priority queue has the same
* comparator.
*
* @param c the priority queue whose elements will be added to the priority
* queue to be constructed.
*/
public PriorityQueue(PriorityQueue<? extends E> c) {
getFromPriorityQueue(c);
}
/**
* Constructs a priority queue that contains the elements of a sorted set.
* The constructed priority queue has the initial capacity of 110% the
* sorted set. And the priority queue has the same comparator of the sorted
* set.
*
* @param c the sorted set whose elements will be added to the priority
* queue to be constructed.
*/
public PriorityQueue(SortedSet<? extends E> c) {
getFromSortedSet(c);
}
/**
* Gets the iterator of the priority queue, which will not return elements
* in any specified ordering.
*
* @return the iterator of the priority queue.
*/
@Override
public Iterator<E> iterator() {
return new PriorityIterator();
}
/**
* Gets the size of the priority queue. If the size of the queue is greater
* than the Integer.MAX, then it returns Integer.MAX.
*
* @return the size of the priority queue.
*/
@Override
public int size() {
return size;
}
/**
* Removes all the elements of the priority queue.
*/
@Override
public void clear() {
Arrays.fill(elements, null);
size = 0;
}
/**
* Inserts the element to the priority queue.
*
* @return true
* @throws ClassCastException if the element cannot be compared with the
* elements in the priority queue using the ordering of the priority
* queue.
* @throws NullPointerExcepiton if the element is null.
*/
public boolean offer(E o) {
if (null == o) {
throw new NullPointerException();
}
growToSize(size + 1);
elements[size] = o;
siftUp(size++);
return true;
}
/**
* Gets and removes the head of the queue.
*
* @return the head of the queue. Null if the queue is empty.
*/
public E poll() {
if (isEmpty()) {
return null;
}
E result = elements[0];
removeAt(0);
return result;
}
/**
* Gets but not removes the head of the queue.
*
* @return the head of the queue. Null if the queue is empty.
*/
public E peek() {
if (isEmpty()) {
return null;
}
return elements[0];
}
/**
* Gets the comparator of the priority queue.
*
* @return the comparator of the priority queue. Null if the natural
* ordering is used.
*/
public Comparator<? super E> comparator() {
return comparator;
}
/**
* Removes the specified object of the priority queue.
*
* @param o the object to be removed.
* @return true if the object is in the priority queue, false if the object
* is not in the priority queue.
*/
@Override
@SuppressWarnings("unchecked")
public boolean remove(Object o) {
if (o == null) {
return false;
}
int targetIndex;
for (targetIndex = 0; targetIndex < size; targetIndex++) {
if (0 == this.compare((E) o, elements[targetIndex])) {
break;
}
}
if (size == 0 || size == targetIndex) {
return false;
}
removeAt(targetIndex);
return true;
}
/**
* Adds the specified object to the priority queue.
*
* @param o the object to be added.
* @return true.
* @throws ClassCastException if the element cannot be compared with the
* elements in the priority queue using the ordering of the priority
* queue.
* @throws NullPointerExcepiton if the element is null.
*/
@Override
public boolean add(E o) {
return offer(o);
}
private class PriorityIterator implements Iterator<E> {
private int currentIndex = -1;
private boolean allowRemove = false;
public boolean hasNext() {
return currentIndex < size - 1;
}
public E next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
allowRemove = true;
return elements[++currentIndex];
}
public void remove() {
if (!allowRemove) {
throw new IllegalStateException();
}
allowRemove = false;
removeAt(currentIndex--);
}
}
@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException,
ClassNotFoundException {
in.defaultReadObject();
int capacity = in.readInt();
elements = newElementArray(capacity);
for (int i = 0; i < size; i++) {
elements[i] = (E) in.readObject();
}
}
@SuppressWarnings("unchecked")
private E[] newElementArray(int capacity) {
return (E[]) new Object[capacity];
}
private void writeObject(ObjectOutputStream out) throws IOException {
out.defaultWriteObject();
out.writeInt(elements.length);
for (int i = 0; i < size; i++) {
out.writeObject(elements[i]);
}
}
@SuppressWarnings("unchecked")
private void getFromPriorityQueue(PriorityQueue<? extends E> c) {
initSize(c);
comparator = (Comparator<? super E>) c.comparator();
System.arraycopy(c.elements, 0, elements, 0, c.size());
size = c.size();
}
@SuppressWarnings("unchecked")
private void getFromSortedSet(SortedSet<? extends E> c) {
initSize(c);
comparator = (Comparator<? super E>) c.comparator();
Iterator<? extends E> iter = c.iterator();
while (iter.hasNext()) {
elements[size++] = iter.next();
}
}
private void removeAt(int index) {
size--;
elements[index] = elements[size];
siftDown(index);
elements[size] = null;
}
private int compare(E o1, E o2) {
if (null != comparator) {
return comparator.compare(o1, o2);
}
return ((Comparable<? super E>) o1).compareTo(o2);
}
private void siftUp(int childIndex) {
E target = elements[childIndex];
int parentIndex;
while (childIndex > 0) {
parentIndex = (childIndex - 1) / 2;
E parent = elements[parentIndex];
if (compare(parent, target) <= 0) {
break;
}
elements[childIndex] = parent;
childIndex = parentIndex;
}
elements[childIndex] = target;
}
private void siftDown(int rootIndex) {
E target = elements[rootIndex];
int childIndex;
while ((childIndex = rootIndex * 2 + 1) < size) {
if (childIndex + 1 < size
&& compare(elements[childIndex + 1], elements[childIndex]) < 0) {
childIndex++;
}
if (compare(target, elements[childIndex]) <= 0) {
break;
}
elements[rootIndex] = elements[childIndex];
rootIndex = childIndex;
}
elements[rootIndex] = target;
}
private void initSize(Collection<? extends E> c) {
if (null == c) {
throw new NullPointerException();
}
if (c.isEmpty()) {
elements = newElementArray(1);
} else {
int capacity = (int) Math.ceil(c.size()
* DEFAULT_INIT_CAPACITY_RATIO);
elements = newElementArray(capacity);
}
}
private void growToSize(int size) {
if (size > elements.length) {
E[] newElements = newElementArray(size * DEFAULT_CAPACITY_RATIO);
System.arraycopy(elements, 0, newElements, 0, elements.length);
elements = newElements;
}
}
}
|
|
package yahoofinance;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.net.URLEncoder;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TimeZone;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author Stijn Strickx
*/
public class Utils {
private static final Logger log = LoggerFactory.getLogger(Utils.class);
public static final BigDecimal HUNDRED = new BigDecimal(100);
public static final BigDecimal THOUSAND = new BigDecimal(1000);
public static final BigDecimal MILLION = new BigDecimal(1000000);
public static final BigDecimal BILLION = new BigDecimal(1000000000);
public static String join(String[] data, String d) {
if (data.length == 0) {
return "";
}
StringBuilder sb = new StringBuilder();
int i;
for (i = 0; i < (data.length - 1); i++) {
sb.append(data[i]).append(d);
}
return sb.append(data[i]).toString();
}
private static String cleanNumberString(String data) {
return Utils.join(data.trim().split(","), "");
}
private static boolean isParseable(String data) {
return !(data == null || data.equals("N/A") || data.equals("-")
|| data.equals("") || data.equals("nan"));
}
public static String getString(String data) {
if(!Utils.isParseable(data)) {
return null;
}
return data;
}
public static BigDecimal getBigDecimal(String data) {
BigDecimal result = null;
if (!Utils.isParseable(data)) {
return result;
}
try {
data = Utils.cleanNumberString(data);
char lastChar = data.charAt(data.length() - 1);
BigDecimal multiplier = BigDecimal.ONE;
switch (lastChar) {
case 'B':
data = data.substring(0, data.length() - 1);
multiplier = BILLION;
break;
case 'M':
data = data.substring(0, data.length() - 1);
multiplier = MILLION;
break;
case 'K':
data = data.substring(0, data.length() - 1);
multiplier = THOUSAND;
break;
}
result = new BigDecimal(data).multiply(multiplier);
} catch (NumberFormatException e) {
log.warn("Failed to parse: " + data);
log.debug("Failed to parse: " + data, e);
}
return result;
}
public static BigDecimal getBigDecimal(String dataMain, String dataSub) {
BigDecimal main = getBigDecimal(dataMain);
BigDecimal sub = getBigDecimal(dataSub);
if(main == null || main.compareTo(BigDecimal.ZERO) == 0) {
return sub;
}
return main;
}
public static double getDouble(String data) {
double result = Double.NaN;
if (!Utils.isParseable(data)) {
return result;
}
try {
data = Utils.cleanNumberString(data);
char lastChar = data.charAt(data.length() - 1);
int multiplier = 1;
switch (lastChar) {
case 'B':
data = data.substring(0, data.length() - 1);
multiplier = 1000000000;
break;
case 'M':
data = data.substring(0, data.length() - 1);
multiplier = 1000000;
break;
case 'K':
data = data.substring(0, data.length() - 1);
multiplier = 1000;
break;
}
result = Double.parseDouble(data) * multiplier;
} catch (NumberFormatException e) {
log.warn("Failed to parse: " + data);
log.debug("Failed to parse: " + data, e);
}
return result;
}
public static Integer getInt(String data) {
Integer result = null;
if (!Utils.isParseable(data)) {
return result;
}
try {
data = Utils.cleanNumberString(data);
result = Integer.parseInt(data);
} catch (NumberFormatException e) {
log.warn("Failed to parse: " + data);
log.debug("Failed to parse: " + data, e);
}
return result;
}
public static Long getLong(String data) {
Long result = null;
if (!Utils.isParseable(data)) {
return result;
}
try {
data = Utils.cleanNumberString(data);
result = Long.parseLong(data);
} catch (NumberFormatException e) {
log.warn("Failed to parse: " + data);
log.debug("Failed to parse: " + data, e);
}
return result;
}
public static BigDecimal getPercent(BigDecimal numerator, BigDecimal denominator) {
if (denominator == null || numerator == null || denominator.compareTo(BigDecimal.ZERO) == 0) {
return BigDecimal.ZERO;
}
return numerator.divide(denominator, 4, BigDecimal.ROUND_HALF_EVEN)
.multiply(HUNDRED).setScale(2, BigDecimal.ROUND_HALF_EVEN);
}
public static double getPercent(double numerator, double denominator) {
if (denominator == 0) {
return 0;
}
return (numerator / denominator) * 100;
}
private static String getDividendDateFormat(String date) {
if (date.matches("[0-9][0-9]-...-[0-9][0-9]")) {
return "dd-MMM-yy";
} else if (date.matches("[0-9]-...-[0-9][0-9]")) {
return "d-MMM-yy";
} else if (date.matches("...[ ]+[0-9]+")) {
return "MMM d";
} else {
return "M/d/yy";
}
}
/**
* Used to parse the dividend dates. Returns null if the date cannot be
* parsed.
*
* @param date String received that represents the date
* @return Calendar object representing the parsed date
*/
public static Calendar parseDividendDate(String date) {
if (!Utils.isParseable(date)) {
return null;
}
date = date.trim();
SimpleDateFormat format = new SimpleDateFormat(Utils.getDividendDateFormat(date), Locale.US);
format.setTimeZone(TimeZone.getTimeZone(YahooFinance.TIMEZONE));
try {
Calendar today = Calendar.getInstance(TimeZone.getTimeZone(YahooFinance.TIMEZONE));
Calendar parsedDate = Calendar.getInstance(TimeZone.getTimeZone(YahooFinance.TIMEZONE));
parsedDate.setTime(format.parse(date));
if (parsedDate.get(Calendar.YEAR) == 1970) {
// Not really clear which year the dividend date is... making a reasonable guess.
int monthDiff = parsedDate.get(Calendar.MONTH) - today.get(Calendar.MONTH);
int year = today.get(Calendar.YEAR);
if (monthDiff > 6) {
year -= 1;
} else if (monthDiff < -6) {
year += 1;
}
parsedDate.set(Calendar.YEAR, year);
}
return parsedDate;
} catch (ParseException ex) {
log.warn("Failed to parse dividend date: " + date);
log.debug("Failed to parse dividend date: " + date, ex);
return null;
}
}
/**
* Used to parse the last trade date / time. Returns null if the date / time
* cannot be parsed.
*
* @param date String received that represents the date
* @param time String received that represents the time
* @param timeZone time zone to use for parsing the date time
* @return Calendar object with the parsed datetime
*/
public static Calendar parseDateTime(String date, String time, TimeZone timeZone) {
String datetime = date + " " + time;
SimpleDateFormat format = new SimpleDateFormat("M/d/yyyy h:mma", Locale.US);
format.setTimeZone(timeZone);
try {
if (Utils.isParseable(date) && Utils.isParseable(time)) {
Calendar c = Calendar.getInstance();
c.setTime(format.parse(datetime));
return c;
}
} catch (ParseException ex) {
log.warn("Failed to parse datetime: " + datetime);
log.debug("Failed to parse datetime: " + datetime, ex);
}
return null;
}
public static Calendar parseHistDate(String date) {
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd", Locale.US);
try {
if (Utils.isParseable(date)) {
Calendar c = Calendar.getInstance();
c.setTime(format.parse(date));
return c;
}
} catch (ParseException ex) {
log.warn("Failed to parse hist date: " + date);
log.debug("Failed to parse hist date: " + date, ex);
}
return null;
}
public static Calendar unixToCalendar(long timestamp) {
log.debug("unixToCalendar " + timestamp);
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(timestamp * 1000);
return calendar;
}
public static String getURLParameters(Map<String, String> params) {
StringBuilder sb = new StringBuilder();
for (Entry<String, String> entry : params.entrySet()) {
if (sb.length() > 0) {
sb.append("&");
}
String key = entry.getKey();
String value = entry.getValue();
try {
key = URLEncoder.encode(key, "UTF-8");
value = URLEncoder.encode(value, "UTF-8");
} catch (UnsupportedEncodingException ex) {
log.error(ex.getMessage(), ex);
// Still try to continue with unencoded values
}
sb.append(String.format("%s=%s", key, value));
}
return sb.toString();
}
/**
* Strips the unwanted chars from a line returned in the CSV
* Used for parsing the FX CSV lines
*
* @param line the original CSV line
* @return the stripped line
*/
public static String stripOverhead(String line) {
return line.replaceAll("\"", "");
}
public static String unescape(String data) {
StringBuilder buffer = new StringBuilder(data.length());
for (int i = 0; i < data.length(); i++) {
if ((int) data.charAt(i) > 256) {
buffer.append("\\u").append(Integer.toHexString(data.charAt(i)));
} else {
if (data.charAt(i) == '\n') {
buffer.append("\\n");
} else if (data.charAt(i) == '\t') {
buffer.append("\\t");
} else if (data.charAt(i) == '\r') {
buffer.append("\\r");
} else if (data.charAt(i) == '\b') {
buffer.append("\\b");
} else if (data.charAt(i) == '\f') {
buffer.append("\\f");
} else if (data.charAt(i) == '\'') {
buffer.append("\\'");
} else if (data.charAt(i) == '\"') {
buffer.append("\\\"");
} else if (data.charAt(i) == '\\') {
buffer.append("\\\\");
} else {
buffer.append(data.charAt(i));
}
}
}
return buffer.toString();
}
}
|
|
package org.grobid.core.annotations;
import java.io.ByteArrayInputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.util.Iterator;
import java.util.Set;
import java.util.TreeSet;
import java.util.Vector;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLEventFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.events.Attribute;
import javax.xml.stream.events.Characters;
import javax.xml.stream.events.EndElement;
import javax.xml.stream.events.StartElement;
import javax.xml.stream.events.XMLEvent;
import org.apache.commons.io.output.ByteArrayOutputStream;
import org.custommonkey.xmlunit.XMLTestCase;
import org.grobid.core.engines.patent.ReferenceExtractor;
import org.grobid.core.factory.AbstractEngineFactory;
import org.grobid.core.mock.MockContext;
import org.grobid.core.utilities.XMLWriter;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.xml.sax.SAXException;
public class TeiStAXParserTest extends XMLTestCase {
public static void setInitialContext() throws Exception {
MockContext.setInitialContext();
AbstractEngineFactory.init();
}
public static void destroyInitialContext() throws Exception {
MockContext.destroyInitialContext();
}
@Before
public void setUp() throws Exception {
setInitialContext();
}
@After
public void tearDown() throws Exception {
destroyInitialContext();
}
@Test
public void testTeiStAXParser3Args() throws IOException {
final TeiStAXParser parser = new TeiStAXParser(createInputStream("input"), createOutputStream("output"), false);
assertEquals("The inputStream value of the parser should be 'input'", "input", fromInputStreamToString(parser.inputStream));
assertEquals("The outputStream value of the parser should be 'output'", "output", fromOutputStreamToString(parser.outputStream));
}
@Test
public void testTeiStAXParser4Args() throws IOException {
final TeiStAXParser parser = new TeiStAXParser(createInputStream("input"), createOutputStream("output"), true, false);
assertEquals("The inputStream value of the parser should be 'input'", "input", fromInputStreamToString(parser.inputStream));
assertEquals("The outputStream value of the parser should be 'output'", "output", fromOutputStreamToString(parser.outputStream));
assertEquals("The isIndented value of the parser should be true", true, parser.isIndented);
assertEquals("The isSelfInstanceRefExtractor value of the parser should be true", true, parser.isSelfInstanceRefExtractor);
}
@Test
public void testTeiStAXParser5Args() throws IOException {
final ReferenceExtractor refExtr = new ReferenceExtractor();
refExtr.currentPatentNumber = "patNb";
final String input = "<tag id=\"tId\">input</tag>";
TeiStAXParser parser = new TeiStAXParser(createInputStream(input), createOutputStream("output"), false, refExtr, false);
assertEquals("The inputStream value of the parser should be '" + input + "'", input, fromInputStreamToString(parser.inputStream));
assertEquals("The outputStream value of the parser should be 'output'", "output", fromOutputStreamToString(parser.outputStream));
assertEquals("The isIndented value of the parser should be false", false, parser.isIndented);
assertEquals("The isSelfInstanceRefExtractor value of the parser should be false", false, parser.isSelfInstanceRefExtractor);
assertEquals("The extractor.currentPatentNumber value of the parser should be 'patNb'", "patNb",
parser.extractor.currentPatentNumber);
assertNotNull("currTEIParsedInfo should not be null", parser.currTEIParsedInfo);
assertNotNull("teiBuffer should not be null", parser.teiBuffer);
assertNotNull("headerAnnotation should n ot be null", parser.headerAnnotation);
// escape the START_DOCUMENT event
parser.reader.next();
final XMLEvent event = (XMLEvent) parser.reader.next();
assertEquals("the tag value should be 'tag'", "tag", event.asStartElement().getName().getLocalPart());
final Attribute attr = event.asStartElement().getAttributeByName(getQName("", "id", ""));
assertEquals("the id should be 'tId'", "tId", attr.getValue());
}
@Test
public void testParse() throws IOException, XMLStreamException, SAXException {
final String input = "<tag id=\"tId\"> <div type=\"abstract\" xml:id=\"_cc53f64\" xml:lang=\"en\" subtype=\"docdba\">some text</div> </tag>";
final TeiStAXParser parser = new TeiStAXParser(createInputStream(input), createOutputStream(""), false);
parser.parse();
assertXMLEqual("The 2 xml should be identical", input, fromOutputStreamToString(parser.outputStream));
}
@Test
public void testWriteInTeiBufferStart() throws IOException, XMLStreamException, SAXException {
final TeiStAXParser parser = new TeiStAXParser(createInputStream(""), createOutputStream(""), false);
final QName qName = getQName("", "tag", "");
final Iterator<Attribute> attributes = getAttributes(createAttribute("attr", "val"));
final StartElement start = createStartElement(qName, attributes, null);
parser.writeInTeiBufferStart(start);
assertEquals("The teiBuffer value should be <tag attr=\"val\">", "<tag attr=\"val\">", parser.teiBuffer.toString());
}
@Test
public void testWriteInTeiBufferCharacters() throws IOException, XMLStreamException, SAXException {
final TeiStAXParser parser = new TeiStAXParser(createInputStream(""), createOutputStream(""), false);
final String content = "some content";
parser.writeInTeiBufferCharacters(createCharacters(content));
assertEquals("The teiBuffer value should be " + content, content, parser.teiBuffer.toString());
}
@Test
public void testWriteInTeiBufferEnd() throws IOException, XMLStreamException, SAXException {
final TeiStAXParser parser = new TeiStAXParser(createInputStream(""), createOutputStream(""), false);
final QName qName = getQName("", "endTag", "");
parser.writeInTeiBufferEnd(createEndElement(qName, null));
assertEquals("The teiBuffer value should be </endTag>", "</endTag>", parser.teiBuffer.toString());
}
@Test
public void testWriteInTeiBufferRaw() throws IOException, XMLStreamException, SAXException {
final TeiStAXParser parser = new TeiStAXParser(createInputStream(""), createOutputStream(""), false);
final String content = "some content";
parser.writeInTeiBufferRaw(content);
assertEquals("The teiBuffer value should be " + content, content, parser.teiBuffer.toString());
}
@Test
public void testParseNotSelfRefExtractor() throws UnsupportedEncodingException, IOException, XMLStreamException, SAXException {
final String input = "<teiCorpus id=\"tId\"> <div type=\"claims\"><p>some paragraph</p></div> </teiCorpus>";
final TeiStAXParser parser = new TeiStAXParser(createInputStream(input), createOutputStream(""), false);
parser.isSelfInstanceRefExtractor = false;
parser.parse();
assertXMLEqual("The 2 xml should be identical", input, fromOutputStreamToString(parser.outputStream));
}
@Test
public void testParseDescription() throws UnsupportedEncodingException, IOException, XMLStreamException, SAXException {
final String input = "<teiCorpus id=\"tId\"> <div type=\"description\"><p>some paragraph</p></div> </teiCorpus>";
final TeiStAXParser parser = new TeiStAXParser(createInputStream(input), createOutputStream(""), false, false);
parser.parse();
assertXMLEqual("The 2 xml should be identical", input, fromOutputStreamToString(parser.outputStream));
}
@Test
public void testParseDescription2() throws UnsupportedEncodingException, IOException, XMLStreamException, SAXException {
final String input = "<teiCorpus id=\"tId\"> <div type=\"description\"><p>some paragraph</p><p>some text <sep> " <br clear=\"none\" /> </p></div> </teiCorpus>";
final TeiStAXParser parser = new TeiStAXParser(createInputStream(input), createOutputStream(""), false, false);
parser.parse();
assertXMLEqual("The 2 xml should be identical", XMLWriter.formatXML(input),
XMLWriter.formatXML(fromOutputStreamToString(parser.outputStream)));
}
@Test
public void testParseDescriptionTagInsideP() throws UnsupportedEncodingException, IOException, XMLStreamException, SAXException {
final String input = "<teiCorpus id=\"tId\"> <div type=\"description\"><p>some paragraph</p><p>some text <sep> " <someTag>text inside tag</someTag> </p></div> </teiCorpus>";
final TeiStAXParser parser = new TeiStAXParser(createInputStream(input), createOutputStream(""), false, false);
parser.parse();
assertXMLEqual("The 2 xml should be identical", XMLWriter.formatXML(input),
XMLWriter.formatXML(fromOutputStreamToString(parser.outputStream)));
}
@Test
public void testParseNoDescription() throws UnsupportedEncodingException, IOException, XMLStreamException, SAXException {
final String input = "<teiCorpus id=\"tId\"> <div type=\"noDescription\"><p>some paragraph</p></div> </teiCorpus>";
final TeiStAXParser parser = new TeiStAXParser(createInputStream(input), createOutputStream(""), false, false);
parser.parse();
assertXMLEqual("The 2 xml should be identical", input, fromOutputStreamToString(parser.outputStream));
}
@Test
public void testParseStartTEI() throws UnsupportedEncodingException, IOException, XMLStreamException, SAXException {
final String input = "<teiCorpus id=\"tId\"> <TEI><teiHeader><notesStmt><notes>some element</notes></notesStmt></teiHeader><div type=\"description\"><p>some paragraph</p></div></TEI> </teiCorpus>";
final TeiStAXParser parser = new TeiStAXParser(createInputStream(input), createOutputStream(""), false, false);
parser.parse();
assertXMLEqual("The 2 xml should be identical", input, fromOutputStreamToString(parser.outputStream));
}
@Test
public void testParseNotesStmt() throws UnsupportedEncodingException, IOException, XMLStreamException, SAXException {
final String input = "<teiCorpus id=\"tId\"><TEI><teiHeader><notesStmt><notes>some element</notes></notesStmt></teiHeader></TEI></teiCorpus>";
final TeiStAXParser parser = new TeiStAXParser(createInputStream(input), createOutputStream(""), false, false);
parser.parse();
assertXMLEqual("The 2 xml should be identical", input, fromOutputStreamToString(parser.outputStream));
}
@Test
public void testParserOnFullTEI() throws XMLStreamException, IOException {
ReferenceExtractor extractor = new ReferenceExtractor();
OutputStream out;
TeiStAXParser stax;
out = getOutputStreamFromFile("src/test/resources/org/grobid/core/annotations/resTeiStAXParser/out.tei.xml");
out = System.out;
// ByteArrayOutputStream baos = new ByteArrayOutputStream();
stax = new TeiStAXParser(
getInputStreamFromFile("src/test/resources/org/grobid/core/annotations/resTeiStAXParser/sample-4.tei.xml"),
out, true,
extractor, false);
stax.parse();
}
private static FileInputStream getInputStreamFromFile(final String pFileName) throws FileNotFoundException {
return new FileInputStream(pFileName);
}
private static FileOutputStream getOutputStreamFromFile(final String pFileName) throws FileNotFoundException {
return new FileOutputStream(pFileName);
}
private InputStream createInputStream(final String str) throws java.io.UnsupportedEncodingException {
return new ByteArrayInputStream(str.getBytes("UTF-8"));
}
private OutputStream createOutputStream(final String str) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
out.write(str.getBytes());
return out;
}
private String fromInputStreamToString(final InputStream input) throws IOException {
input.reset();
byte[] bytes = new byte[input.available()];
input.read(bytes);
return new String(bytes);
}
private String fromOutputStreamToString(final OutputStream output) throws IOException {
output.toString();
return output.toString();
}
private static QName getQName(final String namespaceURI, final String localPart, final String prefix) {
return new QName(namespaceURI, localPart, prefix);
}
private static StartElement createStartElement(final QName name, final Iterator<?> attributes, final Iterator<?> namespaces) {
final XMLEventFactory eventFactory = XMLEventFactory.newInstance();
return eventFactory.createStartElement(name, attributes, namespaces);
}
private static Attribute createAttribute(final String localName, final String value) {
final XMLEventFactory eventFactory = XMLEventFactory.newInstance();
return eventFactory.createAttribute(localName, value);
}
private static Iterator<Attribute> getAttributes(final Attribute... pAttr) {
Vector<Attribute> attributes = new Vector<Attribute>();
for (final Attribute attr : pAttr) {
attributes.add(attr);
}
return attributes.iterator();
}
private static Characters createCharacters(final String content) {
final XMLEventFactory eventFactory = XMLEventFactory.newInstance();
return eventFactory.createCharacters(content);
}
private static EndElement createEndElement(final QName name, final Iterator<?> namespaces) {
final XMLEventFactory eventFactory = XMLEventFactory.newInstance();
return eventFactory.createEndElement(name, namespaces);
}
}
|
|
/**
* Copyright 2017 The GreyCat Authors. All rights reserved.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package greycat.internal.heap;
import greycat.Constants;
import greycat.internal.CoreConstants;
import greycat.struct.Buffer;
import greycat.struct.StringArray;
import greycat.utility.Base64;
final class HeapStringArray implements StringArray {
private String[] _backend = null;
private final HeapContainer _parent;
HeapStringArray(final HeapContainer parent) {
this._parent = parent;
}
@Override
public final synchronized String get(int index) {
if (_backend != null) {
if (index >= _backend.length) {
throw new RuntimeException("Array Out of Bounds");
}
return _backend[index];
}
return null;
}
@Override
public final synchronized void set(int index, String value) {
if (_backend == null || index >= _backend.length) {
throw new RuntimeException("allocate first!");
} else {
_backend[index] = value;
_parent.declareDirty();
}
}
@Override
public final synchronized int size() {
if (_backend != null) {
return _backend.length;
}
return 0;
}
@Override
public final synchronized void init(int size) {
_backend = new String[size];
_parent.declareDirty();
}
@Override
public final synchronized void clear() {
_backend = null;
_parent.declareDirty();
}
@Override
public synchronized final void initWith(final String[] values) {
_backend = new String[values.length];
System.arraycopy(values, 0, _backend, 0, values.length);
_parent.declareDirty();
}
@Override
public final synchronized String[] extract() {
if (_backend == null) {
return new String[0];
}
final String[] extracted = new String[_backend.length];
System.arraycopy(_backend, 0, extracted, 0, _backend.length);
return extracted;
}
@Override
public final synchronized boolean removeElement(String value) {
if (_backend == null) {
return false;
}
int index = -1;
for (int i = 0; i < _backend.length; i++) {
if (_backend[i].equals(value)) {
index = i;
break;
}
}
if (index != -1) {
removeElementByIndexInternal(index);
return true;
} else {
return false;
}
}
@Override
public final synchronized boolean removeElementbyIndex(int index) {
if (_backend == null) {
return false;
}
if (index < 0 || index >= _backend.length) {
return false;
}
removeElementByIndexInternal(index);
return true;
}
private void removeElementByIndexInternal(int index) {
String[] newBackend = new String[_backend.length - 1];
System.arraycopy(_backend, 0, newBackend, 0, index);
System.arraycopy(_backend, index + 1, newBackend, index, _backend.length - index - 1);
_backend = newBackend;
_parent.declareDirty();
}
@Override
public final synchronized StringArray addElement(String value) {
if (_backend == null) {
_backend = new String[]{value};
} else {
String[] newBackend = new String[_backend.length + 1];
System.arraycopy(_backend, 0, newBackend, 0, _backend.length);
newBackend[_backend.length] = value;
_backend = newBackend;
}
_parent.declareDirty();
return this;
}
@Override
public final synchronized boolean insertElementAt(int position, String value) {
if (_backend == null) {
return false;
}
if (position < 0 || position >= _backend.length) {
return false;
}
String[] newBackend = new String[_backend.length + 1];
System.arraycopy(_backend, 0, newBackend, 0, position);
newBackend[position] = value;
System.arraycopy(_backend, position, newBackend, position + 1, _backend.length - position);
_backend = newBackend;
_parent.declareDirty();
return true;
}
@Override
public final synchronized boolean replaceElementby(String element, String value) {
if (_backend == null) {
return false;
}
int index = -1;
for (int i = 0; i < _backend.length; i++) {
if (_backend[i].equals(element)) {
index = i;
break;
}
}
if (index != -1) {
_backend[index] = value;
_parent.declareDirty();
return true;
} else {
return false;
}
}
@Override
public final synchronized void addAll(String[] values) {
if (_backend == null) {
initWith(values);
} else {
String[] newBackend = new String[_backend.length + values.length];
System.arraycopy(_backend, 0, newBackend, 0, _backend.length);
System.arraycopy(values, 0, newBackend, _backend.length, values.length);
_backend = newBackend;
_parent.declareDirty();
}
}
public final void save(final Buffer buffer) {
if (_backend != null) {
Base64.encodeIntToBuffer(_backend.length, buffer);
for (int j = 0; j < _backend.length; j++) {
buffer.write(CoreConstants.CHUNK_VAL_SEP);
Base64.encodeStringToBuffer(_backend[j], buffer);
}
} else {
Base64.encodeIntToBuffer(0, buffer);
}
}
public final long load(final Buffer buffer, final long offset, final long max) {
long cursor = offset;
byte current = buffer.read(cursor);
boolean isFirst = true;
long previous = offset;
int elemIndex = 0;
while (cursor < max && current != Constants.CHUNK_SEP && current != Constants.BLOCK_CLOSE) {
if (current == Constants.CHUNK_VAL_SEP) {
if (isFirst) {
_backend = new String[Base64.decodeToIntWithBounds(buffer, previous, cursor)];
isFirst = false;
} else {
_backend[elemIndex] = Base64.decodeToStringWithBounds(buffer, previous, cursor);
elemIndex++;
}
previous = cursor + 1;
}
cursor++;
if (cursor < max) {
current = buffer.read(cursor);
}
}
if (isFirst) {
_backend = new String[Base64.decodeToIntWithBounds(buffer, previous, cursor)];
} else {
_backend[elemIndex] = Base64.decodeToStringWithBounds(buffer, previous, cursor);
}
return cursor;
}
final HeapStringArray cloneFor(HeapContainer target) {
HeapStringArray cloned = new HeapStringArray(target);
if (_backend != null) {
cloned.initWith(_backend);
}
return cloned;
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tajo.master.querymaster;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.event.Event;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.state.*;
import org.apache.hadoop.yarn.util.Records;
import org.apache.tajo.ExecutionBlockId;
import org.apache.tajo.QueryIdFactory;
import org.apache.tajo.QueryUnitId;
import org.apache.tajo.catalog.*;
import org.apache.tajo.catalog.proto.CatalogProtos;
import org.apache.tajo.catalog.statistics.ColumnStats;
import org.apache.tajo.catalog.statistics.StatisticsUtil;
import org.apache.tajo.catalog.statistics.TableStats;
import org.apache.tajo.conf.TajoConf;
import org.apache.tajo.engine.planner.PlannerUtil;
import org.apache.tajo.engine.planner.global.DataChannel;
import org.apache.tajo.engine.planner.global.ExecutionBlock;
import org.apache.tajo.engine.planner.global.MasterPlan;
import org.apache.tajo.engine.planner.logical.GroupbyNode;
import org.apache.tajo.engine.planner.logical.NodeType;
import org.apache.tajo.engine.planner.logical.ScanNode;
import org.apache.tajo.engine.planner.logical.StoreTableNode;
import org.apache.tajo.ipc.TajoMasterProtocol;
import org.apache.tajo.master.*;
import org.apache.tajo.master.TaskRunnerGroupEvent.EventType;
import org.apache.tajo.master.event.*;
import org.apache.tajo.master.event.QueryUnitAttemptScheduleEvent.QueryUnitAttemptScheduleContext;
import org.apache.tajo.storage.AbstractStorageManager;
import org.apache.tajo.storage.fragment.FileFragment;
import java.io.IOException;
import java.net.URI;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import static org.apache.tajo.conf.TajoConf.ConfVars;
import static org.apache.tajo.ipc.TajoWorkerProtocol.ShuffleType;
/**
* SubQuery plays a role in controlling an ExecutionBlock and is a finite state machine.
*/
public class SubQuery implements EventHandler<SubQueryEvent> {
private static final Log LOG = LogFactory.getLog(SubQuery.class);
private MasterPlan masterPlan;
private ExecutionBlock block;
private int priority;
private Schema schema;
private TableMeta meta;
private TableStats resultStatistics;
private TableStats inputStatistics;
private EventHandler<Event> eventHandler;
private final AbstractStorageManager sm;
private AbstractTaskScheduler taskScheduler;
private QueryMasterTask.QueryMasterTaskContext context;
private final List<String> diagnostics = new ArrayList<String>();
private long startTime;
private long finishTime;
volatile Map<QueryUnitId, QueryUnit> tasks = new ConcurrentHashMap<QueryUnitId, QueryUnit>();
volatile Map<ContainerId, Container> containers = new ConcurrentHashMap<ContainerId, Container>();
private static final DiagnosticsUpdateTransition DIAGNOSTIC_UPDATE_TRANSITION = new DiagnosticsUpdateTransition();
private static final InternalErrorTransition INTERNAL_ERROR_TRANSITION = new InternalErrorTransition();
private static final ContainerLaunchTransition CONTAINER_LAUNCH_TRANSITION = new ContainerLaunchTransition();
private static final TaskCompletedTransition TASK_COMPLETED_TRANSITION = new TaskCompletedTransition();
private static final AllocatedContainersCancelTransition CONTAINERS_CANCEL_TRANSITION =
new AllocatedContainersCancelTransition();
private static final SubQueryCompleteTransition SUBQUERY_COMPLETED_TRANSITION =
new SubQueryCompleteTransition();
private StateMachine<SubQueryState, SubQueryEventType, SubQueryEvent> stateMachine;
protected static final StateMachineFactory<SubQuery, SubQueryState,
SubQueryEventType, SubQueryEvent> stateMachineFactory =
new StateMachineFactory <SubQuery, SubQueryState,
SubQueryEventType, SubQueryEvent> (SubQueryState.NEW)
// Transitions from NEW state
.addTransition(SubQueryState.NEW,
EnumSet.of(SubQueryState.INITED, SubQueryState.ERROR, SubQueryState.SUCCEEDED),
SubQueryEventType.SQ_INIT,
new InitAndRequestContainer())
.addTransition(SubQueryState.NEW, SubQueryState.NEW,
SubQueryEventType.SQ_DIAGNOSTIC_UPDATE,
DIAGNOSTIC_UPDATE_TRANSITION)
.addTransition(SubQueryState.NEW, SubQueryState.KILLED,
SubQueryEventType.SQ_KILL)
.addTransition(SubQueryState.NEW, SubQueryState.ERROR,
SubQueryEventType.SQ_INTERNAL_ERROR,
INTERNAL_ERROR_TRANSITION)
// Transitions from INITED state
.addTransition(SubQueryState.INITED, SubQueryState.RUNNING,
SubQueryEventType.SQ_CONTAINER_ALLOCATED,
CONTAINER_LAUNCH_TRANSITION)
.addTransition(SubQueryState.INITED, SubQueryState.INITED,
SubQueryEventType.SQ_DIAGNOSTIC_UPDATE,
DIAGNOSTIC_UPDATE_TRANSITION)
.addTransition(SubQueryState.INITED, SubQueryState.KILL_WAIT,
SubQueryEventType.SQ_KILL)
.addTransition(SubQueryState.INITED, SubQueryState.ERROR,
SubQueryEventType.SQ_INTERNAL_ERROR,
INTERNAL_ERROR_TRANSITION)
// Transitions from RUNNING state
.addTransition(SubQueryState.RUNNING, SubQueryState.RUNNING,
SubQueryEventType.SQ_CONTAINER_ALLOCATED,
CONTAINER_LAUNCH_TRANSITION)
.addTransition(SubQueryState.RUNNING, SubQueryState.RUNNING,
SubQueryEventType.SQ_TASK_COMPLETED,
TASK_COMPLETED_TRANSITION)
.addTransition(SubQueryState.RUNNING,
EnumSet.of(SubQueryState.SUCCEEDED, SubQueryState.FAILED),
SubQueryEventType.SQ_SUBQUERY_COMPLETED,
SUBQUERY_COMPLETED_TRANSITION)
.addTransition(SubQueryState.RUNNING, SubQueryState.RUNNING,
SubQueryEventType.SQ_FAILED,
TASK_COMPLETED_TRANSITION)
.addTransition(SubQueryState.RUNNING, SubQueryState.RUNNING,
SubQueryEventType.SQ_DIAGNOSTIC_UPDATE,
DIAGNOSTIC_UPDATE_TRANSITION)
.addTransition(SubQueryState.RUNNING, SubQueryState.KILL_WAIT,
SubQueryEventType.SQ_KILL,
new KillTasksTransition())
.addTransition(SubQueryState.RUNNING, SubQueryState.ERROR,
SubQueryEventType.SQ_INTERNAL_ERROR,
INTERNAL_ERROR_TRANSITION)
// Ignore-able Transition
.addTransition(SubQueryState.RUNNING, SubQueryState.RUNNING,
SubQueryEventType.SQ_START)
// Transitions from KILL_WAIT state
.addTransition(SubQueryState.KILL_WAIT, SubQueryState.KILL_WAIT,
SubQueryEventType.SQ_CONTAINER_ALLOCATED,
CONTAINERS_CANCEL_TRANSITION)
.addTransition(SubQueryState.KILL_WAIT, SubQueryState.KILL_WAIT,
EnumSet.of(SubQueryEventType.SQ_KILL))
.addTransition(SubQueryState.KILL_WAIT, SubQueryState.KILL_WAIT,
SubQueryEventType.SQ_TASK_COMPLETED,
TASK_COMPLETED_TRANSITION)
.addTransition(SubQueryState.KILL_WAIT,
EnumSet.of(SubQueryState.SUCCEEDED, SubQueryState.FAILED, SubQueryState.KILLED),
SubQueryEventType.SQ_SUBQUERY_COMPLETED,
SUBQUERY_COMPLETED_TRANSITION)
.addTransition(SubQueryState.KILL_WAIT, SubQueryState.KILL_WAIT,
SubQueryEventType.SQ_DIAGNOSTIC_UPDATE,
DIAGNOSTIC_UPDATE_TRANSITION)
.addTransition(SubQueryState.KILL_WAIT, SubQueryState.KILL_WAIT,
SubQueryEventType.SQ_FAILED,
TASK_COMPLETED_TRANSITION)
.addTransition(SubQueryState.KILL_WAIT, SubQueryState.ERROR,
SubQueryEventType.SQ_INTERNAL_ERROR,
INTERNAL_ERROR_TRANSITION)
// Transitions from SUCCEEDED state
.addTransition(SubQueryState.SUCCEEDED, SubQueryState.SUCCEEDED,
SubQueryEventType.SQ_CONTAINER_ALLOCATED,
CONTAINERS_CANCEL_TRANSITION)
.addTransition(SubQueryState.SUCCEEDED, SubQueryState.SUCCEEDED,
SubQueryEventType.SQ_DIAGNOSTIC_UPDATE,
DIAGNOSTIC_UPDATE_TRANSITION)
.addTransition(SubQueryState.SUCCEEDED, SubQueryState.ERROR,
SubQueryEventType.SQ_INTERNAL_ERROR,
INTERNAL_ERROR_TRANSITION)
// Ignore-able events
.addTransition(SubQueryState.SUCCEEDED, SubQueryState.SUCCEEDED,
EnumSet.of(
SubQueryEventType.SQ_START,
SubQueryEventType.SQ_KILL,
SubQueryEventType.SQ_CONTAINER_ALLOCATED))
// Transitions from FAILED state
.addTransition(SubQueryState.FAILED, SubQueryState.FAILED,
SubQueryEventType.SQ_CONTAINER_ALLOCATED,
CONTAINERS_CANCEL_TRANSITION)
.addTransition(SubQueryState.FAILED, SubQueryState.FAILED,
SubQueryEventType.SQ_DIAGNOSTIC_UPDATE,
DIAGNOSTIC_UPDATE_TRANSITION)
.addTransition(SubQueryState.FAILED, SubQueryState.ERROR,
SubQueryEventType.SQ_INTERNAL_ERROR,
INTERNAL_ERROR_TRANSITION)
// Ignore-able transitions
.addTransition(SubQueryState.FAILED, SubQueryState.FAILED,
EnumSet.of(
SubQueryEventType.SQ_START,
SubQueryEventType.SQ_KILL,
SubQueryEventType.SQ_CONTAINER_ALLOCATED,
SubQueryEventType.SQ_FAILED))
// Transitions from FAILED state
.addTransition(SubQueryState.ERROR, SubQueryState.ERROR,
SubQueryEventType.SQ_CONTAINER_ALLOCATED,
CONTAINERS_CANCEL_TRANSITION)
.addTransition(SubQueryState.ERROR, SubQueryState.ERROR,
SubQueryEventType.SQ_DIAGNOSTIC_UPDATE,
DIAGNOSTIC_UPDATE_TRANSITION)
// Ignore-able transitions
.addTransition(SubQueryState.ERROR, SubQueryState.ERROR,
EnumSet.of(
SubQueryEventType.SQ_START,
SubQueryEventType.SQ_KILL,
SubQueryEventType.SQ_FAILED,
SubQueryEventType.SQ_INTERNAL_ERROR))
.installTopology();
private final Lock readLock;
private final Lock writeLock;
private int totalScheduledObjectsCount;
private int succeededObjectCount = 0;
private int completedTaskCount = 0;
private int succeededTaskCount = 0;
private int killedObjectCount = 0;
private int failedObjectCount = 0;
private TaskSchedulerContext schedulerContext;
public SubQuery(QueryMasterTask.QueryMasterTaskContext context, MasterPlan masterPlan, ExecutionBlock block, AbstractStorageManager sm) {
this.context = context;
this.masterPlan = masterPlan;
this.block = block;
this.sm = sm;
this.eventHandler = context.getEventHandler();
ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
this.readLock = readWriteLock.readLock();
this.writeLock = readWriteLock.writeLock();
stateMachine = stateMachineFactory.make(this);
}
public static boolean isRunningState(SubQueryState state) {
return state == SubQueryState.INITED || state == SubQueryState.NEW || state == SubQueryState.RUNNING;
}
public QueryMasterTask.QueryMasterTaskContext getContext() {
return context;
}
public MasterPlan getMasterPlan() {
return masterPlan;
}
public DataChannel getDataChannel() {
return masterPlan.getOutgoingChannels(getId()).iterator().next();
}
public EventHandler<Event> getEventHandler() {
return eventHandler;
}
public AbstractTaskScheduler getTaskScheduler() {
return taskScheduler;
}
public void setStartTime() {
startTime = context.getClock().getTime();
}
@SuppressWarnings("UnusedDeclaration")
public long getStartTime() {
return this.startTime;
}
public void setFinishTime() {
finishTime = context.getClock().getTime();
}
@SuppressWarnings("UnusedDeclaration")
public long getFinishTime() {
return this.finishTime;
}
public float getTaskProgress() {
readLock.lock();
try {
if (getState() == SubQueryState.NEW) {
return 0;
} else {
return (float)(succeededObjectCount) / (float)totalScheduledObjectsCount;
}
} finally {
readLock.unlock();
}
}
public float getProgress() {
List<QueryUnit> tempTasks = null;
readLock.lock();
try {
if (getState() == SubQueryState.NEW) {
return 0;
} else {
tempTasks = new ArrayList<QueryUnit>(tasks.values());
}
} finally {
readLock.unlock();
}
float totalProgress = 0.0f;
for (QueryUnit eachQueryUnit: tempTasks) {
if (eachQueryUnit.getLastAttempt() != null) {
totalProgress += eachQueryUnit.getLastAttempt().getProgress();
}
}
return totalProgress/(float)tempTasks.size();
}
public int getSucceededObjectCount() {
return succeededObjectCount;
}
public int getTotalScheduledObjectsCount() {
return totalScheduledObjectsCount;
}
public ExecutionBlock getBlock() {
return block;
}
public void addTask(QueryUnit task) {
tasks.put(task.getId(), task);
}
/**
* It finalizes this subquery. It is only invoked when the subquery is succeeded.
*/
public void complete() {
cleanup();
finalizeStats();
setFinishTime();
eventHandler.handle(new SubQueryCompletedEvent(getId(), SubQueryState.SUCCEEDED));
}
/**
* It finalizes this subquery. Unlike {@link SubQuery#complete()},
* it is invoked when a subquery is abnormally finished.
*
* @param finalState The final subquery state
*/
public void abort(SubQueryState finalState) {
// TODO -
// - committer.abortSubQuery(...)
// - record SubQuery Finish Time
// - CleanUp Tasks
// - Record History
cleanup();
setFinishTime();
eventHandler.handle(new SubQueryCompletedEvent(getId(), finalState));
}
public StateMachine<SubQueryState, SubQueryEventType, SubQueryEvent> getStateMachine() {
return this.stateMachine;
}
public void setPriority(int priority) {
this.priority = priority;
}
public int getPriority() {
return this.priority;
}
public AbstractStorageManager getStorageManager() {
return sm;
}
public ExecutionBlockId getId() {
return block.getId();
}
public QueryUnit[] getQueryUnits() {
return tasks.values().toArray(new QueryUnit[tasks.size()]);
}
public QueryUnit getQueryUnit(QueryUnitId qid) {
return tasks.get(qid);
}
public Schema getSchema() {
return schema;
}
public TableMeta getTableMeta() {
return meta;
}
public TableStats getResultStats() {
return resultStatistics;
}
public TableStats getInputStats() {
return inputStatistics;
}
public List<String> getDiagnostics() {
readLock.lock();
try {
return diagnostics;
} finally {
readLock.unlock();
}
}
protected void addDiagnostic(String diag) {
diagnostics.add(diag);
}
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(this.getId());
return sb.toString();
}
@Override
public boolean equals(Object o) {
if (o instanceof SubQuery) {
SubQuery other = (SubQuery)o;
return getId().equals(other.getId());
}
return false;
}
@Override
public int hashCode() {
return getId().hashCode();
}
public int compareTo(SubQuery other) {
return getId().compareTo(other.getId());
}
public SubQueryState getState() {
readLock.lock();
try {
return stateMachine.getCurrentState();
} finally {
readLock.unlock();
}
}
public static TableStats[] computeStatFromUnionBlock(SubQuery subQuery) {
TableStats[] stat = new TableStats[]{new TableStats(), new TableStats()};
long[] avgRows = new long[]{0, 0};
long[] numBytes = new long[]{0, 0};
long[] readBytes = new long[]{0, 0};
long[] numRows = new long[]{0, 0};
int[] numBlocks = new int[]{0, 0};
int[] numOutputs = new int[]{0, 0};
List<ColumnStats> columnStatses = Lists.newArrayList();
MasterPlan masterPlan = subQuery.getMasterPlan();
Iterator<ExecutionBlock> it = masterPlan.getChilds(subQuery.getBlock()).iterator();
while (it.hasNext()) {
ExecutionBlock block = it.next();
SubQuery childSubQuery = subQuery.context.getSubQuery(block.getId());
TableStats[] childStatArray = new TableStats[]{
childSubQuery.getInputStats(), childSubQuery.getResultStats()
};
for (int i = 0; i < 2; i++) {
if (childStatArray[i] == null) {
continue;
}
avgRows[i] += childStatArray[i].getAvgRows();
numBlocks[i] += childStatArray[i].getNumBlocks();
numBytes[i] += childStatArray[i].getNumBytes();
readBytes[i] += childStatArray[i].getReadBytes();
numOutputs[i] += childStatArray[i].getNumShuffleOutputs();
numRows[i] += childStatArray[i].getNumRows();
}
columnStatses.addAll(childStatArray[1].getColumnStats());
}
for (int i = 0; i < 2; i++) {
stat[i].setNumBlocks(numBlocks[i]);
stat[i].setNumBytes(numBytes[i]);
stat[i].setReadBytes(readBytes[i]);
stat[i].setNumShuffleOutputs(numOutputs[i]);
stat[i].setNumRows(numRows[i]);
stat[i].setAvgRows(avgRows[i]);
}
stat[1].setColumnStats(columnStatses);
return stat;
}
private TableStats[] computeStatFromTasks() {
List<TableStats> inputStatsList = Lists.newArrayList();
List<TableStats> resultStatsList = Lists.newArrayList();
for (QueryUnit unit : getQueryUnits()) {
resultStatsList.add(unit.getStats());
if (unit.getLastAttempt().getInputStats() != null) {
inputStatsList.add(unit.getLastAttempt().getInputStats());
}
}
TableStats inputStats = StatisticsUtil.aggregateTableStat(inputStatsList);
TableStats resultStats = StatisticsUtil.aggregateTableStat(resultStatsList);
return new TableStats[]{inputStats, resultStats};
}
private void stopScheduler() {
// If there are launched TaskRunners, send the 'shouldDie' message to all r
// via received task requests.
if (taskScheduler != null) {
taskScheduler.stop();
}
}
private void releaseContainers() {
// If there are still live TaskRunners, try to kill the containers.
eventHandler.handle(new TaskRunnerGroupEvent(EventType.CONTAINER_REMOTE_CLEANUP, getId(), containers.values()));
}
public void releaseContainer(ContainerId containerId) {
// try to kill the container.
ArrayList<Container> list = new ArrayList<Container>();
list.add(containers.get(containerId));
eventHandler.handle(new TaskRunnerGroupEvent(EventType.CONTAINER_REMOTE_CLEANUP, getId(), list));
}
/**
* It computes all stats and sets the intermediate result.
*/
private void finalizeStats() {
TableStats[] statsArray;
if (block.hasUnion()) {
statsArray = computeStatFromUnionBlock(this);
} else {
statsArray = computeStatFromTasks();
}
DataChannel channel = masterPlan.getOutgoingChannels(getId()).get(0);
// get default or store type
CatalogProtos.StoreType storeType = CatalogProtos.StoreType.CSV; // default setting
// if store plan (i.e., CREATE or INSERT OVERWRITE)
StoreTableNode storeTableNode = PlannerUtil.findTopNode(getBlock().getPlan(), NodeType.STORE);
if (storeTableNode != null) {
storeType = storeTableNode.getStorageType();
}
schema = channel.getSchema();
meta = CatalogUtil.newTableMeta(storeType, new Options());
inputStatistics = statsArray[0];
resultStatistics = statsArray[1];
}
@Override
public void handle(SubQueryEvent event) {
if (LOG.isDebugEnabled()) {
LOG.debug("Processing " + event.getSubQueryId() + " of type " + event.getType() + ", preState=" + getState());
}
try {
writeLock.lock();
SubQueryState oldState = getState();
try {
getStateMachine().doTransition(event.getType(), event);
} catch (InvalidStateTransitonException e) {
LOG.error("Can't handle this event at current state", e);
eventHandler.handle(new SubQueryEvent(getId(),
SubQueryEventType.SQ_INTERNAL_ERROR));
}
// notify the eventhandler of state change
if (LOG.isDebugEnabled()) {
if (oldState != getState()) {
LOG.debug(getId() + " SubQuery Transitioned from " + oldState + " to "
+ getState());
}
}
} finally {
writeLock.unlock();
}
}
public void handleTaskRequestEvent(TaskRequestEvent event) {
taskScheduler.handleTaskRequestEvent(event);
}
private static class InitAndRequestContainer implements MultipleArcTransition<SubQuery,
SubQueryEvent, SubQueryState> {
@Override
public SubQueryState transition(SubQuery subQuery, SubQueryEvent subQueryEvent) {
subQuery.setStartTime();
ExecutionBlock execBlock = subQuery.getBlock();
SubQueryState state;
try {
// Union operator does not require actual query processing. It is performed logically.
if (execBlock.hasUnion()) {
subQuery.finalizeStats();
state = SubQueryState.SUCCEEDED;
} else {
ExecutionBlock parent = subQuery.getMasterPlan().getParent(subQuery.getBlock());
DataChannel channel = subQuery.getMasterPlan().getChannel(subQuery.getId(), parent.getId());
setShuffleIfNecessary(subQuery, channel);
initTaskScheduler(subQuery);
schedule(subQuery);
subQuery.totalScheduledObjectsCount = subQuery.getTaskScheduler().remainingScheduledObjectNum();
LOG.info(subQuery.totalScheduledObjectsCount + " objects are scheduled");
if (subQuery.getTaskScheduler().remainingScheduledObjectNum() == 0) { // if there is no tasks
subQuery.stopScheduler();
subQuery.finalizeStats();
subQuery.eventHandler.handle(new SubQueryCompletedEvent(subQuery.getId(), SubQueryState.SUCCEEDED));
return SubQueryState.SUCCEEDED;
} else {
subQuery.taskScheduler.start();
allocateContainers(subQuery);
return SubQueryState.INITED;
}
}
} catch (Exception e) {
LOG.error("SubQuery (" + subQuery.getId() + ") ERROR: ", e);
subQuery.setFinishTime();
subQuery.eventHandler.handle(new SubQueryDiagnosticsUpdateEvent(subQuery.getId(), e.getMessage()));
subQuery.eventHandler.handle(new SubQueryCompletedEvent(subQuery.getId(), SubQueryState.ERROR));
return SubQueryState.ERROR;
}
return state;
}
private void initTaskScheduler(SubQuery subQuery) throws IOException {
TajoConf conf = subQuery.context.getConf();
subQuery.schedulerContext = new TaskSchedulerContext(subQuery.context,
subQuery.getMasterPlan().isLeaf(subQuery.getId()), subQuery.getId());
subQuery.taskScheduler = TaskSchedulerFactory.get(conf, subQuery.schedulerContext, subQuery);
subQuery.taskScheduler.init(conf);
LOG.info(subQuery.taskScheduler.getName() + " is chosen for the task scheduling");
}
/**
* If a parent block requires a repartition operation, the method sets proper repartition
* methods and the number of partitions to a given subquery.
*/
private static void setShuffleIfNecessary(SubQuery subQuery, DataChannel channel) {
if (channel.getShuffleType() != ShuffleType.NONE_SHUFFLE) {
int numTasks = calculateShuffleOutputNum(subQuery, channel);
Repartitioner.setShuffleOutputNumForTwoPhase(subQuery, numTasks, channel);
}
}
/**
* Getting the total memory of cluster
*
* @param subQuery
* @return mega bytes
*/
private static int getClusterTotalMemory(SubQuery subQuery) {
List<TajoMasterProtocol.WorkerResourceProto> workers =
subQuery.context.getQueryMasterContext().getQueryMaster().getAllWorker();
int totalMem = 0;
for (TajoMasterProtocol.WorkerResourceProto worker : workers) {
totalMem += worker.getMemoryMB();
}
return totalMem;
}
/**
* Getting the desire number of partitions according to the volume of input data.
* This method is only used to determine the partition key number of hash join or aggregation.
*
* @param subQuery
* @return
*/
public static int calculateShuffleOutputNum(SubQuery subQuery, DataChannel channel) {
TajoConf conf = subQuery.context.getConf();
MasterPlan masterPlan = subQuery.getMasterPlan();
ExecutionBlock parent = masterPlan.getParent(subQuery.getBlock());
GroupbyNode grpNode = null;
if (parent != null) {
grpNode = PlannerUtil.findMostBottomNode(parent.getPlan(), NodeType.GROUP_BY);
}
// Is this subquery the first step of join?
if (parent != null && parent.getScanNodes().length == 2) {
List<ExecutionBlock> childs = masterPlan.getChilds(parent);
// for inner
ExecutionBlock outer = childs.get(0);
long outerVolume = getInputVolume(subQuery.masterPlan, subQuery.context, outer);
// for inner
ExecutionBlock inner = childs.get(1);
long innerVolume = getInputVolume(subQuery.masterPlan, subQuery.context, inner);
LOG.info("Outer volume: " + Math.ceil((double) outerVolume / 1048576) + "MB, "
+ "Inner volume: " + Math.ceil((double) innerVolume / 1048576) + "MB");
long bigger = Math.max(outerVolume, innerVolume);
int mb = (int) Math.ceil((double) bigger / 1048576);
LOG.info("Bigger Table's volume is approximately " + mb + " MB");
int taskNum = (int) Math.ceil((double) mb /
conf.getIntVar(ConfVars.DIST_QUERY_JOIN_PARTITION_VOLUME));
int totalMem = getClusterTotalMemory(subQuery);
LOG.info("Total memory of cluster is " + totalMem + " MB");
int slots = Math.max(totalMem / conf.getIntVar(ConfVars.TASK_DEFAULT_MEMORY), 1);
// determine the number of task
taskNum = Math.min(taskNum, slots);
LOG.info("The determined number of join partitions is " + taskNum);
return taskNum;
// Is this subquery the first step of group-by?
} else if (grpNode != null) {
if (grpNode.getGroupingColumns().length == 0) {
return 1;
} else {
long volume = getInputVolume(subQuery.masterPlan, subQuery.context, subQuery.block);
int mb = (int) Math.ceil((double) volume / 1048576);
LOG.info("Table's volume is approximately " + mb + " MB");
// determine the number of task
int taskNumBySize = (int) Math.ceil((double) mb /
conf.getIntVar(ConfVars.DIST_QUERY_GROUPBY_PARTITION_VOLUME));
int totalMem = getClusterTotalMemory(subQuery);
LOG.info("Total memory of cluster is " + totalMem + " MB");
int slots = Math.max(totalMem / conf.getIntVar(ConfVars.TASK_DEFAULT_MEMORY), 1);
int taskNum = Math.min(taskNumBySize, slots); //Maximum partitions
LOG.info("The determined number of aggregation partitions is " + taskNum);
return taskNum;
}
} else {
LOG.info("============>>>>> Unexpected Case! <<<<<================");
long volume = getInputVolume(subQuery.masterPlan, subQuery.context, subQuery.block);
int mb = (int) Math.ceil((double)volume / 1048576);
LOG.info("Table's volume is approximately " + mb + " MB");
// determine the number of task per 128MB
int taskNum = (int) Math.ceil((double)mb / 128);
LOG.info("The determined number of partitions is " + taskNum);
return taskNum;
}
}
private static void schedule(SubQuery subQuery) throws IOException {
MasterPlan masterPlan = subQuery.getMasterPlan();
ExecutionBlock execBlock = subQuery.getBlock();
if (subQuery.getMasterPlan().isLeaf(execBlock.getId()) && execBlock.getScanNodes().length == 1) { // Case 1: Just Scan
scheduleFragmentsForLeafQuery(subQuery);
} else if (execBlock.getScanNodes().length > 1) { // Case 2: Join
Repartitioner.scheduleFragmentsForJoinQuery(subQuery.schedulerContext, subQuery);
} else { // Case 3: Others (Sort or Aggregation)
int numTasks = getNonLeafTaskNum(subQuery);
Repartitioner.scheduleFragmentsForNonLeafTasks(subQuery.schedulerContext, masterPlan, subQuery, numTasks);
}
}
/**
* Getting the desire number of tasks according to the volume of input data
*
* @param subQuery
* @return
*/
public static int getNonLeafTaskNum(SubQuery subQuery) {
// Getting intermediate data size
long volume = getInputVolume(subQuery.getMasterPlan(), subQuery.context, subQuery.getBlock());
int mb = (int) Math.ceil((double)volume / 1048576);
LOG.info("Table's volume is approximately " + mb + " MB");
// determine the number of task per 64MB
int maxTaskNum = Math.max(1, (int) Math.ceil((double)mb / 64));
LOG.info("The determined number of non-leaf tasks is " + maxTaskNum);
return maxTaskNum;
}
public static long getInputVolume(MasterPlan masterPlan, QueryMasterTask.QueryMasterTaskContext context,
ExecutionBlock execBlock) {
Map<String, TableDesc> tableMap = context.getTableDescMap();
if (masterPlan.isLeaf(execBlock)) {
ScanNode outerScan = execBlock.getScanNodes()[0];
TableStats stat = tableMap.get(outerScan.getCanonicalName()).getStats();
return stat.getNumBytes();
} else {
long aggregatedVolume = 0;
for (ExecutionBlock childBlock : masterPlan.getChilds(execBlock)) {
SubQuery subquery = context.getSubQuery(childBlock.getId());
if (subquery == null || subquery.getState() != SubQueryState.SUCCEEDED) {
aggregatedVolume += getInputVolume(masterPlan, context, childBlock);
} else {
aggregatedVolume += subquery.getResultStats().getNumBytes();
}
}
return aggregatedVolume;
}
}
public static void allocateContainers(SubQuery subQuery) {
ExecutionBlock execBlock = subQuery.getBlock();
//TODO consider disk slot
int requiredMemoryMBPerTask = 512;
int numRequest = subQuery.getContext().getResourceAllocator().calculateNumRequestContainers(
subQuery.getContext().getQueryMasterContext().getWorkerContext(),
subQuery.schedulerContext.getEstimatedTaskNum(),
requiredMemoryMBPerTask
);
final Resource resource = Records.newRecord(Resource.class);
resource.setMemory(requiredMemoryMBPerTask);
LOG.info("Request Container for " + subQuery.getId() + " containers=" + numRequest);
Priority priority = Records.newRecord(Priority.class);
priority.setPriority(subQuery.getPriority());
ContainerAllocationEvent event =
new ContainerAllocationEvent(ContainerAllocatorEventType.CONTAINER_REQ,
subQuery.getId(), priority, resource, numRequest,
subQuery.masterPlan.isLeaf(execBlock), 0.0f);
subQuery.eventHandler.handle(event);
}
private static void scheduleFragmentsForLeafQuery(SubQuery subQuery) throws IOException {
ExecutionBlock execBlock = subQuery.getBlock();
ScanNode[] scans = execBlock.getScanNodes();
Preconditions.checkArgument(scans.length == 1, "Must be Scan Query");
ScanNode scan = scans[0];
TableDesc table = subQuery.context.getTableDescMap().get(scan.getCanonicalName());
Collection<FileFragment> fragments;
TableMeta meta = table.getMeta();
// Depending on scanner node's type, it creates fragments. If scan is for
// a partitioned table, It will creates lots fragments for all partitions.
// Otherwise, it creates at least one fragments for a table, which may
// span a number of blocks or possibly consists of a number of files.
if (scan.getType() == NodeType.PARTITIONS_SCAN) {
fragments = Repartitioner.getFragmentsFromPartitionedTable(subQuery.getStorageManager(), scan, table);
} else {
Path inputPath = table.getPath();
fragments = subQuery.getStorageManager().getSplits(scan.getCanonicalName(), meta, table.getSchema(), inputPath);
}
SubQuery.scheduleFragments(subQuery, fragments);
if (subQuery.getTaskScheduler() instanceof DefaultTaskScheduler) {
//Leaf task of DefaultTaskScheduler should be fragment size
// EstimatedTaskNum determined number of initial container
subQuery.schedulerContext.setTaskSize(fragments.size());
subQuery.schedulerContext.setEstimatedTaskNum(fragments.size());
} else {
TajoConf conf = subQuery.context.getConf();
subQuery.schedulerContext.setTaskSize(conf.getIntVar(ConfVars.TASK_DEFAULT_SIZE) * 1024 * 1024);
int estimatedTaskNum = (int) Math.ceil((double) table.getStats().getNumBytes() /
(double) subQuery.schedulerContext.getTaskSize());
subQuery.schedulerContext.setEstimatedTaskNum(estimatedTaskNum);
}
}
}
public static void scheduleFragments(SubQuery subQuery, Collection<FileFragment> fragments) {
for (FileFragment eachFragment : fragments) {
scheduleFragment(subQuery, eachFragment);
}
}
public static void scheduleFragment(SubQuery subQuery, FileFragment fragment) {
subQuery.taskScheduler.handle(new FragmentScheduleEvent(TaskSchedulerEvent.EventType.T_SCHEDULE,
subQuery.getId(), fragment));
}
public static void scheduleFragments(SubQuery subQuery, Collection<FileFragment> leftFragments,
FileFragment broadcastFragment) {
for (FileFragment eachLeafFragment : leftFragments) {
scheduleFragment(subQuery, eachLeafFragment, broadcastFragment);
}
}
public static void scheduleFragment(SubQuery subQuery,
FileFragment leftFragment, FileFragment rightFragment) {
subQuery.taskScheduler.handle(new FragmentScheduleEvent(TaskSchedulerEvent.EventType.T_SCHEDULE,
subQuery.getId(), leftFragment, rightFragment));
}
public static void scheduleFetches(SubQuery subQuery, Map<String, List<URI>> fetches) {
subQuery.taskScheduler.handle(new FetchScheduleEvent(TaskSchedulerEvent.EventType.T_SCHEDULE,
subQuery.getId(), fetches));
}
public static QueryUnit newEmptyQueryUnit(TaskSchedulerContext schedulerContext,
QueryUnitAttemptScheduleContext queryUnitContext,
SubQuery subQuery, int taskId) {
ExecutionBlock execBlock = subQuery.getBlock();
QueryUnit unit = new QueryUnit(schedulerContext.getMasterContext().getConf(),
queryUnitContext,
QueryIdFactory.newQueryUnitId(schedulerContext.getBlockId(), taskId),
schedulerContext.isLeafQuery(), subQuery.eventHandler);
unit.setLogicalPlan(execBlock.getPlan());
subQuery.addTask(unit);
return unit;
}
private static class ContainerLaunchTransition
implements SingleArcTransition<SubQuery, SubQueryEvent> {
@Override
public void transition(SubQuery subQuery, SubQueryEvent event) {
try {
SubQueryContainerAllocationEvent allocationEvent =
(SubQueryContainerAllocationEvent) event;
for (Container container : allocationEvent.getAllocatedContainer()) {
ContainerId cId = container.getId();
if (subQuery.containers.containsKey(cId)) {
subQuery.eventHandler.handle(new SubQueryDiagnosticsUpdateEvent(subQuery.getId(),
"Duplicated containers are allocated: " + cId.toString()));
subQuery.eventHandler.handle(new SubQueryEvent(subQuery.getId(), SubQueryEventType.SQ_INTERNAL_ERROR));
}
subQuery.containers.put(cId, container);
}
LOG.info("SubQuery (" + subQuery.getId() + ") has " + subQuery.containers.size() + " containers!");
subQuery.eventHandler.handle(
new TaskRunnerGroupEvent(EventType.CONTAINER_REMOTE_LAUNCH,
subQuery.getId(), allocationEvent.getAllocatedContainer()));
subQuery.eventHandler.handle(new SubQueryEvent(subQuery.getId(), SubQueryEventType.SQ_START));
} catch (Throwable t) {
subQuery.eventHandler.handle(new SubQueryDiagnosticsUpdateEvent(subQuery.getId(),
ExceptionUtils.getStackTrace(t)));
subQuery.eventHandler.handle(new SubQueryEvent(subQuery.getId(), SubQueryEventType.SQ_INTERNAL_ERROR));
}
}
}
/**
* It is used in KILL_WAIT state against Contained Allocated event.
* It just returns allocated containers to resource manager.
*/
private static class AllocatedContainersCancelTransition implements SingleArcTransition<SubQuery, SubQueryEvent> {
@Override
public void transition(SubQuery subQuery, SubQueryEvent event) {
try {
SubQueryContainerAllocationEvent allocationEvent =
(SubQueryContainerAllocationEvent) event;
subQuery.eventHandler.handle(
new TaskRunnerGroupEvent(EventType.CONTAINER_REMOTE_CLEANUP,
subQuery.getId(), allocationEvent.getAllocatedContainer()));
LOG.info(String.format("[%s] %d allocated containers are canceled",
subQuery.getId().toString(),
allocationEvent.getAllocatedContainer().size()));
} catch (Throwable t) {
subQuery.eventHandler.handle(new SubQueryDiagnosticsUpdateEvent(subQuery.getId(),
ExceptionUtils.getStackTrace(t)));
subQuery.eventHandler.handle(new SubQueryEvent(subQuery.getId(), SubQueryEventType.SQ_INTERNAL_ERROR));
}
}
}
private static class TaskCompletedTransition implements SingleArcTransition<SubQuery, SubQueryEvent> {
@Override
public void transition(SubQuery subQuery,
SubQueryEvent event) {
SubQueryTaskEvent taskEvent = (SubQueryTaskEvent) event;
QueryUnit task = subQuery.getQueryUnit(taskEvent.getTaskId());
if (task == null) { // task failed
LOG.error(String.format("Task %s is absent", taskEvent.getTaskId()));
subQuery.eventHandler.handle(new SubQueryEvent(subQuery.getId(), SubQueryEventType.SQ_FAILED));
} else {
subQuery.completedTaskCount++;
if (taskEvent.getState() == TaskState.SUCCEEDED) {
if (task.isLeafTask()) {
subQuery.succeededObjectCount += task.getTotalFragmentNum();
} else {
subQuery.succeededObjectCount++;
}
} else if (task.getState() == TaskState.KILLED) {
if (task.isLeafTask()) {
subQuery.killedObjectCount += task.getTotalFragmentNum();
} else {
subQuery.killedObjectCount++;
}
} else if (task.getState() == TaskState.FAILED) {
if (task.isLeafTask()) {
subQuery.failedObjectCount+= task.getTotalFragmentNum();
} else {
subQuery.failedObjectCount++;
}
// if at least one task is failed, try to kill all tasks.
subQuery.eventHandler.handle(new SubQueryEvent(subQuery.getId(), SubQueryEventType.SQ_KILL));
}
LOG.info(String.format("[%s] Task Completion Event (Total: %d, Success: %d, Killed: %d, Failed: %d",
subQuery.getId(),
subQuery.getTotalScheduledObjectsCount(),
subQuery.succeededObjectCount,
subQuery.killedObjectCount,
subQuery.failedObjectCount));
if (subQuery.totalScheduledObjectsCount ==
subQuery.succeededObjectCount + subQuery.killedObjectCount + subQuery.failedObjectCount) {
subQuery.eventHandler.handle(new SubQueryEvent(subQuery.getId(), SubQueryEventType.SQ_SUBQUERY_COMPLETED));
}
}
}
}
private static class KillTasksTransition implements SingleArcTransition<SubQuery, SubQueryEvent> {
@Override
public void transition(SubQuery subQuery, SubQueryEvent subQueryEvent) {
subQuery.getTaskScheduler().stop();
for (QueryUnit queryUnit : subQuery.getQueryUnits()) {
subQuery.eventHandler.handle(new TaskEvent(queryUnit.getId(), TaskEventType.T_KILL));
}
}
}
private void cleanup() {
stopScheduler();
releaseContainers();
}
private static class SubQueryCompleteTransition
implements MultipleArcTransition<SubQuery, SubQueryEvent, SubQueryState> {
@Override
public SubQueryState transition(SubQuery subQuery, SubQueryEvent subQueryEvent) {
// TODO - Commit subQuery & do cleanup
// TODO - records succeeded, failed, killed completed task
// TODO - records metrics
try {
LOG.info(String.format("subQuery completed - %s (total=%d, success=%d, killed=%d)",
subQuery.getId().toString(),
subQuery.getTotalScheduledObjectsCount(),
subQuery.getSucceededObjectCount(),
subQuery.killedObjectCount));
if (subQuery.killedObjectCount > 0 || subQuery.failedObjectCount > 0) {
if (subQuery.failedObjectCount > 0) {
subQuery.abort(SubQueryState.FAILED);
return SubQueryState.FAILED;
} else if (subQuery.killedObjectCount > 0) {
subQuery.abort(SubQueryState.KILLED);
return SubQueryState.KILLED;
} else {
LOG.error("Invalid State " + subQuery.getState() + " State");
subQuery.abort(SubQueryState.ERROR);
return SubQueryState.ERROR;
}
} else {
subQuery.complete();
return SubQueryState.SUCCEEDED;
}
} catch (Throwable t) {
LOG.error(t);
subQuery.abort(SubQueryState.ERROR);
return SubQueryState.ERROR;
}
}
}
private static class DiagnosticsUpdateTransition implements SingleArcTransition<SubQuery, SubQueryEvent> {
@Override
public void transition(SubQuery subQuery, SubQueryEvent event) {
subQuery.addDiagnostic(((SubQueryDiagnosticsUpdateEvent) event).getDiagnosticUpdate());
}
}
private static class InternalErrorTransition implements SingleArcTransition<SubQuery, SubQueryEvent> {
@Override
public void transition(SubQuery subQuery, SubQueryEvent subQueryEvent) {
subQuery.abort(SubQueryState.ERROR);
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.direct;
import static com.google.common.base.Preconditions.checkState;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.isA;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.theInstance;
import static org.junit.Assert.assertThat;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.util.UserCodeException;
import org.hamcrest.Matchers;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests for {@link DoFnLifecycleManager}.
*/
@RunWith(JUnit4.class)
public class DoFnLifecycleManagerTest {
@Rule public ExpectedException thrown = ExpectedException.none();
private TestFn fn = new TestFn();
private DoFnLifecycleManager mgr = DoFnLifecycleManager.of(fn);
@Test
public void setupOnGet() throws Exception {
TestFn obtained = (TestFn) mgr.get();
assertThat(obtained, not(theInstance(fn)));
assertThat(obtained.setupCalled, is(true));
assertThat(obtained.teardownCalled, is(false));
}
@Test
public void getMultipleCallsSingleSetupCall() throws Exception {
TestFn obtained = (TestFn) mgr.get();
TestFn secondObtained = (TestFn) mgr.get();
assertThat(obtained, theInstance(secondObtained));
assertThat(obtained.setupCalled, is(true));
assertThat(obtained.teardownCalled, is(false));
}
@Test
public void getMultipleThreadsDifferentInstances() throws Exception {
CountDownLatch startSignal = new CountDownLatch(1);
ExecutorService executor = Executors.newCachedThreadPool();
List<Future<TestFn>> futures = new ArrayList<>();
for (int i = 0; i < 10; i++) {
futures.add(executor.submit(new GetFnCallable(mgr, startSignal)));
}
startSignal.countDown();
List<TestFn> fns = new ArrayList<>();
for (Future<TestFn> future : futures) {
fns.add(future.get(1L, TimeUnit.SECONDS));
}
for (TestFn fn : fns) {
assertThat(fn.setupCalled, is(true));
int sameInstances = 0;
for (TestFn otherFn : fns) {
if (otherFn == fn) {
sameInstances++;
}
}
assertThat(sameInstances, equalTo(1));
}
}
@Test
public void teardownOnRemove() throws Exception {
TestFn obtained = (TestFn) mgr.get();
mgr.remove();
assertThat(obtained, not(theInstance(fn)));
assertThat(obtained.setupCalled, is(true));
assertThat(obtained.teardownCalled, is(true));
assertThat(mgr.get(), not(Matchers.<DoFn<?, ?>>theInstance(obtained)));
}
@Test
public void teardownThrowsRemoveThrows() throws Exception {
TestFn obtained = (TestFn) mgr.get();
obtained.teardown();
thrown.expect(UserCodeException.class);
thrown.expectCause(isA(IllegalStateException.class));
thrown.expectMessage("Cannot call teardown: already torn down");
mgr.remove();
}
@Test
public void teardownAllOnRemoveAll() throws Exception {
CountDownLatch startSignal = new CountDownLatch(1);
ExecutorService executor = Executors.newCachedThreadPool();
List<Future<TestFn>> futures = new ArrayList<>();
for (int i = 0; i < 10; i++) {
futures.add(executor.submit(new GetFnCallable(mgr, startSignal)));
}
startSignal.countDown();
List<TestFn> fns = new ArrayList<>();
for (Future<TestFn> future : futures) {
fns.add(future.get(1L, TimeUnit.SECONDS));
}
mgr.removeAll();
for (TestFn fn : fns) {
assertThat(fn.setupCalled, is(true));
assertThat(fn.teardownCalled, is(true));
}
}
@Test
public void removeAndRemoveAllConcurrent() throws Exception {
CountDownLatch startSignal = new CountDownLatch(1);
ExecutorService executor = Executors.newCachedThreadPool();
List<Future<TestFn>> futures = new ArrayList<>();
for (int i = 0; i < 10; i++) {
futures.add(executor.submit(new GetFnCallable(mgr, startSignal)));
}
startSignal.countDown();
List<TestFn> fns = new ArrayList<>();
for (Future<TestFn> future : futures) {
fns.add(future.get(1L, TimeUnit.SECONDS));
}
CountDownLatch removeSignal = new CountDownLatch(1);
List<Future<Void>> removeFutures = new ArrayList<>();
for (int i = 0; i < 5; i++) {
// These will reuse the threads used in the GetFns
removeFutures.add(executor.submit(new TeardownFnCallable(mgr, removeSignal)));
}
removeSignal.countDown();
assertThat(mgr.removeAll(), Matchers.emptyIterable());
for (Future<Void> removed : removeFutures) {
// Should not have thrown an exception.
removed.get();
}
for (TestFn fn : fns) {
assertThat(fn.setupCalled, is(true));
assertThat(fn.teardownCalled, is(true));
}
}
private static class GetFnCallable implements Callable<TestFn> {
private final DoFnLifecycleManager mgr;
private final CountDownLatch startSignal;
private GetFnCallable(DoFnLifecycleManager mgr, CountDownLatch startSignal) {
this.mgr = mgr;
this.startSignal = startSignal;
}
@Override
public TestFn call() throws Exception {
startSignal.await();
return (TestFn) mgr.get();
}
}
private static class TeardownFnCallable implements Callable<Void> {
private final DoFnLifecycleManager mgr;
private final CountDownLatch startSignal;
private TeardownFnCallable(DoFnLifecycleManager mgr, CountDownLatch startSignal) {
this.mgr = mgr;
this.startSignal = startSignal;
}
@Override
public Void call() throws Exception {
startSignal.await();
// Will throw an exception if the TestFn has already been removed from this thread
mgr.remove();
return null;
}
}
private static class TestFn extends DoFn<Object, Object> {
boolean setupCalled = false;
boolean teardownCalled = false;
@Setup
public void setup() {
checkState(!setupCalled, "Cannot call setup: already set up");
checkState(!teardownCalled, "Cannot call setup: already torn down");
setupCalled = true;
}
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
}
@Teardown
public void teardown() {
checkState(setupCalled, "Cannot call teardown: not set up");
checkState(!teardownCalled, "Cannot call teardown: already torn down");
teardownCalled = true;
}
}
}
|
|
/*
* Copyright 2009-2017. DigitalGlobe, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
package org.mrgeo.resources.raster.mrspyramid;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import org.mrgeo.colorscale.ColorScale;
import org.mrgeo.data.raster.MrGeoRaster;
import org.mrgeo.image.MrsPyramid;
import org.mrgeo.mapalgebra.MapAlgebraJob;
import org.mrgeo.services.SecurityUtils;
import org.mrgeo.services.mrspyramid.MrsPyramidService;
import org.mrgeo.services.mrspyramid.MrsPyramidServiceException;
import org.mrgeo.services.mrspyramid.rendering.ImageRenderer;
import org.mrgeo.services.mrspyramid.rendering.ImageRendererException;
import org.mrgeo.services.mrspyramid.rendering.TiffImageRenderer;
import org.mrgeo.utils.tms.Bounds;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.*;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.ext.ContextResolver;
import javax.ws.rs.ext.Providers;
import java.io.FileNotFoundException;
import java.io.IOException;
//import org.mrgeo.services.mrspyramid.MrsPyramidService;
@Path("/raster")
public class RasterResource
{
private static final String TIFF_MIME_TYPE = "image/tiff";
private static final String KML_INPUT_FORMAT = "kml";
private static final Logger log = LoggerFactory.getLogger(RasterResource.class);
@Context
UriInfo uriInfo;
@Context
Providers providers;
@Context
MrsPyramidService service;
@Context
HttpServletRequest request;
/*
* Accepts a MapAlgebra expression and runs a job that will create a raster as
* a result of of running the expression.
*
* @param output - unique id, this will be the name of the output raster
*
* @param expression - mapalgebra expression
*
* @param basepath [optional] - this is added for testing purposes. This will
* be the path where the output raster will be created.
*/
@SuppressFBWarnings(value = "JAXRS_ENDPOINT", justification = "verified")
@PUT
@Path("/{output}/mapalgebra/")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.TEXT_PLAIN)
public Response createMapAlgebraJob(@PathParam("output") String outputId,
@QueryParam("basePath") String basePath,
@QueryParam("protectionLevel") @DefaultValue("") String protectionLevel,
String expression)
{
getService();
// TODO: After MrsPyramid 2.0 is complete, we will no longer specify a
// full path but instead just the resource name. This is because there is no concept of
// paths in Accumulo.
// String outputPathStr = service.getOutputImageStr(basePath, outputId);
// TODO: Need to construct provider properties from the WebRequest using
// a new security layer and pass those properties to MapAlgebraJob.
MapAlgebraJob job = new MapAlgebraJob(expression, outputId,
protectionLevel, SecurityUtils.getProviderProperties());
service.getJobManager().submitJob("MapAlgebra job " + outputId, job);
// long jobId = service.getJobManager().submitJob("MapAlgebra job " + outputId, job);
// String jobUri = uriInfo.getBaseUri().toString() + "job/";
// jobUri = HttpUtil.updateSchemeFromHeaders(jobUri, request);
// TODO: Revisit the response whenever we re-think how job status reporting
// will work within Spark.
// JobInfoResponse jr = JobResponseFormatter.createJobResponse(service.getJobManager().getJob(jobId), jobUri);
// return Response.status(Status.ACCEPTED).entity(jr).build();
return Response.status(Status.ACCEPTED).build();
}
/*
* Accepts a GeoTiff stream for in-memory ingest to MrsPyramid.
*
* @param output - unique id, this will be the name of the ingested raster
*
*/
// @POST
// @Path("/{output}/ingest/")
// @Produces(MediaType.APPLICATION_JSON)
// @Consumes(MediaType.APPLICATION_OCTET_STREAM)
// public Response ingestRaster(@PathParam("output") String output,
// @QueryParam("protectionLevel") @DefaultValue("") String protectionLevel)
// {
// try {
// // TODO: Need to construct provider properties from the WebRequest using
// // a new security layer and pass those properties to MapAlgebraJob.
// String pyramidOutput = service.ingestImage(request.getInputStream(), output,
// protectionLevel, SecurityUtils.getProviderProperties());
// //TODO: write a metadata record to catalog??
//// String url = request.getRequestURI().substring(request.getContextPath().length());
//// URI uri = new URI(url);
// String createdDate = new DateTime(DateTimeZone.UTC).toString();
// String json = new JSONStringer()
// .object()
// .key("path").value( pyramidOutput )
// //.key("uri").value( uri )
// .key("created_date").value( createdDate )
// .endObject()
// .toString();
// return Response.ok().entity( json ).build();
// } catch (IOException ioe) {
// log.error("Error reading POST content", ioe);
// return Response.status(Response.Status.INTERNAL_SERVER_ERROR).type("text/plain").entity("Error reading POST content").build();
// } catch (IllegalStateException e) {
// return Response.status(Response.Status.INTERNAL_SERVER_ERROR).type("text/plain").entity("Output path already exists").build();
// } catch (URISyntaxException e) {
// log.error("Error creating pyramid URI", e);
// return Response.status(Response.Status.INTERNAL_SERVER_ERROR).type("text/plain").entity("Error creating pyramid URI").build();
// } catch (JSONException e) {
// log.error("Error creating JSON response", e);
// return Response.status(Response.Status.INTERNAL_SERVER_ERROR).type("text/plain").entity("Error creating JSON response").build();
// } catch (Exception e) {
// log.error("Error ingesting raster", e);
// return Response.status(Response.Status.INTERNAL_SERVER_ERROR).type("text/plain").entity("Error ingesting raster").build();
// }
// }
@SuppressFBWarnings(value = "JAXRS_ENDPOINT", justification = "verified")
@GET
@Produces("image/*")
@Path("/{output: .*+}")
public Response getImage(@PathParam("output") String imgName,
@QueryParam("format") @DefaultValue("png") String format,
@QueryParam("bbox") @DefaultValue("-180, -90, 180, 90") String bbox,
@QueryParam("width") @DefaultValue("600") int width,
@QueryParam("height") @DefaultValue("400") int height,
@QueryParam("color-scale-name") String colorScaleName,
@QueryParam("color-scale") String colorScale,
@QueryParam("min") Double min,
@QueryParam("max") Double max,
@QueryParam("srs") String srs,
@QueryParam("zoom-level") @DefaultValue("-1") int zoomLevel)
{
String error;
String[] bBoxValues = bbox.split(",");
if (bBoxValues.length != 4)
{
return Response.status(Status.BAD_REQUEST)
.entity("Bounding box must have four comma delimited arguments.").build();
}
double minX = Double.valueOf(bBoxValues[0]);
double minY = Double.valueOf(bBoxValues[1]);
double maxX = Double.valueOf(bBoxValues[2]);
double maxY = Double.valueOf(bBoxValues[3]);
Bounds bounds = new Bounds(minX, minY, maxX, maxY);
ColorScale cs = null;
getService();
try
{
if (colorScaleName != null)
{
if (colorScale != null)
{
return Response.status(Status.BAD_REQUEST)
.entity("Only one of ColorScale or ColorScaleName can be specified.").build();
}
cs = service.getColorScaleFromName(colorScaleName);
}
else if (colorScale != null)
{
cs = service.getColorScaleFromJSON(colorScale);
}
// else
// {
// cs = service.getColorScaleFromPyramid(imgName);
// }
if (zoomLevel != -1)
{
MrsPyramid pyramid = service.getPyramid(imgName, SecurityUtils.getProviderProperties());
if (pyramid == null)
{
return Response.status(Status.NOT_FOUND).entity(imgName + " not found").build();
}
// Throw an error if the requested zoom level does not exist
if (pyramid.getMetadata().getName(zoomLevel) == null)
{
return Response.status(Status.BAD_REQUEST).entity("Invalid zoom level specified.")
.build();
}
}
//for kml we'll use the original raster resource code path, since no kml generation exists
//in the original wms code
if (!format.equals(KML_INPUT_FORMAT))
{
MrsPyramid pyramid = service.getPyramid(imgName, SecurityUtils.getProviderProperties());
if (pyramid == null)
{
return Response.status(Status.NOT_FOUND).entity(imgName + " not found").build();
}
if (!bounds.toEnvelope().intersects(pyramid.getBounds().toEnvelope()))
{
log.debug("request bounds does not intersects image bounds");
byte imageData[] = service.getEmptyTile(width, height, format);
String type = service.getContentType(format);
return Response.ok(imageData)
.header("Content-Type", type)
.build();
}
ImageRenderer renderer;
try
{
renderer = service.getImageRenderer(format);
}
catch (IllegalArgumentException e)
{
if (e.getMessage().toUpperCase().contains("INVALID FORMAT"))
{
return Response.status(Status.BAD_REQUEST).entity(
"Unsupported image format - " + format).build();
}
throw e;
}
// TODO: Need to construct provider properties from the WebRequest using
// a new security layer and pass those properties.
MrGeoRaster result = renderer.renderImage(imgName, bounds, width, height,
SecurityUtils.getProviderProperties(), srs);
if (!(renderer instanceof TiffImageRenderer))
{
log.debug("Applying color scale to image " + imgName + " ...");
//Add min/max colorscale override
double[] overrideExtrema = renderer.getExtrema();
if (min != null)
{
overrideExtrema[0] = min;
}
if (max != null)
{
overrideExtrema[1] = max;
}
result = service.applyColorScaleToImage(format, result, cs, renderer, overrideExtrema);
log.debug("Color scale applied to image " + imgName);
}
return service.getImageResponseWriter(format).write(result, imgName, bounds).build();
}
else
{
// Render KML
log.error("KML not supported anymore");
return Response.status(Status.NOT_IMPLEMENTED).entity("KML is no longer supported").build();
// return service.renderKml(imgName, bounds, width, height, cs, zoomLevel,
// SecurityUtils.getProviderProperties());
}
}
catch (FileNotFoundException | MrsPyramidServiceException fnfe)
{
log.error("Exception thrown", fnfe);
return Response.status(Status.BAD_REQUEST).entity(fnfe.getMessage()).build();
}
catch (ImageRendererException | IOException e)
{
log.error("Exception thrown", e);
return Response.serverError().entity(e.getMessage()).build();
}
}
private void getService()
{
if (service == null)
{
ContextResolver<MrsPyramidService> resolver =
providers.getContextResolver(MrsPyramidService.class, MediaType.WILDCARD_TYPE);
if (resolver != null)
{
service = resolver.getContext(MrsPyramidService.class);
}
}
}
}
|
|
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tests.targets.security.cert;
import dalvik.annotation.AndroidOnly;
import dalvik.annotation.TestLevel;
import dalvik.annotation.TestTargetNew;
import java.io.ByteArrayInputStream;
import java.security.KeyStore;
import java.security.NoSuchAlgorithmException;
import java.security.Provider;
import java.security.PublicKey;
import java.security.Security;
import java.security.cert.CertPath;
import java.security.cert.CertPathValidator;
import java.security.cert.CertPathValidatorResult;
import java.security.cert.Certificate;
import java.security.cert.CertificateFactory;
import java.security.cert.PKIXCertPathValidatorResult;
import java.security.cert.PKIXParameters;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.List;
import junit.framework.TestCase;
public class CertificateTest extends TestCase {
/*
* Following certificate chain was taken from https://www.verisign.com and
* uses MD2withRSA for the root certificate. This chain stops validating
* in Nov 2016.
*/
/**
* A selfsigned certificate using MD2withRSA
*
* <pre>
* Certificate:
* Data:
* Version: 1 (0x0)
* Serial Number:
* 70:ba:e4:1d:10:d9:29:34:b6:38:ca:7b:03:cc:ba:bf
* Signature Algorithm: md2WithRSAEncryption
* Issuer: C=US, O=VeriSign, Inc., OU=Class 3 Public Primary Certification Authority
* Validity
* Not Before: Jan 29 00:00:00 1996 GMT
* Not After : Aug 1 23:59:59 2028 GMT
* Subject: C=US, O=VeriSign, Inc., OU=Class 3 Public Primary Certification Authority
* Subject Public Key Info:
* Public Key Algorithm: rsaEncryption
* RSA Public Key: (1024 bit)
* Modulus (1024 bit):
* 00:c9:5c:59:9e:f2:1b:8a:01:14:b4:10:df:04:40:
* db:e3:57:af:6a:45:40:8f:84:0c:0b:d1:33:d9:d9:
* 11:cf:ee:02:58:1f:25:f7:2a:a8:44:05:aa:ec:03:
* 1f:78:7f:9e:93:b9:9a:00:aa:23:7d:d6:ac:85:a2:
* 63:45:c7:72:27:cc:f4:4c:c6:75:71:d2:39:ef:4f:
* 42:f0:75:df:0a:90:c6:8e:20:6f:98:0f:f8:ac:23:
* 5f:70:29:36:a4:c9:86:e7:b1:9a:20:cb:53:a5:85:
* e7:3d:be:7d:9a:fe:24:45:33:dc:76:15:ed:0f:a2:
* 71:64:4c:65:2e:81:68:45:a7
* Exponent: 65537 (0x10001)
* Signature Algorithm: md2WithRSAEncryption
* bb:4c:12:2b:cf:2c:26:00:4f:14:13:dd:a6:fb:fc:0a:11:84:
* 8c:f3:28:1c:67:92:2f:7c:b6:c5:fa:df:f0:e8:95:bc:1d:8f:
* 6c:2c:a8:51:cc:73:d8:a4:c0:53:f0:4e:d6:26:c0:76:01:57:
* 81:92:5e:21:f1:d1:b1:ff:e7:d0:21:58:cd:69:17:e3:44:1c:
* 9c:19:44:39:89:5c:dc:9c:00:0f:56:8d:02:99:ed:a2:90:45:
* 4c:e4:bb:10:a4:3d:f0:32:03:0e:f1:ce:f8:e8:c9:51:8c:e6:
* 62:9f:e6:9f:c0:7d:b7:72:9c:c9:36:3a:6b:9f:4e:a8:ff:64:
* 0d:64
* </pre>
*/
private static final String selfSignedCertMD2 =
"-----BEGIN CERTIFICATE-----\n"
+ "MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG\n"
+ "A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz\n"
+ "cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2\n"
+ "MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV\n"
+ "BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt\n"
+ "YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN\n"
+ "ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE\n"
+ "BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is\n"
+ "I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G\n"
+ "CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do\n"
+ "lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc\n"
+ "AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k\n"
+ "-----END CERTIFICATE-----\n";
/**
* A certificate signed by selfSignedCertMD2
*
* <pre>
* Certificate:
* Data:
* Version: 3 (0x2)
* Serial Number:
* 57:bf:fb:03:fb:2c:46:d4:e1:9e:ce:e0:d7:43:7f:13
* Signature Algorithm: sha1WithRSAEncryption
* Issuer: C=US, O=VeriSign, Inc., OU=Class 3 Public Primary Certification Authority
* Validity
* Not Before: Nov 8 00:00:00 2006 GMT
* Not After : Nov 7 23:59:59 2021 GMT
* Subject: C=US, O=VeriSign, Inc., OU=VeriSign Trust Network, OU=(c) 2006 VeriSign, Inc. - For authorized use only, CN=VeriSign Class 3 Public Primary Certification Authority - G5
* Subject Public Key Info:
* Public Key Algorithm: rsaEncryption
* RSA Public Key: (2048 bit)
* Modulus (2048 bit):
* 00:af:24:08:08:29:7a:35:9e:60:0c:aa:e7:4b:3b:
* 4e:dc:7c:bc:3c:45:1c:bb:2b:e0:fe:29:02:f9:57:
* 08:a3:64:85:15:27:f5:f1:ad:c8:31:89:5d:22:e8:
* 2a:aa:a6:42:b3:8f:f8:b9:55:b7:b1:b7:4b:b3:fe:
* 8f:7e:07:57:ec:ef:43:db:66:62:15:61:cf:60:0d:
* a4:d8:de:f8:e0:c3:62:08:3d:54:13:eb:49:ca:59:
* 54:85:26:e5:2b:8f:1b:9f:eb:f5:a1:91:c2:33:49:
* d8:43:63:6a:52:4b:d2:8f:e8:70:51:4d:d1:89:69:
* 7b:c7:70:f6:b3:dc:12:74:db:7b:5d:4b:56:d3:96:
* bf:15:77:a1:b0:f4:a2:25:f2:af:1c:92:67:18:e5:
* f4:06:04:ef:90:b9:e4:00:e4:dd:3a:b5:19:ff:02:
* ba:f4:3c:ee:e0:8b:eb:37:8b:ec:f4:d7:ac:f2:f6:
* f0:3d:af:dd:75:91:33:19:1d:1c:40:cb:74:24:19:
* 21:93:d9:14:fe:ac:2a:52:c7:8f:d5:04:49:e4:8d:
* 63:47:88:3c:69:83:cb:fe:47:bd:2b:7e:4f:c5:95:
* ae:0e:9d:d4:d1:43:c0:67:73:e3:14:08:7e:e5:3f:
* 9f:73:b8:33:0a:cf:5d:3f:34:87:96:8a:ee:53:e8:
* 25:15
* Exponent: 65537 (0x10001)
* X509v3 extensions:
* X509v3 Basic Constraints: critical
* CA:TRUE
* X509v3 CRL Distribution Points:
* URI:http://crl.verisign.com/pca3.crl
* X509v3 Key Usage: critical
* Certificate Sign, CRL Sign
* 1.3.6.1.5.5.7.1.12:
* 0_.].[0Y0W0U..image/gif0!0.0...+..............k...j.H.,{..0%.#http://logo.verisign.com/vslogo.gif
* X509v3 Certificate Policies:
* Policy: X509v3 Any Policy
* CPS: https://www.verisign.com/cps
* X509v3 Subject Key Identifier:
* 7F:D3:65:A7:C2:DD:EC:BB:F0:30:09:F3:43:39:FA:02:AF:33:31:33
* X509v3 Extended Key Usage:
* Netscape Server Gated Crypto, 2.16.840.1.113733.1.8.1, TLS Web Server Authentication, TLS Web Client Authentication
* X509v3 Authority Key Identifier:
* DirName:/C=US/O=VeriSign, Inc./OU=Class 3 Public Primary Certification Authority
* serial:70:BA:E4:1D:10:D9:29:34:B6:38:CA:7B:03:CC:BA:BF
* Signature Algorithm: sha1WithRSAEncryption
* a9:7b:66:29:30:f7:d5:b4:a6:96:12:d0:ee:72:f0:58:11:69:
* 15:55:5f:41:ff:d2:12:84:13:a4:d9:03:66:ff:a9:e0:4c:c9:
* ed:8c:72:8b:b4:d7:55:3b:29:15:60:c8:3c:21:ef:44:2e:93:
* 3d:c6:0b:0c:8d:24:3f:1e:fb:01:5a:7a:dd:83:66:14:d1:c7:
* fd:30:53:48:51:85:85:13:a8:54:e1:ee:76:a2:89:18:d3:97:
* 89:7a:c6:fd:b3:bd:94:61:5a:3a:08:cf:14:93:bd:93:fd:09:
* a9:7b:56:c8:00:b8:44:58:e9:de:5b:77:bd:07:1c:6c:0b:30:
* 30:c7
* </pre>
*/
private static final String signedCert1Chain1 =
"-----BEGIN CERTIFICATE-----\n"
+ "MIIFEzCCBHygAwIBAgIQV7/7A/ssRtThns7g10N/EzANBgkqhkiG9w0BAQUFADBf\n"
+ "MQswCQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xNzA1BgNVBAsT\n"
+ "LkNsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkw\n"
+ "HhcNMDYxMTA4MDAwMDAwWhcNMjExMTA3MjM1OTU5WjCByjELMAkGA1UEBhMCVVMx\n"
+ "FzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2lnbiBUcnVz\n"
+ "dCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2lnbiwgSW5jLiAtIEZv\n"
+ "ciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2lnbiBDbGFzcyAz\n"
+ "IFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzUwggEi\n"
+ "MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1nmAMqudLO07cfLw8\n"
+ "RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbext0uz/o9+B1fs70Pb\n"
+ "ZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIzSdhDY2pSS9KP6HBR\n"
+ "TdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQGBO+QueQA5N06tRn/\n"
+ "Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+rCpSx4/VBEnkjWNH\n"
+ "iDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/NIeWiu5T6CUVAgMB\n"
+ "AAGjggHeMIIB2jAPBgNVHRMBAf8EBTADAQH/MDEGA1UdHwQqMCgwJqAkoCKGIGh0\n"
+ "dHA6Ly9jcmwudmVyaXNpZ24uY29tL3BjYTMuY3JsMA4GA1UdDwEB/wQEAwIBBjBt\n"
+ "BggrBgEFBQcBDARhMF+hXaBbMFkwVzBVFglpbWFnZS9naWYwITAfMAcGBSsOAwIa\n"
+ "BBSP5dMahqyNjmvDz4Bq1EgYLHsZLjAlFiNodHRwOi8vbG9nby52ZXJpc2lnbi5j\n"
+ "b20vdnNsb2dvLmdpZjA9BgNVHSAENjA0MDIGBFUdIAAwKjAoBggrBgEFBQcCARYc\n"
+ "aHR0cHM6Ly93d3cudmVyaXNpZ24uY29tL2NwczAdBgNVHQ4EFgQUf9Nlp8Ld7Lvw\n"
+ "MAnzQzn6Aq8zMTMwNAYDVR0lBC0wKwYJYIZIAYb4QgQBBgpghkgBhvhFAQgBBggr\n"
+ "BgEFBQcDAQYIKwYBBQUHAwIwgYAGA1UdIwR5MHehY6RhMF8xCzAJBgNVBAYTAlVT\n"
+ "MRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE3MDUGA1UECxMuQ2xhc3MgMyBQdWJs\n"
+ "aWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eYIQcLrkHRDZKTS2OMp7\n"
+ "A8y6vzANBgkqhkiG9w0BAQUFAAOBgQCpe2YpMPfVtKaWEtDucvBYEWkVVV9B/9IS\n"
+ "hBOk2QNm/6ngTMntjHKLtNdVOykVYMg8Ie9ELpM9xgsMjSQ/HvsBWnrdg2YU0cf9\n"
+ "MFNIUYWFE6hU4e52ookY05eJesb9s72UYVo6CM8Uk72T/Qmpe1bIALhEWOneW3e9\n"
+ "BxxsCzAwxw==\n"
+ "-----END CERTIFICATE-----";
/**
* A certificate signed by signedCert1Chain1
*
* <pre>
* Certificate:
* Data:
* Version: 3 (0x2)
* Serial Number:
* 11:2a:00:6d:37:e5:10:6f:d6:ca:7c:c3:ef:ba:cc:18
* Signature Algorithm: sha1WithRSAEncryption
* Issuer: C=US, O=VeriSign, Inc., OU=VeriSign Trust Network, OU=(c) 2006 VeriSign, Inc. - For authorized use only, CN=VeriSign Class 3 Public Primary Certification Authority - G5
* Validity
* Not Before: Nov 8 00:00:00 2006 GMT
* Not After : Nov 7 23:59:59 2016 GMT
* Subject: C=US, O=VeriSign, Inc., OU=VeriSign Trust Network, OU=Terms of use at https://www.verisign.com/rpa (c)06, CN=VeriSign Class 3 Extended Validation SSL SGC CA
* Subject Public Key Info:
* Public Key Algorithm: rsaEncryption
* RSA Public Key: (2048 bit)
* Modulus (2048 bit):
* 00:bd:56:88:ba:88:34:64:64:cf:cd:ca:b0:ee:e7:
* 19:73:c5:72:d9:bb:45:bc:b5:a8:ff:83:be:1c:03:
* db:ed:89:b7:2e:10:1a:25:bc:55:ca:41:a1:9f:0b:
* cf:19:5e:70:b9:5e:39:4b:9e:31:1c:5f:87:ae:2a:
* aa:a8:2b:a2:1b:3b:10:23:5f:13:b1:dd:08:8c:4e:
* 14:da:83:81:e3:b5:8c:e3:68:ed:24:67:ce:56:b6:
* ac:9b:73:96:44:db:8a:8c:b3:d6:f0:71:93:8e:db:
* 71:54:4a:eb:73:59:6a:8f:70:51:2c:03:9f:97:d1:
* cc:11:7a:bc:62:0d:95:2a:c9:1c:75:57:e9:f5:c7:
* ea:ba:84:35:cb:c7:85:5a:7e:e4:4d:e1:11:97:7d:
* 0e:20:34:45:db:f1:a2:09:eb:eb:3d:9e:b8:96:43:
* 5e:34:4b:08:25:1e:43:1a:a2:d9:b7:8a:01:34:3d:
* c3:f8:e5:af:4f:8c:ff:cd:65:f0:23:4e:c5:97:b3:
* 5c:da:90:1c:82:85:0d:06:0d:c1:22:b6:7b:28:a4:
* 03:c3:4c:53:d1:58:bc:72:bc:08:39:fc:a0:76:a8:
* a8:e9:4b:6e:88:3d:e3:b3:31:25:8c:73:29:48:0e:
* 32:79:06:ed:3d:43:f4:f6:e4:e9:fc:7d:be:8e:08:
* d5:1f
* Exponent: 65537 (0x10001)
* X509v3 extensions:
* X509v3 Subject Key Identifier:
* 4E:43:C8:1D:76:EF:37:53:7A:4F:F2:58:6F:94:F3:38:E2:D5:BD:DF
* X509v3 Basic Constraints: critical
* CA:TRUE, pathlen:0
* X509v3 Certificate Policies:
* Policy: X509v3 Any Policy
* CPS: https://www.verisign.com/cps
* X509v3 CRL Distribution Points:
* URI:http://EVSecure-crl.verisign.com/pca3-g5.crl
* X509v3 Extended Key Usage:
* Netscape Server Gated Crypto, 2.16.840.1.113733.1.8.1
* X509v3 Key Usage: critical
* Certificate Sign, CRL Sign
* Netscape Cert Type:
* SSL CA, S/MIME CA
* 1.3.6.1.5.5.7.1.12:
* 0_.].[0Y0W0U..image/gif0!0.0...+..............k...j.H.,{..0%.#http://logo.verisign.com/vslogo.gif
* X509v3 Subject Alternative Name:
* DirName:/CN=Class3CA2048-1-48
* Authority Information Access:
* OCSP - URI:http://EVSecure-ocsp.verisign.com
* X509v3 Authority Key Identifier:
* keyid:7F:D3:65:A7:C2:DD:EC:BB:F0:30:09:F3:43:39:FA:02:AF:33:31:33
* Signature Algorithm: sha1WithRSAEncryption
* 5a:a2:b1:bf:eb:8d:d4:38:a8:80:72:c2:dc:38:2e:ac:a7:71:
* f9:2b:a3:bb:47:bb:6d:69:6f:10:36:98:8c:c7:56:2e:bb:bc:
* ab:4a:9b:7a:d6:f2:82:93:e0:14:fe:8a:ce:83:b7:83:db:93:
* 87:ab:ac:65:79:49:fd:57:a9:b1:ce:09:1f:ba:10:15:c4:09:
* 0e:62:e3:f9:0a:25:d5:64:98:f0:f2:a8:0f:76:32:7e:91:e6:
* 18:ee:bc:e7:da:d0:4e:8d:78:bb:e2:9d:c0:59:2b:c0:ce:95:
* 0d:24:0c:72:ca:34:5e:70:22:89:2b:4a:b0:f1:68:87:f3:ee:
* 44:8d:28:40:77:39:6e:48:72:45:31:5d:6b:39:0e:86:02:ea:
* 66:99:93:31:0f:df:67:de:a6:9f:8c:9d:4c:ce:71:6f:3a:21:
* f6:b9:34:3f:f9:6e:d8:9a:f7:3e:da:f3:81:5f:7a:5c:6d:8f:
* 7c:f6:99:74:b7:ff:e4:17:5d:ed:61:5e:ab:48:bb:96:8d:66:
* 45:39:b4:12:0a:f6:70:e9:9c:76:22:4b:60:e9:2a:1b:34:49:
* f7:a2:d4:67:c0:b1:26:ad:13:ba:d9:84:01:c1:ab:e1:8e:6d:
* 70:16:3b:77:ac:91:9a:bb:1a:1f:da:58:a7:e4:4f:c1:61:ae:
* bc:a2:fe:4b
* </pre>
*/
private static final String signedCert2Chain1 =
"-----BEGIN CERTIFICATE-----\n"
+ "MIIGCjCCBPKgAwIBAgIQESoAbTflEG/WynzD77rMGDANBgkqhkiG9w0BAQUFADCB\n"
+ "yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL\n"
+ "ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp\n"
+ "U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW\n"
+ "ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0\n"
+ "aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMTYxMTA3MjM1OTU5WjCBvjEL\n"
+ "MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW\n"
+ "ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTswOQYDVQQLEzJUZXJtcyBvZiB1c2UgYXQg\n"
+ "aHR0cHM6Ly93d3cudmVyaXNpZ24uY29tL3JwYSAoYykwNjE4MDYGA1UEAxMvVmVy\n"
+ "aVNpZ24gQ2xhc3MgMyBFeHRlbmRlZCBWYWxpZGF0aW9uIFNTTCBTR0MgQ0EwggEi\n"
+ "MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9Voi6iDRkZM/NyrDu5xlzxXLZ\n"
+ "u0W8taj/g74cA9vtibcuEBolvFXKQaGfC88ZXnC5XjlLnjEcX4euKqqoK6IbOxAj\n"
+ "XxOx3QiMThTag4HjtYzjaO0kZ85Wtqybc5ZE24qMs9bwcZOO23FUSutzWWqPcFEs\n"
+ "A5+X0cwRerxiDZUqyRx1V+n1x+q6hDXLx4VafuRN4RGXfQ4gNEXb8aIJ6+s9nriW\n"
+ "Q140SwglHkMaotm3igE0PcP45a9PjP/NZfAjTsWXs1zakByChQ0GDcEitnsopAPD\n"
+ "TFPRWLxyvAg5/KB2qKjpS26IPeOzMSWMcylIDjJ5Bu09Q/T25On8fb6OCNUfAgMB\n"
+ "AAGjggH0MIIB8DAdBgNVHQ4EFgQUTkPIHXbvN1N6T/JYb5TzOOLVvd8wEgYDVR0T\n"
+ "AQH/BAgwBgEB/wIBADA9BgNVHSAENjA0MDIGBFUdIAAwKjAoBggrBgEFBQcCARYc\n"
+ "aHR0cHM6Ly93d3cudmVyaXNpZ24uY29tL2NwczA9BgNVHR8ENjA0MDKgMKAuhixo\n"
+ "dHRwOi8vRVZTZWN1cmUtY3JsLnZlcmlzaWduLmNvbS9wY2EzLWc1LmNybDAgBgNV\n"
+ "HSUEGTAXBglghkgBhvhCBAEGCmCGSAGG+EUBCAEwDgYDVR0PAQH/BAQDAgEGMBEG\n"
+ "CWCGSAGG+EIBAQQEAwIBBjBtBggrBgEFBQcBDARhMF+hXaBbMFkwVzBVFglpbWFn\n"
+ "ZS9naWYwITAfMAcGBSsOAwIaBBSP5dMahqyNjmvDz4Bq1EgYLHsZLjAlFiNodHRw\n"
+ "Oi8vbG9nby52ZXJpc2lnbi5jb20vdnNsb2dvLmdpZjApBgNVHREEIjAgpB4wHDEa\n"
+ "MBgGA1UEAxMRQ2xhc3MzQ0EyMDQ4LTEtNDgwPQYIKwYBBQUHAQEEMTAvMC0GCCsG\n"
+ "AQUFBzABhiFodHRwOi8vRVZTZWN1cmUtb2NzcC52ZXJpc2lnbi5jb20wHwYDVR0j\n"
+ "BBgwFoAUf9Nlp8Ld7LvwMAnzQzn6Aq8zMTMwDQYJKoZIhvcNAQEFBQADggEBAFqi\n"
+ "sb/rjdQ4qIBywtw4Lqyncfkro7tHu21pbxA2mIzHVi67vKtKm3rW8oKT4BT+is6D\n"
+ "t4Pbk4errGV5Sf1XqbHOCR+6EBXECQ5i4/kKJdVkmPDyqA92Mn6R5hjuvOfa0E6N\n"
+ "eLvincBZK8DOlQ0kDHLKNF5wIokrSrDxaIfz7kSNKEB3OW5IckUxXWs5DoYC6maZ\n"
+ "kzEP32fepp+MnUzOcW86Ifa5ND/5btia9z7a84Ffelxtj3z2mXS3/+QXXe1hXqtI\n"
+ "u5aNZkU5tBIK9nDpnHYiS2DpKhs0Sfei1GfAsSatE7rZhAHBq+GObXAWO3eskZq7\n"
+ "Gh/aWKfkT8Fhrryi/ks=\n"
+ "-----END CERTIFICATE-----";
/*
* Following certificate chain was taken from https://www.thawte.com and
* uses MD5withRSA for the root certificate. This chain stops validating
* in Nov 2016.
*/
/**
* A selfsigned certificate using MD5withRSA
*
* <pre>
* Certificate:
* Data:
* Version: 3 (0x2)
* Serial Number: 1 (0x1)
* Signature Algorithm: md5WithRSAEncryption
* Issuer: C=ZA, ST=Western Cape, L=Cape Town, O=Thawte Consulting cc, OU=Certification Services Division, CN=Thawte Premium Server CA/emailAddress=premium-server@thawte.com
* Validity
* Not Before: Aug 1 00:00:00 1996 GMT
* Not After : Dec 31 23:59:59 2020 GMT
* Subject: C=ZA, ST=Western Cape, L=Cape Town, O=Thawte Consulting cc, OU=Certification Services Division, CN=Thawte Premium Server CA/emailAddress=premium-server@thawte.com
* Subject Public Key Info:
* Public Key Algorithm: rsaEncryption
* RSA Public Key: (1024 bit)
* Modulus (1024 bit):
* 00:d2:36:36:6a:8b:d7:c2:5b:9e:da:81:41:62:8f:
* 38:ee:49:04:55:d6:d0:ef:1c:1b:95:16:47:ef:18:
* 48:35:3a:52:f4:2b:6a:06:8f:3b:2f:ea:56:e3:af:
* 86:8d:9e:17:f7:9e:b4:65:75:02:4d:ef:cb:09:a2:
* 21:51:d8:9b:d0:67:d0:ba:0d:92:06:14:73:d4:93:
* cb:97:2a:00:9c:5c:4e:0c:bc:fa:15:52:fc:f2:44:
* 6e:da:11:4a:6e:08:9f:2f:2d:e3:f9:aa:3a:86:73:
* b6:46:53:58:c8:89:05:bd:83:11:b8:73:3f:aa:07:
* 8d:f4:42:4d:e7:40:9d:1c:37
* Exponent: 65537 (0x10001)
* X509v3 extensions:
* X509v3 Basic Constraints: critical
* CA:TRUE
* Signature Algorithm: md5WithRSAEncryption
* 26:48:2c:16:c2:58:fa:e8:16:74:0c:aa:aa:5f:54:3f:f2:d7:
* c9:78:60:5e:5e:6e:37:63:22:77:36:7e:b2:17:c4:34:b9:f5:
* 08:85:fc:c9:01:38:ff:4d:be:f2:16:42:43:e7:bb:5a:46:fb:
* c1:c6:11:1f:f1:4a:b0:28:46:c9:c3:c4:42:7d:bc:fa:ab:59:
* 6e:d5:b7:51:88:11:e3:a4:85:19:6b:82:4c:a4:0c:12:ad:e9:
* a4:ae:3f:f1:c3:49:65:9a:8c:c5:c8:3e:25:b7:94:99:bb:92:
* 32:71:07:f0:86:5e:ed:50:27:a6:0d:a6:23:f9:bb:cb:a6:07:
* 14:42
* </pre>
*/
private static final String selfSignedCertMD5 =
"-----BEGIN CERTIFICATE-----\n"
+ "MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx\n"
+ "FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD\n"
+ "VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv\n"
+ "biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy\n"
+ "dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t\n"
+ "MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB\n"
+ "MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG\n"
+ "A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp\n"
+ "b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl\n"
+ "cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv\n"
+ "bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE\n"
+ "VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ\n"
+ "ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR\n"
+ "uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG\n"
+ "9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI\n"
+ "hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM\n"
+ "pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg==\n"
+ "-----END CERTIFICATE-----";
/**
* A certificate signed by selfSignedCertMD5
*
* <pre>
* Certificate:
* Data:
* Version: 3 (0x2)
* Serial Number:
* 5f:a6:be:80:b6:86:c6:2f:01:ed:0c:ab:b1:96:a1:05
* Signature Algorithm: sha1WithRSAEncryption
* Issuer: C=ZA, ST=Western Cape, L=Cape Town, O=Thawte Consulting cc, OU=Certification Services Division, CN=Thawte Premium Server CA/emailAddress=premium-server@thawte.com
* Validity
* Not Before: Nov 17 00:00:00 2006 GMT
* Not After : Dec 30 23:59:59 2020 GMT
* Subject: C=US, O=thawte, Inc., OU=Certification Services Division, OU=(c) 2006 thawte, Inc. - For authorized use only, CN=thawte Primary Root CA
* Subject Public Key Info:
* Public Key Algorithm: rsaEncryption
* RSA Public Key: (2048 bit)
* Modulus (2048 bit):
* 00:ac:a0:f0:fb:80:59:d4:9c:c7:a4:cf:9d:a1:59:
* 73:09:10:45:0c:0d:2c:6e:68:f1:6c:5b:48:68:49:
* 59:37:fc:0b:33:19:c2:77:7f:cc:10:2d:95:34:1c:
* e6:eb:4d:09:a7:1c:d2:b8:c9:97:36:02:b7:89:d4:
* 24:5f:06:c0:cc:44:94:94:8d:02:62:6f:eb:5a:dd:
* 11:8d:28:9a:5c:84:90:10:7a:0d:bd:74:66:2f:6a:
* 38:a0:e2:d5:54:44:eb:1d:07:9f:07:ba:6f:ee:e9:
* fd:4e:0b:29:f5:3e:84:a0:01:f1:9c:ab:f8:1c:7e:
* 89:a4:e8:a1:d8:71:65:0d:a3:51:7b:ee:bc:d2:22:
* 60:0d:b9:5b:9d:df:ba:fc:51:5b:0b:af:98:b2:e9:
* 2e:e9:04:e8:62:87:de:2b:c8:d7:4e:c1:4c:64:1e:
* dd:cf:87:58:ba:4a:4f:ca:68:07:1d:1c:9d:4a:c6:
* d5:2f:91:cc:7c:71:72:1c:c5:c0:67:eb:32:fd:c9:
* 92:5c:94:da:85:c0:9b:bf:53:7d:2b:09:f4:8c:9d:
* 91:1f:97:6a:52:cb:de:09:36:a4:77:d8:7b:87:50:
* 44:d5:3e:6e:29:69:fb:39:49:26:1e:09:a5:80:7b:
* 40:2d:eb:e8:27:85:c9:fe:61:fd:7e:e6:7c:97:1d:
* d5:9d
* Exponent: 65537 (0x10001)
* X509v3 extensions:
* X509v3 Basic Constraints: critical
* CA:TRUE
* X509v3 Certificate Policies:
* Policy: X509v3 Any Policy
* CPS: https://www.thawte.com/cps
* X509v3 Key Usage: critical
* Certificate Sign, CRL Sign
* X509v3 Subject Key Identifier:
* 7B:5B:45:CF:AF:CE:CB:7A:FD:31:92:1A:6A:B6:F3:46:EB:57:48:50
* X509v3 CRL Distribution Points:
* URI:http://crl.thawte.com/ThawtePremiumServerCA.crl
* X509v3 Extended Key Usage:
* Netscape Server Gated Crypto, 2.16.840.1.113733.1.8.1
* X509v3 Authority Key Identifier:
* DirName:/C=ZA/ST=Western Cape/L=Cape Town/O=Thawte Consulting cc/OU=Certification Services Division/CN=Thawte Premium Server CA/emailAddress=premium-server@thawte.com
* serial:01
* Signature Algorithm: sha1WithRSAEncryption
* 2b:ca:12:c9:dd:d7:cc:63:1c:9b:31:35:4a:dd:e4:b7:f6:9d:
* d1:a4:fb:1e:f8:47:f9:ae:07:8e:0d:58:12:fb:da:ed:b5:cc:
* 33:e5:97:68:47:61:42:d5:66:a9:6e:1e:47:bf:85:db:7d:58:
* d1:77:5a:cc:90:61:98:9a:29:f5:9d:b1:cf:b8:dc:f3:7b:80:
* 47:48:d1:7d:f4:68:8c:c4:41:cb:b4:e9:fd:f0:23:e0:b1:9b:
* 76:2a:6d:28:56:a3:8c:cd:e9:ec:21:00:71:f0:5f:dd:50:a5:
* 69:42:1b:83:11:5d:84:28:d3:27:ae:ec:2a:ab:2f:60:42:c5:
* c4:78
* </pre>
*/
private static final String signedCert1Chain2 =
"-----BEGIN CERTIFICATE-----\n"
+ "MIIFUTCCBLqgAwIBAgIQX6a+gLaGxi8B7QyrsZahBTANBgkqhkiG9w0BAQUFADCB\n"
+ "zjELMAkGA1UEBhMCWkExFTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJ\n"
+ "Q2FwZSBUb3duMR0wGwYDVQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UE\n"
+ "CxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhh\n"
+ "d3RlIFByZW1pdW0gU2VydmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNl\n"
+ "cnZlckB0aGF3dGUuY29tMB4XDTA2MTExNzAwMDAwMFoXDTIwMTIzMDIzNTk1OVow\n"
+ "gakxCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwx0aGF3dGUsIEluYy4xKDAmBgNVBAsT\n"
+ "H0NlcnRpZmljYXRpb24gU2VydmljZXMgRGl2aXNpb24xODA2BgNVBAsTLyhjKSAy\n"
+ "MDA2IHRoYXd0ZSwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MR8wHQYD\n"
+ "VQQDExZ0aGF3dGUgUHJpbWFyeSBSb290IENBMIIBIjANBgkqhkiG9w0BAQEFAAOC\n"
+ "AQ8AMIIBCgKCAQEArKDw+4BZ1JzHpM+doVlzCRBFDA0sbmjxbFtIaElZN/wLMxnC\n"
+ "d3/MEC2VNBzm600JpxzSuMmXNgK3idQkXwbAzESUlI0CYm/rWt0RjSiaXISQEHoN\n"
+ "vXRmL2o4oOLVVETrHQefB7pv7un9Tgsp9T6EoAHxnKv4HH6JpOih2HFlDaNRe+68\n"
+ "0iJgDblbnd+6/FFbC6+Ysuku6QToYofeK8jXTsFMZB7dz4dYukpPymgHHRydSsbV\n"
+ "L5HMfHFyHMXAZ+sy/cmSXJTahcCbv1N9Kwn0jJ2RH5dqUsveCTakd9h7h1BE1T5u\n"
+ "KWn7OUkmHgmlgHtALevoJ4XJ/mH9fuZ8lx3VnQIDAQABo4IBzTCCAckwDwYDVR0T\n"
+ "AQH/BAUwAwEB/zA7BgNVHSAENDAyMDAGBFUdIAAwKDAmBggrBgEFBQcCARYaaHR0\n"
+ "cHM6Ly93d3cudGhhd3RlLmNvbS9jcHMwDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQW\n"
+ "BBR7W0XPr87Lev0xkhpqtvNG61dIUDBABgNVHR8EOTA3MDWgM6Axhi9odHRwOi8v\n"
+ "Y3JsLnRoYXd0ZS5jb20vVGhhd3RlUHJlbWl1bVNlcnZlckNBLmNybDAgBgNVHSUE\n"
+ "GTAXBglghkgBhvhCBAEGCmCGSAGG+EUBCAEwgeUGA1UdIwSB3TCB2qGB1KSB0TCB\n"
+ "zjELMAkGA1UEBhMCWkExFTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJ\n"
+ "Q2FwZSBUb3duMR0wGwYDVQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UE\n"
+ "CxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhh\n"
+ "d3RlIFByZW1pdW0gU2VydmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNl\n"
+ "cnZlckB0aGF3dGUuY29tggEBMA0GCSqGSIb3DQEBBQUAA4GBACvKEsnd18xjHJsx\n"
+ "NUrd5Lf2ndGk+x74R/muB44NWBL72u21zDPll2hHYULVZqluHke/hdt9WNF3WsyQ\n"
+ "YZiaKfWdsc+43PN7gEdI0X30aIzEQcu06f3wI+Cxm3YqbShWo4zN6ewhAHHwX91Q\n"
+ "pWlCG4MRXYQo0yeu7CqrL2BCxcR4\n"
+ "-----END CERTIFICATE-----";
/**
* A certificate signed by signedCert1Chain2
*
* <pre>
* Certificate:
* Data:
* Version: 3 (0x2)
* Serial Number:
* 7b:11:55:eb:78:9a:90:85:b5:8c:92:ff:42:b7:fe:56
* Signature Algorithm: sha1WithRSAEncryption
* Issuer: C=US, O=thawte, Inc., OU=Certification Services Division, OU=(c) 2006 thawte, Inc. - For authorized use only, CN=thawte Primary Root CA
* Validity
* Not Before: Nov 17 00:00:00 2006 GMT
* Not After : Nov 16 23:59:59 2016 GMT
* Subject: C=US, O=thawte, Inc., OU=Terms of use at https://www.thawte.com/cps (c)06, CN=thawte Extended Validation SSL CA
* Subject Public Key Info:
* Public Key Algorithm: rsaEncryption
* RSA Public Key: (2048 bit)
* Modulus (2048 bit):
* 00:b5:8d:47:f7:b0:48:76:9b:bd:fb:a9:cb:bf:04:
* 31:a2:3d:9a:7e:30:29:d3:28:b8:fe:68:ce:cf:e9:
* 30:6a:53:95:0e:50:65:80:26:c9:98:bf:f2:14:ff:
* 06:7c:6a:7b:dc:50:07:e2:98:fa:df:cf:30:5d:ca:
* a8:b9:8a:9b:2d:2d:7e:59:8b:1a:f7:b3:c9:c3:69:
* 80:0f:89:19:08:77:b2:52:55:ad:78:83:9d:6b:b9:
* 87:e4:53:24:37:2c:fc:19:0e:8b:79:14:4d:be:80:
* 9e:b4:9b:73:74:31:f2:38:ec:8a:af:2a:36:8e:64:
* ce:31:26:14:03:54:53:8e:fb:84:08:c1:7e:47:32:
* 3d:71:e0:ba:ba:8c:82:58:96:4d:68:43:56:1a:f3:
* 46:5a:32:99:95:b0:60:6f:e9:41:8a:48:cc:16:0d:
* 44:68:b1:8a:dd:dd:17:3d:a4:9b:78:7f:2e:29:06:
* f0:dc:d5:d2:13:3f:c0:36:05:fd:c7:b5:b9:80:1b:
* 8a:46:74:2f:f1:ab:79:9e:97:6e:f8:a5:13:5a:f3:
* fc:b5:d7:c8:96:19:37:ee:06:bc:c6:27:14:81:05:
* 14:33:38:16:9f:4b:e2:0f:db:38:bb:f3:01:ef:35:
* 2e:de:af:f1:e4:6f:6f:f7:96:00:56:5e:8f:60:94:
* 1d:2f
* Exponent: 65537 (0x10001)
* X509v3 extensions:
* Authority Information Access:
* OCSP - URI:http://EVSecure-ocsp.thawte.com
* X509v3 Basic Constraints: critical
* CA:TRUE, pathlen:0
* X509v3 Certificate Policies:
* Policy: X509v3 Any Policy
* CPS: https://www.thawte.com/cps
* X509v3 CRL Distribution Points:
* URI:http://crl.thawte.com/ThawtePCA.crl
* X509v3 Key Usage: critical
* Certificate Sign, CRL Sign
* X509v3 Subject Alternative Name:
* DirName:/CN=PrivateLabel3-2048-234
* X509v3 Subject Key Identifier:
* CD:32:E2:F2:5D:25:47:02:AA:8F:79:4B:32:EE:03:99:FD:30:49:D1
* X509v3 Authority Key Identifier:
* keyid:7B:5B:45:CF:AF:CE:CB:7A:FD:31:92:1A:6A:B6:F3:46:EB:57:48:50
* Signature Algorithm: sha1WithRSAEncryption
* 0b:b4:96:ce:03:0c:d1:9d:af:cb:e3:39:56:0d:c6:22:a0:c9:
* 71:7d:ea:65:95:31:f1:dc:b6:1e:f2:8d:31:5d:61:b3:54:84:
* 13:cc:2b:3f:02:5c:c7:1f:15:01:82:90:1e:31:25:06:e3:32:
* 0c:87:f0:c3:be:9a:c4:00:41:f6:c6:91:e5:6c:3e:92:5d:a3:
* e4:3d:1f:32:2d:31:1e:50:c1:02:21:b4:23:e3:07:75:9a:52:
* 45:51:fa:d3:1d:fd:01:6f:60:6d:25:d9:bf:43:b1:a7:43:6c:
* ad:8c:bb:bc:f7:99:41:eb:d6:95:cf:20:5c:7e:6f:c4:2a:da:
* 4b:4d:1b:5b:c2:9f:b0:94:d4:bf:47:97:fd:9d:49:79:60:8e:
* ae:96:19:a1:b0:eb:e8:df:42:c7:22:74:61:0c:25:a3:7f:8f:
* 45:d2:7e:e7:4a:6e:1d:4f:48:bb:c2:da:1a:7e:4a:59:81:fa:
* 1c:e3:fb:14:73:41:03:a1:77:fa:9b:06:fc:7c:33:bd:46:3d:
* 0c:06:17:85:7b:2a:7b:e3:36:e8:83:df:fa:aa:cb:32:0c:79:
* aa:86:74:6c:44:54:f6:d8:07:9e:cd:98:f4:23:05:09:2f:a2:
* 53:b5:db:0a:81:cc:5f:23:cb:79:11:c5:11:5b:85:6b:27:01:
* 89:f3:0e:bb
* </pre>
*/
private static final String signedCert2Chain2 =
"-----BEGIN CERTIFICATE-----\n"
+ "MIIFCjCCA/KgAwIBAgIQexFV63iakIW1jJL/Qrf+VjANBgkqhkiG9w0BAQUFADCB\n"
+ "qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf\n"
+ "Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw\n"
+ "MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV\n"
+ "BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMTYx\n"
+ "MTE2MjM1OTU5WjCBizELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j\n"
+ "LjE5MDcGA1UECxMwVGVybXMgb2YgdXNlIGF0IGh0dHBzOi8vd3d3LnRoYXd0ZS5j\n"
+ "b20vY3BzIChjKTA2MSowKAYDVQQDEyF0aGF3dGUgRXh0ZW5kZWQgVmFsaWRhdGlv\n"
+ "biBTU0wgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC1jUf3sEh2\n"
+ "m737qcu/BDGiPZp+MCnTKLj+aM7P6TBqU5UOUGWAJsmYv/IU/wZ8anvcUAfimPrf\n"
+ "zzBdyqi5ipstLX5Zixr3s8nDaYAPiRkId7JSVa14g51ruYfkUyQ3LPwZDot5FE2+\n"
+ "gJ60m3N0MfI47IqvKjaOZM4xJhQDVFOO+4QIwX5HMj1x4Lq6jIJYlk1oQ1Ya80Za\n"
+ "MpmVsGBv6UGKSMwWDURosYrd3Rc9pJt4fy4pBvDc1dITP8A2Bf3HtbmAG4pGdC/x\n"
+ "q3mel274pRNa8/y118iWGTfuBrzGJxSBBRQzOBafS+IP2zi78wHvNS7er/Hkb2/3\n"
+ "lgBWXo9glB0vAgMBAAGjggFIMIIBRDA7BggrBgEFBQcBAQQvMC0wKwYIKwYBBQUH\n"
+ "MAGGH2h0dHA6Ly9FVlNlY3VyZS1vY3NwLnRoYXd0ZS5jb20wEgYDVR0TAQH/BAgw\n"
+ "BgEB/wIBADA7BgNVHSAENDAyMDAGBFUdIAAwKDAmBggrBgEFBQcCARYaaHR0cHM6\n"
+ "Ly93d3cudGhhd3RlLmNvbS9jcHMwNAYDVR0fBC0wKzApoCegJYYjaHR0cDovL2Ny\n"
+ "bC50aGF3dGUuY29tL1RoYXd0ZVBDQS5jcmwwDgYDVR0PAQH/BAQDAgEGMC4GA1Ud\n"
+ "EQQnMCWkIzAhMR8wHQYDVQQDExZQcml2YXRlTGFiZWwzLTIwNDgtMjM0MB0GA1Ud\n"
+ "DgQWBBTNMuLyXSVHAqqPeUsy7gOZ/TBJ0TAfBgNVHSMEGDAWgBR7W0XPr87Lev0x\n"
+ "khpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAC7SWzgMM0Z2vy+M5Vg3GIqDJ\n"
+ "cX3qZZUx8dy2HvKNMV1hs1SEE8wrPwJcxx8VAYKQHjElBuMyDIfww76axABB9saR\n"
+ "5Ww+kl2j5D0fMi0xHlDBAiG0I+MHdZpSRVH60x39AW9gbSXZv0Oxp0NsrYy7vPeZ\n"
+ "QevWlc8gXH5vxCraS00bW8KfsJTUv0eX/Z1JeWCOrpYZobDr6N9CxyJ0YQwlo3+P\n"
+ "RdJ+50puHU9Iu8LaGn5KWYH6HOP7FHNBA6F3+psG/HwzvUY9DAYXhXsqe+M26IPf\n"
+ "+qrLMgx5qoZ0bERU9tgHns2Y9CMFCS+iU7XbCoHMXyPLeRHFEVuFaycBifMOuw==\n"
+ "-----END CERTIFICATE-----";
@TestTargetNew(
clazz=Certificate.class,
level=TestLevel.ADDITIONAL,
method="verify",
args={PublicKey.class}
)
public void testVerifyMD5() throws Exception {
Provider[] providers = Security.getProviders("CertificateFactory.X509");
for (Provider provider : providers) {
CertificateFactory certificateFactory = CertificateFactory
.getInstance("X509", provider);
Certificate certificate = certificateFactory
.generateCertificate(new ByteArrayInputStream(selfSignedCertMD5
.getBytes()));
certificate.verify(certificate.getPublicKey());
}
}
@TestTargetNew(
clazz=Certificate.class,
level=TestLevel.ADDITIONAL,
method="verify",
args={PublicKey.class}
)
@AndroidOnly("MD2 is not supported by Android")
public void testVerifyMD2() throws Exception {
Provider[] providers = Security.getProviders("CertificateFactory.X509");
for (Provider provider : providers) {
CertificateFactory certificateFactory = CertificateFactory
.getInstance("X509", provider);
Certificate certificate = certificateFactory
.generateCertificate(new ByteArrayInputStream(selfSignedCertMD2
.getBytes()));
try {
certificate.verify(certificate.getPublicKey());
fail("MD2 should not be allowed");
} catch (NoSuchAlgorithmException e) {
// expected
}
}
}
@TestTargetNew(
clazz=CertPathValidator.class,
level=TestLevel.ADDITIONAL,
method="verify",
args={PublicKey.class}
)
public void testVerifyMD2_chain() throws Exception {
CertificateFactory certificateFactory = CertificateFactory
.getInstance("X509");
CertPath path;
CertPathValidator certPathValidator;
PKIXParameters params;
CertPathValidatorResult res;
// First check with the trust anchor not included in the chain
path = certificateFactory.generateCertPath(getCertList(true, false));
certPathValidator = CertPathValidator.getInstance("PKIX");
params = createPKIXParams();
res = certPathValidator.validate(path, params);
assertTrue("wrong result type",
res instanceof PKIXCertPathValidatorResult);
PKIXCertPathValidatorResult r = (PKIXCertPathValidatorResult) res;
assertTrue("Wrong trust anchor returned", params.getTrustAnchors()
.contains(r.getTrustAnchor()));
// Now check with the trust anchor included in the chain
path = certificateFactory.generateCertPath(getCertList(true, true));
certPathValidator = CertPathValidator.getInstance("PKIX");
params = createPKIXParams();
res = certPathValidator.validate(path, params);
assertTrue("wrong result type",
res instanceof PKIXCertPathValidatorResult);
r = (PKIXCertPathValidatorResult) res;
assertTrue("Wrong trust anchor returned", params.getTrustAnchors()
.contains(r.getTrustAnchor()));
}
@TestTargetNew(
clazz=CertPathValidator.class,
level=TestLevel.ADDITIONAL,
method="verify",
args={PublicKey.class}
)
public void testVerifyMD5_chain() throws Exception {
CertificateFactory certificateFactory = CertificateFactory
.getInstance("X509");
CertPath path;
CertPathValidator certPathValidator;
PKIXParameters params;
CertPathValidatorResult res;
// First check with the trust anchor not included in the chain
path = certificateFactory.generateCertPath(getCertList(false, false));
certPathValidator = CertPathValidator.getInstance("PKIX");
params = createPKIXParams();
res = certPathValidator.validate(path, params);
assertTrue("wrong result type",
res instanceof PKIXCertPathValidatorResult);
PKIXCertPathValidatorResult r = (PKIXCertPathValidatorResult) res;
assertTrue("Wrong trust anchor returned", params.getTrustAnchors()
.contains(r.getTrustAnchor()));
// Now check with the trust anchor included in the chain
path = certificateFactory.generateCertPath(getCertList(false, true));
certPathValidator = CertPathValidator.getInstance("PKIX");
params = createPKIXParams();
res = certPathValidator.validate(path, params);
assertTrue("wrong result type",
res instanceof PKIXCertPathValidatorResult);
r = (PKIXCertPathValidatorResult) res;
assertTrue("Wrong trust anchor returned", params.getTrustAnchors()
.contains(r.getTrustAnchor()));
}
private X509Certificate[] certs= new X509Certificate[3];
private List<Certificate> getCertList(boolean useMD2root,
boolean includeRootInChain) throws Exception {
CertificateFactory certificateFactory = CertificateFactory
.getInstance("X509");
if (useMD2root) {
certs[0] = (X509Certificate) certificateFactory
.generateCertificate(new ByteArrayInputStream(
selfSignedCertMD2.getBytes()));
certs[1] = (X509Certificate) certificateFactory
.generateCertificate(new ByteArrayInputStream(
signedCert1Chain1.getBytes()));
certs[2] = (X509Certificate) certificateFactory
.generateCertificate(new ByteArrayInputStream(
signedCert2Chain1.getBytes()));
} else {
certs[0] = (X509Certificate) certificateFactory
.generateCertificate(new ByteArrayInputStream(
selfSignedCertMD5.getBytes()));
certs[1] = (X509Certificate) certificateFactory
.generateCertificate(new ByteArrayInputStream(
signedCert1Chain2.getBytes()));
certs[2] = (X509Certificate) certificateFactory
.generateCertificate(new ByteArrayInputStream(
signedCert2Chain2.getBytes()));
}
ArrayList<Certificate> result = new ArrayList<Certificate>();
result.add(certs[2]);
result.add(certs[1]);
if (includeRootInChain) {
result.add(certs[0]);
}
return result;
}
private PKIXParameters createPKIXParams() throws Exception {
KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType());
keyStore.load(null, null);
keyStore.setCertificateEntry("selfSignedCert", certs[0]);
PKIXParameters params;
params = new PKIXParameters(keyStore);
params.setRevocationEnabled(false);
return params;
}
}
|
|
/*
* Copyright (C) ExBin Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.exbin.bined.swt.basic;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.annotation.ParametersAreNonnullByDefault;
import javax.swing.Timer;
import org.exbin.bined.basic.BasicCodeAreaSection;
import org.exbin.bined.CodeAreaCaret;
import org.exbin.bined.DefaultCodeAreaCaretPosition;
import org.exbin.bined.CodeAreaSection;
import org.exbin.bined.CodeAreaUtils;
import org.exbin.bined.capability.CaretCapable;
import org.exbin.bined.CodeAreaCaretPosition;
/**
* Default implementation of code area caret.
*
* @version 0.2.0 2018/08/11
* @author ExBin Project (https://exbin.org)
*/
@ParametersAreNonnullByDefault
public class DefaultCodeAreaCaret implements CodeAreaCaret {
private static final int DOUBLE_CURSOR_WIDTH = 2;
private static final int DEFAULT_BLINK_RATE = 450;
@Nonnull
private final CodeArea codeArea;
private final DefaultCodeAreaCaretPosition caretPosition = new DefaultCodeAreaCaretPosition();
private int blinkRate = 0;
private Timer blinkTimer = null;
private boolean cursorVisible = true;
@Nonnull
private CursorRenderingMode renderingMode = CursorRenderingMode.PAINT; // NEGATIVE;
public DefaultCodeAreaCaret(CodeArea codeArea) {
CodeAreaUtils.requireNonNull(codeArea);
this.codeArea = codeArea;
privateSetBlinkRate(DEFAULT_BLINK_RATE);
}
public static int getCursorThickness(CursorShape cursorShape, int characterWidth, int lineHeight) {
switch (cursorShape) {
case INSERT:
return DOUBLE_CURSOR_WIDTH;
case OVERWRITE:
case MIRROR:
return characterWidth;
}
return -1;
}
@Nonnull
@Override
public CodeAreaCaretPosition getCaretPosition() {
return caretPosition;
}
public void resetBlink() {
if (blinkTimer != null) {
cursorVisible = true;
blinkTimer.restart();
}
}
private void notifyCaredChanged() {
// TODO limit to cursor repaint
((CaretCapable) codeArea).notifyCaretChanged();
}
@Override
public void setCaretPosition(@Nullable CodeAreaCaretPosition caretPosition) {
if (caretPosition != null) {
this.caretPosition.setPosition(caretPosition);
} else {
this.caretPosition.reset();
}
resetBlink();
}
@Override
public void setCaretPosition(long dataPosition) {
caretPosition.setDataPosition(dataPosition);
caretPosition.setCodeOffset(0);
resetBlink();
}
@Override
public void setCaretPosition(long dataPosition, int codeOffset) {
caretPosition.setDataPosition(dataPosition);
caretPosition.setCodeOffset(codeOffset);
resetBlink();
}
public void setCaretPosition(long dataPosition, int codeOffset, CodeAreaSection section) {
caretPosition.setDataPosition(dataPosition);
caretPosition.setCodeOffset(codeOffset);
caretPosition.setSection(section);
resetBlink();
}
public long getDataPosition() {
return caretPosition.getDataPosition();
}
public void setDataPosition(long dataPosition) {
caretPosition.setDataPosition(dataPosition);
resetBlink();
}
public int getCodeOffset() {
return caretPosition.getCodeOffset();
}
public void setCodeOffset(int codeOffset) {
caretPosition.setCodeOffset(codeOffset);
resetBlink();
}
@Nonnull
@Override
public CodeAreaSection getSection() {
return caretPosition.getSection().orElse(BasicCodeAreaSection.CODE_MATRIX);
}
public void setSection(CodeAreaSection section) {
caretPosition.setSection(section);
resetBlink();
}
public int getBlinkRate() {
return blinkRate;
}
public void setBlinkRate(int blinkRate) {
privateSetBlinkRate(blinkRate);
}
public boolean isCursorVisible() {
return cursorVisible;
}
@Nonnull
public CursorRenderingMode getRenderingMode() {
return renderingMode;
}
public void setRenderingMode(CursorRenderingMode renderingMode) {
CodeAreaUtils.requireNonNull(renderingMode);
this.renderingMode = renderingMode;
notifyCaredChanged();
}
private void privateSetBlinkRate(int blinkRate) {
if (blinkRate < 0) {
throw new IllegalArgumentException("Blink rate cannot be negative");
}
this.blinkRate = blinkRate;
if (blinkTimer != null) {
if (blinkRate == 0) {
blinkTimer.stop();
blinkTimer = null;
cursorVisible = true;
notifyCaredChanged();
} else {
blinkTimer.setDelay(blinkRate);
blinkTimer.setInitialDelay(blinkRate);
}
} else if (blinkRate > 0) {
blinkTimer = new javax.swing.Timer(blinkRate, new Blink());
blinkTimer.setRepeats(true);
blinkTimer.start();
}
}
private class Blink implements ActionListener {
@Override
public void actionPerformed(ActionEvent e) {
cursorVisible = !cursorVisible;
notifyCaredChanged();
}
}
/**
* Enumeration of supported cursor shapes.
*/
public enum CursorShape {
INSERT, OVERWRITE, MIRROR
}
/**
* Method for rendering cursor into CodeArea component.
*/
public enum CursorRenderingMode {
/**
* Cursor is just painted.
*/
PAINT,
/**
* Cursor is painted using pixels inversion.
*/
XOR,
/**
* Underlying character is painted using negative color to cursor
* cursor.
*/
NEGATIVE
}
}
|
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.rename.naming;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.codeStyle.NameUtil;
import gnu.trove.TIntIntHashMap;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* @author dsl
*/
public class NameSuggester {
private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.rename.naming.NameSuggester");
private final String[] myOldClassName;
private final String[] myNewClassName;
private final List<OriginalToNewChange> myChanges; // sorted from right to left
private final String myOldClassNameAsGiven;
private final String myNewClassNameAsGiven;
public NameSuggester(String oldClassName, String newClassName) {
myOldClassNameAsGiven = oldClassName;
myNewClassNameAsGiven = newClassName;
myOldClassName = NameUtil.splitNameIntoWords(oldClassName);
myNewClassName = NameUtil.splitNameIntoWords(newClassName);
myChanges = new ArrayList<OriginalToNewChange>();
int oldIndex = myOldClassName.length - 1;
int oldLastMatch = myOldClassName.length;
int newLastMatch = myNewClassName.length;
while(oldIndex >= 0) {
final String patternWord = myOldClassName[oldIndex];
final int matchingWordIndex = findInNewBackwardsFromIndex(patternWord, newLastMatch - 1);
if (matchingWordIndex < 0) { // no matching word
oldIndex--;
}
else { // matching word found
if (oldIndex + 1 <= oldLastMatch - 1 || matchingWordIndex + 1 <= newLastMatch - 1) {
final OriginalToNewChange change = new OriginalToNewChange(
oldIndex + 1, oldLastMatch - 1, matchingWordIndex + 1, newLastMatch - 1);
myChanges.add(change);
}
oldLastMatch = oldIndex;
newLastMatch = matchingWordIndex;
oldIndex--;
}
}
if (0 <= oldLastMatch - 1 || 0 <= newLastMatch - 1) {
myChanges.add(new OriginalToNewChange(0, oldLastMatch - 1, 0, newLastMatch - 1));
}
}
private int findInNewBackwardsFromIndex(String patternWord, int newIndex) {
for (int i = newIndex; i >= 0; i--) {
final String s = myNewClassName[i];
if (s.equals(patternWord)) return i;
}
return -1;
}
List<Pair<String,String>> getChanges() {
final ArrayList<Pair<String,String>> result = new ArrayList<Pair<String,String>>();
for (int i = myChanges.size() - 1; i >=0; i--) {
final OriginalToNewChange change = myChanges.get(i);
result.add(Pair.create(change.getOldString(), change.getNewString()));
}
return result;
}
public String suggestName(final String propertyName) {
if (myOldClassNameAsGiven.equals(propertyName)) return myNewClassNameAsGiven;
final String[] propertyWords = NameUtil.splitNameIntoWords(propertyName);
TIntIntHashMap matches = calculateMatches(propertyWords);
if (matches.isEmpty()) return propertyName;
TreeMap<Pair<Integer,Integer>, String> replacements = calculateReplacements(propertyWords, matches);
if (replacements.isEmpty()) return propertyName;
return calculateNewName(replacements, propertyWords, propertyName);
}
private static Pair<int[],int[]> calculateWordPositions(String s, String[] words) {
int[] starts = new int[words.length + 1];
int[] prevEnds = new int[words.length + 1];
prevEnds[0] = -1;
int pos = 0;
for (int i = 0; i < words.length; i++) {
final String word = words[i];
final int index = s.indexOf(word, pos);
LOG.assertTrue(index >= 0);
starts[i] = index;
pos = index + word.length();
prevEnds[i + 1] = pos - 1;
}
starts[words.length] = s.length();
return Pair.create(starts, prevEnds);
}
private static String calculateNewName(TreeMap<Pair<Integer, Integer>, String> replacements,
final String[] propertyWords,
String propertyName) {
StringBuffer resultingWords = new StringBuffer();
int currentWord = 0;
final Pair<int[],int[]> wordIndicies = calculateWordPositions(propertyName, propertyWords);
for (final Map.Entry<Pair<Integer, Integer>, String> entry : replacements.entrySet()) {
final int first = entry.getKey().getFirst().intValue();
final int last = entry.getKey().getSecond().intValue();
for (int i = currentWord; i < first; i++) {
resultingWords.append(calculateBetween(wordIndicies, i, propertyName));
final String propertyWord = propertyWords[i];
appendWord(resultingWords, propertyWord);
}
resultingWords.append(calculateBetween(wordIndicies, first, propertyName));
appendWord(resultingWords, entry.getValue());
currentWord = last + 1;
}
for(; currentWord < propertyWords.length; currentWord++) {
resultingWords.append(calculateBetween(wordIndicies, currentWord, propertyName));
appendWord(resultingWords, propertyWords[currentWord]);
}
resultingWords.append(calculateBetween(wordIndicies, propertyWords.length, propertyName));
if (resultingWords.length() == 0) return propertyName;
return decapitalizeProbably(resultingWords.toString(), propertyName);
}
private static void appendWord(StringBuffer resultingWords, String propertyWord) {
if (resultingWords.length() > 0) {
final char lastChar = resultingWords.charAt(resultingWords.length() - 1);
if (Character.isLetterOrDigit(lastChar)) {
propertyWord = StringUtil.capitalize(propertyWord);
}
}
resultingWords.append(propertyWord);
}
private static String calculateBetween(final Pair<int[], int[]> wordIndicies, int i, String propertyName) {
final int thisWordStart = wordIndicies.getFirst()[i];
final int prevWordEnd = wordIndicies.getSecond()[i];
return propertyName.substring(prevWordEnd + 1, thisWordStart);
}
/**
* Calculates a map of replacements. Result has a form:<br>
* {<first,last> -> replacement} <br>
* where start and end are indices of property words range (inclusive), and replacement is a
* string that this range must be replaced with.<br>
* It is valid situation that <code>last == first - 1</code>: in this case replace means insertion
* before first word. Furthermore, first may be equal to <code>propertyWords.length</code> - in
* that case replacements transormates to appending.
* @param propertyWords
* @param matches
* @return
*/
private TreeMap<Pair<Integer, Integer>, String> calculateReplacements(String[] propertyWords, TIntIntHashMap matches) {
TreeMap<Pair<Integer,Integer>, String> replacements = new TreeMap<Pair<Integer,Integer>, String>(new Comparator<Pair<Integer, Integer>>() {
@Override
public int compare(Pair<Integer, Integer> pair, Pair<Integer, Integer> pair1) {
return pair.getFirst().compareTo(pair1.getFirst());
}
});
for (final OriginalToNewChange change : myChanges) {
final int first = change.oldFirst;
final int last = change.oldLast;
if (change.getOldLength() > 0) {
if (containsAllBetween(matches, first, last)) {
final String newString = change.getNewString();
final int propertyWordFirst = matches.get(first);
if (first >= myOldClassName.length || last >= myOldClassName.length) {
LOG.error("old class name = " + myOldClassNameAsGiven + ", new class name = " + myNewClassNameAsGiven + ", propertyWords = " +
Arrays.asList(propertyWords).toString());
}
final String replacement = suggestReplacement(propertyWords[propertyWordFirst], newString);
replacements.put(Pair.create(propertyWordFirst, matches.get(last)), replacement);
}
}
else {
final String newString = change.getNewString();
final int propertyWordToInsertBefore;
if (matches.containsKey(first)) {
propertyWordToInsertBefore = matches.get(first);
}
else {
if (matches.contains(last)) {
propertyWordToInsertBefore = matches.get(last) + 1;
} else {
propertyWordToInsertBefore = propertyWords.length;
}
}
replacements.put(Pair.create(propertyWordToInsertBefore, propertyWordToInsertBefore - 1), newString);
}
}
return replacements;
}
private static String suggestReplacement(String propertyWord, String newClassNameWords) {
return decapitalizeProbably(newClassNameWords, propertyWord);
}
private static String decapitalizeProbably(String word, String originalWord) {
if (originalWord.length() == 0) return word;
if (Character.isLowerCase(originalWord.charAt(0))) {
return StringUtil.decapitalize(word);
}
return word;
}
private static boolean containsAllBetween(TIntIntHashMap matches, int first, int last) {
for (int i = first; i <= last; i++) {
if (!matches.containsKey(i)) return false;
}
return true;
}
private TIntIntHashMap calculateMatches(final String[] propertyWords) {
int classNameIndex = myOldClassName.length - 1;
TIntIntHashMap matches = new TIntIntHashMap();
for (int i = propertyWords.length - 1; i >= 0; i--) {
final String propertyWord = propertyWords[i];
Match match = null;
for (int j = classNameIndex; j >= 0 && match == null; j--) {
match = checkMatch(j, i, propertyWord);
}
if (match != null) {
matches.put(match.oldClassNameIndex, i);
classNameIndex = match.oldClassNameIndex - 1;
}
}
return matches;
}
private class OriginalToNewChange {
final int oldFirst;
final int oldLast;
final int newFirst;
final int newLast;
public OriginalToNewChange(int firstInOld, int lastInOld, int firstInNew, int lastInNew) {
oldFirst = firstInOld;
oldLast = lastInOld;
newFirst = firstInNew;
newLast = lastInNew;
}
int getOldLength() {
return oldLast - oldFirst + 1;
}
String getOldString() {
final StringBuilder buffer = new StringBuilder();
for (int i = oldFirst; i <= oldLast; i++) {
buffer.append(myOldClassName[i]);
}
return buffer.toString();
}
String getNewString() {
final StringBuilder buffer = new StringBuilder();
for (int i = newFirst; i <= newLast; i++) {
buffer.append(myNewClassName[i]);
}
return buffer.toString();
}
}
private static class Match {
final int oldClassNameIndex;
final int propertyNameIndex;
final String propertyWord;
public Match(int oldClassNameIndex, int propertyNameIndex, String propertyWord) {
this.oldClassNameIndex = oldClassNameIndex;
this.propertyNameIndex = propertyNameIndex;
this.propertyWord = propertyWord;
}
}
@Nullable
private Match checkMatch(final int oldClassNameIndex, final int propertyNameIndex, final String propertyWord) {
if (propertyWord.equalsIgnoreCase(myOldClassName[oldClassNameIndex])) {
return new Match(oldClassNameIndex, propertyNameIndex, propertyWord);
}
else return null;
}
}
|
|
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.container.spring.beans;
import java.util.*;
import org.drools.KnowledgeBase;
import org.drools.command.Command;
import org.drools.container.spring.namespace.EventListenersUtil;
import org.drools.event.process.ProcessEventListener;
import org.drools.event.rule.AgendaEventListener;
import org.drools.event.rule.WorkingMemoryEventListener;
import org.drools.grid.GridNode;
import org.drools.runtime.CommandExecutor;
import org.drools.runtime.KnowledgeSessionConfiguration;
import org.drools.runtime.process.WorkItemHandler;
import org.springframework.beans.factory.BeanNameAware;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.NamedBean;
public abstract class AbstractKnowledgeSessionBeanFactory
implements
FactoryBean,
InitializingBean,
BeanNameAware,
NamedBean {
private GridNode node;
private Map<String, WorkItemHandler> workItems;
private KnowledgeSessionConfiguration conf;
private KnowledgeBase kbase;
private String beanName;
private String name;
private List<Command<?>> batch;
// Additions for JIRA JBRULES-3076
protected List<AgendaEventListener> agendaEventListeners;
protected List<ProcessEventListener> processEventListeners;
protected List<WorkingMemoryEventListener> workingMemoryEventListeners;
protected List<Object> groupedListeners = new ArrayList<Object>();
// End of additions for JIRA JBRULES-3076
public AbstractKnowledgeSessionBeanFactory() {
super();
// Additions for JIRA JBRULES-3076
agendaEventListeners = new ArrayList<AgendaEventListener>();
processEventListeners = new ArrayList<ProcessEventListener>();
workingMemoryEventListeners = new ArrayList<WorkingMemoryEventListener>();
// End of additions for JIRA JBRULES-3076
}
public Object getObject() throws Exception {
return getCommandExecutor();
}
public Map<String, WorkItemHandler> getWorkItems() {
return workItems;
}
public void setWorkItems(Map<String, WorkItemHandler> workItems) {
this.workItems = workItems;
}
public KnowledgeSessionConfiguration getConf() {
return conf;
}
public void setConf(KnowledgeSessionConfiguration conf) {
this.conf = conf;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public KnowledgeBase getKbase() {
return kbase;
}
public void setKbase(KnowledgeBase kbase) {
this.kbase = kbase;
}
public boolean isSingleton() {
return true;
}
public List<Command<?>> getBatch() {
return batch;
}
public void setBatch(List<Command<?>> commands) {
this.batch = commands;
}
public final void afterPropertiesSet() throws Exception {
if (kbase == null) {
throw new IllegalArgumentException("kbase property is mandatory");
}
if (name == null) {
name = beanName;
}
internalAfterPropertiesSet();
}
protected abstract CommandExecutor getCommandExecutor();
protected abstract void internalAfterPropertiesSet();
public GridNode getNode() {
return node;
}
public void setNode(GridNode node) {
this.node = node;
}
public void setBeanName(String name) {
this.beanName = name;
}
public String getBeanName() {
return beanName;
}
// Additions for JIRA JBRULES-3076
public void setEventListenersFromGroup(List<Object> eventListenerList) {
for (Object eventListener : eventListenerList) {
if (eventListener instanceof AgendaEventListener) {
agendaEventListeners.add((AgendaEventListener) eventListener);
}
if (eventListener instanceof WorkingMemoryEventListener) {
workingMemoryEventListeners.add((WorkingMemoryEventListener) eventListener);
}
if (eventListener instanceof ProcessEventListener) {
processEventListeners.add((ProcessEventListener) eventListener);
}
}
groupedListeners.addAll(eventListenerList);
// System.out.println("adding listener-group elements " + groupedListeners.size());
}
public void setEventListeners(Map<String, List> eventListenerMap) {
for (String key : eventListenerMap.keySet()) {
List<Object> eventListenerList = eventListenerMap.get(key);
if (EventListenersUtil.TYPE_AGENDA_EVENT_LISTENER.equalsIgnoreCase(key)) {
for (Object eventListener : eventListenerList) {
if (eventListener instanceof AgendaEventListener) {
agendaEventListeners.add((AgendaEventListener) eventListener);
}
}
} else if (EventListenersUtil.TYPE_WORKING_MEMORY_EVENT_LISTENER.equalsIgnoreCase(key)) {
for (Object eventListener : eventListenerList) {
if (eventListener instanceof WorkingMemoryEventListener) {
workingMemoryEventListeners.add((WorkingMemoryEventListener) eventListener);
}
}
} else if (EventListenersUtil.TYPE_PROCESS_EVENT_LISTENER.equalsIgnoreCase(key)) {
for (Object eventListener : eventListenerList) {
if (eventListener instanceof ProcessEventListener) {
processEventListeners.add((ProcessEventListener) eventListener);
}
}
}
}
}
public List<AgendaEventListener> getAgendaEventListeners() {
return agendaEventListeners;
}
public void setAgendaEventListeners(List<AgendaEventListener> agendaEventListeners) {
this.agendaEventListeners = agendaEventListeners;
}
public List<ProcessEventListener> getProcessEventListeners() {
return processEventListeners;
}
public void setProcessEventListeners(List<ProcessEventListener> processEventListeners) {
this.processEventListeners = processEventListeners;
}
public List<WorkingMemoryEventListener> getWorkingMemoryEventListeners() {
return workingMemoryEventListeners;
}
public void setWorkingMemoryEventListeners(List<WorkingMemoryEventListener> workingMemoryEventListeners) {
this.workingMemoryEventListeners = workingMemoryEventListeners;
}
// End of Changes for JIRA JBRULES-3076
}
|
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.dmn.engine.transform;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.InputStream;
import java.util.List;
import org.camunda.bpm.dmn.engine.DmnClause;
import org.camunda.bpm.dmn.engine.DmnClauseEntry;
import org.camunda.bpm.dmn.engine.DmnDecision;
import org.camunda.bpm.dmn.engine.DmnDecisionModel;
import org.camunda.bpm.dmn.engine.DmnDecisionTable;
import org.camunda.bpm.dmn.engine.DmnEngine;
import org.camunda.bpm.dmn.engine.DmnExpression;
import org.camunda.bpm.dmn.engine.DmnItemDefinition;
import org.camunda.bpm.dmn.engine.DmnRule;
import org.camunda.bpm.dmn.engine.DmnTypeDefinition;
import org.camunda.bpm.dmn.engine.impl.DmnClauseImpl;
import org.camunda.bpm.dmn.engine.impl.DmnDecisionTableImpl;
import org.camunda.bpm.dmn.engine.impl.DmnEngineConfigurationImpl;
import org.camunda.bpm.dmn.engine.impl.context.DmnDecisionContextImpl;
import org.camunda.bpm.model.dmn.HitPolicy;
import org.camunda.commons.utils.IoUtil;
import org.junit.BeforeClass;
import org.junit.Test;
public class DmnTransformTest {
protected static DmnEngine engine;
protected static DmnDecisionModel decisionModel;
@BeforeClass
public static void readModelFromFile() {
InputStream inputStream = IoUtil.fileAsStream("org/camunda/bpm/dmn/engine/transform/DmnTransformTest.dmn");
DmnEngineConfigurationImpl engineConfiguration = new DmnEngineConfigurationImpl();
engine = engineConfiguration.buildEngine();
decisionModel = engine.parseDecisionModel(inputStream);
}
@Test
public void shouldTransformDefinitions() {
assertThat(decisionModel.getKey()).isEqualTo("definitions");
assertThat(decisionModel.getName()).isEqualTo("camunda");
assertThat(decisionModel.getNamespace()).isEqualTo("http://camunda.org/dmn");
assertThat(decisionModel.getExpressionLanguage()).isEqualTo(DmnDecisionContextImpl.DEFAULT_SCRIPT_LANGUAGE);
}
@Test
public void shouldTransformItemDefinitions() {
assertThat(decisionModel.getItemDefinitions()).hasSize(2);
DmnItemDefinition itemDefinition = decisionModel.getItemDefinition("itemDefinition1");
assertThat(itemDefinition).isNotNull();
assertThat(itemDefinition.getName()).isEqualTo("camunda");
itemDefinition = decisionModel.getItemDefinition("itemDefinition2");
assertThat(itemDefinition).isNotNull();
assertThat(itemDefinition.getName()).isNull();
}
@Test
public void shouldTransformTypeDefinitions() {
DmnTypeDefinition typeDefinition = decisionModel.getItemDefinition("itemDefinition1").getTypeDefinition();
assertThat(typeDefinition).isNull();
typeDefinition = decisionModel.getItemDefinition("itemDefinition2").getTypeDefinition();
assertThat(typeDefinition).isNotNull();
assertThat(typeDefinition.getTypeName()).isEqualTo("string");
}
@Test
public void shouldTransformAllowedValues() {
List<DmnExpression> allowedValues = decisionModel.getItemDefinition("itemDefinition1").getAllowedValues();
assertThat(allowedValues).isEmpty();
allowedValues = decisionModel.getItemDefinition("itemDefinition2").getAllowedValues();
assertThat(allowedValues).hasSize(2);
DmnExpression allowedValue = allowedValues.get(0);
assertThat(allowedValue.getKey()).isEqualTo("allowedValue1");
assertThat(allowedValue.getName()).isEqualTo("a");
assertThat(allowedValue.getExpressionLanguage()).isNull();
assertThat(allowedValue.getExpression()).isEqualTo("camunda");
allowedValue = allowedValues.get(1);
assertThat(allowedValue.getKey()).isEqualTo("allowedValue2");
assertThat(allowedValue.getName()).isEqualTo("b");
assertThat(allowedValue.getExpressionLanguage()).isNull();
assertThat(allowedValue.getExpression()).isEqualTo("camunda");
}
@Test
public void shouldTransformDecisions() {
assertThat(decisionModel.getDecisions()).hasSize(2);
DmnDecision decision = decisionModel.getDecision("decision1");
assertThat(decision).isNotNull();
assertThat(decision.getName()).isEqualTo("camunda");
// decision2 should be ignored as it isn't supported by the DMN engine
decision = decisionModel.getDecision("decision2");
assertThat(decision).isNull();
decision = decisionModel.getDecision("decision3");
assertThat(decision).isNotNull();
assertThat(decision.getName()).isEqualTo("camunda");
}
@Test
public void shouldTransformDecisionTables() {
DmnDecision decision = decisionModel.getDecision("decision1");
assertThat(decision).isInstanceOf(DmnDecisionTable.class);
DmnDecisionTable decisionTable = (DmnDecisionTable) decision;
assertThat(decisionTable.getHitPolicy()).isEqualTo(DmnDecisionTableImpl.DEFAULT_HIT_POLICY);
decision = decisionModel.getDecision("decision3");
assertThat(decision).isInstanceOf(DmnDecisionTable.class);
decisionTable = (DmnDecisionTable) decision;
assertThat(decisionTable.getHitPolicy()).isEqualTo(HitPolicy.FIRST);
}
@Test
public void shouldTransformClauses() {
List<DmnClause> clauses = getClausesForDecision("decision1");
// clause7 should be ignored as it has no input or output entries
assertThat(clauses).hasSize(6);
DmnClause clause = clauses.get(0);
assertThat(clause.getKey()).isEqualTo("clause1");
assertThat(clause.getName()).isEqualTo("camunda");
assertThat(clause.getOutputName()).isEqualTo("camunda");
assertThat(clause.isOrdered()).isFalse();
clause = clauses.get(1);
assertThat(clause.getKey()).isEqualTo("clause2");
assertThat(clause.getName()).isNull();
assertThat(clause.getOutputName()).isEqualTo(DmnClauseImpl.DEFAULT_INPUT_VARIABLE_NAME);
assertThat(clause.isOrdered()).isFalse();
clause = clauses.get(2);
assertThat(clause.getKey()).isEqualTo("clause3");
assertThat(clause.getName()).isNull();
assertThat(clause.getOutputName()).isEqualTo(DmnClauseImpl.DEFAULT_INPUT_VARIABLE_NAME);
assertThat(clause.isOrdered()).isFalse();
clause = clauses.get(3);
assertThat(clause.getKey()).isEqualTo("clause4");
assertThat(clause.getName()).isNull();
assertThat(clause.getOutputName()).isEqualTo(DmnClauseImpl.DEFAULT_INPUT_VARIABLE_NAME);
assertThat(clause.isOrdered()).isTrue();
clause = clauses.get(4);
assertThat(clause.getKey()).isEqualTo("clause5");
assertThat(clause.getName()).isNull();
assertThat(clause.getOutputName()).isNull();
assertThat(clause.isOrdered()).isFalse();
clause = clauses.get(5);
assertThat(clause.getKey()).isEqualTo("clause6");
assertThat(clause.getName()).isEqualTo("camunda");
assertThat(clause.getOutputName()).isNull();
assertThat(clause.isOrdered()).isTrue();
}
@Test
public void shouldTransformInputExpressions() {
List<DmnClause> clauses = getClausesForDecision("decision1");
DmnExpression inputExpression = clauses.get(0).getInputExpression();
assertThat(inputExpression).isNotNull();
assertThat(inputExpression.getKey()).isEqualTo("inputExpression1");
assertThat(inputExpression.getName()).isNull();
assertThat(inputExpression.getExpressionLanguage()).isNull();
assertThat(inputExpression.getExpression()).isEqualTo("${camunda}");
assertThat(inputExpression.getItemDefinition()).isNotNull();
assertThat(inputExpression.getItemDefinition().getTypeDefinition()).isNull();
inputExpression = clauses.get(1).getInputExpression();
assertThat(inputExpression).isNotNull();
assertThat(inputExpression.getKey()).isEqualTo("inputExpression2");
assertThat(inputExpression.getName()).isEqualTo("camunda");
assertThat(inputExpression.getExpressionLanguage()).isEqualTo("camunda");
assertThat(inputExpression.getExpression()).isNull();
assertThat(inputExpression.getItemDefinition()).isNotNull();
assertThat(inputExpression.getItemDefinition().getTypeDefinition()).isNotNull();
assertThat(inputExpression.getItemDefinition().getTypeDefinition().getTypeName()).isEqualTo("string");
inputExpression = clauses.get(2).getInputExpression();
assertThat(inputExpression).isNotNull();
assertThat(inputExpression.getKey()).isEqualTo("inputExpression3");
assertThat(inputExpression.getName()).isNull();
assertThat(inputExpression.getExpressionLanguage()).isNull();
assertThat(inputExpression.getExpression()).isNull();
assertThat(inputExpression.getItemDefinition()).isNull();
inputExpression = clauses.get(3).getInputExpression();
assertThat(inputExpression).isNull();
}
@Test
public void shouldTransformInputEntries() {
List<DmnClause> clauses = getClausesForDecision("decision1");
assertThat(clauses.get(0).getInputEntries()).isEmpty();
assertThat(clauses.get(4).getInputEntries()).isEmpty();
assertThat(clauses.get(5).getInputEntries()).isEmpty();
List<DmnClauseEntry> inputEntries = clauses.get(3).getInputEntries();
assertThat(inputEntries).hasSize(3);
DmnClauseEntry inputEntry = inputEntries.get(0);
assertThat(inputEntry.getKey()).isEqualTo("inputEntry1");
assertThat(inputEntry.getName()).isEqualTo("camunda");
assertThat(inputEntry.getExpressionLanguage()).isNull();
assertThat(inputEntry.getExpression()).isEqualTo("${cellInput=='camunda'}");
assertThat(inputEntry.getClause().getKey()).isEqualTo("clause4");
inputEntry = inputEntries.get(1);
assertThat(inputEntry.getKey()).isEqualTo("inputEntry2");
assertThat(inputEntry.getName()).isEqualTo("camunda");
assertThat(inputEntry.getExpressionLanguage()).isEqualTo("camunda");
assertThat(inputEntry.getExpression()).isNull();
assertThat(inputEntry.getClause().getKey()).isEqualTo("clause4");
inputEntry = inputEntries.get(2);
assertThat(inputEntry.getKey()).isEqualTo("inputEntry3");
assertThat(inputEntry.getName()).isNull();
assertThat(inputEntry.getExpressionLanguage()).isNull();
assertThat(inputEntry.getExpression()).isNull();
assertThat(inputEntry.getClause().getKey()).isEqualTo("clause4");
}
@Test
public void shouldTransformOutputDefinition() {
List<DmnClause> clauses = getClausesForDecision("decision1");
assertThat(clauses.get(0).getOutputDefinition()).isNull();
DmnItemDefinition outputDefinition = clauses.get(4).getOutputDefinition();
assertThat(outputDefinition).isNotNull();
assertThat(outputDefinition.getTypeDefinition()).isNotNull();
assertThat(outputDefinition.getTypeDefinition().getTypeName()).isEqualTo("string");
}
@Test
public void shouldTransformOutputEntries() {
List<DmnClause> clauses = getClausesForDecision("decision1");
assertThat(clauses.get(0).getOutputEntries()).isEmpty();
assertThat(clauses.get(3).getOutputEntries()).isEmpty();
assertThat(clauses.get(4).getOutputEntries()).isEmpty();
List<DmnExpression> outputEntries = clauses.get(5).getOutputEntries();
assertThat(outputEntries).hasSize(3);
DmnExpression outputEntry = outputEntries.get(0);
assertThat(outputEntry.getKey()).isEqualTo("outputEntry1");
assertThat(outputEntry.getName()).isEqualTo("camunda");
assertThat(outputEntry.getExpressionLanguage()).isNull();
assertThat(outputEntry.getExpression()).isEqualTo("camunda");
outputEntry = outputEntries.get(1);
assertThat(outputEntry.getKey()).isEqualTo("outputEntry2");
assertThat(outputEntry.getName()).isEqualTo("camunda");
assertThat(outputEntry.getExpressionLanguage()).isEqualTo("camunda");
assertThat(outputEntry.getExpression()).isNull();
outputEntry = outputEntries.get(2);
assertThat(outputEntry.getKey()).isEqualTo("outputEntry3");
assertThat(outputEntry.getName()).isNull();
assertThat(outputEntry.getExpressionLanguage()).isNull();
assertThat(outputEntry.getExpression()).isNull();
}
@Test
public void shouldTransformRules() {
List<DmnRule> rules = getRulesForDecision("decision1");
assertThat(rules).hasSize(5);
DmnRule rule = rules.get(0);
assertThat(rule.getKey()).isEqualTo("rule1");
assertThat(rule.getName()).isEqualTo("camunda");
rule = rules.get(1);
assertThat(rule.getKey()).isEqualTo("rule2");
assertThat(rule.getName()).isNull();
rule = rules.get(2);
assertThat(rule.getKey()).isEqualTo("rule3");
assertThat(rule.getName()).isNull();
rule = rules.get(3);
assertThat(rule.getKey()).isEqualTo("rule4");
assertThat(rule.getName()).isNull();
rule = rules.get(4);
assertThat(rule.getKey()).isEqualTo("rule5");
assertThat(rule.getName()).isNull();
}
@Test
public void shouldTransformConditions() {
List<DmnRule> rules = getRulesForDecision("decision1");
List<DmnClauseEntry> conditions = rules.get(0).getConditions();
assertThat(conditions).isEmpty();
conditions = rules.get(1).getConditions();
assertThat(conditions).hasSize(1);
assertThat(conditions.get(0).getKey()).isEqualTo("inputEntry1");
conditions = rules.get(2).getConditions();
assertThat(conditions).hasSize(2);
assertThat(conditions.get(0).getKey()).isEqualTo("inputEntry1");
assertThat(conditions.get(1).getKey()).isEqualTo("inputEntry2");
conditions = rules.get(3).getConditions();
assertThat(conditions).hasSize(2);
assertThat(conditions.get(0).getKey()).isEqualTo("inputEntry1");
assertThat(conditions.get(1).getKey()).isEqualTo("inputEntry3");
conditions = rules.get(4).getConditions();
assertThat(conditions).isEmpty();
}
@Test
public void shouldTransformConclusions() {
List<DmnRule> rules = getRulesForDecision("decision1");
List<DmnClauseEntry> conclusions = rules.get(0).getConclusions();
assertThat(conclusions).isEmpty();
conclusions = rules.get(1).getConclusions();
assertThat(conclusions).isEmpty();
conclusions = rules.get(2).getConclusions();
assertThat(conclusions).hasSize(1);
assertThat(conclusions.get(0).getKey()).isEqualTo("outputEntry1");
conclusions = rules.get(3).getConclusions();
assertThat(conclusions).hasSize(2);
assertThat(conclusions.get(0).getKey()).isEqualTo("outputEntry1");
assertThat(conclusions.get(1).getKey()).isEqualTo("outputEntry2");
conclusions = rules.get(4).getConclusions();
assertThat(conclusions).hasSize(2);
assertThat(conclusions.get(0).getKey()).isEqualTo("outputEntry1");
assertThat(conclusions.get(1).getKey()).isEqualTo("outputEntry3");
}
// helper ///////////////////////////////////////////////////////////////////////////////////////////////////////
public List<DmnClause> getClausesForDecision(String decisionKey) {
DmnDecisionTable decisionTable = decisionModel.getDecision(decisionKey);
return decisionTable.getClauses();
}
public List<DmnRule> getRulesForDecision(String decisionKey) {
DmnDecisionTable decisionTable = decisionModel.getDecision(decisionKey);
return decisionTable.getRules();
}
}
|
|
package org.aikodi.chameleon.eclipse.editors.preferences;
import org.aikodi.chameleon.core.language.Language;
import org.aikodi.chameleon.eclipse.ChameleonEditorPlugin;
import org.aikodi.chameleon.eclipse.LanguageMgt;
import org.aikodi.chameleon.eclipse.editors.ChameleonEditor;
import org.aikodi.chameleon.eclipse.presentation.PresentationStyle;
import org.aikodi.chameleon.eclipse.presentation.Selector;
import org.eclipse.core.runtime.SafeRunner;
import org.eclipse.jface.preference.BooleanFieldEditor;
import org.eclipse.jface.preference.FieldEditor;
import org.eclipse.jface.preference.FieldEditorPreferencePage;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.jface.util.SafeRunnable;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.ScrolledComposite;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.TabFolder;
import org.eclipse.swt.widgets.TabItem;
import org.eclipse.ui.*;
import org.eclipse.ui.internal.WorkbenchMessages;
import java.util.*;
/**
* @author Manuel Van Wesemael
* @author Joeri Hendrickx
*
* A preference page for chameleonDocuments.
* It handles the colors used in the syntax coloring for the various languages.
* The colors can be changed at will.
*/
public class ColoringPreferencePage extends FieldEditorPreferencePage implements IWorkbenchPreferencePage {
//per language, contains a color selector and the field editor that represents it
private HashMap<String, HashMap<Selector, Vector<FieldEditor>>> options;
public ColoringPreferencePage() {
super(FieldEditorPreferencePage.GRID);
// Set the preference store for the preference page.
IPreferenceStore store = ChameleonEditorPlugin.getDefault().getPreferenceStore();
setPreferenceStore(store);
}
/*
* stores the chosen colors and recolors all the documents
*/
private void performChoices(){
for (String language : options.keySet()) {
HashMap<Selector, Vector<FieldEditor>> fieldsBySelector = options.get(language);
Set<Selector> selectors = fieldsBySelector.keySet();
for (Iterator<Selector> iterator = selectors.iterator(); iterator.hasNext();) {
Selector selector = iterator.next();
OptionalColorFieldEditor fg = (OptionalColorFieldEditor) fieldsBySelector.get(selector).get(0);
OptionalColorFieldEditor bg = (OptionalColorFieldEditor) fieldsBySelector.get(selector).get(1);
BooleanFieldEditor bold = (BooleanFieldEditor) fieldsBySelector.get(selector).get(2);
BooleanFieldEditor italic = (BooleanFieldEditor) fieldsBySelector.get(selector).get(3);
BooleanFieldEditor underline = (BooleanFieldEditor) fieldsBySelector.get(selector).get(4);
BooleanFieldEditor foldable = (BooleanFieldEditor) fieldsBySelector.get(selector).get(5);
BooleanFieldEditor folded = (BooleanFieldEditor) fieldsBySelector.get(selector).get(6);
PresentationStyle style = new PresentationStyle(fg.getOptionalColor(), bg.getOptionalColor(), bold.getBooleanValue(), italic.getBooleanValue(), underline.getBooleanValue(), foldable.getBooleanValue(), folded.getBooleanValue());
// FIXME: this should update the presentation models of all project natures.
LanguageMgt.getInstance().getPresentationModel(language).updateRule(selector, style);
}
}
IPreferenceStore store = ChameleonEditorPlugin.getDefault().getPreferenceStore();
store.setValue("Chameleon_color_prefs_inited", true);
recolorAll();
}
//recolors all open editors.
private void recolorAll() {
SafeRunner.run(new SafeRunnable(WorkbenchMessages.ErrorClosing) {
private IWorkbench workbench = _workbench;
@Override
public void run() {
// Collect dirtyParts
//ArrayList dirtyParts = new ArrayList();
//ArrayList dirtyEditorsInput = new ArrayList();
IWorkbenchWindow windows[] = workbench.getWorkbenchWindows();
for (int i = 0; i < windows.length; i++) {
IWorkbenchPage pages[] = windows[i].getPages();
for (int j = 0; j < pages.length; j++) {
IWorkbenchPage page = pages[j];
IEditorReference[] parts = page.getEditorReferences();
for (int k = 0; k < parts.length; k++) {
IEditorPart editor = parts[k].getEditor(true);
try{
ChameleonEditor chamEditor = (ChameleonEditor) editor;
chamEditor.updateFoldingStructure();
chamEditor.getDocument().doPresentation(chamEditor.getChameleonConfiguration().getChameleonPresentationReconciler().getTextViewer());
}catch(ClassCastException cce){
// the part is not a chameleonEditor. No changes are needed
}
}
}
}
}
});
}
@Override
protected void createFieldEditors() {
options = new HashMap<String, HashMap<Selector, Vector<FieldEditor>>>();
//a vector containing:
// per language a vector of elements
// the first element of each vector is the language name
HashMap<String, HashMap<Selector, PresentationStyle>> possibilities = readConfigurables();
TabFolder languageTabs = new TabFolder(getFieldEditorParent(),SWT.NONE);
languageTabs.setLayoutData(new GridData(SWT.FILL, SWT.FILL,true, true));
languageTabs.setLayout(new GridLayout(1, true));
Set<String> talen = possibilities.keySet();
for (Iterator<String> iter = talen.iterator(); iter.hasNext();) {
String taalS = iter.next();
TabItem currentPage = new TabItem(languageTabs, SWT.NONE);
currentPage.setText(taalS);
ScrolledComposite scroller = new ScrolledComposite(languageTabs, SWT.V_SCROLL);
currentPage.setControl(scroller);
scroller.setLayoutData(new GridData(SWT.FILL, SWT.FILL,true, true));
scroller.setLayout(new GridLayout(1, true));
Composite container = new Composite(scroller, SWT.BORDER);
scroller.setContent(container);
GridLayout grid = new GridLayout(2, true);
container.setLayout(grid);
container.setLayoutData(new GridData(SWT.FILL, SWT.DEFAULT, true, false));
HashMap<Selector, Vector<FieldEditor>> current = new HashMap<Selector, Vector<FieldEditor>>();
HashMap<Selector, PresentationStyle> taal = possibilities.get(taalS);
Set<Selector> selectors = taal.keySet();
for (Iterator<Selector> iterator = selectors.iterator(); iterator.hasNext();) {
Group selectorGroup = new Group(container,SWT.NONE);
GridLayout grid2 = new GridLayout();
selectorGroup.setLayout(grid2);
Composite selectorGroupC = new Composite(selectorGroup, SWT.NONE);
selectorGroupC.setLayout(new GridLayout(2, true));
selectorGroup.setLayoutData(new GridData(SWT.FILL,SWT.DEFAULT, true, false));
selectorGroupC.setLayoutData(new GridData(SWT.FILL,SWT.DEFAULT, true, false));
Composite c = selectorGroupC;
Vector<FieldEditor> current2 = new Vector<FieldEditor>();
Selector selector = iterator.next();
// PresentationStyle presrule = taal.get(selector);
String fieldNaam = "stylerule_"+taalS+"_"+selector.getElementType()+"_"+selector.getPositionType()+"_";
// PresentationStyle presrule = taal.get(selector);
// String fieldNaam = "stylerule_"+taalS+"_"+selector.getElementType()+"_"+selector.getDecoratorType()+"_";
//Label elementLb = new Label(c, SWT.NONE);
selectorGroup.setText(selector.getDescription());
//addField(spacer2);
PresentationStyle style = taal.get(selector);
OptionalColorFieldEditor foregroundEdit = new OptionalColorFieldEditor(fieldNaam+"foreground", "Foreground", c);
foregroundEdit.setOptionalColor(style.getForeground());
//foregroundEdit.fillIntoGrid(c,2);
//addField(spacer3);
OptionalColorFieldEditor backgroundEdit = new OptionalColorFieldEditor(fieldNaam+"background","Background", c);
backgroundEdit.setOptionalColor(style.getBackground());
//addField(spacer1);
BooleanFieldEditor bold = new BooleanFieldEditor(
fieldNaam+"bold",
"Bold",
new Composite(c,SWT.NONE));
BooleanFieldEditor italic= new BooleanFieldEditor(
fieldNaam+"italic",
"Italic",
new Composite(c,SWT.NONE));
BooleanFieldEditor underline = new BooleanFieldEditor(
fieldNaam+"underline",
"Underline",
new Composite(c,SWT.NONE));
BooleanFieldEditor foldable = new BooleanFieldEditor(
fieldNaam+"foldable",
"Foldable",
new Composite(c,SWT.NONE));
BooleanFieldEditor folded = new BooleanFieldEditor(
fieldNaam+"folded",
"Folded",
new Composite(c,SWT.NONE));
addField(foregroundEdit);
addField(backgroundEdit);
addField(bold);
addField(italic);
addField(underline);
addField(foldable);
addField(folded);
current2.add(foregroundEdit);
current2.add(backgroundEdit);
current2.add(bold);
current2.add(italic);
current2.add(underline);
current2.add(foldable);
current2.add(folded);
current.put(selector, current2);
c.pack();
selectorGroup.pack();
}
options.put(taalS, current);
container.pack();
scroller.pack();
}
}
@Override
public void init(IWorkbench workbench) {
_workbench = workbench;
}
private IWorkbench _workbench;
//reads the configuration for a specific language and gets it from the xml file
private HashMap<String, HashMap<Selector, PresentationStyle>> readConfigurables() {
//lees eerst alle talen uit
List<String> languages = new ArrayList<String>();
for(Language lang: LanguageMgt.getInstance().workspace().languageRepository().languages()) {
languages.add(lang.name());
}
//haal van alle talen alle elementen op
HashMap<String, HashMap<Selector, PresentationStyle>> result = obtainLanguageColorElements(languages);
return result;
}
private HashMap<String, HashMap<Selector, PresentationStyle>> obtainLanguageColorElements(List<String> talen) {
HashMap<String, HashMap<Selector, PresentationStyle>> result = new HashMap<String, HashMap<Selector, PresentationStyle>>();
for(String taal: talen){
HashMap<Selector,PresentationStyle> taalResult = LanguageMgt.getInstance().getPresentationModel(taal).getRules();
result.put(taal, taalResult);
}
return result;
}
@Override
public boolean performOk(){
boolean prev= super.performOk();
performChoices();
return prev;
}
@Override
public void performApply(){
super.performApply();
performChoices();
//ChameleonOutlineTree.setAllowedElements(lang, allowed);
}
}
|
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.api.authorization.history;
import static org.camunda.bpm.engine.authorization.Authorization.ANY;
import static org.camunda.bpm.engine.authorization.Permissions.READ_HISTORY;
import static org.camunda.bpm.engine.authorization.Resources.PROCESS_DEFINITION;
import java.util.List;
import org.camunda.bpm.engine.ProcessEngineConfiguration;
import org.camunda.bpm.engine.history.HistoricProcessInstance;
import org.camunda.bpm.engine.history.HistoricVariableInstanceQuery;
import org.camunda.bpm.engine.impl.AbstractQuery;
import org.camunda.bpm.engine.task.Task;
import org.camunda.bpm.engine.test.RequiredHistoryLevel;
import org.camunda.bpm.engine.test.api.authorization.AuthorizationTest;
/**
* @author Roman Smirnov
*
*/
@RequiredHistoryLevel(ProcessEngineConfiguration.HISTORY_FULL)
public class HistoricVariableInstanceAuthorizationTest extends AuthorizationTest {
protected static final String PROCESS_KEY = "oneTaskProcess";
protected static final String MESSAGE_START_PROCESS_KEY = "messageStartProcess";
protected static final String CASE_KEY = "oneTaskCase";
protected String deploymentId;
@Override
public void setUp() throws Exception {
deploymentId = createDeployment(null,
"org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml",
"org/camunda/bpm/engine/test/api/authorization/messageStartEventProcess.bpmn20.xml",
"org/camunda/bpm/engine/test/api/authorization/oneTaskCase.cmmn").getId();
super.setUp();
}
@Override
public void tearDown() {
super.tearDown();
deleteDeployment(deploymentId);
}
// historic variable instance query (standalone task) /////////////////////////////////////////////
public void testQueryAfterStandaloneTaskVariables() {
// given
String taskId = "myTask";
createTask(taskId);
disableAuthorization();
taskService.setVariables(taskId, getVariables());
enableAuthorization();
// when
HistoricVariableInstanceQuery query = historyService.createHistoricVariableInstanceQuery();
// then
verifyQueryResults(query, 1);
deleteTask(taskId, true);
}
// historic variable instance query (process variables) ///////////////////////////////////////////
public void testSimpleQueryWithoutAuthorization() {
// given
startProcessInstanceByKey(PROCESS_KEY, getVariables());
// when
HistoricVariableInstanceQuery query = historyService.createHistoricVariableInstanceQuery();
// then
verifyQueryResults(query, 0);
}
public void testSimpleQueryWithReadHistoryPermissionOnProcessDefinition() {
// given
startProcessInstanceByKey(PROCESS_KEY, getVariables());
createGrantAuthorization(PROCESS_DEFINITION, PROCESS_KEY, userId, READ_HISTORY);
// when
HistoricVariableInstanceQuery query = historyService.createHistoricVariableInstanceQuery();
// then
verifyQueryResults(query, 1);
}
public void testSimpleQueryWithReadHistoryPermissionOnAnyProcessDefinition() {
// given
startProcessInstanceByKey(PROCESS_KEY, getVariables());
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
// when
HistoricVariableInstanceQuery query = historyService.createHistoricVariableInstanceQuery();
// then
verifyQueryResults(query, 1);
}
public void testSimpleQueryWithMultiple() {
// given
startProcessInstanceByKey(PROCESS_KEY, getVariables());
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
createGrantAuthorization(PROCESS_DEFINITION, PROCESS_KEY, userId, READ_HISTORY);
// when
HistoricVariableInstanceQuery query = historyService.createHistoricVariableInstanceQuery();
// then
verifyQueryResults(query, 1);
}
// historic variable instance query (multiple process instances) ////////////////////////
public void testQueryWithoutAuthorization() {
// given
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
// when
HistoricVariableInstanceQuery query = historyService.createHistoricVariableInstanceQuery();
// then
verifyQueryResults(query, 0);
}
public void testQueryWithReadHistoryPermissionOnProcessDefinition() {
// given
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
createGrantAuthorization(PROCESS_DEFINITION, PROCESS_KEY, userId, READ_HISTORY);
// when
HistoricVariableInstanceQuery query = historyService.createHistoricVariableInstanceQuery();
// then
verifyQueryResults(query, 3);
}
public void testQueryWithReadHistoryPermissionOnAnyProcessDefinition() {
// given
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
// when
HistoricVariableInstanceQuery query = historyService.createHistoricVariableInstanceQuery();
// then
verifyQueryResults(query, 7);
}
// historic variable instance query (case variables) /////////////////////////////////////////////
public void testQueryAfterCaseVariables() {
// given
createCaseInstanceByKey(CASE_KEY, getVariables());
// when
HistoricVariableInstanceQuery query = historyService.createHistoricVariableInstanceQuery();
// then
verifyQueryResults(query, 1);
}
// historic variable instance query (mixed variables) ////////////////////////////////////
public void testMixedQueryWithoutAuthorization() {
// given
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
createTask("one");
createTask("two");
createTask("three");
createTask("four");
createTask("five");
disableAuthorization();
taskService.setVariables("one", getVariables());
taskService.setVariables("two", getVariables());
taskService.setVariables("three", getVariables());
taskService.setVariables("four", getVariables());
taskService.setVariables("five", getVariables());
enableAuthorization();
createCaseInstanceByKey(CASE_KEY, getVariables());
createCaseInstanceByKey(CASE_KEY, getVariables());
// when
HistoricVariableInstanceQuery query = historyService.createHistoricVariableInstanceQuery();
// then
verifyQueryResults(query, 7);
deleteTask("one", true);
deleteTask("two", true);
deleteTask("three", true);
deleteTask("four", true);
deleteTask("five", true);
}
public void testMixedQueryWithReadHistoryPermissionOnProcessDefinition() {
// given
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
createTask("one");
createTask("two");
createTask("three");
createTask("four");
createTask("five");
disableAuthorization();
taskService.setVariables("one", getVariables());
taskService.setVariables("two", getVariables());
taskService.setVariables("three", getVariables());
taskService.setVariables("four", getVariables());
taskService.setVariables("five", getVariables());
enableAuthorization();
createCaseInstanceByKey(CASE_KEY, getVariables());
createCaseInstanceByKey(CASE_KEY, getVariables());
createGrantAuthorization(PROCESS_DEFINITION, PROCESS_KEY, userId, READ_HISTORY);
// when
HistoricVariableInstanceQuery query = historyService.createHistoricVariableInstanceQuery();
// then
verifyQueryResults(query, 10);
deleteTask("one", true);
deleteTask("two", true);
deleteTask("three", true);
deleteTask("four", true);
deleteTask("five", true);
}
public void testMixedQueryWithReadHistoryPermissionOnAnyProcessDefinition() {
// given
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY, getVariables());
createTask("one");
createTask("two");
createTask("three");
createTask("four");
createTask("five");
disableAuthorization();
taskService.setVariables("one", getVariables());
taskService.setVariables("two", getVariables());
taskService.setVariables("three", getVariables());
taskService.setVariables("four", getVariables());
taskService.setVariables("five", getVariables());
enableAuthorization();
createCaseInstanceByKey(CASE_KEY, getVariables());
createCaseInstanceByKey(CASE_KEY, getVariables());
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
// when
HistoricVariableInstanceQuery query = historyService.createHistoricVariableInstanceQuery();
// then
verifyQueryResults(query, 14);
deleteTask("one", true);
deleteTask("two", true);
deleteTask("three", true);
deleteTask("four", true);
deleteTask("five", true);
}
// delete deployment (cascade = false)
public void testQueryAfterDeletingDeployment() {
// given
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(PROCESS_KEY, getVariables());
startProcessInstanceByKey(PROCESS_KEY, getVariables());
createGrantAuthorization(PROCESS_DEFINITION, PROCESS_KEY, userId, READ_HISTORY);
disableAuthorization();
List<Task> tasks = taskService.createTaskQuery().list();
for (Task task : tasks) {
taskService.complete(task.getId());
}
enableAuthorization();
disableAuthorization();
repositoryService.deleteDeployment(deploymentId);
enableAuthorization();
// when
HistoricVariableInstanceQuery query = historyService.createHistoricVariableInstanceQuery();
// then
verifyQueryResults(query, 3);
disableAuthorization();
List<HistoricProcessInstance> instances = historyService.createHistoricProcessInstanceQuery().list();
for (HistoricProcessInstance instance : instances) {
historyService.deleteHistoricProcessInstance(instance.getId());
}
enableAuthorization();
}
// helper ////////////////////////////////////////////////////////
protected void verifyQueryResults(HistoricVariableInstanceQuery query, int countExpected) {
verifyQueryResults((AbstractQuery<?, ?>) query, countExpected);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.distributed.internal;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.RejectedExecutionHandler;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.apache.geode.SystemFailure;
import org.apache.geode.internal.monitoring.ThreadsMonitoring;
/**
* A ThreadPoolExecutor with stat support.
*
*/
public class PooledExecutorWithDMStats extends ThreadPoolExecutor {
protected final PoolStatHelper stats;
private final ThreadsMonitoring threadMonitoring;
/**
* Create a new pool
**/
public PooledExecutorWithDMStats(SynchronousQueue<Runnable> q, int maxPoolSize,
PoolStatHelper stats, ThreadFactory tf, int msTimeout, RejectedExecutionHandler reh,
ThreadsMonitoring tMonitoring) {
super(getCorePoolSize(maxPoolSize), maxPoolSize, msTimeout, TimeUnit.MILLISECONDS, q, tf, reh);
// if (getCorePoolSize() != 0 && getCorePoolSize() == getMaximumPoolSize()) {
// allowCoreThreadTimeOut(true); // deadcoded for 1.5
// }
this.stats = stats;
this.threadMonitoring = tMonitoring;
}
/**
* Used to buffer up tasks that would be have been rejected. Only used (i.e. non-null) if
* constructor queue is not a SynchronousQueue.
*/
protected BlockingQueue<Runnable> bufferQueue;
/**
* Used to consume items off the bufferQueue and put them into the pools synchronous queue. Only
* used (i.e. non-null) if constructor queue is not a SynchronousQueue.
*/
private Thread bufferConsumer;
private static SynchronousQueue<Runnable> initQ(BlockingQueue<Runnable> q) {
if (q instanceof SynchronousQueue) {
return (SynchronousQueue<Runnable>) q;
} else {
return new SynchronousQueue/* NoSpin */<Runnable>();
}
}
private static RejectedExecutionHandler initREH(BlockingQueue<Runnable> q) {
if (q instanceof SynchronousQueue) {
return new CallerRunsPolicy();
// return new BlockHandler();
} else {
// create a thread that takes from bufferQueue and puts into result
return new BufferHandler();
}
}
/**
* Create a new pool that uses the supplied Channel for queuing, and with all default parameter
* settings except for pool size.
**/
public PooledExecutorWithDMStats(BlockingQueue<Runnable> q, int maxPoolSize, PoolStatHelper stats,
ThreadFactory tf, int msTimeout, ThreadsMonitoring tMonitoring) {
this(initQ(q), maxPoolSize, stats, tf, msTimeout, initREH(q), tMonitoring);
if (!(q instanceof SynchronousQueue)) {
this.bufferQueue = q;
// create a thread that takes from bufferQueue and puts into result
final BlockingQueue<Runnable> takeQueue = q;
final BlockingQueue<Runnable> putQueue = getQueue();
Runnable r = new Runnable() {
@Override
public void run() {
try {
for (;;) {
SystemFailure.checkFailure();
Runnable job = takeQueue.take();
putQueue.put(job);
}
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
// this thread is being shutdown so just return;
return;
}
}
};
this.bufferConsumer = tf.newThread(r);
this.bufferConsumer.start();
}
}
@Override
public void shutdown() {
try {
super.shutdown();
} finally {
terminated();
}
}
@Override
protected void terminated() {
if (this.bufferConsumer != null) {
this.bufferConsumer.interrupt();
}
super.terminated();
}
@Override
public List shutdownNow() {
terminated();
List l = super.shutdownNow();
if (this.bufferQueue != null) {
this.bufferQueue.drainTo(l);
}
return l;
}
/**
* Sets timeout to IDLE_THREAD_TIMEOUT
*/
public PooledExecutorWithDMStats(BlockingQueue<Runnable> q, int poolSize, PoolStatHelper stats,
ThreadFactory tf, ThreadsMonitoring tMonitoring,
String systemPropertyPrefix) {
/**
* How long an idle thread will wait, in milliseconds, before it is removed from its thread
* pool. Default is (30000 * 60) ms (30 minutes). It is not static so it can be set at runtime
* and pick up different values.
*/
this(q, poolSize, stats, tf,
Integer.getInteger(systemPropertyPrefix + "IDLE_THREAD_TIMEOUT", 30000 * 60)
.intValue(),
tMonitoring);
}
/**
* Default timeout with no stats.
*/
public PooledExecutorWithDMStats(BlockingQueue<Runnable> q, int poolSize, ThreadFactory tf,
ThreadsMonitoring tMonitoring, String systemPropertyPrefix) {
this(q, poolSize, null/* no stats */, tf, tMonitoring, systemPropertyPrefix);
}
@Override
protected void beforeExecute(Thread t, Runnable r) {
if (this.stats != null) {
this.stats.startJob();
}
if (this.threadMonitoring != null) {
threadMonitoring.startMonitor(ThreadsMonitoring.Mode.PooledExecutor);
}
}
@Override
protected void afterExecute(Runnable r, Throwable ex) {
if (this.stats != null) {
this.stats.endJob();
}
if (this.threadMonitoring != null) {
threadMonitoring.endMonitor();
}
}
private static int getCorePoolSize(int maxSize) {
if (maxSize == Integer.MAX_VALUE) {
return 0;
} else {
return 1;
// int result = Runtime.getRuntime().availableProcessors();
// if (result < 2) {
// result = 2;
// }
// if (result > maxSize) {
// result = maxSize;
// }
// return result;
}
}
/**
* This handler does a put which will just wait until the queue has room.
*/
public static class BlockHandler implements RejectedExecutionHandler {
@Override
public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
if (executor.isShutdown()) {
throw new RejectedExecutionException(
"executor has been shutdown");
} else {
try {
executor.getQueue().put(r);
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
RejectedExecutionException e = new RejectedExecutionException(
"interrupted");
e.initCause(ie);
}
}
}
}
/**
* This handler fronts a synchronous queue, that is owned by the parent ThreadPoolExecutor, with a
* the
* client supplied BlockingQueue that supports storage (the buffer queue). A dedicated thread is
* used to consume off the buffer queue and put into the synchronous queue.
*/
public static class BufferHandler implements RejectedExecutionHandler {
@Override
public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
if (executor.isShutdown()) {
throw new RejectedExecutionException(
"executor has been shutdown");
} else {
try {
PooledExecutorWithDMStats pool = (PooledExecutorWithDMStats) executor;
pool.bufferQueue.put(r);
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
RejectedExecutionException e = new RejectedExecutionException(
"interrupted");
e.initCause(ie);
throw e;
}
}
}
}
}
|
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2/context.proto
package com.google.cloud.dialogflow.v2;
/**
*
*
* <pre>
* The request message for
* [Contexts.DeleteAllContexts][google.cloud.dialogflow.v2.Contexts.DeleteAllContexts].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.DeleteAllContextsRequest}
*/
public final class DeleteAllContextsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.DeleteAllContextsRequest)
DeleteAllContextsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteAllContextsRequest.newBuilder() to construct.
private DeleteAllContextsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteAllContextsRequest() {
parent_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private DeleteAllContextsRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
parent_ = s;
break;
}
default:
{
if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.ContextProto
.internal_static_google_cloud_dialogflow_v2_DeleteAllContextsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.ContextProto
.internal_static_google_cloud_dialogflow_v2_DeleteAllContextsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.DeleteAllContextsRequest.class,
com.google.cloud.dialogflow.v2.DeleteAllContextsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
private volatile java.lang.Object parent_;
/**
*
*
* <pre>
* Required. The name of the session to delete all contexts from. Format:
* `projects/<Project ID>/agent/sessions/<Session ID>`.
* </pre>
*
* <code>string parent = 1;</code>
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the session to delete all contexts from. Format:
* `projects/<Project ID>/agent/sessions/<Session ID>`.
* </pre>
*
* <code>string parent = 1;</code>
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!getParentBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getParentBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2.DeleteAllContextsRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2.DeleteAllContextsRequest other =
(com.google.cloud.dialogflow.v2.DeleteAllContextsRequest) obj;
boolean result = true;
result = result && getParent().equals(other.getParent());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2.DeleteAllContextsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.DeleteAllContextsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.DeleteAllContextsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.DeleteAllContextsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.DeleteAllContextsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.DeleteAllContextsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.DeleteAllContextsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.DeleteAllContextsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.DeleteAllContextsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.DeleteAllContextsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.DeleteAllContextsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.DeleteAllContextsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.v2.DeleteAllContextsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [Contexts.DeleteAllContexts][google.cloud.dialogflow.v2.Contexts.DeleteAllContexts].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.DeleteAllContextsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.DeleteAllContextsRequest)
com.google.cloud.dialogflow.v2.DeleteAllContextsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.ContextProto
.internal_static_google_cloud_dialogflow_v2_DeleteAllContextsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.ContextProto
.internal_static_google_cloud_dialogflow_v2_DeleteAllContextsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.DeleteAllContextsRequest.class,
com.google.cloud.dialogflow.v2.DeleteAllContextsRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2.DeleteAllContextsRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
parent_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2.ContextProto
.internal_static_google_cloud_dialogflow_v2_DeleteAllContextsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.DeleteAllContextsRequest getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2.DeleteAllContextsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.DeleteAllContextsRequest build() {
com.google.cloud.dialogflow.v2.DeleteAllContextsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.DeleteAllContextsRequest buildPartial() {
com.google.cloud.dialogflow.v2.DeleteAllContextsRequest result =
new com.google.cloud.dialogflow.v2.DeleteAllContextsRequest(this);
result.parent_ = parent_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return (Builder) super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2.DeleteAllContextsRequest) {
return mergeFrom((com.google.cloud.dialogflow.v2.DeleteAllContextsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2.DeleteAllContextsRequest other) {
if (other == com.google.cloud.dialogflow.v2.DeleteAllContextsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.dialogflow.v2.DeleteAllContextsRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.dialogflow.v2.DeleteAllContextsRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the session to delete all contexts from. Format:
* `projects/<Project ID>/agent/sessions/<Session ID>`.
* </pre>
*
* <code>string parent = 1;</code>
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the session to delete all contexts from. Format:
* `projects/<Project ID>/agent/sessions/<Session ID>`.
* </pre>
*
* <code>string parent = 1;</code>
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the session to delete all contexts from. Format:
* `projects/<Project ID>/agent/sessions/<Session ID>`.
* </pre>
*
* <code>string parent = 1;</code>
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the session to delete all contexts from. Format:
* `projects/<Project ID>/agent/sessions/<Session ID>`.
* </pre>
*
* <code>string parent = 1;</code>
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the session to delete all contexts from. Format:
* `projects/<Project ID>/agent/sessions/<Session ID>`.
* </pre>
*
* <code>string parent = 1;</code>
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.DeleteAllContextsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.DeleteAllContextsRequest)
private static final com.google.cloud.dialogflow.v2.DeleteAllContextsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.DeleteAllContextsRequest();
}
public static com.google.cloud.dialogflow.v2.DeleteAllContextsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteAllContextsRequest> PARSER =
new com.google.protobuf.AbstractParser<DeleteAllContextsRequest>() {
@java.lang.Override
public DeleteAllContextsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DeleteAllContextsRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<DeleteAllContextsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteAllContextsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.DeleteAllContextsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
|
/*
* Copyright 2007 Taro L. Saito
* Copyright 2015 crea-doo.at
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.creadoo.util.nativeloader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.log4j.Logger;
import at.creadoo.util.nativeloader.util.Util;
public class NativeLoader {
private static final Logger log = Logger.getLogger(NativeLoader.class);
private OSInfo osInfo = new OSInfo();
private Boolean extracted = false;
private String libraryPath = "";
private String libraryName = null;
private final Map<String, String> mappingArch = OSInfo.getArchMapDefault();
private final Map<String, String> mappingOS = OSInfo.getOSMapDefault();
private final Map<ImmutablePair<String, String>, String> mappingLibraryName = new HashMap<ImmutablePair<String, String>, String>();
public NativeLoader(final String libraryName) {
if (libraryName != null && !libraryName.isEmpty()) {
this.libraryName = FilenameUtils.getBaseName(libraryName);
}
}
protected NativeLoader(final String libraryName, final OSInfo osInfo) {
this(libraryName);
this.osInfo = osInfo;
}
public NativeLoader(final String libraryPath, final String libraryName) {
this(libraryName);
if (libraryPath != null && !libraryPath.isEmpty()) {
log.debug("libraryPath: " + libraryPath);
this.libraryPath = libraryPath;
}
}
protected NativeLoader(final String libraryPath, final String libraryName, final OSInfo osInfo) {
this(libraryPath, libraryName);
this.osInfo = osInfo;
}
public NativeLoader mapArch(final ARCH arch, final String archValue) {
return mapArch(arch.getValue(), archValue);
}
public NativeLoader mapArch(final String archName, final String archValue) {
if (archName != null && !archName.trim().isEmpty() && archValue != null) {
mappingArch.put(archName.trim(), archValue.trim());
}
return this;
}
public NativeLoader mapOS(final OS os, final String osValue) {
return mapOS(os.getValue(), osValue);
}
public NativeLoader mapOS(final String osName, final String osValue) {
if (osName != null && !osName.trim().isEmpty() && osValue != null) {
mappingOS.put(osName.trim(), osValue.trim());
}
return this;
}
public NativeLoader mapLibraryName(final OS os, final ARCH arch, final String libraryName) {
return mapLibraryName(os.getValue(), arch.getValue(), libraryName);
}
public NativeLoader mapLibraryName(final String osName, final String archName, final String libraryName) {
if (osName != null && !osName.trim().isEmpty() && archName != null && !archName.trim().isEmpty() && libraryName != null) {
mappingLibraryName.put(new ImmutablePair<String, String>(osName.trim(), archName.trim()), libraryName.trim());
}
return this;
}
protected NativeLoader setOSInfo(final OSInfo osInfo) {
this.osInfo = osInfo;
return this;
}
private String getLibraryName(final String libraryName) {
String nativeLibraryName = System.mapLibraryName(libraryName);
if (nativeLibraryName != null && FilenameUtils.getExtension(nativeLibraryName).equals("dylib")) {
nativeLibraryName = nativeLibraryName.replace("dylib", "jnilib");
}
if (nativeLibraryName != null && !nativeLibraryName.startsWith("lib")) {
nativeLibraryName = "lib" + nativeLibraryName;
}
return nativeLibraryName;
}
private String getLibraryName(final String osName, final String archName, final Map<ImmutablePair<String, String>, String> mappingLibraryName, final String libraryName) {
String nativeLibraryName = System.mapLibraryName(libraryName);
final ImmutablePair<String, String> key = new ImmutablePair<String, String>(osName, archName);
if (mappingLibraryName.containsKey(key)) {
nativeLibraryName = System.mapLibraryName(mappingLibraryName.get(key));
}
if (nativeLibraryName != null && FilenameUtils.getExtension(nativeLibraryName).equals("dylib")) {
nativeLibraryName = nativeLibraryName.replace("dylib", "jnilib");
}
if (nativeLibraryName != null && !nativeLibraryName.startsWith("lib")) {
nativeLibraryName = "lib" + nativeLibraryName;
}
return nativeLibraryName;
}
protected String getResourcePath() {
log.debug("mappingOS: " + mappingOS);
log.debug("mappingArch: " + mappingArch);
log.debug("mappingLibraryName: " + mappingLibraryName);
String nativeLibraryPath = this.libraryPath;
log.debug("nativeLibraryPath: " + nativeLibraryPath);
if (nativeLibraryPath == null || nativeLibraryPath.isEmpty()) {
nativeLibraryPath = FilenameUtils.normalize(osInfo.getNativeLibFolderPathForCurrentOS(this.mappingOS, this.mappingArch), true).replace(System.getProperty("file.separator"), "/");
} else {
nativeLibraryPath = FilenameUtils.normalize(this.libraryPath + "/" + osInfo.getNativeLibFolderPathForCurrentOS(this.mappingOS, this.mappingArch), true).replace(System.getProperty("file.separator"), "/");
}
return nativeLibraryPath;
}
protected String getResourceName() {
log.debug("mappingOS: " + mappingOS);
log.debug("mappingArch: " + mappingArch);
log.debug("mappingLibraryName: " + mappingLibraryName);
String nativeLibraryName = getLibraryName(osInfo.getOSName(), osInfo.getArchName(), mappingLibraryName, libraryName);
log.debug("nativeLibraryName: " + nativeLibraryName);
return nativeLibraryName;
}
/**
* Loads the native library using given path and name of the library.
*
* @throws
*/
public void load(final Class<?> clazz) throws Exception {
if (this.extracted) {
return;
}
String nativeLibraryPath = this.libraryPath;
String nativeLibraryName = getLibraryName(libraryName);
log.debug("nativeLibraryPath: " + nativeLibraryPath);
log.debug("nativeLibraryName: " + nativeLibraryName);
if (loadNativeLibrary(new File(nativeLibraryPath, nativeLibraryName))) {
this.extracted = true;
return;
}
// Load the os-dependent library from the jar file
nativeLibraryPath = getResourcePath();
nativeLibraryName = getResourceName();
log.debug("nativeLibraryPath: " + nativeLibraryPath);
log.debug("nativeLibraryName: " + nativeLibraryName);
boolean hasNativeLib = Util.hasResource(clazz, nativeLibraryPath + "/" + nativeLibraryName);
log.debug("hasNativeLib: " + hasNativeLib);
if (!hasNativeLib) {
if (osInfo.getOSName().equals("Mac")) {
// Fix for openjdk7 for Mac
String altName = FilenameUtils.getBaseName(nativeLibraryName) + ".jnilib";
if (Util.hasResource(clazz, FilenameUtils.concat(this.libraryPath, altName))) {
nativeLibraryName = altName;
hasNativeLib = true;
}
}
}
if (!hasNativeLib) {
this.extracted = false;
throw new Exception(String.format("No native library is found for os.name=%s and os.arch=%s", osInfo.getOSName(), osInfo.getArchName()));
}
// temporary library folder
final String tempFolder = new File(System.getProperty("java.io.tmpdir")).getAbsolutePath();
// Try extracting the library from jar
if (extractAndLoadLibraryFile(clazz, nativeLibraryPath, nativeLibraryName, tempFolder)) {
this.extracted = true;
return;
} else {
this.extracted = false;
throw new Exception(String.format("Native library for os.name=%s and os.arch=%s can't be extracted", osInfo.getOSName(), osInfo.getArchName()));
}
}
/**
* Extracts and loads the specified library file to the target folder
*
* @param libraryFolderForCurrentOS Library path.
* @param libraryFileName Library name.
* @param targetFolder Target folder.
* @return
*/
private Boolean extractAndLoadLibraryFile(final Class<?> clazz, final String nativeLibraryPath, final String libraryFileName, final String targetFolder) {
// Include architecture name in temporary filename in order to avoid
// conflicts when multiple JVMs with different architectures running at
// the same time
final String uuid = UUID.randomUUID().toString();
final String extractedLibFileName = String.format("%s-%s", FilenameUtils.getBaseName(libraryFileName), uuid);
final File extractedLibFile = new File(targetFolder, extractedLibFileName + "." + FilenameUtils.getExtension(libraryFileName));
final String resourcePath = "/" + nativeLibraryPath + "/" + libraryFileName;
log.debug("resourcePath: " + resourcePath);
try {
// Extract a native library file into the target directory
final InputStream reader = clazz.getResourceAsStream(resourcePath);
if (reader == null) {
return false;
}
final FileOutputStream writer = new FileOutputStream(extractedLibFile);
try {
IOUtils.copy(reader, writer);
} finally {
// Delete the extracted lib file on JVM exit.
extractedLibFile.deleteOnExit();
IOUtils.closeQuietly(reader);
IOUtils.closeQuietly(writer);
}
// Set executable (x) flag to enable Java to load the native library
extractedLibFile.setReadable(true);
extractedLibFile.setWritable(true, true);
extractedLibFile.setExecutable(true);
// Check whether the contents are properly copied from the resource
// folder
{
final InputStream nativeIn = clazz.getResourceAsStream(resourcePath);
final InputStream extractedLibIn = new FileInputStream(extractedLibFile);
try {
if (!IOUtils.contentEquals(nativeIn, extractedLibIn)) {
throw new RuntimeException(String.format("Failed to write a native library file at %s", extractedLibFile));
}
} finally {
IOUtils.closeQuietly(nativeIn);
IOUtils.closeQuietly(extractedLibIn);
}
}
return loadNativeLibrary(extractedLibFile);
} catch (IOException e) {
System.err.println(e.getMessage());
return false;
}
}
/**
* Loads native library using the given path and name of the library.
*
* @param path Path of the native library.
* @param name Name of the native library.
* @return True for successfully loading; false otherwise.
*/
private synchronized Boolean loadNativeLibrary(final File libraryFile) {
log.debug("Trying to load native library: " + libraryFile);
if (libraryFile.exists()) {
try {
System.load(libraryFile.getAbsolutePath());
return true;
} catch (UnsatisfiedLinkError ex) {
log.error("Error loading library", ex);
}
}
return false;
}
public enum ARCH {
X86("x86"),
X86_64("x86_64"),
IA64_32("ia64_32"),
IA64("ia64"),
PPC("ppc"),
PPC64("ppc64"),
ARM("arm"),
ARMHF("armhf");
private final String value;
private ARCH(final String value) {
this.value = value;
}
public final String getValue() {
return this.value;
}
}
public enum OS {
AIX("AIX"),
FREEBSD("FreeBSD"),
LINUX("Linux"),
MAC("Mac"),
WINDOWS("Windows");
private final String value;
private OS(final String value) {
this.value = value;
}
public final String getValue() {
return this.value;
}
}
}
|
|
// Copyright 2014 Cloudera Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.cloudera.impala.analysis;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.cloudera.impala.analysis.AnalyticWindow.Boundary;
import com.cloudera.impala.analysis.AnalyticWindow.BoundaryType;
import com.cloudera.impala.catalog.AggregateFunction;
import com.cloudera.impala.catalog.Function;
import com.cloudera.impala.catalog.ScalarType;
import com.cloudera.impala.catalog.Type;
import com.cloudera.impala.common.AnalysisException;
import com.cloudera.impala.common.InternalException;
import com.cloudera.impala.common.TreeNode;
import com.cloudera.impala.service.FeSupport;
import com.cloudera.impala.thrift.TColumnValue;
import com.cloudera.impala.thrift.TExprNode;
import com.cloudera.impala.util.TColumnValueUtil;
import com.google.common.base.Joiner;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
/**
* Representation of an analytic function call with OVER clause.
* All "subexpressions" (such as the actual function call parameters as well as the
* partition/ordering exprs, etc.) are embedded as children in order to allow expr
* substitution:
* function call params: child 0 .. #params
* partition exprs: children #params + 1 .. #params + #partition-exprs
* ordering exprs:
* children #params + #partition-exprs + 1 ..
* #params + #partition-exprs + #order-by-elements
* exprs in windowing clause: remaining children
*
* Note that it's wrong to embed the FunctionCallExpr itself as a child,
* because in 'COUNT(..) OVER (..)' the 'COUNT(..)' is not part of a standard aggregate
* computation and must not be substituted as such. However, the parameters of the
* analytic function call might reference the output of an aggregate computation
* and need to be substituted as such; example: COUNT(COUNT(..)) OVER (..)
*/
public class AnalyticExpr extends Expr {
private final static Logger LOG = LoggerFactory.getLogger(AnalyticExpr.class);
private FunctionCallExpr fnCall_;
private final List<Expr> partitionExprs_;
// These elements are modified to point to the corresponding child exprs to keep them
// in sync through expr substitutions.
private List<OrderByElement> orderByElements_ = Lists.newArrayList();
private AnalyticWindow window_;
// If set, requires the window to be set to null in resetAnalysisState(). Required for
// proper substitution/cloning because standardization may set a window that is illegal
// in SQL, and hence, will fail analysis().
private boolean resetWindow_ = false;
// SQL string of this AnalyticExpr before standardization. Returned in toSqlImpl().
private String sqlString_;
private static String LEAD = "lead";
private static String LAG = "lag";
private static String FIRSTVALUE = "first_value";
private static String LASTVALUE = "last_value";
private static String RANK = "rank";
private static String DENSERANK = "dense_rank";
private static String ROWNUMBER = "row_number";
private static String MIN = "min";
private static String MAX = "max";
private static String PERCENT_RANK = "percent_rank";
private static String CUME_DIST = "cume_dist";
private static String NTILE = "ntile";
// Internal function used to implement FIRST_VALUE with a window rewrite and
// additional null handling in the backend.
public static String FIRST_VALUE_REWRITE = "first_value_rewrite";
public AnalyticExpr(FunctionCallExpr fnCall, List<Expr> partitionExprs,
List<OrderByElement> orderByElements, AnalyticWindow window) {
Preconditions.checkNotNull(fnCall);
fnCall_ = fnCall;
partitionExprs_ = partitionExprs != null ? partitionExprs : new ArrayList<Expr>();
if (orderByElements != null) orderByElements_.addAll(orderByElements);
window_ = window;
setChildren();
}
/**
* clone() c'tor
*/
protected AnalyticExpr(AnalyticExpr other) {
super(other);
fnCall_ = (FunctionCallExpr) other.fnCall_.clone();
for (OrderByElement e: other.orderByElements_) {
orderByElements_.add(e.clone());
}
partitionExprs_ = Expr.cloneList(other.partitionExprs_);
window_ = (other.window_ != null ? other.window_.clone() : null);
resetWindow_ = other.resetWindow_;
sqlString_ = other.sqlString_;
setChildren();
}
public FunctionCallExpr getFnCall() { return fnCall_; }
public List<Expr> getPartitionExprs() { return partitionExprs_; }
public List<OrderByElement> getOrderByElements() { return orderByElements_; }
public AnalyticWindow getWindow() { return window_; }
@Override
public boolean equals(Object obj) {
if (!super.equals(obj)) return false;
AnalyticExpr o = (AnalyticExpr)obj;
if (!fnCall_.equals(o.getFnCall())) return false;
if ((window_ == null) != (o.window_ == null)) return false;
if (window_ != null) {
if (!window_.equals(o.window_)) return false;
}
return orderByElements_.equals(o.orderByElements_);
}
/**
* Analytic exprs cannot be constant.
*/
@Override
public boolean isConstant() { return false; }
@Override
public Expr clone() { return new AnalyticExpr(this); }
@Override
public String toSqlImpl() {
if (sqlString_ != null) return sqlString_;
StringBuilder sb = new StringBuilder();
sb.append(fnCall_.toSql()).append(" OVER (");
boolean needsSpace = false;
if (!partitionExprs_.isEmpty()) {
sb.append("PARTITION BY ").append(Expr.toSql(partitionExprs_));
needsSpace = true;
}
if (!orderByElements_.isEmpty()) {
List<String> orderByStrings = Lists.newArrayList();
for (OrderByElement e: orderByElements_) {
orderByStrings.add(e.toSql());
}
if (needsSpace) sb.append(" ");
sb.append("ORDER BY ").append(Joiner.on(", ").join(orderByStrings));
needsSpace = true;
}
if (window_ != null) {
if (needsSpace) sb.append(" ");
sb.append(window_.toSql());
}
sb.append(")");
return sb.toString();
}
@Override
public String debugString() {
return Objects.toStringHelper(this)
.add("fn", getFnCall())
.add("window", window_)
.addValue(super.debugString())
.toString();
}
@Override
protected void toThrift(TExprNode msg) {
}
private static boolean isAnalyticFn(Function fn) {
return fn instanceof AggregateFunction
&& ((AggregateFunction) fn).isAnalyticFn();
}
private static boolean isAnalyticFn(Function fn, String fnName) {
return isAnalyticFn(fn) && fn.functionName().equals(fnName);
}
public static boolean isAggregateFn(Function fn) {
return fn instanceof AggregateFunction
&& ((AggregateFunction) fn).isAggregateFn();
}
public static boolean isPercentRankFn(Function fn) {
return isAnalyticFn(fn, PERCENT_RANK);
}
public static boolean isCumeDistFn(Function fn) {
return isAnalyticFn(fn, CUME_DIST);
}
public static boolean isNtileFn(Function fn) {
return isAnalyticFn(fn, NTILE);
}
static private boolean isOffsetFn(Function fn) {
return isAnalyticFn(fn, LEAD) || isAnalyticFn(fn, LAG);
}
static private boolean isMinMax(Function fn) {
return isAnalyticFn(fn, MIN) || isAnalyticFn(fn, MAX);
}
static private boolean isRankingFn(Function fn) {
return isAnalyticFn(fn, RANK) || isAnalyticFn(fn, DENSERANK) ||
isAnalyticFn(fn, ROWNUMBER);
}
/**
* Rewrite the following analytic functions:
* percent_rank(), cume_dist() and ntile()
*
* Returns a new Expr if the analytic expr is rewritten, returns null if it's not one
* that we want to rewrite.
*/
public static Expr rewrite(AnalyticExpr analyticExpr) {
Function fn = analyticExpr.getFnCall().getFn();
if (AnalyticExpr.isPercentRankFn(fn)) {
return createPercentRank(analyticExpr);
} else if (AnalyticExpr.isCumeDistFn(fn)) {
return createCumeDist(analyticExpr);
} else if (AnalyticExpr.isNtileFn(fn)) {
return createNtile(analyticExpr);
}
return null;
}
/**
* Rewrite percent_rank() to the following:
*
* percent_rank() over([partition by clause] order by clause)
* = (Count == 1) ? 0:(Rank - 1)/(Count - 1)
* where,
* Rank = rank() over([partition by clause] order by clause)
* Count = count() over([partition by clause])
*/
private static Expr createPercentRank(AnalyticExpr analyticExpr) {
Preconditions.checkState(
AnalyticExpr.isPercentRankFn(analyticExpr.getFnCall().getFn()));
NumericLiteral zero = new NumericLiteral(BigInteger.valueOf(0), ScalarType.BIGINT);
NumericLiteral one = new NumericLiteral(BigInteger.valueOf(1), ScalarType.BIGINT);
AnalyticExpr countExpr = create("count", analyticExpr, false, false);
AnalyticExpr rankExpr = create("rank", analyticExpr, true, false);
ArithmeticExpr arithmeticRewrite =
new ArithmeticExpr(ArithmeticExpr.Operator.DIVIDE,
new ArithmeticExpr(ArithmeticExpr.Operator.SUBTRACT, rankExpr, one),
new ArithmeticExpr(ArithmeticExpr.Operator.SUBTRACT, countExpr, one));
List<Expr> ifParams = Lists.newArrayList();
ifParams.add(
new BinaryPredicate(BinaryPredicate.Operator.EQ, one, countExpr));
ifParams.add(zero);
ifParams.add(arithmeticRewrite);
FunctionCallExpr resultantRewrite = new FunctionCallExpr("if", ifParams);
return resultantRewrite;
}
/**
* Rewrite cume_dist() to the following:
*
* cume_dist() over([partition by clause] order by clause)
* = ((Count - Rank) + 1)/Count
* where,
* Rank = rank() over([partition by clause] order by clause DESC)
* Count = count() over([partition by clause])
*/
private static Expr createCumeDist(AnalyticExpr analyticExpr) {
Preconditions.checkState(
AnalyticExpr.isCumeDistFn(analyticExpr.getFnCall().getFn()));
AnalyticExpr rankExpr = create("rank", analyticExpr, true, true);
AnalyticExpr countExpr = create("count", analyticExpr, false, false);
NumericLiteral one = new NumericLiteral(BigInteger.valueOf(1), ScalarType.BIGINT);
ArithmeticExpr arithmeticRewrite =
new ArithmeticExpr(ArithmeticExpr.Operator.DIVIDE,
new ArithmeticExpr(ArithmeticExpr.Operator.ADD,
new ArithmeticExpr(ArithmeticExpr.Operator.SUBTRACT, countExpr, rankExpr),
one),
countExpr);
return arithmeticRewrite;
}
/**
* Rewrite ntile() to the following:
*
* ntile(B) over([partition by clause] order by clause)
* = floor(min(Count, B) * (RowNumber - 1)/Count) + 1
* where,
* RowNumber = row_number() over([partition by clause] order by clause)
* Count = count() over([partition by clause])
*/
private static Expr createNtile(AnalyticExpr analyticExpr) {
Preconditions.checkState(
AnalyticExpr.isNtileFn(analyticExpr.getFnCall().getFn()));
Expr bucketExpr = analyticExpr.getChild(0);
AnalyticExpr rowNumExpr = create("row_number", analyticExpr, true, false);
AnalyticExpr countExpr = create("count", analyticExpr, false, false);
List<Expr> ifParams = Lists.newArrayList();
ifParams.add(
new BinaryPredicate(BinaryPredicate.Operator.LT, bucketExpr, countExpr));
ifParams.add(bucketExpr);
ifParams.add(countExpr);
NumericLiteral one = new NumericLiteral(BigInteger.valueOf(1), ScalarType.BIGINT);
ArithmeticExpr minMultiplyRowMinusOne =
new ArithmeticExpr(ArithmeticExpr.Operator.MULTIPLY,
new ArithmeticExpr(ArithmeticExpr.Operator.SUBTRACT, rowNumExpr, one),
new FunctionCallExpr("if", ifParams));
ArithmeticExpr divideAddOne =
new ArithmeticExpr(ArithmeticExpr.Operator.ADD,
new ArithmeticExpr(ArithmeticExpr.Operator.INT_DIVIDE,
minMultiplyRowMinusOne, countExpr),
one);
return divideAddOne;
}
/**
* Create a new Analytic Expr and associate it with a new function.
* Takes a reference analytic expression and clones the partition expressions and the
* order by expressions if 'copyOrderBy' is set and optionally reverses it if
* 'reverseOrderBy' is set. The new function that it will be associated with is
* specified by fnName.
*/
private static AnalyticExpr create(String fnName,
AnalyticExpr referenceExpr, boolean copyOrderBy, boolean reverseOrderBy) {
FunctionCallExpr fnExpr = new FunctionCallExpr(fnName, new ArrayList<Expr>());
fnExpr.setIsAnalyticFnCall(true);
List<OrderByElement> orderByElements = null;
if (copyOrderBy) {
if (reverseOrderBy) {
orderByElements = OrderByElement.reverse(referenceExpr.getOrderByElements());
} else {
orderByElements = Lists.newArrayList();
for (OrderByElement elem: referenceExpr.getOrderByElements()) {
orderByElements.add(elem.clone());
}
}
}
AnalyticExpr analyticExpr = new AnalyticExpr(fnExpr,
Expr.cloneList(referenceExpr.getPartitionExprs()), orderByElements, null);
return analyticExpr;
}
/**
* Checks that the value expr of an offset boundary of a RANGE window is compatible
* with orderingExprs (and that there's only a single ordering expr).
*/
private void checkRangeOffsetBoundaryExpr(AnalyticWindow.Boundary boundary)
throws AnalysisException {
Preconditions.checkState(boundary.getType().isOffset());
if (orderByElements_.size() > 1) {
throw new AnalysisException("Only one ORDER BY expression allowed if used with "
+ "a RANGE window with PRECEDING/FOLLOWING: " + toSql());
}
Expr rangeExpr = boundary.getExpr();
if (!Type.isImplicitlyCastable(
rangeExpr.getType(), orderByElements_.get(0).getExpr().getType(), false)) {
throw new AnalysisException(
"The value expression of a PRECEDING/FOLLOWING clause of a RANGE window must "
+ "be implicitly convertable to the ORDER BY expression's type: "
+ rangeExpr.toSql() + " cannot be implicitly converted to "
+ orderByElements_.get(0).getExpr().getType().toSql());
}
}
/**
* Checks offset of lag()/lead().
*/
void checkOffset(Analyzer analyzer) throws AnalysisException {
Preconditions.checkState(isOffsetFn(getFnCall().getFn()));
Preconditions.checkState(getFnCall().getChildren().size() > 1);
Expr offset = getFnCall().getChild(1);
Preconditions.checkState(offset.getType().isIntegerType());
boolean isPosConstant = true;
if (!offset.isConstant()) {
isPosConstant = false;
} else {
try {
TColumnValue val = FeSupport.EvalConstExpr(offset, analyzer.getQueryCtx());
if (TColumnValueUtil.getNumericVal(val) <= 0) isPosConstant = false;
} catch (InternalException exc) {
throw new AnalysisException(
"Couldn't evaluate LEAD/LAG offset: " + exc.getMessage());
}
}
if (!isPosConstant) {
throw new AnalysisException(
"The offset parameter of LEAD/LAG must be a constant positive integer: "
+ getFnCall().toSql());
}
}
@Override
public void analyze(Analyzer analyzer) throws AnalysisException {
if (isAnalyzed_) return;
fnCall_.analyze(analyzer);
super.analyze(analyzer);
type_ = getFnCall().getType();
for (Expr e: partitionExprs_) {
if (e.isConstant()) {
throw new AnalysisException(
"Expressions in the PARTITION BY clause must not be constant: "
+ e.toSql() + " (in " + toSql() + ")");
} else if (e.getType().isComplexType()) {
throw new AnalysisException(String.format("PARTITION BY expression '%s' with " +
"complex type '%s' is not supported.", e.toSql(),
e.getType().toSql()));
}
}
for (OrderByElement e: orderByElements_) {
if (e.getExpr().isConstant()) {
throw new AnalysisException(
"Expressions in the ORDER BY clause must not be constant: "
+ e.getExpr().toSql() + " (in " + toSql() + ")");
} else if (e.getExpr().getType().isComplexType()) {
throw new AnalysisException(String.format("ORDER BY expression '%s' with " +
"complex type '%s' is not supported.", e.getExpr().toSql(),
e.getExpr().getType().toSql()));
}
}
if (getFnCall().getParams().isDistinct()) {
throw new AnalysisException(
"DISTINCT not allowed in analytic function: " + getFnCall().toSql());
}
// check for correct composition of analytic expr
Function fn = getFnCall().getFn();
if (!(fn instanceof AggregateFunction)) {
throw new AnalysisException(
"OVER clause requires aggregate or analytic function: "
+ getFnCall().toSql());
}
// check for non-analytic aggregate functions
if (!isAnalyticFn(fn)) {
throw new AnalysisException(
String.format("Aggregate function '%s' not supported with OVER clause.",
getFnCall().toSql()));
}
if (isAnalyticFn(fn) && !isAggregateFn(fn)) {
if (orderByElements_.isEmpty()) {
throw new AnalysisException(
"'" + getFnCall().toSql() + "' requires an ORDER BY clause");
}
if ((isRankingFn(fn) || isOffsetFn(fn)) && window_ != null) {
throw new AnalysisException(
"Windowing clause not allowed with '" + getFnCall().toSql() + "'");
}
if (isOffsetFn(fn) && getFnCall().getChildren().size() > 1) {
checkOffset(analyzer);
// check the default, which needs to be a constant at the moment
// TODO: remove this check when the backend can handle non-constants
if (getFnCall().getChildren().size() > 2) {
if (!getFnCall().getChild(2).isConstant()) {
throw new AnalysisException(
"The default parameter (parameter 3) of LEAD/LAG must be a constant: "
+ getFnCall().toSql());
}
}
}
if (isNtileFn(fn)) {
// TODO: IMPALA-2171:Remove this when ntile() can handle a non-constant argument.
if (!getFnCall().getChild(0).isConstant()) {
throw new AnalysisException("NTILE() requires a constant argument");
}
// Check if argument value is zero or negative and throw an exception if found.
try {
TColumnValue bucketValue =
FeSupport.EvalConstExpr(getFnCall().getChild(0), analyzer.getQueryCtx());
Long arg = bucketValue.getLong_val();
if (arg <= 0) {
throw new AnalysisException("NTILE() requires a positive argument: " + arg);
}
} catch (InternalException e) {
throw new AnalysisException(e.toString());
}
}
}
if (window_ != null) {
if (orderByElements_.isEmpty()) {
throw new AnalysisException("Windowing clause requires ORDER BY clause: "
+ toSql());
}
window_.analyze(analyzer);
if (!orderByElements_.isEmpty()
&& window_.getType() == AnalyticWindow.Type.RANGE) {
// check that preceding/following ranges match ordering
if (window_.getLeftBoundary().getType().isOffset()) {
checkRangeOffsetBoundaryExpr(window_.getLeftBoundary());
}
if (window_.getRightBoundary() != null
&& window_.getRightBoundary().getType().isOffset()) {
checkRangeOffsetBoundaryExpr(window_.getRightBoundary());
}
}
}
// check nesting
if (TreeNode.contains(getChildren(), AnalyticExpr.class)) {
throw new AnalysisException(
"Nesting of analytic expressions is not allowed: " + toSql());
}
sqlString_ = toSql();
standardize(analyzer);
// min/max is not currently supported on sliding windows (i.e. start bound is not
// unbounded).
if (window_ != null && isMinMax(fn) &&
window_.getLeftBoundary().getType() != BoundaryType.UNBOUNDED_PRECEDING) {
throw new AnalysisException(
"'" + getFnCall().toSql() + "' is only supported with an "
+ "UNBOUNDED PRECEDING start bound.");
}
setChildren();
}
/**
* If necessary, rewrites the analytic function, window, and/or order-by elements into
* a standard format for the purpose of simpler backend execution, as follows:
* 1. row_number():
* Set a window from UNBOUNDED PRECEDING to CURRENT_ROW.
* 2. lead()/lag():
* Explicitly set the default arguments to for BE simplicity.
* Set a window for lead(): UNBOUNDED PRECEDING to OFFSET FOLLOWING.
* Set a window for lag(): UNBOUNDED PRECEDING to OFFSET PRECEDING.
* 3. UNBOUNDED FOLLOWING windows:
* Reverse the ordering and window if the start bound is not UNBOUNDED PRECEDING.
* Flip first_value() and last_value().
* 4. first_value():
* Set the upper boundary to CURRENT_ROW if the lower boundary is
* UNBOUNDED_PRECEDING.
* 5. Explicitly set the default window if no window was given but there
* are order-by elements.
* 6. FIRST_VALUE without UNBOUNDED PRECEDING gets rewritten to use a different window
* and change the function to return the last value. We either set the fn to be
* 'last_value' or 'first_value_rewrite', which simply wraps the 'last_value'
* implementation but allows us to handle the first rows in a partition in a special
* way in the backend. There are a few cases:
* a) Start bound is X FOLLOWING or CURRENT ROW (X=0):
* Use 'last_value' with a window where both bounds are X FOLLOWING (or
* CURRENT ROW). Setting the start bound to X following is necessary because the
* X rows at the end of a partition have no rows in their window. Note that X
* FOLLOWING could be rewritten as lead(X) but that would not work for CURRENT
* ROW.
* b) Start bound is X PRECEDING and end bound is CURRENT ROW or FOLLOWING:
* Use 'first_value_rewrite' and a window with an end bound X PRECEDING. An
* extra parameter '-1' is added to indicate to the backend that NULLs should
* not be added for the first X rows.
* c) Start bound is X PRECEDING and end bound is Y PRECEDING:
* Use 'first_value_rewrite' and a window with an end bound X PRECEDING. The
* first Y rows in a partition have empty windows and should be NULL. An extra
* parameter with the integer constant Y is added to indicate to the backend
* that NULLs should be added for the first Y rows.
*/
private void standardize(Analyzer analyzer) {
FunctionName analyticFnName = getFnCall().getFnName();
// Set a window from UNBOUNDED PRECEDING to CURRENT_ROW for row_number().
if (analyticFnName.getFunction().equals(ROWNUMBER)) {
Preconditions.checkState(window_ == null, "Unexpected window set for row_numer()");
window_ = new AnalyticWindow(AnalyticWindow.Type.ROWS,
new Boundary(BoundaryType.UNBOUNDED_PRECEDING, null),
new Boundary(BoundaryType.CURRENT_ROW, null));
resetWindow_ = true;
return;
}
// Explicitly set the default arguments to lead()/lag() for BE simplicity.
// Set a window for lead(): UNBOUNDED PRECEDING to OFFSET FOLLOWING,
// Set a window for lag(): UNBOUNDED PRECEDING to OFFSET PRECEDING.
if (isOffsetFn(getFnCall().getFn())) {
Preconditions.checkState(window_ == null);
// If necessary, create a new fn call with the default args explicitly set.
List<Expr> newExprParams = null;
if (getFnCall().getChildren().size() == 1) {
newExprParams = Lists.newArrayListWithExpectedSize(3);
newExprParams.addAll(getFnCall().getChildren());
// Default offset is 1.
newExprParams.add(new NumericLiteral(BigDecimal.valueOf(1)));
// Default default value is NULL.
newExprParams.add(new NullLiteral());
} else if (getFnCall().getChildren().size() == 2) {
newExprParams = Lists.newArrayListWithExpectedSize(3);
newExprParams.addAll(getFnCall().getChildren());
// Default default value is NULL.
newExprParams.add(new NullLiteral());
} else {
Preconditions.checkState(getFnCall().getChildren().size() == 3);
}
if (newExprParams != null) {
fnCall_ = new FunctionCallExpr(getFnCall().getFnName(),
new FunctionParams(newExprParams));
fnCall_.setIsAnalyticFnCall(true);
fnCall_.analyzeNoThrow(analyzer);
}
// Set the window.
BoundaryType rightBoundaryType = BoundaryType.FOLLOWING;
if (analyticFnName.getFunction().equals(LAG)) {
rightBoundaryType = BoundaryType.PRECEDING;
}
window_ = new AnalyticWindow(AnalyticWindow.Type.ROWS,
new Boundary(BoundaryType.UNBOUNDED_PRECEDING, null),
new Boundary(rightBoundaryType, getOffsetExpr(getFnCall())));
try {
window_.analyze(analyzer);
} catch (AnalysisException e) {
throw new IllegalStateException(e);
}
resetWindow_ = true;
return;
}
if (analyticFnName.getFunction().equals(FIRSTVALUE)
&& window_ != null
&& window_.getLeftBoundary().getType() != BoundaryType.UNBOUNDED_PRECEDING) {
if (window_.getLeftBoundary().getType() != BoundaryType.PRECEDING) {
window_ = new AnalyticWindow(window_.getType(), window_.getLeftBoundary(),
window_.getLeftBoundary());
fnCall_ = new FunctionCallExpr(new FunctionName("last_value"),
getFnCall().getParams());
} else {
List<Expr> paramExprs = Expr.cloneList(getFnCall().getParams().exprs());
if (window_.getRightBoundary().getType() == BoundaryType.PRECEDING) {
// The number of rows preceding for the end bound determines the number of
// rows at the beginning of each partition that should have a NULL value.
paramExprs.add(new NumericLiteral(window_.getRightBoundary().getOffsetValue(),
Type.BIGINT));
} else {
// -1 indicates that no NULL values are inserted even though we set the end
// bound to the start bound (which is PRECEDING) below; this is different from
// the default behavior of windows with an end bound PRECEDING.
paramExprs.add(new NumericLiteral(BigInteger.valueOf(-1), Type.BIGINT));
}
window_ = new AnalyticWindow(window_.getType(),
new Boundary(BoundaryType.UNBOUNDED_PRECEDING, null),
window_.getLeftBoundary());
fnCall_ = new FunctionCallExpr(new FunctionName("first_value_rewrite"),
new FunctionParams(paramExprs));
fnCall_.setIsInternalFnCall(true);
}
fnCall_.setIsAnalyticFnCall(true);
fnCall_.analyzeNoThrow(analyzer);
// Use getType() instead if getReturnType() because wildcard decimals
// have only been resolved in the former.
type_ = fnCall_.getType();
analyticFnName = getFnCall().getFnName();
}
// Reverse the ordering and window for windows ending with UNBOUNDED FOLLOWING,
// and and not starting with UNBOUNDED PRECEDING.
if (window_ != null
&& window_.getRightBoundary().getType() == BoundaryType.UNBOUNDED_FOLLOWING
&& window_.getLeftBoundary().getType() != BoundaryType.UNBOUNDED_PRECEDING) {
orderByElements_ = OrderByElement.reverse(orderByElements_);
window_ = window_.reverse();
// Also flip first_value()/last_value(). For other analytic functions there is no
// need to also change the function.
FunctionName reversedFnName = null;
if (analyticFnName.getFunction().equals(FIRSTVALUE)) {
reversedFnName = new FunctionName(LASTVALUE);
} else if (analyticFnName.getFunction().equals(LASTVALUE)) {
reversedFnName = new FunctionName(FIRSTVALUE);
}
if (reversedFnName != null) {
fnCall_ = new FunctionCallExpr(reversedFnName, getFnCall().getParams());
fnCall_.setIsAnalyticFnCall(true);
fnCall_.analyzeNoThrow(analyzer);
}
analyticFnName = getFnCall().getFnName();
}
// Set the upper boundary to CURRENT_ROW for first_value() if the lower boundary
// is UNBOUNDED_PRECEDING.
if (window_ != null
&& window_.getLeftBoundary().getType() == BoundaryType.UNBOUNDED_PRECEDING
&& window_.getRightBoundary().getType() != BoundaryType.PRECEDING
&& analyticFnName.getFunction().equals(FIRSTVALUE)) {
window_.setRightBoundary(new Boundary(BoundaryType.CURRENT_ROW, null));
}
// Set the default window.
if (!orderByElements_.isEmpty() && window_ == null) {
window_ = AnalyticWindow.DEFAULT_WINDOW;
resetWindow_ = true;
}
}
/**
* Returns the explicit or implicit offset of an analytic function call.
*/
private Expr getOffsetExpr(FunctionCallExpr offsetFnCall) {
Preconditions.checkState(isOffsetFn(getFnCall().getFn()));
if (offsetFnCall.getChild(1) != null) return offsetFnCall.getChild(1);
// The default offset is 1.
return new NumericLiteral(BigDecimal.valueOf(1));
}
/**
* Keep fnCall_, partitionExprs_ and orderByElements_ in sync with children_.
*/
private void syncWithChildren() {
int numArgs = fnCall_.getChildren().size();
for (int i = 0; i < numArgs; ++i) {
fnCall_.setChild(i, getChild(i));
}
int numPartitionExprs = partitionExprs_.size();
for (int i = 0; i < numPartitionExprs; ++i) {
partitionExprs_.set(i, getChild(numArgs + i));
}
for (int i = 0; i < orderByElements_.size(); ++i) {
orderByElements_.get(i).setExpr(getChild(numArgs + numPartitionExprs + i));
}
}
/**
* Populate children_ from fnCall_, partitionExprs_, orderByElements_
*/
private void setChildren() {
getChildren().clear();
addChildren(fnCall_.getChildren());
addChildren(partitionExprs_);
for (OrderByElement e: orderByElements_) {
addChild(e.getExpr());
}
if (window_ != null) {
if (window_.getLeftBoundary().getExpr() != null) {
addChild(window_.getLeftBoundary().getExpr());
}
if (window_.getRightBoundary() != null
&& window_.getRightBoundary().getExpr() != null) {
addChild(window_.getRightBoundary().getExpr());
}
}
}
@Override
protected void resetAnalysisState() {
super.resetAnalysisState();
fnCall_.resetAnalysisState();
if (resetWindow_) window_ = null;
resetWindow_ = false;
// sync with children, now that they've been reset
syncWithChildren();
}
@Override
protected Expr substituteImpl(ExprSubstitutionMap smap, Analyzer analyzer)
throws AnalysisException {
Expr e = super.substituteImpl(smap, analyzer);
if (!(e instanceof AnalyticExpr)) return e;
// Re-sync state after possible child substitution.
((AnalyticExpr) e).syncWithChildren();
return e;
}
}
|
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl;
import com.intellij.formatting.FormatterEx;
import com.intellij.formatting.FormatterImpl;
import com.intellij.ide.startup.CacheUpdater;
import com.intellij.ide.startup.FileContent;
import com.intellij.ide.startup.FileSystemSynchronizer;
import com.intellij.ide.startup.StartupManagerEx;
import com.intellij.lang.PsiBuilderFactory;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.ProjectComponent;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleUtil;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ex.ProjectRootManagerEx;
import com.intellij.openapi.startup.StartupManager;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.NotNullLazyValue;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileFilter;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import static com.intellij.psi.impl.PsiTreeChangeEventImpl.PsiEventType.*;
import com.intellij.psi.impl.cache.CacheManager;
import com.intellij.psi.impl.cache.impl.CacheUtil;
import com.intellij.psi.impl.cache.impl.CompositeCacheManager;
import com.intellij.psi.impl.cache.impl.IndexCacheManagerImpl;
import com.intellij.psi.impl.file.impl.FileManager;
import com.intellij.psi.impl.file.impl.FileManagerImpl;
import com.intellij.psi.impl.search.PsiSearchHelperImpl;
import com.intellij.psi.impl.source.PostprocessReformattingAspect;
import com.intellij.psi.impl.source.PsiFileImpl;
import com.intellij.psi.impl.source.resolve.ResolveCache;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageManagerImpl;
import com.intellij.psi.search.PsiSearchHelper;
import com.intellij.psi.util.CachedValuesManager;
import com.intellij.psi.util.PsiModificationTracker;
import com.intellij.testFramework.LightVirtualFile;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.ThrowableRunnable;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
public class PsiManagerImpl extends PsiManagerEx implements ProjectComponent {
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.PsiManagerImpl");
private final Project myProject;
private final FileManager myFileManager;
private final PsiSearchHelperImpl mySearchHelper;
private final CacheManager myCacheManager;
private final PsiModificationTrackerImpl myModificationTracker;
private final ResolveCache myResolveCache;
private final NotNullLazyValue<CachedValuesManager> myCachedValuesManager = new NotNullLazyValue<CachedValuesManager>() {
@NotNull
@Override
protected CachedValuesManager compute() {
return CachedValuesManager.getManager(myProject);
}
};
private final List<PsiTreeChangePreprocessor> myTreeChangePreprocessors = ContainerUtil.createEmptyCOWList();
private final List<PsiTreeChangeListener> myTreeChangeListeners = ContainerUtil.createEmptyCOWList();
private boolean myTreeChangeEventIsFiring = false;
private final List<Runnable> myRunnablesOnChange = ContainerUtil.createEmptyCOWList();
private final List<WeakReference<Runnable>> myWeakRunnablesOnChange = ContainerUtil.createEmptyCOWList();
private final List<Runnable> myRunnablesOnAnyChange = ContainerUtil.createEmptyCOWList();
private final List<Runnable> myRunnablesAfterAnyChange = ContainerUtil.createEmptyCOWList();
private boolean myIsDisposed;
private VirtualFileFilter myAssertOnFileLoadingFilter = VirtualFileFilter.NONE;
private final AtomicInteger myBatchFilesProcessingModeCount = new AtomicInteger(0);
private static final Key<PsiFile> CACHED_PSI_FILE_COPY_IN_FILECONTENT = Key.create("CACHED_PSI_FILE_COPY_IN_FILECONTENT");
private final List<LanguageInjector> myLanguageInjectors = ContainerUtil.createEmptyCOWList();
private final ProgressManager myProgressManager;
public PsiManagerImpl(Project project,
final ProjectRootManagerEx projectRootManagerEx,
StartupManager startupManager,
FileTypeManager fileTypeManager,
FileDocumentManager fileDocumentManager,
PsiBuilderFactory psiBuilderFactory) {
myProject = project;
//We need to initialize PsiBuilderFactory service so it won't initialize under PsiLock from ChameleonTransform
@SuppressWarnings({"UnusedDeclaration", "UnnecessaryLocalVariable"}) Object used = psiBuilderFactory;
boolean isProjectDefault = project.isDefault();
myFileManager = isProjectDefault ? new EmptyFileManager(this) : new FileManagerImpl(this, fileTypeManager, fileDocumentManager,
projectRootManagerEx);
mySearchHelper = new PsiSearchHelperImpl(this);
final CompositeCacheManager cacheManager = new CompositeCacheManager();
if (isProjectDefault) {
cacheManager.addCacheManager(new EmptyCacheManager());
}
else {
cacheManager.addCacheManager(new IndexCacheManagerImpl(this));
}
final CacheManager[] managers = myProject.getComponents(CacheManager.class);
for (CacheManager manager : managers) {
cacheManager.addCacheManager(manager);
}
myCacheManager = cacheManager;
myModificationTracker = new PsiModificationTrackerImpl(myProject);
myTreeChangePreprocessors.add(myModificationTracker);
myResolveCache = new ResolveCache(this);
if (startupManager != null) {
((StartupManagerEx)startupManager).registerPreStartupActivity(
new Runnable() {
public void run() {
runPreStartupActivity();
}
}
);
}
myProgressManager = ProgressManager.getInstance();
}
public void initComponent() {
}
public void disposeComponent() {
myFileManager.dispose();
myCacheManager.dispose();
myIsDisposed = true;
}
public boolean isDisposed() {
return myIsDisposed;
}
public void dropResolveCaches() {
myResolveCache.clearCache();
physicalChange();
nonPhysicalChange();
}
public boolean isInProject(@NotNull PsiElement element) {
PsiFile file = element.getContainingFile();
if (file instanceof PsiFileImpl && file.isPhysical() && file.getViewProvider().getVirtualFile() instanceof LightVirtualFile) return true;
if (element instanceof PsiDirectoryContainer) {
PsiDirectory[] dirs = ((PsiDirectoryContainer) element).getDirectories();
for (PsiDirectory dir : dirs) {
if (!isInProject(dir)) return false;
}
return true;
}
VirtualFile virtualFile = null;
if (file != null) {
virtualFile = file.getViewProvider().getVirtualFile();
}
else if (element instanceof PsiFileSystemItem) {
virtualFile = ((PsiFileSystemItem)element).getVirtualFile();
}
if (virtualFile != null) {
Module module = ModuleUtil.findModuleForFile(virtualFile, element.getProject());
return module != null;
}
return false;
}
public void performActionWithFormatterDisabled(final Runnable r) {
final PostprocessReformattingAspect component = getProject().getComponent(PostprocessReformattingAspect.class);
try {
((FormatterImpl)FormatterEx.getInstance()).disableFormatting();
component.disablePostprocessFormattingInside(new Computable<Object>() {
public Object compute() {
r.run();
return null;
}
});
}
finally {
((FormatterImpl)FormatterEx.getInstance()).enableFormatting();
}
}
public <T extends Throwable> void performActionWithFormatterDisabled(final ThrowableRunnable<T> r) throws T {
final Throwable[] throwable = new Throwable[1];
final PostprocessReformattingAspect component = getProject().getComponent(PostprocessReformattingAspect.class);
try {
((FormatterImpl)FormatterEx.getInstance()).disableFormatting();
component.disablePostprocessFormattingInside(new Computable<Object>() {
public Object compute() { try { r.run(); } catch (Throwable t) { throwable[0] = t; } return null; }
});
}
finally {
((FormatterImpl)FormatterEx.getInstance()).enableFormatting();
}
if (throwable[0] != null) //noinspection unchecked
throw (T)throwable[0];
}
public <T> T performActionWithFormatterDisabled(Computable<T> r) {
try {
final PostprocessReformattingAspect component = PostprocessReformattingAspect.getInstance(getProject());
((FormatterImpl)FormatterEx.getInstance()).disableFormatting();
return component.disablePostprocessFormattingInside(r);
}
finally {
((FormatterImpl)FormatterEx.getInstance()).enableFormatting();
}
}
@NotNull
public List<? extends LanguageInjector> getLanguageInjectors() {
return myLanguageInjectors;
}
public void registerLanguageInjector(@NotNull LanguageInjector injector) {
myLanguageInjectors.add(injector);
InjectedLanguageManagerImpl.getInstanceImpl(myProject).psiManagerInjectorsChanged();
}
public void registerLanguageInjector(@NotNull final LanguageInjector injector, Disposable parentDisposable) {
registerLanguageInjector(injector);
Disposer.register(parentDisposable, new Disposable() {
public void dispose() {
unregisterLanguageInjector(injector);
}
});
}
public void unregisterLanguageInjector(@NotNull LanguageInjector injector) {
myLanguageInjectors.remove(injector);
InjectedLanguageManagerImpl.getInstanceImpl(myProject).psiManagerInjectorsChanged();
}
public void postponeAutoFormattingInside(Runnable runnable) {
PostprocessReformattingAspect.getInstance(getProject()).postponeFormattingInside(runnable);
}
public void projectClosed() {
}
public void projectOpened() {
}
private void runPreStartupActivity() {
if (LOG.isDebugEnabled()) {
LOG.debug("PsiManager.runPreStartupActivity()");
}
myFileManager.runStartupActivity();
myCacheManager.initialize();
StartupManagerEx startupManager = StartupManagerEx.getInstanceEx(myProject);
if (startupManager != null) {
FileSystemSynchronizer synchronizer = startupManager.getFileSystemSynchronizer();
if (PsiManagerConfiguration.getInstance().REPOSITORY_ENABLED) {
CacheUpdater[] updaters = myCacheManager.getCacheUpdaters();
for (CacheUpdater updater : updaters) {
synchronizer.registerCacheUpdater(updater);
}
}
}
}
public void setAssertOnFileLoadingFilter(VirtualFileFilter filter) {
// Find something to ensure there's no changed files waiting to be processed in repository indicies.
myAssertOnFileLoadingFilter = filter;
}
public boolean isAssertOnFileLoading(VirtualFile file) {
return myAssertOnFileLoadingFilter.accept(file);
}
@NotNull
public Project getProject() {
return myProject;
}
public FileManager getFileManager() {
return myFileManager;
}
public CacheManager getCacheManager() {
if (myIsDisposed) {
LOG.error("Project is already disposed.");
}
return myCacheManager;
}
@NotNull
public CodeStyleManager getCodeStyleManager() {
return CodeStyleManager.getInstance(myProject);
}
public ResolveCache getResolveCache() {
myProgressManager.checkCanceled(); // We hope this method is being called often enough to cancel daemon processes smoothly
return myResolveCache;
}
public boolean areElementsEquivalent(PsiElement element1, PsiElement element2) {
myProgressManager.checkCanceled(); // We hope this method is being called often enough to cancel daemon processes smoothly
if (element1 == element2) return true;
if (element1 == null || element2 == null) {
return false;
}
return element1.equals(element2) || element1.isEquivalentTo(element2) || element2.isEquivalentTo(element1);
}
public PsiFile findFile(@NotNull VirtualFile file) {
return myFileManager.findFile(file);
}
@Nullable
public FileViewProvider findViewProvider(@NotNull VirtualFile file) {
return myFileManager.findViewProvider(file);
}
@TestOnly
public void cleanupForNextTest() {
//myFileManager.cleanupForNextTest();
LOG.assertTrue(ApplicationManager.getApplication().isUnitTestMode());
}
@Nullable
public PsiFile getFile(FileContent content) {
PsiFile psiFile = content.getUserData(CACHED_PSI_FILE_COPY_IN_FILECONTENT);
if (psiFile == null) {
final VirtualFile vFile = content.getVirtualFile();
psiFile = myFileManager.getCachedPsiFile(vFile);
if (psiFile == null) {
psiFile = findFile(vFile);
if (psiFile == null) return null;
psiFile = CacheUtil.createFileCopy(content, psiFile);
}
//psiFile = content.putUserDataIfAbsent(CACHED_PSI_FILE_COPY_IN_FILECONTENT, psiFile);
content.putUserData(CACHED_PSI_FILE_COPY_IN_FILECONTENT, psiFile);
}
LOG.assertTrue(psiFile instanceof PsiCompiledElement || psiFile.isValid());
return psiFile;
}
public PsiDirectory findDirectory(@NotNull VirtualFile file) {
myProgressManager.checkCanceled();
return myFileManager.findDirectory(file);
}
public void invalidateFile(PsiFile file) {
if (myIsDisposed) {
LOG.error("Disposed PsiManager calls invalidateFile!");
}
final VirtualFile virtualFile = file.getVirtualFile();
if (file.getViewProvider().isPhysical() && myCacheManager != null) {
myCacheManager.addOrInvalidateFile(virtualFile);
}
}
public void reloadFromDisk(@NotNull PsiFile file) {
myFileManager.reloadFromDisk(file);
}
public void addPsiTreeChangeListener(@NotNull PsiTreeChangeListener listener) {
myTreeChangeListeners.add(listener);
}
public void addPsiTreeChangeListener(@NotNull final PsiTreeChangeListener listener, Disposable parentDisposable) {
addPsiTreeChangeListener(listener);
Disposer.register(parentDisposable, new Disposable() {
public void dispose() {
removePsiTreeChangeListener(listener);
}
});
}
public void removePsiTreeChangeListener(@NotNull PsiTreeChangeListener listener) {
myTreeChangeListeners.remove(listener);
}
public void beforeChildAddition(PsiTreeChangeEventImpl event) {
event.setCode(BEFORE_CHILD_ADDITION);
if (LOG.isDebugEnabled()) {
LOG.debug(
"beforeChildAddition: parent = " + event.getParent()
);
}
fireEvent(event);
}
public void beforeChildRemoval(PsiTreeChangeEventImpl event) {
event.setCode(BEFORE_CHILD_REMOVAL);
if (LOG.isDebugEnabled()) {
LOG.debug(
"beforeChildRemoval: child = " + event.getChild()
+ ", parent = " + event.getParent()
);
}
fireEvent(event);
}
public void beforeChildReplacement(PsiTreeChangeEventImpl event) {
event.setCode(BEFORE_CHILD_REPLACEMENT);
if (LOG.isDebugEnabled()) {
LOG.debug(
"beforeChildReplacement: oldChild = " + event.getOldChild()
+ ", parent = " + event.getParent()
);
}
fireEvent(event);
}
public void beforeChildrenChange(PsiTreeChangeEventImpl event) {
event.setCode(BEFORE_CHILDREN_CHANGE);
if (LOG.isDebugEnabled()) {
LOG.debug("beforeChildrenChange: parent = " + event.getParent());
}
fireEvent(event);
}
public void beforeChildMovement(PsiTreeChangeEventImpl event) {
event.setCode(BEFORE_CHILD_MOVEMENT);
if (LOG.isDebugEnabled()) {
LOG.debug(
"beforeChildMovement: child = " + event.getChild()
+ ", oldParent = " + event.getOldParent()
+ ", newParent = " + event.getNewParent()
);
}
fireEvent(event);
}
public void beforePropertyChange(PsiTreeChangeEventImpl event) {
event.setCode(BEFORE_PROPERTY_CHANGE);
if (LOG.isDebugEnabled()) {
LOG.debug(
"beforePropertyChange: element = " + event.getElement()
+ ", propertyName = " + event.getPropertyName()
+ ", oldValue = " + event.getOldValue()
);
}
fireEvent(event);
}
public void childAdded(PsiTreeChangeEventImpl event) {
onChange(true);
event.setCode(CHILD_ADDED);
if (LOG.isDebugEnabled()) {
LOG.debug(
"childAdded: child = " + event.getChild()
+ ", parent = " + event.getParent()
);
}
fireEvent(event);
afterAnyChange();
}
public void childRemoved(PsiTreeChangeEventImpl event) {
onChange(true);
event.setCode(CHILD_REMOVED);
if (LOG.isDebugEnabled()) {
LOG.debug(
"childRemoved: child = " + event.getChild() + ", parent = " + event.getParent()
);
}
fireEvent(event);
afterAnyChange();
}
public void childReplaced(PsiTreeChangeEventImpl event) {
onChange(true);
event.setCode(CHILD_REPLACED);
if (LOG.isDebugEnabled()) {
LOG.debug(
"childReplaced: oldChild = " + event.getOldChild()
+ ", newChild = " + event.getNewChild()
+ ", parent = " + event.getParent()
);
}
fireEvent(event);
afterAnyChange();
}
public void childMoved(PsiTreeChangeEventImpl event) {
onChange(true);
event.setCode(CHILD_MOVED);
if (LOG.isDebugEnabled()) {
LOG.debug(
"childMoved: child = " + event.getChild()
+ ", oldParent = " + event.getOldParent()
+ ", newParent = " + event.getNewParent()
);
}
fireEvent(event);
afterAnyChange();
}
public void childrenChanged(PsiTreeChangeEventImpl event) {
onChange(true);
event.setCode(CHILDREN_CHANGED);
if (LOG.isDebugEnabled()) {
LOG.debug(
"childrenChanged: parent = " + event.getParent()
);
}
fireEvent(event);
afterAnyChange();
}
public void propertyChanged(PsiTreeChangeEventImpl event) {
onChange(true);
event.setCode(PROPERTY_CHANGED);
if (LOG.isDebugEnabled()) {
LOG.debug(
"propertyChanged: element = " + event.getElement()
+ ", propertyName = " + event.getPropertyName()
+ ", oldValue = " + event.getOldValue()
+ ", newValue = " + event.getNewValue()
);
}
fireEvent(event);
afterAnyChange();
}
public void addTreeChangePreprocessor(PsiTreeChangePreprocessor preprocessor) {
myTreeChangePreprocessors.add(preprocessor);
}
private void fireEvent(PsiTreeChangeEventImpl event) {
boolean isRealTreeChange = event.getCode() != PROPERTY_CHANGED && event.getCode() != BEFORE_PROPERTY_CHANGE;
PsiFile file = event.getFile();
if (file == null || file.isPhysical()) {
ApplicationManager.getApplication().assertWriteAccessAllowed();
}
if (isRealTreeChange) {
LOG.assertTrue(!myTreeChangeEventIsFiring, "Changes to PSI are not allowed inside event processing");
myTreeChangeEventIsFiring = true;
}
try {
for(PsiTreeChangePreprocessor preprocessor: myTreeChangePreprocessors) {
preprocessor.treeChanged(event);
}
for (PsiTreeChangeListener listener : myTreeChangeListeners) {
try {
switch (event.getCode()) {
case BEFORE_CHILD_ADDITION:
listener.beforeChildAddition(event);
break;
case BEFORE_CHILD_REMOVAL:
listener.beforeChildRemoval(event);
break;
case BEFORE_CHILD_REPLACEMENT:
listener.beforeChildReplacement(event);
break;
case BEFORE_CHILD_MOVEMENT:
listener.beforeChildMovement(event);
break;
case BEFORE_CHILDREN_CHANGE:
listener.beforeChildrenChange(event);
break;
case BEFORE_PROPERTY_CHANGE:
listener.beforePropertyChange(event);
break;
case CHILD_ADDED:
listener.childAdded(event);
break;
case CHILD_REMOVED:
listener.childRemoved(event);
break;
case CHILD_REPLACED:
listener.childReplaced(event);
break;
case CHILD_MOVED:
listener.childMoved(event);
break;
case CHILDREN_CHANGED:
listener.childrenChanged(event);
break;
case PROPERTY_CHANGED:
listener.propertyChanged(event);
break;
}
}
catch (Exception e) {
LOG.error(e);
}
}
}
finally {
if (isRealTreeChange) {
myTreeChangeEventIsFiring = false;
}
}
}
public void registerRunnableToRunOnChange(Runnable runnable) {
myRunnablesOnChange.add(runnable);
}
public void registerWeakRunnableToRunOnChange(Runnable runnable) {
myWeakRunnablesOnChange.add(new WeakReference<Runnable>(runnable));
}
public void registerRunnableToRunOnAnyChange(Runnable runnable) { // includes non-physical changes
myRunnablesOnAnyChange.add(runnable);
}
public void registerRunnableToRunAfterAnyChange(Runnable runnable) { // includes non-physical changes
myRunnablesAfterAnyChange.add(runnable);
}
public void nonPhysicalChange() {
onChange(false);
}
public void physicalChange() {
onChange(true);
}
private void onChange(boolean isPhysical) {
if (isPhysical) {
runRunnables(myRunnablesOnChange);
WeakReference[] refs = myWeakRunnablesOnChange.toArray(new WeakReference[myWeakRunnablesOnChange.size()]);
myWeakRunnablesOnChange.clear();
for (WeakReference ref : refs) {
Runnable runnable = ref != null ? (Runnable)ref.get() : null;
if (runnable != null) {
runnable.run();
}
}
}
runRunnables(myRunnablesOnAnyChange);
}
private void afterAnyChange() {
runRunnables(myRunnablesAfterAnyChange);
}
private static void runRunnables(List<Runnable> runnables) {
if (runnables.isEmpty()) return;
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < runnables.size(); i++) {
runnables.get(i).run();
}
}
@NotNull
public PsiSearchHelper getSearchHelper() {
return mySearchHelper;
}
@NotNull
public PsiModificationTracker getModificationTracker() {
return myModificationTracker;
}
@NotNull
public CachedValuesManager getCachedValuesManager() {
return myCachedValuesManager.getValue();
}
public void moveDirectory(@NotNull final PsiDirectory dir, @NotNull PsiDirectory newParent) throws IncorrectOperationException {
checkMove(dir, newParent);
try {
dir.getVirtualFile().move(this, newParent.getVirtualFile());
}
catch (IOException e) {
throw new IncorrectOperationException(e.toString(),e);
}
}
public void moveFile(@NotNull final PsiFile file, @NotNull PsiDirectory newParent) throws IncorrectOperationException {
checkMove(file, newParent);
try {
final VirtualFile virtualFile = file.getVirtualFile();
assert virtualFile != null;
virtualFile.move(this, newParent.getVirtualFile());
}
catch (IOException e) {
throw new IncorrectOperationException(e.toString(),e);
}
}
public void checkMove(@NotNull PsiElement element, @NotNull PsiElement newContainer) throws IncorrectOperationException {
if (element instanceof PsiDirectoryContainer) {
PsiDirectory[] dirs = ((PsiDirectoryContainer)element).getDirectories();
if (dirs.length == 0) {
throw new IncorrectOperationException();
}
else if (dirs.length > 1) {
throw new IncorrectOperationException(
"Moving of packages represented by more than one physical directory is not supported.");
}
checkMove(dirs[0], newContainer);
return;
}
//element.checkDelete(); //move != delete + add
newContainer.checkAdd(element);
checkIfMoveIntoSelf(element, newContainer);
}
private static void checkIfMoveIntoSelf(PsiElement element, PsiElement newContainer) throws IncorrectOperationException {
PsiElement container = newContainer;
while (container != null) {
if (container == element) {
if (element instanceof PsiDirectory) {
if (element == newContainer) {
throw new IncorrectOperationException("Cannot move directory into itself.");
}
else {
throw new IncorrectOperationException("Cannot move directory into its subdirectory.");
}
}
else {
throw new IncorrectOperationException();
}
}
container = container.getParent();
}
}
public void startBatchFilesProcessingMode() {
myBatchFilesProcessingModeCount.incrementAndGet();
}
public void finishBatchFilesProcessingMode() {
myBatchFilesProcessingModeCount.decrementAndGet();
LOG.assertTrue(myBatchFilesProcessingModeCount.get() >= 0);
}
public boolean isBatchFilesProcessingMode() {
return myBatchFilesProcessingModeCount.get() > 0;
}
@NotNull
public String getComponentName() {
return "PsiManager";
}
}
|
|
/*
* #%L
* Deep Zoom plugin for ImageJ.
* %%
* Copyright (C) 2010 - 2014 Board of Regents of the University of
* Wisconsin-Madison.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package loci.deepzoom;
import ij.IJ;
import ij.ImageJ;
import ij.ImagePlus;
import ij.WindowManager;
import ij.gui.GenericDialog;
import ij.plugin.PlugIn;
import ij.process.ImageProcessor;
import java.util.prefs.Preferences;
/**
* TODO
*
* @author Aivar Grislis
*/
public class DeepZoom implements PlugIn {
private static final String FILE = "FILE";
private static final String OUTPUT = "OUTPUT";
private static final String NAME = "NAME";
private static final String DESCRIPTION = "DESCRIPTION";
private static final String WIDTH = "WIDTH";
private static final String HEIGHT = "HEIGHT";
private static final String LAUNCH = "LAUNCH";
private static final String URL = "URL";
private enum Implementation {
CHAINED, MULTITHREADED, SINGLEINSTANCE, MULTIINSTANCE
}
private static final String[] m_choices = { Implementation.CHAINED.name(),
// Implementation.MULTITHREADED.name(),
Implementation.SINGLEINSTANCE.name(), Implementation.MULTIINSTANCE.name() };
private final Preferences m_prefs = Preferences.userRoot().node(
this.getClass().getName());
@Override
public void run(final String arg) {
final ImagePlus imp = WindowManager.getCurrentImage();
if (null == imp) {
return;
}
final ImageProcessor ip = imp.getChannelProcessor().convertToRGB();
String folder = m_prefs.get(OUTPUT, "");
String name = m_prefs.get(NAME, "image");
String description = m_prefs.get(DESCRIPTION, "Zoomable image");
int width = m_prefs.getInt(WIDTH, 640);
int height = m_prefs.getInt(HEIGHT, 480);
boolean launch = m_prefs.getBoolean(LAUNCH, true);
String url = m_prefs.get(URL, "");
final GenericDialog dialog = new GenericDialog("Save Image to Deep Zoom");
dialog.addStringField("Output folder: ", folder);
dialog.addStringField("HTML file name: ", name);
dialog.addStringField("HTML title: ", description);
dialog.addNumericField("Image window width: ", width, 0);
dialog.addNumericField("Image window height: ", height, 0);
dialog.addCheckbox("Launch browser: ", launch);
dialog.addStringField("URL (optional): ", url);
dialog.addChoice("Implementation: ", m_choices, Implementation.CHAINED
.toString());
dialog
.addMessage("Uses Seadragon Ajax Code, built on Microsoft technology.");
dialog.showDialog();
if (dialog.wasCanceled()) {
return;
}
folder = dialog.getNextString();
name = dialog.getNextString();
description = dialog.getNextString();
width = (int) dialog.getNextNumber();
height = (int) dialog.getNextNumber();
launch = dialog.getNextBoolean();
url = dialog.getNextString();
final int choiceIndex = dialog.getNextChoiceIndex();
final Implementation implementation = Implementation.values()[choiceIndex];
m_prefs.put(OUTPUT, folder);
m_prefs.put(NAME, name);
m_prefs.put(DESCRIPTION, description);
m_prefs.putInt(WIDTH, width);
m_prefs.putInt(HEIGHT, height);
m_prefs.putBoolean(LAUNCH, launch);
m_prefs.put(URL, url);
// TODO just define an IDeepZoomExporter interface
switch (implementation) {
case CHAINED:
case MULTITHREADED: // TODO
final loci.chainableplugin.deepzoom.DeepZoomExporter deepZoomExporter1 =
new loci.chainableplugin.deepzoom.DeepZoomExporter(launch, false,
folder, url, name, description, width, height);
final loci.deepzoom.plugin.ImageWrapper imageWrapper1 =
new loci.deepzoom.plugin.ImageWrapper(ip);
deepZoomExporter1.process(imageWrapper1);
break;
case SINGLEINSTANCE:
loci.multiinstanceplugin.PluginLauncher.s_singleInstance = true;
final loci.multiinstanceplugin.deepzoom.DeepZoomExporter deepZoomExporter2 =
new loci.multiinstanceplugin.deepzoom.DeepZoomExporter(launch, false,
folder, url, name, description, width, height);
final loci.deepzoom.plugin.ImageWrapper imageWrapper2 =
new loci.deepzoom.plugin.ImageWrapper(ip);
deepZoomExporter2.process(imageWrapper2);
break;
case MULTIINSTANCE:
loci.multiinstanceplugin.PluginLauncher.s_singleInstance = false;
final loci.multiinstanceplugin.deepzoom.DeepZoomExporter deepZoomExporter3 =
new loci.multiinstanceplugin.deepzoom.DeepZoomExporter(launch, false,
folder, url, name, description, width, height);
final loci.deepzoom.plugin.ImageWrapper imageWrapper3 =
new loci.deepzoom.plugin.ImageWrapper(ip);
deepZoomExporter3.process(imageWrapper3);
break;
}
}
/**
* Main method used for testing only. This allows the tester to compare
* different implementations of the plugin that use different frameworks to
* chain the processor components together.
*
* @param args the command line arguments
*/
public static void main(final String[] args) {
new ImageJ();
// ask for file to load
final Preferences prefs = Preferences.userRoot().node("tmp");
String file = prefs.get(FILE, "");
final GenericDialog dialog = new GenericDialog("Choose Image");
dialog.addStringField("File: ", "");
dialog.showDialog();
if (dialog.wasCanceled()) {
return;
}
file = dialog.getNextString();
prefs.put(FILE, file);
// generateDeepZoom(file, "folder", "name", 640, 480);
// System.exit(0);
IJ.open(file);
final ImagePlus imp = WindowManager.getCurrentImage();
imp.hide();
System.out.println("imp size is " + imp.getStackSize());
System.out.println("imp type is " + imp.getType());
// ij.process.ImageConverter converter = new ij.process.ImageConverter(imp);
// //FAIL for some reason the image type is IMGLIB and conversion fails
// converter.convertRGBStackToRGB();
convertRGBStackToRGB(imp);
imp.show();
// run plugin
final DeepZoom plugin = new DeepZoom();
plugin.run("");
System.exit(0); // TODO just for testing.
}
/**
* This is just a hack to make the main method work:
*/
/** Converts a 2 or 3 slice 8-bit stack to RGB. */
public static void convertRGBStackToRGB(final ImagePlus imp) {
final int stackSize = imp.getStackSize();
final int type = imp.getType();
// if (stackSize<2 || stackSize>3 || type!=ImagePlus.GRAY8) //FAIL, the
// ImageConverter version encounters ImagePlus.IMGLIB == 5
// throw new IllegalArgumentException("2 or 3 slice 8-bit stack required");
final int width = imp.getWidth();
final int height = imp.getHeight();
final ij.ImageStack stack = imp.getStack();
if (stack.getPixels(1) instanceof byte[]) {
final byte[] R = (byte[]) stack.getPixels(1);
final byte[] G = (byte[]) stack.getPixels(2);
byte[] B;
if (stackSize > 2) B = (byte[]) stack.getPixels(3);
else B = new byte[width * height];
imp.trimProcessor();
final ij.process.ColorProcessor cp =
new ij.process.ColorProcessor(width, height);
cp.setRGB(R, G, B);
if (imp.isInvertedLut()) cp.invert();
imp.setImage(cp.createImage());
imp.killStack();
}
}
/**
* Warning: this method is a hack upon a hack (upon a hack). Generate a
* DeepZoom HTML, XML, folder, and file structure.
*
* @param source file name of source
* @param folder folder name on local file system
* @param name HTML name
* @param width starting width
* @param height starting height
*/
public static void generateDeepZoom(final String source, final String folder,
final String name, final int width, final int height)
{
IJ.open(source);
final ImagePlus imp = WindowManager.getCurrentImage();
imp.hide();
convertRGBStackToRGB(imp);
final ImageProcessor ip = imp.getChannelProcessor().convertToRGB();
final loci.chainableplugin.deepzoom.DeepZoomExporter deepZoomExporter1 =
new loci.chainableplugin.deepzoom.DeepZoomExporter(false, false, folder,
null, name, name, width, height);
final loci.deepzoom.plugin.ImageWrapper imageWrapper1 =
new loci.deepzoom.plugin.ImageWrapper(ip);
deepZoomExporter1.process(imageWrapper1);
}
}
|
|
/*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.kogito.client.session;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.stunner.core.client.session.ClientSession;
import org.kie.workbench.common.stunner.core.client.session.command.ManagedClientSessionCommands;
import org.kie.workbench.common.stunner.core.client.session.command.impl.ClearSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.CopySelectionSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.CutSelectionSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.DeleteSelectionSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.ExportToJpgSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.ExportToPdfSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.ExportToPngSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.ExportToRawFormatSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.ExportToSvgSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.PasteSelectionSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.RedoSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.SaveDiagramSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.SwitchGridSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.UndoSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.ValidateSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.VisitGraphSessionCommand;
import org.mockito.InOrder;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class EditorSessionCommandsTest {
@Mock
protected ManagedClientSessionCommands commands;
@Mock
protected ClientSession session;
protected EditorSessionCommands editorSessionCommands;
@Before
@SuppressWarnings("unchecked")
public void setup() {
this.editorSessionCommands = makeEditorSessionCommands();
when(commands.register(any(Class.class))).thenReturn(commands);
}
protected EditorSessionCommands makeEditorSessionCommands() {
return new EditorSessionCommands(commands);
}
@Test
public void testInit() {
editorSessionCommands.init();
final InOrder inOrder = inOrder(commands);
inOrder.verify(commands).register(VisitGraphSessionCommand.class);
inOrder.verify(commands).register(SwitchGridSessionCommand.class);
inOrder.verify(commands).register(ClearSessionCommand.class);
inOrder.verify(commands).register(DeleteSelectionSessionCommand.class);
inOrder.verify(commands).register(UndoSessionCommand.class);
inOrder.verify(commands).register(RedoSessionCommand.class);
inOrder.verify(commands).register(ValidateSessionCommand.class);
inOrder.verify(commands).register(ExportToPngSessionCommand.class);
inOrder.verify(commands).register(ExportToJpgSessionCommand.class);
inOrder.verify(commands).register(ExportToPdfSessionCommand.class);
inOrder.verify(commands).register(ExportToSvgSessionCommand.class);
inOrder.verify(commands).register(ExportToRawFormatSessionCommand.class);
inOrder.verify(commands).register(CopySelectionSessionCommand.class);
inOrder.verify(commands).register(PasteSelectionSessionCommand.class);
inOrder.verify(commands).register(CutSelectionSessionCommand.class);
inOrder.verify(commands).register(SaveDiagramSessionCommand.class);
}
@Test
public void testBind() {
editorSessionCommands.bind(session);
verify(commands).bind(session);
}
@Test
public void testGetCommands() {
assertEquals(commands, editorSessionCommands.getCommands());
}
@Test
public void testGetVisitGraphSessionCommand() {
editorSessionCommands.getVisitGraphSessionCommand();
verify(commands).get(eq(VisitGraphSessionCommand.class));
}
@Test
public void testGetSwitchGridSessionCommand() {
editorSessionCommands.getSwitchGridSessionCommand();
verify(commands).get(eq(SwitchGridSessionCommand.class));
}
@Test
public void testGetClearSessionCommand() {
editorSessionCommands.getClearSessionCommand();
verify(commands).get(eq(ClearSessionCommand.class));
}
@Test
public void testGetDeleteSelectionSessionCommand() {
editorSessionCommands.getDeleteSelectionSessionCommand();
verify(commands).get(eq(DeleteSelectionSessionCommand.class));
}
@Test
public void testGetUndoSessionCommand() {
editorSessionCommands.getUndoSessionCommand();
verify(commands).get(eq(UndoSessionCommand.class));
}
@Test
public void testGetRedoSessionCommand() {
editorSessionCommands.getRedoSessionCommand();
verify(commands).get(eq(RedoSessionCommand.class));
}
@Test
public void testGetValidateSessionCommand() {
editorSessionCommands.getValidateSessionCommand();
verify(commands).get(eq(ValidateSessionCommand.class));
}
@Test
public void testGetExportToPngSessionCommand() {
editorSessionCommands.getExportToPngSessionCommand();
verify(commands).get(eq(ExportToPngSessionCommand.class));
}
@Test
public void testGetExportToJpgSessionCommand() {
editorSessionCommands.getExportToJpgSessionCommand();
verify(commands).get(eq(ExportToJpgSessionCommand.class));
}
@Test
public void testGetExportToPdfSessionCommand() {
editorSessionCommands.getExportToPdfSessionCommand();
verify(commands).get(eq(ExportToPdfSessionCommand.class));
}
@Test
public void testGetExportToSvgSessionCommand() {
editorSessionCommands.getExportToSvgSessionCommand();
verify(commands).get(eq(ExportToSvgSessionCommand.class));
}
@Test
public void testGetExportToRawSessionCommand() {
editorSessionCommands.getExportToRawFormatSessionCommand();
verify(commands).get(eq(ExportToRawFormatSessionCommand.class));
}
@Test
public void testGetCopySelectionSessionCommand() {
editorSessionCommands.getCopySelectionSessionCommand();
verify(commands).get(eq(CopySelectionSessionCommand.class));
}
@Test
public void testGetPasteSelectionSessionCommand() {
editorSessionCommands.getPasteSelectionSessionCommand();
verify(commands).get(eq(PasteSelectionSessionCommand.class));
}
@Test
public void testGetCutSelectionSessionCommand() {
editorSessionCommands.getCutSelectionSessionCommand();
verify(commands).get(eq(CutSelectionSessionCommand.class));
}
@Test
public void testGetSaveDiagramSessionCommand() {
editorSessionCommands.getSaveDiagramSessionCommand();
verify(commands).get(eq(SaveDiagramSessionCommand.class));
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.macie2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Specifies a condition for filtering the results of a query for quota and usage data for one or more Amazon Macie
* accounts.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/macie2-2020-01-01/UsageStatisticsFilter" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UsageStatisticsFilter implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The operator to use in the condition. If the value for the key property is accountId, this value must be
* CONTAINS. If the value for the key property is any other supported field, this value can be EQ, GT, GTE, LT, LTE,
* or NE.
* </p>
*/
private String comparator;
/**
* <p>
* The field to use in the condition.
* </p>
*/
private String key;
/**
* <p>
* An array that lists values to use in the condition, based on the value for the field specified by the key
* property. If the value for the key property is accountId, this array can specify multiple values. Otherwise, this
* array can specify only one value.
* </p>
* <p>
* Valid values for each supported field are:
* </p>
* <ul>
* <li>
* <p>
* accountId - The unique identifier for an Amazon Web Services account.
* </p>
* </li>
* <li>
* <p>
* freeTrialStartDate - The date and time, in UTC and extended ISO 8601 format, when the free trial started for an
* account.
* </p>
* </li>
* <li>
* <p>
* serviceLimit - A Boolean (true or false) value that indicates whether an account has reached its monthly quota.
* </p>
* </li>
* <li>
* <p>
* total - A string that represents the current estimated cost for an account.
* </p>
* </li>
* </ul>
*/
private java.util.List<String> values;
/**
* <p>
* The operator to use in the condition. If the value for the key property is accountId, this value must be
* CONTAINS. If the value for the key property is any other supported field, this value can be EQ, GT, GTE, LT, LTE,
* or NE.
* </p>
*
* @param comparator
* The operator to use in the condition. If the value for the key property is accountId, this value must be
* CONTAINS. If the value for the key property is any other supported field, this value can be EQ, GT, GTE,
* LT, LTE, or NE.
* @see UsageStatisticsFilterComparator
*/
public void setComparator(String comparator) {
this.comparator = comparator;
}
/**
* <p>
* The operator to use in the condition. If the value for the key property is accountId, this value must be
* CONTAINS. If the value for the key property is any other supported field, this value can be EQ, GT, GTE, LT, LTE,
* or NE.
* </p>
*
* @return The operator to use in the condition. If the value for the key property is accountId, this value must be
* CONTAINS. If the value for the key property is any other supported field, this value can be EQ, GT, GTE,
* LT, LTE, or NE.
* @see UsageStatisticsFilterComparator
*/
public String getComparator() {
return this.comparator;
}
/**
* <p>
* The operator to use in the condition. If the value for the key property is accountId, this value must be
* CONTAINS. If the value for the key property is any other supported field, this value can be EQ, GT, GTE, LT, LTE,
* or NE.
* </p>
*
* @param comparator
* The operator to use in the condition. If the value for the key property is accountId, this value must be
* CONTAINS. If the value for the key property is any other supported field, this value can be EQ, GT, GTE,
* LT, LTE, or NE.
* @return Returns a reference to this object so that method calls can be chained together.
* @see UsageStatisticsFilterComparator
*/
public UsageStatisticsFilter withComparator(String comparator) {
setComparator(comparator);
return this;
}
/**
* <p>
* The operator to use in the condition. If the value for the key property is accountId, this value must be
* CONTAINS. If the value for the key property is any other supported field, this value can be EQ, GT, GTE, LT, LTE,
* or NE.
* </p>
*
* @param comparator
* The operator to use in the condition. If the value for the key property is accountId, this value must be
* CONTAINS. If the value for the key property is any other supported field, this value can be EQ, GT, GTE,
* LT, LTE, or NE.
* @return Returns a reference to this object so that method calls can be chained together.
* @see UsageStatisticsFilterComparator
*/
public UsageStatisticsFilter withComparator(UsageStatisticsFilterComparator comparator) {
this.comparator = comparator.toString();
return this;
}
/**
* <p>
* The field to use in the condition.
* </p>
*
* @param key
* The field to use in the condition.
* @see UsageStatisticsFilterKey
*/
public void setKey(String key) {
this.key = key;
}
/**
* <p>
* The field to use in the condition.
* </p>
*
* @return The field to use in the condition.
* @see UsageStatisticsFilterKey
*/
public String getKey() {
return this.key;
}
/**
* <p>
* The field to use in the condition.
* </p>
*
* @param key
* The field to use in the condition.
* @return Returns a reference to this object so that method calls can be chained together.
* @see UsageStatisticsFilterKey
*/
public UsageStatisticsFilter withKey(String key) {
setKey(key);
return this;
}
/**
* <p>
* The field to use in the condition.
* </p>
*
* @param key
* The field to use in the condition.
* @return Returns a reference to this object so that method calls can be chained together.
* @see UsageStatisticsFilterKey
*/
public UsageStatisticsFilter withKey(UsageStatisticsFilterKey key) {
this.key = key.toString();
return this;
}
/**
* <p>
* An array that lists values to use in the condition, based on the value for the field specified by the key
* property. If the value for the key property is accountId, this array can specify multiple values. Otherwise, this
* array can specify only one value.
* </p>
* <p>
* Valid values for each supported field are:
* </p>
* <ul>
* <li>
* <p>
* accountId - The unique identifier for an Amazon Web Services account.
* </p>
* </li>
* <li>
* <p>
* freeTrialStartDate - The date and time, in UTC and extended ISO 8601 format, when the free trial started for an
* account.
* </p>
* </li>
* <li>
* <p>
* serviceLimit - A Boolean (true or false) value that indicates whether an account has reached its monthly quota.
* </p>
* </li>
* <li>
* <p>
* total - A string that represents the current estimated cost for an account.
* </p>
* </li>
* </ul>
*
* @return An array that lists values to use in the condition, based on the value for the field specified by the key
* property. If the value for the key property is accountId, this array can specify multiple values.
* Otherwise, this array can specify only one value.</p>
* <p>
* Valid values for each supported field are:
* </p>
* <ul>
* <li>
* <p>
* accountId - The unique identifier for an Amazon Web Services account.
* </p>
* </li>
* <li>
* <p>
* freeTrialStartDate - The date and time, in UTC and extended ISO 8601 format, when the free trial started
* for an account.
* </p>
* </li>
* <li>
* <p>
* serviceLimit - A Boolean (true or false) value that indicates whether an account has reached its monthly
* quota.
* </p>
* </li>
* <li>
* <p>
* total - A string that represents the current estimated cost for an account.
* </p>
* </li>
*/
public java.util.List<String> getValues() {
return values;
}
/**
* <p>
* An array that lists values to use in the condition, based on the value for the field specified by the key
* property. If the value for the key property is accountId, this array can specify multiple values. Otherwise, this
* array can specify only one value.
* </p>
* <p>
* Valid values for each supported field are:
* </p>
* <ul>
* <li>
* <p>
* accountId - The unique identifier for an Amazon Web Services account.
* </p>
* </li>
* <li>
* <p>
* freeTrialStartDate - The date and time, in UTC and extended ISO 8601 format, when the free trial started for an
* account.
* </p>
* </li>
* <li>
* <p>
* serviceLimit - A Boolean (true or false) value that indicates whether an account has reached its monthly quota.
* </p>
* </li>
* <li>
* <p>
* total - A string that represents the current estimated cost for an account.
* </p>
* </li>
* </ul>
*
* @param values
* An array that lists values to use in the condition, based on the value for the field specified by the key
* property. If the value for the key property is accountId, this array can specify multiple values.
* Otherwise, this array can specify only one value.</p>
* <p>
* Valid values for each supported field are:
* </p>
* <ul>
* <li>
* <p>
* accountId - The unique identifier for an Amazon Web Services account.
* </p>
* </li>
* <li>
* <p>
* freeTrialStartDate - The date and time, in UTC and extended ISO 8601 format, when the free trial started
* for an account.
* </p>
* </li>
* <li>
* <p>
* serviceLimit - A Boolean (true or false) value that indicates whether an account has reached its monthly
* quota.
* </p>
* </li>
* <li>
* <p>
* total - A string that represents the current estimated cost for an account.
* </p>
* </li>
*/
public void setValues(java.util.Collection<String> values) {
if (values == null) {
this.values = null;
return;
}
this.values = new java.util.ArrayList<String>(values);
}
/**
* <p>
* An array that lists values to use in the condition, based on the value for the field specified by the key
* property. If the value for the key property is accountId, this array can specify multiple values. Otherwise, this
* array can specify only one value.
* </p>
* <p>
* Valid values for each supported field are:
* </p>
* <ul>
* <li>
* <p>
* accountId - The unique identifier for an Amazon Web Services account.
* </p>
* </li>
* <li>
* <p>
* freeTrialStartDate - The date and time, in UTC and extended ISO 8601 format, when the free trial started for an
* account.
* </p>
* </li>
* <li>
* <p>
* serviceLimit - A Boolean (true or false) value that indicates whether an account has reached its monthly quota.
* </p>
* </li>
* <li>
* <p>
* total - A string that represents the current estimated cost for an account.
* </p>
* </li>
* </ul>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setValues(java.util.Collection)} or {@link #withValues(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param values
* An array that lists values to use in the condition, based on the value for the field specified by the key
* property. If the value for the key property is accountId, this array can specify multiple values.
* Otherwise, this array can specify only one value.</p>
* <p>
* Valid values for each supported field are:
* </p>
* <ul>
* <li>
* <p>
* accountId - The unique identifier for an Amazon Web Services account.
* </p>
* </li>
* <li>
* <p>
* freeTrialStartDate - The date and time, in UTC and extended ISO 8601 format, when the free trial started
* for an account.
* </p>
* </li>
* <li>
* <p>
* serviceLimit - A Boolean (true or false) value that indicates whether an account has reached its monthly
* quota.
* </p>
* </li>
* <li>
* <p>
* total - A string that represents the current estimated cost for an account.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UsageStatisticsFilter withValues(String... values) {
if (this.values == null) {
setValues(new java.util.ArrayList<String>(values.length));
}
for (String ele : values) {
this.values.add(ele);
}
return this;
}
/**
* <p>
* An array that lists values to use in the condition, based on the value for the field specified by the key
* property. If the value for the key property is accountId, this array can specify multiple values. Otherwise, this
* array can specify only one value.
* </p>
* <p>
* Valid values for each supported field are:
* </p>
* <ul>
* <li>
* <p>
* accountId - The unique identifier for an Amazon Web Services account.
* </p>
* </li>
* <li>
* <p>
* freeTrialStartDate - The date and time, in UTC and extended ISO 8601 format, when the free trial started for an
* account.
* </p>
* </li>
* <li>
* <p>
* serviceLimit - A Boolean (true or false) value that indicates whether an account has reached its monthly quota.
* </p>
* </li>
* <li>
* <p>
* total - A string that represents the current estimated cost for an account.
* </p>
* </li>
* </ul>
*
* @param values
* An array that lists values to use in the condition, based on the value for the field specified by the key
* property. If the value for the key property is accountId, this array can specify multiple values.
* Otherwise, this array can specify only one value.</p>
* <p>
* Valid values for each supported field are:
* </p>
* <ul>
* <li>
* <p>
* accountId - The unique identifier for an Amazon Web Services account.
* </p>
* </li>
* <li>
* <p>
* freeTrialStartDate - The date and time, in UTC and extended ISO 8601 format, when the free trial started
* for an account.
* </p>
* </li>
* <li>
* <p>
* serviceLimit - A Boolean (true or false) value that indicates whether an account has reached its monthly
* quota.
* </p>
* </li>
* <li>
* <p>
* total - A string that represents the current estimated cost for an account.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UsageStatisticsFilter withValues(java.util.Collection<String> values) {
setValues(values);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getComparator() != null)
sb.append("Comparator: ").append(getComparator()).append(",");
if (getKey() != null)
sb.append("Key: ").append(getKey()).append(",");
if (getValues() != null)
sb.append("Values: ").append(getValues());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UsageStatisticsFilter == false)
return false;
UsageStatisticsFilter other = (UsageStatisticsFilter) obj;
if (other.getComparator() == null ^ this.getComparator() == null)
return false;
if (other.getComparator() != null && other.getComparator().equals(this.getComparator()) == false)
return false;
if (other.getKey() == null ^ this.getKey() == null)
return false;
if (other.getKey() != null && other.getKey().equals(this.getKey()) == false)
return false;
if (other.getValues() == null ^ this.getValues() == null)
return false;
if (other.getValues() != null && other.getValues().equals(this.getValues()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getComparator() == null) ? 0 : getComparator().hashCode());
hashCode = prime * hashCode + ((getKey() == null) ? 0 : getKey().hashCode());
hashCode = prime * hashCode + ((getValues() == null) ? 0 : getValues().hashCode());
return hashCode;
}
@Override
public UsageStatisticsFilter clone() {
try {
return (UsageStatisticsFilter) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.macie2.model.transform.UsageStatisticsFilterMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.s3a.auth;
import java.io.IOException;
import java.net.URI;
import java.util.Date;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.SdkClientException;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.AWSSessionCredentials;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.auth.BasicSessionCredentials;
import com.amazonaws.services.securitytoken.AWSSecurityTokenService;
import com.amazonaws.services.securitytoken.model.Credentials;
import org.apache.hadoop.classification.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.s3a.Invoker;
import org.apache.hadoop.fs.s3a.Retries;
import org.apache.hadoop.fs.s3a.S3AFileSystem;
import org.apache.hadoop.security.ProviderUtils;
import static org.apache.hadoop.fs.s3a.Constants.ACCESS_KEY;
import static org.apache.hadoop.fs.s3a.Constants.SECRET_KEY;
import static org.apache.hadoop.fs.s3a.Constants.SESSION_TOKEN;
import static org.apache.hadoop.fs.s3a.S3AUtils.lookupPassword;
/**
* Class to bridge from the serializable/marshallabled
* {@link MarshalledCredentialBinding} class to/from AWS classes.
* This is to keep that class isolated and not dependent on aws-sdk JARs
* to load.
*/
public final class MarshalledCredentialBinding {
private static final Logger LOG =
LoggerFactory.getLogger(MarshalledCredentialBinding.class);
private MarshalledCredentialBinding() {
}
/**
* Error text on empty credentials: {@value}.
*/
@VisibleForTesting
public static final String NO_AWS_CREDENTIALS = "No AWS credentials";
/**
* Create a set of marshalled credentials from a set of credentials
* issued by an STS call.
* @param credentials AWS-provided session credentials
* @return a set of marshalled credentials.
*/
public static MarshalledCredentials fromSTSCredentials(
final Credentials credentials) {
MarshalledCredentials marshalled = new MarshalledCredentials(
credentials.getAccessKeyId(),
credentials.getSecretAccessKey(),
credentials.getSessionToken());
Date date = credentials.getExpiration();
marshalled.setExpiration(date != null ? date.getTime() : 0);
return marshalled;
}
/**
* Create from a set of AWS credentials.
* @param credentials source credential.
* @return a set of marshalled credentials.
*/
public static MarshalledCredentials fromAWSCredentials(
final AWSSessionCredentials credentials) {
return new MarshalledCredentials(
credentials.getAWSAccessKeyId(),
credentials.getAWSSecretKey(),
credentials.getSessionToken());
}
/**
* Build a set of credentials from the environment.
* @param env environment.
* @return a possibly incomplete/invalid set of credentials.
*/
public static MarshalledCredentials fromEnvironment(
final Map<String, String> env) {
return new MarshalledCredentials(
nullToEmptyString(env.get("AWS_ACCESS_KEY")),
nullToEmptyString(env.get("AWS_SECRET_KEY")),
nullToEmptyString(env.get("AWS_SESSION_TOKEN")));
}
/**
* Take a string where a null value is remapped to an empty string.
* @param src source string.
* @return the value of the string or ""
*/
private static String nullToEmptyString(final String src) {
return src == null ? "" : src;
}
/**
* Loads the credentials from the owning S3A FS, including
* from Hadoop credential providers.
* There is no validation.
* @param uri binding URI
* @param conf configuration to load from
* @return the component
* @throws IOException on any load failure
*/
public static MarshalledCredentials fromFileSystem(
final URI uri,
final Configuration conf) throws IOException {
// determine the bucket
final String bucket = uri != null ? uri.getHost() : "";
final Configuration leanConf =
ProviderUtils.excludeIncompatibleCredentialProviders(
conf, S3AFileSystem.class);
return new MarshalledCredentials(
lookupPassword(bucket, leanConf, ACCESS_KEY),
lookupPassword(bucket, leanConf, SECRET_KEY),
lookupPassword(bucket, leanConf, SESSION_TOKEN));
}
/**
* Create an AWS credential set from a set of marshalled credentials.
*
* This code would seem to fit into (@link MarshalledCredentials}, and
* while it would from a code-hygiene perspective, to keep all AWS
* SDK references out of that class, the logic is implemented here instead,
* @param marshalled marshalled credentials
* @param typeRequired type of credentials required
* @param component component name for exception messages.
* @return a new set of credentials
* @throws NoAuthWithAWSException validation failure
* @throws NoAwsCredentialsException the credentials are actually empty.
*/
public static AWSCredentials toAWSCredentials(
final MarshalledCredentials marshalled,
final MarshalledCredentials.CredentialTypeRequired typeRequired,
final String component)
throws NoAuthWithAWSException, NoAwsCredentialsException {
if (marshalled.isEmpty()) {
throw new NoAwsCredentialsException(component, NO_AWS_CREDENTIALS);
}
if (!marshalled.isValid(typeRequired)) {
throw new NoAuthWithAWSException(component + ":" +
marshalled.buildInvalidCredentialsError(typeRequired));
}
final String accessKey = marshalled.getAccessKey();
final String secretKey = marshalled.getSecretKey();
if (marshalled.hasSessionToken()) {
// a session token was supplied, so return session credentials
return new BasicSessionCredentials(accessKey, secretKey,
marshalled.getSessionToken());
} else {
// these are full credentials
return new BasicAWSCredentials(accessKey, secretKey);
}
}
/**
* Request a set of credentials from an STS endpoint.
* @param parentCredentials the parent credentials needed to talk to STS
* @param awsConf AWS client configuration
* @param stsEndpoint an endpoint, use "" for none
* @param stsRegion region; use if the endpoint isn't the AWS default.
* @param duration duration of the credentials in seconds. Minimum value: 900.
* @param invoker invoker to use for retrying the call.
* @return the credentials
* @throws IOException on a failure of the request
*/
@Retries.RetryTranslated
public static MarshalledCredentials requestSessionCredentials(
final AWSCredentialsProvider parentCredentials,
final ClientConfiguration awsConf,
final String stsEndpoint,
final String stsRegion,
final int duration,
final Invoker invoker) throws IOException {
try {
final AWSSecurityTokenService tokenService =
STSClientFactory.builder(parentCredentials,
awsConf,
stsEndpoint.isEmpty() ? null : stsEndpoint,
stsRegion)
.build();
return fromSTSCredentials(
STSClientFactory.createClientConnection(tokenService, invoker)
.requestSessionCredentials(duration, TimeUnit.SECONDS));
} catch (SdkClientException e) {
if (stsRegion.isEmpty()) {
LOG.error("Region must be provided when requesting session credentials.",
e);
}
throw e;
}
}
}
|
|
package com.wellness49.desktop;
import java.io.File;
import android.app.Activity;
import android.content.Context;
import android.os.Bundle;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnFocusChangeListener;
import android.widget.ImageView;
import com.wellness49.util.Common;
import com.wellness49.util.Util;
public class MainActivity extends Activity {
private static final String TAG = "MainActivity";
private Context context = MainActivity.this;
private ImageView communication_img, movie_img, tv_img, sports_img,
shape_img, socialize_img, email_img, languages_img, music_img,
kids_img, cartoons_img, internet_img, extras_img, setting_img,
cryo_img, imaqua_img, productandservices_img,
productpresentation_img, testimonial_img, videos_img, estore_img,
ceo_img, toppartners_img, opportunitypresentation_img,
register_img, backoffice_img, support_img, home_img, aboutus_img;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
File readfile = new File(Common.DEFAULT_FILEPATH);
File writefile = new File(Common.FILEPATH);
//Util.defaultToData(readfile, writefile);
init();
doListener();
}
public void init(){
communication_img = (ImageView) findViewById(R.id.communication_img);
communication_img.requestFocus();
movie_img = (ImageView)findViewById(R.id.movie_img);
tv_img = (ImageView)findViewById(R.id.tv_img);
sports_img = (ImageView)findViewById(R.id.sport_img);
shape_img = (ImageView)findViewById(R.id.shape_img);
socialize_img = (ImageView)findViewById(R.id.socialize_img);
email_img = (ImageView)findViewById(R.id.email_img);
languages_img = (ImageView)findViewById(R.id.languages_img);
music_img = (ImageView)findViewById(R.id.music_img);
kids_img = (ImageView)findViewById(R.id.kid_img);
cartoons_img = (ImageView)findViewById(R.id.cartoons_img);
internet_img = (ImageView)findViewById(R.id.internet_img);
extras_img = (ImageView)findViewById(R.id.extras_img);
setting_img = (ImageView)findViewById(R.id.setting_img);
cryo_img = (ImageView)findViewById(R.id.cryo_img);
imaqua_img = (ImageView)findViewById(R.id.imaqua_img);
productandservices_img = (ImageView)findViewById(R.id.productandservices_img);
productpresentation_img = (ImageView)findViewById(R.id.productpresentation_img);
testimonial_img = (ImageView)findViewById(R.id.testimonial_img);
videos_img = (ImageView)findViewById(R.id.videos_img);
estore_img = (ImageView)findViewById(R.id.estore_img);
ceo_img = (ImageView)findViewById(R.id.ceo_img);
toppartners_img = (ImageView)findViewById(R.id.toppartners_img);
opportunitypresentation_img = (ImageView)findViewById(R.id.opportunitypresentation_img);
register_img = (ImageView)findViewById(R.id.register_img);
backoffice_img = (ImageView)findViewById(R.id.backoffice_img);
support_img = (ImageView)findViewById(R.id.support_img);
home_img = (ImageView)findViewById(R.id.home_img);
aboutus_img = (ImageView)findViewById(R.id.aboutus_img);
}
public void doListener(){
communication_img.setOnFocusChangeListener(new MyFocusListener());
movie_img.setOnFocusChangeListener(new MyFocusListener());
tv_img.setOnFocusChangeListener(new MyFocusListener());
sports_img.setOnFocusChangeListener(new MyFocusListener());
shape_img.setOnFocusChangeListener(new MyFocusListener());
socialize_img.setOnFocusChangeListener(new MyFocusListener());
email_img.setOnFocusChangeListener(new MyFocusListener());
languages_img.setOnFocusChangeListener(new MyFocusListener());
music_img.setOnFocusChangeListener(new MyFocusListener());
kids_img.setOnFocusChangeListener(new MyFocusListener());
cartoons_img.setOnFocusChangeListener(new MyFocusListener());
internet_img.setOnFocusChangeListener(new MyFocusListener());
extras_img.setOnFocusChangeListener(new MyFocusListener());
//setting_img.setOnFocusChangeListener(new MyFocusListener());
/*cryo_img.setOnFocusChangeListener(new MyFocusListener());
imaqua_img.setOnFocusChangeListener(new MyFocusListener());
productandservices_img.setOnFocusChangeListener(new MyFocusListener());
productpresentation_img.setOnFocusChangeListener(new MyFocusListener());
testimonial_img.setOnFocusChangeListener(new MyFocusListener());
videos_img.setOnFocusChangeListener(new MyFocusListener());
estore_img.setOnFocusChangeListener(new MyFocusListener());
ceo_img.setOnFocusChangeListener(new MyFocusListener());
toppartners_img.setOnFocusChangeListener(new MyFocusListener());
opportunitypresentation_img.setOnFocusChangeListener(new MyFocusListener());
register_img.setOnFocusChangeListener(new MyFocusListener());
backoffice_img.setOnFocusChangeListener(new MyFocusListener());
support_img.setOnFocusChangeListener(new MyFocusListener());
home_img.setOnFocusChangeListener(new MyFocusListener());
aboutus_img.setOnFocusChangeListener(new MyFocusListener());*/
}
class MyFocusListener implements OnFocusChangeListener{
@Override
public void onFocusChange(View v, boolean hasFocus) {
// TODO Auto-generated method stub
if (hasFocus) {
v.setBackgroundColor(getResources()
.getColor(R.color.transparent_orange));
} else {
v.setBackgroundColor(getResources()
.getColor(R.color.transparent_background));
}
}
}
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
return true;
}
return super.onKeyDown(keyCode, event);
}
public void mainClick(View target) {
switch (target.getId()) {
case R.id.communication_img:
// Common.gotoActivity(context, CommunicationActivity.class, false);
Common.gotoActivity(context, CategoryActivity.class, "category",
Common.COMMUNICATION_HEAD);
break;
case R.id.movie_img:
// Common.gotoActivity(context, MovieActivity.class, false);
Common.gotoActivity(context, CategoryActivity.class, "category",
Common.MOVIE_HEAD);
break;
case R.id.tv_img:
// Common.gotoActivity(context, TvActivity.class, false);
Common.gotoActivity(context, CategoryActivity.class, "category",
Common.TV_HEAD);
break;
case R.id.sport_img:
// Common.gotoActivity(context, SportActivity.class, false);
Common.gotoActivity(context, CategoryActivity.class, "category",
Common.SPORT_HEAD);
break;
case R.id.shape_img:
// Common.gotoActivity(context, ShapeActivity.class, false);
Common.gotoActivity(context, CategoryActivity.class, "category",
Common.SHAPE_HEAD);
break;
case R.id.socialize_img:
// Common.gotoActivity(context, SocializeActivity.class, false);
Common.gotoActivity(context, CategoryActivity.class, "category",
Common.SOCIALIZE_HEAD);
break;
case R.id.email_img:
// Common.gotoActivity(context, EmailActivity.class, false);
Common.gotoActivity(context, CategoryActivity.class, "category",
Common.EMAIL_HEAD);
break;
case R.id.languages_img:
// Common.gotoActivity(context, LanguagesActivity.class, false);
Common.gotoActivity(context, CategoryActivity.class, "category",
Common.LEARNLANGUAGES_HEAD);
break;
case R.id.music_img:
// Common.gotoActivity(context, MusicActivity.class, false);
Common.gotoActivity(context, CategoryActivity.class, "category",
Common.MUSIC_HEAD);
break;
case R.id.kid_img:
// Common.gotoActivity(context, KidActivity.class, false);
Common.gotoActivity(context, CategoryActivity.class, "category",
Common.KID_HEAD);
break;
case R.id.cartoons_img:
// Common.gotoActivity(context, CartoonActivity.class, false);
Common.gotoActivity(context, CategoryActivity.class, "category",
Common.CARTOON_HEAD);
break;
case R.id.internet_img:
// Common.gotoActivity(context, InternetActivity.class, false);
Common.gotoActivity(context, CategoryActivity.class, "category",
Common.INTERNET_HEAD);
break;
case R.id.extras_img:
// Common.gotoActivity(context, ExtraActivity.class, false);
Common.gotoActivity(context, CategoryActivity.class, "category",
Common.EXTRA_HEAD);
break;
case R.id.setting_img:
Util.doStartApplicationWithPackageName(context,
"com.mbx.settingsmbox");
break;
case R.id.cryo_img:
Common.gotoActivity(context, WellnessWebActivity.class, "urlStr",
"http://wellness49.com/site/page/cryo");
break;
case R.id.imaqua_img:
Common.gotoActivity(context, WellnessWebActivity.class, "urlStr",
"http://wellness49.com/site/page/imaqua");
break;
case R.id.productandservices_img:
Common.gotoActivity(context, WellnessWebActivity.class, "urlStr",
"http://wellness49.com/site/page/products-and-services");
break;
case R.id.productpresentation_img:
Common.gotoActivity(context, WellnessWebActivity.class, "urlStr",
"http://wellness49.com/site/page/presentation");
break;
case R.id.testimonial_img:
Common.gotoActivity(context, WellnessWebActivity.class, "urlStr",
"http://wellness49.com/testimonies");
break;
case R.id.videos_img:
Common.gotoActivity(context, WellnessWebActivity.class, "urlStr",
"http://wellness49.com/site/page/video");
break;
case R.id.estore_img:
Common.gotoActivity(context, WellnessWebActivity.class, "urlStr",
"http://wellness49.com/estore");
break;
case R.id.ceo_img:
Common.gotoActivity(context, WellnessWebActivity.class, "urlStr",
"http://wellness49.com/ceopres");
break;
case R.id.toppartners_img:
Common.gotoActivity(context, WellnessWebActivity.class, "urlStr",
"http://wellness49.com/toppartners");
break;
case R.id.opportunitypresentation_img:
Common.gotoActivity(context, WellnessWebActivity.class, "urlStr",
"http://wellness49.com/site/page/opportunity");
break;
case R.id.register_img:
Common.gotoActivity(context, WellnessWebActivity.class, "urlStr",
"http://wellness49.com/register");
break;
case R.id.backoffice_img:
Common.gotoActivity(context, WellnessWebActivity.class, "urlStr",
"http://wellness49.com/backoffice");
break;
case R.id.support_img:
Common.gotoActivity(context, WellnessWebActivity.class, "urlStr",
"http://wellness49.com/customerservice");
break;
case R.id.home_img:
Common.gotoActivity(context, WellnessWebActivity.class, "urlStr",
"http://wellness49.com");
break;
case R.id.aboutus_img:
Common.gotoActivity(context, WellnessWebActivity.class, "urlStr",
"http://wellness49.com/aboutus");
break;
}
}
}
|
|
package com.github.sql.builder.criteria;
import com.github.sql.builder.aggregate.Aggregate;
import com.github.sql.builder.constants.Operator;
import com.github.sql.builder.model.Column;
import com.github.sql.builder.model.Table;
import com.github.sql.builder.model.Value;
/**
* Represents a matching predicate.
*
* @author Medhi
*
*/
public class MatchCriteria implements Criteria {
/**
* The column to apply the predicate to.
*/
private final Column column;
/**
* The aggregate predicate to apply.
*/
private final Aggregate aggregate;
/**
* The predicate operator.
*/
private final Operator operator;
/**
* The predicate value.
*/
private final Value value;
/**
* Constructor of {@link MatchCriteria}.
*
* @param aggregate
* the {@link Aggregate}
* @param operator
* the {@link Operator}
* @param value
* the string value
*/
public MatchCriteria(Aggregate aggregate, Operator operator, String value) {
this(aggregate, null, operator, new Value(value));
}
/**
* Constructor of {@link MatchCriteria}.
*
* @param aggregate
* the {@link Aggregate}
* @param operator
* the {@link Operator}
* @param value
* the numeric value
*/
public MatchCriteria(Aggregate aggregate, Operator operator, Number value) {
this(aggregate, null, operator, new Value(value));
}
/**
* Constructor of {@link MatchCriteria}.
*
* @param aggregate
* the {@link Aggregate}
* @param operator
* the {@link Operator}
* @param value
* the boolean value
*/
public MatchCriteria(Aggregate aggregate, Operator operator, Boolean value) {
this(aggregate, null, operator, new Value(value));
}
/**
* Constructor of {@link MatchCriteria}.
*
* @param aggregate
* the {@link Aggregate}
* @param operator
* the {@link Operator}
* @param value
* the object value
*/
public MatchCriteria(Aggregate aggregate, Operator operator, Value value) {
this(aggregate, null, operator, value);
}
/**
* Constructor of {@link MatchCriteria}.
*
* @param table
* the {@link Table}
* @param columnName
* the column name
* @param operator
* the {@link Operator}
* @param value
* the string value
*/
public MatchCriteria(Table table, String columnName, Operator operator,
String value) {
this(null, table.getColumn(columnName), operator, new Value(value));
}
/**
* Constructor of {@link MatchCriteria}.
*
* @param table
* the {@link Table}
* @param columnName
* the column name
* @param operator
* the {@link Operator}
* @param value
* the numeric value
*/
public MatchCriteria(Table table, String columnName, Operator operator,
Number value) {
this(null, table.getColumn(columnName), operator, new Value(value));
}
/**
* Constructor of {@link MatchCriteria}.
*
* @param table
* the {@link Table}
* @param columnName
* the column name
* @param operator
* the {@link Operator}
* @param value
* the boolean value
*/
public MatchCriteria(Table table, String columnName, Operator operator,
Boolean value) {
this(null, table.getColumn(columnName), operator, new Value(value));
}
/**
* Constructor of {@link MatchCriteria}.
*
* @param table
* the {@link Table}
* @param columnName
* the column name
* @param operator
* the {@link Operator}
* @param value
* the object value
*/
public MatchCriteria(Table table, String columnName, Operator operator,
Value value) {
this(null, table.getColumn(columnName), operator, value);
}
/**
* Constructor of {@link MatchCriteria}.
*
* @param table
* the {@link Table}
* @param columnName
* the column name
* @param value
* the string value
*/
public MatchCriteria(Table table, String columnName, String value) {
this(table.getColumn(columnName), new Value(value));
}
/**
* Constructor of {@link MatchCriteria}.
*
* @param table
* the {@link Table}
* @param columnName
* the column name
* @param value
* the numeric value
*/
public MatchCriteria(Table table, String columnName, Number value) {
this(table.getColumn(columnName), new Value(value));
}
/**
* Constructor of {@link MatchCriteria}.
*
* @param table
* the {@link Table}
* @param columnName
* the column name
* @param value
* the boolean value
*/
public MatchCriteria(Table table, String columnName, Boolean value) {
this(table.getColumn(columnName), new Value(value));
}
/**
* Constructor of {@link MatchCriteria}.
*
* @param table
* the {@link Table}
* @param columnName
* the column name
* @param value
* the object value
*/
public MatchCriteria(Table table, String columnName, Value value) {
this(table.getColumn(columnName), new Value(value));
}
/**
* Constructor of {@link MatchCriteria}.
*
* @param column
* the {@link Column}
* @param value
* the string value
*/
public MatchCriteria(Column column, String value) {
this(null, column, Operator.EQUALS, new Value(value));
}
/**
* Constructor of {@link MatchCriteria}.
*
* @param column
* the {@link Column}
* @param value
* the numeric value
*/
public MatchCriteria(Column column, Number value) {
this(null, column, Operator.EQUALS, new Value(value));
}
/**
* Constructor of {@link MatchCriteria}.
*
* @param column
* the {@link Column}
* @param value
* the boolean value
*/
public MatchCriteria(Column column, Boolean value) {
this(null, column, Operator.EQUALS, new Value(value));
}
/**
* Constructor of {@link MatchCriteria}.
*
* @param column
* the {@link Column}
* @param value
* the object value
*/
public MatchCriteria(Column column, Value value) {
this(null, column, Operator.EQUALS, value);
}
/**
* Constructor of {@link MatchCriteria}.
*
* @param aggregate
* the {@link Aggregate}
* @param column
* the {@link Column}
* @param operator
* the {@link Operator}
* @param value
* the object value
*/
public MatchCriteria(Aggregate aggregate, Column column, Operator operator,
Value value) {
this.aggregate = aggregate;
this.column = column;
this.operator = operator;
this.value = value;
}
@Override
public void output(StringBuilder builder) {
if (column != null) {
column.output(builder);
} else {
aggregate.output(builder);
}
builder.append(" ").append(operator.getSymbol()).append(" ");
value.output(builder);
}
}
|
|
/*
* =============================================================================
*
* Copyright (c) 2010, The JAVATUPLES team (http://www.javatuples.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* =============================================================================
*/
package org.javatuples;
import java.util.Collection;
import java.util.Iterator;
import org.javatuples.valueintf.IValue0;
import org.javatuples.valueintf.IValue1;
import org.javatuples.valueintf.IValue2;
import org.javatuples.valueintf.IValue3;
import org.javatuples.valueintf.IValue4;
import org.javatuples.valueintf.IValue5;
import org.javatuples.valueintf.IValue6;
import org.javatuples.valueintf.IValue7;
/**
* <p>
* A tuple of eight elements.
* </p>
*
* @since 1.0
*
* @author Daniel Fernández
*
*/
public final class Octet<A,B,C,D,E,F,G,H>
extends Tuple
implements IValue0<A>,
IValue1<B>,
IValue2<C>,
IValue3<D>,
IValue4<E>,
IValue5<F>,
IValue6<G>,
IValue7<H> {
private static final long serialVersionUID = -1187955276020306879L;
private static final int SIZE = 8;
private final A val0;
private final B val1;
private final C val2;
private final D val3;
private final E val4;
private final F val5;
private final G val6;
private final H val7;
public static <A,B,C,D,E,F,G,H> Octet<A,B,C,D,E,F,G,H> with(final A value0, final B value1, final C value2, final D value3, final E value4, final F value5, final G value6, final H value7) {
return new Octet<A,B,C,D,E,F,G,H>(value0,value1,value2,value3,value4,value5,value6,value7);
}
/**
* <p>
* Create tuple from array. Array has to have exactly eight elements.
* </p>
*
* @param <X> the array component type
* @param array the array to be converted to a tuple
* @return the tuple
*/
public static <X> Octet<X,X,X,X,X,X,X,X> fromArray(final X[] array) {
if (array == null) {
throw new IllegalArgumentException("Array cannot be null");
}
if (array.length != 8) {
throw new IllegalArgumentException("Array must have exactly 8 elements in order to create an Octet. Size is " + array.length);
}
return new Octet<X,X,X,X,X,X,X,X>(
array[0],array[1],array[2],array[3],array[4],
array[5],array[6],array[7]);
}
/**
* <p>
* Create tuple from collection. Collection has to have exactly eight elements.
* </p>
*
* @param <X> the collection component type
* @param collection the collection to be converted to a tuple
* @return the tuple
*/
public static <X> Octet<X,X,X,X,X,X,X,X> fromCollection(final Collection<X> collection) {
return fromIterable(collection);
}
/**
* <p>
* Create tuple from iterable. Iterable has to have exactly eight elements.
* </p>
*
* @param <X> the iterable component type
* @param iterable the iterable to be converted to a tuple
* @return the tuple
*/
public static <X> Octet<X,X,X,X,X,X,X,X> fromIterable(final Iterable<X> iterable) {
return fromIterable(iterable, 0, true);
}
/**
* <p>
* Create tuple from iterable, starting from the specified index. Iterable
* can have more (or less) elements than the tuple to be created.
* </p>
*
* @param <X> the iterable component type
* @param iterable the iterable to be converted to a tuple
* @return the tuple
*/
public static <X> Octet<X,X,X,X,X,X,X,X> fromIterable(final Iterable<X> iterable, int index) {
return fromIterable(iterable, index, false);
}
private static <X> Octet<X,X,X,X,X,X,X,X> fromIterable(final Iterable<X> iterable, int index, final boolean exactSize) {
if (iterable == null) {
throw new IllegalArgumentException("Iterable cannot be null");
}
boolean tooFewElements = false;
X element0 = null;
X element1 = null;
X element2 = null;
X element3 = null;
X element4 = null;
X element5 = null;
X element6 = null;
X element7 = null;
final Iterator<X> iter = iterable.iterator();
int i = 0;
while (i < index) {
if (iter.hasNext()) {
iter.next();
} else {
tooFewElements = true;
}
i++;
}
if (iter.hasNext()) {
element0 = iter.next();
} else {
tooFewElements = true;
}
if (iter.hasNext()) {
element1 = iter.next();
} else {
tooFewElements = true;
}
if (iter.hasNext()) {
element2 = iter.next();
} else {
tooFewElements = true;
}
if (iter.hasNext()) {
element3 = iter.next();
} else {
tooFewElements = true;
}
if (iter.hasNext()) {
element4 = iter.next();
} else {
tooFewElements = true;
}
if (iter.hasNext()) {
element5 = iter.next();
} else {
tooFewElements = true;
}
if (iter.hasNext()) {
element6 = iter.next();
} else {
tooFewElements = true;
}
if (iter.hasNext()) {
element7 = iter.next();
} else {
tooFewElements = true;
}
if (tooFewElements && exactSize) {
throw new IllegalArgumentException("Not enough elements for creating an Octet (8 needed)");
}
if (iter.hasNext() && exactSize) {
throw new IllegalArgumentException("Iterable must have exactly 8 available elements in order to create an Octet.");
}
return new Octet<X,X,X,X,X,X,X,X>(
element0, element1, element2, element3, element4,
element5, element6, element7);
}
public Octet(
final A value0,
final B value1,
final C value2,
final D value3,
final E value4,
final F value5,
final G value6,
final H value7) {
super(value0, value1, value2, value3, value4, value5, value6, value7);
this.val0 = value0;
this.val1 = value1;
this.val2 = value2;
this.val3 = value3;
this.val4 = value4;
this.val5 = value5;
this.val6 = value6;
this.val7 = value7;
}
public A getValue0() {
return this.val0;
}
public B getValue1() {
return this.val1;
}
public C getValue2() {
return this.val2;
}
public D getValue3() {
return this.val3;
}
public E getValue4() {
return this.val4;
}
public F getValue5() {
return this.val5;
}
public G getValue6() {
return this.val6;
}
public H getValue7() {
return this.val7;
}
@Override
public int getSize() {
return SIZE;
}
public <X0> Ennead<X0,A,B,C,D,E,F,G,H> addAt0(final X0 value0) {
return new Ennead<X0,A,B,C,D,E,F,G,H>(
value0, this.val0, this.val1, this.val2, this.val3, this.val4, this.val5,
this.val6, this.val7);
}
public <X0> Ennead<A,X0,B,C,D,E,F,G,H> addAt1(final X0 value0) {
return new Ennead<A,X0,B,C,D,E,F,G,H>(
this.val0, value0, this.val1, this.val2, this.val3, this.val4, this.val5,
this.val6, this.val7);
}
public <X0> Ennead<A,B,X0,C,D,E,F,G,H> addAt2(final X0 value0) {
return new Ennead<A,B,X0,C,D,E,F,G,H>(
this.val0, this.val1, value0, this.val2, this.val3, this.val4, this.val5,
this.val6, this.val7);
}
public <X0> Ennead<A,B,C,X0,D,E,F,G,H> addAt3(final X0 value0) {
return new Ennead<A,B,C,X0,D,E,F,G,H>(
this.val0, this.val1, this.val2, value0, this.val3, this.val4, this.val5,
this.val6, this.val7);
}
public <X0> Ennead<A,B,C,D,X0,E,F,G,H> addAt4(final X0 value0) {
return new Ennead<A,B,C,D,X0,E,F,G,H>(
this.val0, this.val1, this.val2, this.val3, value0, this.val4, this.val5,
this.val6, this.val7);
}
public <X0> Ennead<A,B,C,D,E,X0,F,G,H> addAt5(final X0 value0) {
return new Ennead<A,B,C,D,E,X0,F,G,H>(
this.val0, this.val1, this.val2, this.val3, this.val4, value0, this.val5,
this.val6, this.val7);
}
public <X0> Ennead<A,B,C,D,E,F,X0,G,H> addAt6(final X0 value0) {
return new Ennead<A,B,C,D,E,F,X0,G,H>(
this.val0, this.val1, this.val2, this.val3, this.val4, this.val5, value0,
this.val6, this.val7);
}
public <X0> Ennead<A,B,C,D,E,F,G,X0,H> addAt7(final X0 value0) {
return new Ennead<A,B,C,D,E,F,G,X0,H>(
this.val0, this.val1, this.val2, this.val3, this.val4, this.val5, this.val6,
value0, this.val7);
}
public <X0> Ennead<A,B,C,D,E,F,G,H,X0> addAt8(final X0 value0) {
return new Ennead<A,B,C,D,E,F,G,H,X0>(
this.val0, this.val1, this.val2, this.val3, this.val4, this.val5, this.val6,
this.val7, value0);
}
public <X0,X1> Decade<X0,X1,A,B,C,D,E,F,G,H> addAt0(final X0 value0, final X1 value1) {
return new Decade<X0,X1,A,B,C,D,E,F,G,H>(
value0, value1, this.val0, this.val1, this.val2, this.val3, this.val4, this.val5,
this.val6, this.val7);
}
public <X0,X1> Decade<A,X0,X1,B,C,D,E,F,G,H> addAt1(final X0 value0, final X1 value1) {
return new Decade<A,X0,X1,B,C,D,E,F,G,H>(
this.val0, value0, value1, this.val1, this.val2, this.val3, this.val4, this.val5,
this.val6, this.val7);
}
public <X0,X1> Decade<A,B,X0,X1,C,D,E,F,G,H> addAt2(final X0 value0, final X1 value1) {
return new Decade<A,B,X0,X1,C,D,E,F,G,H>(
this.val0, this.val1, value0, value1, this.val2, this.val3, this.val4, this.val5,
this.val6, this.val7);
}
public <X0,X1> Decade<A,B,C,X0,X1,D,E,F,G,H> addAt3(final X0 value0, final X1 value1) {
return new Decade<A,B,C,X0,X1,D,E,F,G,H>(
this.val0, this.val1, this.val2, value0, value1, this.val3, this.val4, this.val5,
this.val6, this.val7);
}
public <X0,X1> Decade<A,B,C,D,X0,X1,E,F,G,H> addAt4(final X0 value0, final X1 value1) {
return new Decade<A,B,C,D,X0,X1,E,F,G,H>(
this.val0, this.val1, this.val2, this.val3, value0, value1, this.val4, this.val5,
this.val6, this.val7);
}
public <X0,X1> Decade<A,B,C,D,E,X0,X1,F,G,H> addAt5(final X0 value0, final X1 value1) {
return new Decade<A,B,C,D,E,X0,X1,F,G,H>(
this.val0, this.val1, this.val2, this.val3, this.val4, value0, value1, this.val5,
this.val6, this.val7);
}
public <X0,X1> Decade<A,B,C,D,E,F,X0,X1,G,H> addAt6(final X0 value0, final X1 value1) {
return new Decade<A,B,C,D,E,F,X0,X1,G,H>(
this.val0, this.val1, this.val2, this.val3, this.val4, this.val5, value0, value1,
this.val6, this.val7);
}
public <X0,X1> Decade<A,B,C,D,E,F,G,X0,X1,H> addAt7(final X0 value0, final X1 value1) {
return new Decade<A,B,C,D,E,F,G,X0,X1,H>(
this.val0, this.val1, this.val2, this.val3, this.val4, this.val5, this.val6,
value0, value1, this.val7);
}
public <X0,X1> Decade<A,B,C,D,E,F,G,H,X0,X1> addAt8(final X0 value0, final X1 value1) {
return new Decade<A,B,C,D,E,F,G,H,X0,X1>(
this.val0, this.val1, this.val2, this.val3, this.val4, this.val5, this.val6,
this.val7, value0, value1);
}
public <X0> Ennead<X0,A,B,C,D,E,F,G,H> addAt0(final Unit<X0> tuple) {
return addAt0(tuple.getValue0());
}
public <X0> Ennead<A,X0,B,C,D,E,F,G,H> addAt1(final Unit<X0> tuple) {
return addAt1(tuple.getValue0());
}
public <X0> Ennead<A,B,X0,C,D,E,F,G,H> addAt2(final Unit<X0> tuple) {
return addAt2(tuple.getValue0());
}
public <X0> Ennead<A,B,C,X0,D,E,F,G,H> addAt3(final Unit<X0> tuple) {
return addAt3(tuple.getValue0());
}
public <X0> Ennead<A,B,C,D,X0,E,F,G,H> addAt4(final Unit<X0> tuple) {
return addAt4(tuple.getValue0());
}
public <X0> Ennead<A,B,C,D,E,X0,F,G,H> addAt5(final Unit<X0> tuple) {
return addAt5(tuple.getValue0());
}
public <X0> Ennead<A,B,C,D,E,F,X0,G,H> addAt6(final Unit<X0> tuple) {
return addAt6(tuple.getValue0());
}
public <X0> Ennead<A,B,C,D,E,F,G,X0,H> addAt7(final Unit<X0> tuple) {
return addAt7(tuple.getValue0());
}
public <X0> Ennead<A,B,C,D,E,F,G,H,X0> addAt8(final Unit<X0> tuple) {
return addAt8(tuple.getValue0());
}
public <X0,X1> Decade<X0,X1,A,B,C,D,E,F,G,H> addAt0(final Pair<X0,X1> tuple) {
return addAt0(tuple.getValue0(),tuple.getValue1());
}
public <X0,X1> Decade<A,X0,X1,B,C,D,E,F,G,H> addAt1(final Pair<X0,X1> tuple) {
return addAt1(tuple.getValue0(),tuple.getValue1());
}
public <X0,X1> Decade<A,B,X0,X1,C,D,E,F,G,H> addAt2(final Pair<X0,X1> tuple) {
return addAt2(tuple.getValue0(),tuple.getValue1());
}
public <X0,X1> Decade<A,B,C,X0,X1,D,E,F,G,H> addAt3(final Pair<X0,X1> tuple) {
return addAt3(tuple.getValue0(),tuple.getValue1());
}
public <X0,X1> Decade<A,B,C,D,X0,X1,E,F,G,H> addAt4(final Pair<X0,X1> tuple) {
return addAt4(tuple.getValue0(),tuple.getValue1());
}
public <X0,X1> Decade<A,B,C,D,E,X0,X1,F,G,H> addAt5(final Pair<X0,X1> tuple) {
return addAt5(tuple.getValue0(),tuple.getValue1());
}
public <X0,X1> Decade<A,B,C,D,E,F,X0,X1,G,H> addAt6(final Pair<X0,X1> tuple) {
return addAt6(tuple.getValue0(),tuple.getValue1());
}
public <X0,X1> Decade<A,B,C,D,E,F,G,X0,X1,H> addAt7(final Pair<X0,X1> tuple) {
return addAt7(tuple.getValue0(),tuple.getValue1());
}
public <X0,X1> Decade<A,B,C,D,E,F,G,H,X0,X1> addAt8(final Pair<X0,X1> tuple) {
return addAt8(tuple.getValue0(),tuple.getValue1());
}
public <X0> Ennead<A,B,C,D,E,F,G,H,X0> add(final X0 value0) {
return addAt8(value0);
}
public <X0> Ennead<A,B,C,D,E,F,G,H,X0> add(final Unit<X0> tuple) {
return addAt8(tuple);
}
public <X0,X1> Decade<A,B,C,D,E,F,G,H,X0,X1> add(final X0 value0, final X1 value1) {
return addAt8(value0, value1);
}
public <X0,X1> Decade<A,B,C,D,E,F,G,H,X0,X1> add(final Pair<X0,X1> tuple) {
return addAt8(tuple);
}
public <X> Octet<X,B,C,D,E,F,G,H> setAt0(final X value) {
return new Octet<X,B,C,D,E,F,G,H>(
value, this.val1, this.val2, this.val3, this.val4, this.val5, this.val6, this.val7);
}
public <X> Octet<A,X,C,D,E,F,G,H> setAt1(final X value) {
return new Octet<A,X,C,D,E,F,G,H>(
this.val0, value, this.val2, this.val3, this.val4, this.val5, this.val6, this.val7);
}
public <X> Octet<A,B,X,D,E,F,G,H> setAt2(final X value) {
return new Octet<A,B,X,D,E,F,G,H>(
this.val0, this.val1, value, this.val3, this.val4, this.val5, this.val6, this.val7);
}
public <X> Octet<A,B,C,X,E,F,G,H> setAt3(final X value) {
return new Octet<A,B,C,X,E,F,G,H>(
this.val0, this.val1, this.val2, value, this.val4, this.val5, this.val6, this.val7);
}
public <X> Octet<A,B,C,D,X,F,G,H> setAt4(final X value) {
return new Octet<A,B,C,D,X,F,G,H>(
this.val0, this.val1, this.val2, this.val3, value, this.val5, this.val6, this.val7);
}
public <X> Octet<A,B,C,D,E,X,G,H> setAt5(final X value) {
return new Octet<A,B,C,D,E,X,G,H>(
this.val0, this.val1, this.val2, this.val3, this.val4, value, this.val6, this.val7);
}
public <X> Octet<A,B,C,D,E,F,X,H> setAt6(final X value) {
return new Octet<A,B,C,D,E,F,X,H>(
this.val0, this.val1, this.val2, this.val3, this.val4, this.val5, value, this.val7);
}
public <X> Octet<A,B,C,D,E,F,G,X> setAt7(final X value) {
return new Octet<A,B,C,D,E,F,G,X>(
this.val0, this.val1, this.val2, this.val3, this.val4, this.val5, this.val6, value);
}
public Septet<B,C,D,E,F,G,H> removeFrom0() {
return new Septet<B,C,D,E,F,G,H>(
this.val1, this.val2, this.val3, this.val4, this.val5, this.val6, this.val7);
}
public Septet<A,C,D,E,F,G,H> removeFrom1() {
return new Septet<A,C,D,E,F,G,H>(
this.val0, this.val2, this.val3, this.val4, this.val5, this.val6, this.val7);
}
public Septet<A,B,D,E,F,G,H> removeFrom2() {
return new Septet<A,B,D,E,F,G,H>(
this.val0, this.val1, this.val3, this.val4, this.val5, this.val6, this.val7);
}
public Septet<A,B,C,E,F,G,H> removeFrom3() {
return new Septet<A,B,C,E,F,G,H>(
this.val0, this.val1, this.val2, this.val4, this.val5, this.val6, this.val7);
}
public Septet<A,B,C,D,F,G,H> removeFrom4() {
return new Septet<A,B,C,D,F,G,H>(
this.val0, this.val1, this.val2, this.val3, this.val5, this.val6, this.val7);
}
public Septet<A,B,C,D,E,G,H> removeFrom5() {
return new Septet<A,B,C,D,E,G,H>(
this.val0, this.val1, this.val2, this.val3, this.val4, this.val6, this.val7);
}
public Septet<A,B,C,D,E,F,H> removeFrom6() {
return new Septet<A,B,C,D,E,F,H>(
this.val0, this.val1, this.val2, this.val3, this.val4, this.val5, this.val7);
}
public Septet<A,B,C,D,E,F,G> removeFrom7() {
return new Septet<A,B,C,D,E,F,G>(
this.val0, this.val1, this.val2, this.val3, this.val4, this.val5, this.val6);
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.datapipeline.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Contains the parameters for DescribeObjects.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/datapipeline-2012-10-29/DescribeObjects" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeObjectsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The ID of the pipeline that contains the object definitions.
* </p>
*/
private String pipelineId;
/**
* <p>
* The IDs of the pipeline objects that contain the definitions to be described. You can pass as many as 25
* identifiers in a single call to <code>DescribeObjects</code>.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<String> objectIds;
/**
* <p>
* Indicates whether any expressions in the object should be evaluated when the object descriptions are returned.
* </p>
*/
private Boolean evaluateExpressions;
/**
* <p>
* The starting point for the results to be returned. For the first call, this value should be empty. As long as
* there are more results, continue to call <code>DescribeObjects</code> with the marker value from the previous
* call to retrieve the next set of results.
* </p>
*/
private String marker;
/**
* <p>
* The ID of the pipeline that contains the object definitions.
* </p>
*
* @param pipelineId
* The ID of the pipeline that contains the object definitions.
*/
public void setPipelineId(String pipelineId) {
this.pipelineId = pipelineId;
}
/**
* <p>
* The ID of the pipeline that contains the object definitions.
* </p>
*
* @return The ID of the pipeline that contains the object definitions.
*/
public String getPipelineId() {
return this.pipelineId;
}
/**
* <p>
* The ID of the pipeline that contains the object definitions.
* </p>
*
* @param pipelineId
* The ID of the pipeline that contains the object definitions.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeObjectsRequest withPipelineId(String pipelineId) {
setPipelineId(pipelineId);
return this;
}
/**
* <p>
* The IDs of the pipeline objects that contain the definitions to be described. You can pass as many as 25
* identifiers in a single call to <code>DescribeObjects</code>.
* </p>
*
* @return The IDs of the pipeline objects that contain the definitions to be described. You can pass as many as 25
* identifiers in a single call to <code>DescribeObjects</code>.
*/
public java.util.List<String> getObjectIds() {
if (objectIds == null) {
objectIds = new com.amazonaws.internal.SdkInternalList<String>();
}
return objectIds;
}
/**
* <p>
* The IDs of the pipeline objects that contain the definitions to be described. You can pass as many as 25
* identifiers in a single call to <code>DescribeObjects</code>.
* </p>
*
* @param objectIds
* The IDs of the pipeline objects that contain the definitions to be described. You can pass as many as 25
* identifiers in a single call to <code>DescribeObjects</code>.
*/
public void setObjectIds(java.util.Collection<String> objectIds) {
if (objectIds == null) {
this.objectIds = null;
return;
}
this.objectIds = new com.amazonaws.internal.SdkInternalList<String>(objectIds);
}
/**
* <p>
* The IDs of the pipeline objects that contain the definitions to be described. You can pass as many as 25
* identifiers in a single call to <code>DescribeObjects</code>.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setObjectIds(java.util.Collection)} or {@link #withObjectIds(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param objectIds
* The IDs of the pipeline objects that contain the definitions to be described. You can pass as many as 25
* identifiers in a single call to <code>DescribeObjects</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeObjectsRequest withObjectIds(String... objectIds) {
if (this.objectIds == null) {
setObjectIds(new com.amazonaws.internal.SdkInternalList<String>(objectIds.length));
}
for (String ele : objectIds) {
this.objectIds.add(ele);
}
return this;
}
/**
* <p>
* The IDs of the pipeline objects that contain the definitions to be described. You can pass as many as 25
* identifiers in a single call to <code>DescribeObjects</code>.
* </p>
*
* @param objectIds
* The IDs of the pipeline objects that contain the definitions to be described. You can pass as many as 25
* identifiers in a single call to <code>DescribeObjects</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeObjectsRequest withObjectIds(java.util.Collection<String> objectIds) {
setObjectIds(objectIds);
return this;
}
/**
* <p>
* Indicates whether any expressions in the object should be evaluated when the object descriptions are returned.
* </p>
*
* @param evaluateExpressions
* Indicates whether any expressions in the object should be evaluated when the object descriptions are
* returned.
*/
public void setEvaluateExpressions(Boolean evaluateExpressions) {
this.evaluateExpressions = evaluateExpressions;
}
/**
* <p>
* Indicates whether any expressions in the object should be evaluated when the object descriptions are returned.
* </p>
*
* @return Indicates whether any expressions in the object should be evaluated when the object descriptions are
* returned.
*/
public Boolean getEvaluateExpressions() {
return this.evaluateExpressions;
}
/**
* <p>
* Indicates whether any expressions in the object should be evaluated when the object descriptions are returned.
* </p>
*
* @param evaluateExpressions
* Indicates whether any expressions in the object should be evaluated when the object descriptions are
* returned.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeObjectsRequest withEvaluateExpressions(Boolean evaluateExpressions) {
setEvaluateExpressions(evaluateExpressions);
return this;
}
/**
* <p>
* Indicates whether any expressions in the object should be evaluated when the object descriptions are returned.
* </p>
*
* @return Indicates whether any expressions in the object should be evaluated when the object descriptions are
* returned.
*/
public Boolean isEvaluateExpressions() {
return this.evaluateExpressions;
}
/**
* <p>
* The starting point for the results to be returned. For the first call, this value should be empty. As long as
* there are more results, continue to call <code>DescribeObjects</code> with the marker value from the previous
* call to retrieve the next set of results.
* </p>
*
* @param marker
* The starting point for the results to be returned. For the first call, this value should be empty. As long
* as there are more results, continue to call <code>DescribeObjects</code> with the marker value from the
* previous call to retrieve the next set of results.
*/
public void setMarker(String marker) {
this.marker = marker;
}
/**
* <p>
* The starting point for the results to be returned. For the first call, this value should be empty. As long as
* there are more results, continue to call <code>DescribeObjects</code> with the marker value from the previous
* call to retrieve the next set of results.
* </p>
*
* @return The starting point for the results to be returned. For the first call, this value should be empty. As
* long as there are more results, continue to call <code>DescribeObjects</code> with the marker value from
* the previous call to retrieve the next set of results.
*/
public String getMarker() {
return this.marker;
}
/**
* <p>
* The starting point for the results to be returned. For the first call, this value should be empty. As long as
* there are more results, continue to call <code>DescribeObjects</code> with the marker value from the previous
* call to retrieve the next set of results.
* </p>
*
* @param marker
* The starting point for the results to be returned. For the first call, this value should be empty. As long
* as there are more results, continue to call <code>DescribeObjects</code> with the marker value from the
* previous call to retrieve the next set of results.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeObjectsRequest withMarker(String marker) {
setMarker(marker);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getPipelineId() != null)
sb.append("PipelineId: ").append(getPipelineId()).append(",");
if (getObjectIds() != null)
sb.append("ObjectIds: ").append(getObjectIds()).append(",");
if (getEvaluateExpressions() != null)
sb.append("EvaluateExpressions: ").append(getEvaluateExpressions()).append(",");
if (getMarker() != null)
sb.append("Marker: ").append(getMarker());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeObjectsRequest == false)
return false;
DescribeObjectsRequest other = (DescribeObjectsRequest) obj;
if (other.getPipelineId() == null ^ this.getPipelineId() == null)
return false;
if (other.getPipelineId() != null && other.getPipelineId().equals(this.getPipelineId()) == false)
return false;
if (other.getObjectIds() == null ^ this.getObjectIds() == null)
return false;
if (other.getObjectIds() != null && other.getObjectIds().equals(this.getObjectIds()) == false)
return false;
if (other.getEvaluateExpressions() == null ^ this.getEvaluateExpressions() == null)
return false;
if (other.getEvaluateExpressions() != null && other.getEvaluateExpressions().equals(this.getEvaluateExpressions()) == false)
return false;
if (other.getMarker() == null ^ this.getMarker() == null)
return false;
if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getPipelineId() == null) ? 0 : getPipelineId().hashCode());
hashCode = prime * hashCode + ((getObjectIds() == null) ? 0 : getObjectIds().hashCode());
hashCode = prime * hashCode + ((getEvaluateExpressions() == null) ? 0 : getEvaluateExpressions().hashCode());
hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode());
return hashCode;
}
@Override
public DescribeObjectsRequest clone() {
return (DescribeObjectsRequest) super.clone();
}
}
|
|
package net.rodrigoamaral.dspsp.project;
import net.rodrigoamaral.dspsp.adapters.SolutionConverter;
import net.rodrigoamaral.dspsp.exceptions.InvalidSolutionException;
import net.rodrigoamaral.dspsp.objectives.Efficiency;
import net.rodrigoamaral.dspsp.project.events.DynamicEvent;
import net.rodrigoamaral.dspsp.project.events.EventType;
import net.rodrigoamaral.dspsp.project.tasks.DynamicTask;
import net.rodrigoamaral.dspsp.project.tasks.EffortParameters;
import net.rodrigoamaral.dspsp.project.tasks.TaskManager;
import net.rodrigoamaral.dspsp.solution.DedicationMatrix;
import net.rodrigoamaral.logging.SPSPLogger;
import org.uma.jmetal.solution.DoubleSolution;
import java.util.*;
import static java.lang.Double.POSITIVE_INFINITY;
import static java.lang.Double.max;
import static net.rodrigoamaral.util.DoubleUtils.sum;
public class DynamicProject {
public static final int SCENARIO_SAMPLE_SIZE = 30;
public static final double ROBUSTNESS_COST_WEIGHT = 1;
public static final int K = 1;
private static final int CROB = 100;
double totalDuration;
double totalCost;
private List<DynamicTask> tasks;
private List<DynamicTask> availableTasks;
private List<DynamicTask> activeTasks;
private List<DynamicEmployee> employees;
private List<DynamicEmployee> availableEmployees;
private DynamicTaskPrecedenceGraph taskPrecedenceGraph;
private List<DynamicEvent> events;
private Map<Integer, Integer> taskIndices;
private Map<Integer, Integer> employeeIndices;
private Map<Integer, List<Double>> taskProficiency;
private List<Double> taskArrivalTimes;
private DedicationMatrix previousSchedule;
private double lastSchedulingTime;
private List<Map<Integer, Double>> sampleEffortScenarios;
private String instanceDescription;
private List<Integer> lastAvailableEmployees;
public DynamicProject() {
tasks = new ArrayList<>();
employees = new ArrayList<>();
taskPrecedenceGraph = new DynamicTaskPrecedenceGraph(tasks.size());
taskIndices = new HashMap<>();
employeeIndices = new HashMap<>();
taskProficiency = new HashMap<>();
taskArrivalTimes = new ArrayList<>();
sampleEffortScenarios = new ArrayList<>(SCENARIO_SAMPLE_SIZE);
previousSchedule = null;
lastSchedulingTime = 0;
totalDuration = 0.0;
totalCost = 0.0;
}
public List<DynamicTask> getTasks() {
return tasks;
}
public List<DynamicEmployee> getEmployees() {
return employees;
}
public DynamicTaskPrecedenceGraph getTaskPrecedenceGraph() {
return taskPrecedenceGraph;
}
public void setTaskPrecedenceGraph(DynamicTaskPrecedenceGraph taskPrecedenceGraph) {
this.taskPrecedenceGraph = taskPrecedenceGraph;
}
public Map<Integer, Integer> getTaskIndices() {
return taskIndices;
}
public Map<Integer, Integer> getEmployeeIndices() {
return employeeIndices;
}
public Map<Integer, List<Double>> getTaskProficiency() {
return taskProficiency;
}
public List<Double> getTaskArrivalTimes() {
return taskArrivalTimes;
}
public void setLastSchedulingTime(double lastSchedulingTime) {
this.lastSchedulingTime = lastSchedulingTime;
}
public DedicationMatrix getPreviousSchedule() {
return previousSchedule;
}
public void setPreviousSchedule(DoubleSolution previousSchedule) {
this.previousSchedule = new SolutionConverter(this).convert(previousSchedule);
}
public List<Map<Integer, Double>> getSampleEffortScenarios() {
return sampleEffortScenarios;
}
public DynamicTask getTaskById(int id) {
return tasks.get(taskIndices.get(id));
}
public DynamicTask getTaskByIndex(int index) {
return getTaskById(index + 1);
}
public DynamicEmployee getEmployeeByIndex(int index) {
return getEmployeeById(index + 1);
}
public DynamicEmployee getEmployeeById(int id) {
try {
return employees.get(employeeIndices.get(id));
} catch (NullPointerException npe) {
System.out.println("Employee ID " + id + " not found!");
npe.printStackTrace();
}
return null;
}
public int size() {
return getEmployees().size() * getTasks().size();
}
public List<DynamicTask> getAvailableTasks() {
return availableTasks;
}
public List<DynamicTask> getActiveTasks() {
return activeTasks;
}
public List<DynamicEmployee> getAvailableEmployees() {
return availableEmployees;
}
public List<DynamicTask> cloneTasks(Collection<DynamicTask> tasks_) {
List<DynamicTask> cloned = new ArrayList<>();
for (DynamicTask task : tasks_) {
cloned.add(new DynamicTask(task));
}
return cloned;
}
/**
* Updates project state based on the dynamic event and current schedule
*
* @param lastSchedule
* @param event
*/
public void update(DynamicEvent event, DoubleSolution lastSchedule) {
// REFACTOR: lastSchedule should be a DedicationMatrix to avoid dependencies with jMetal
setPreviousSchedule(lastSchedule);
setLastAvailableEmployees(availableEmployees);
updateFinishedEffort(availableEmployees, event.getTime());
updateCurrentStatus(event);
setLastSchedulingTime(event.getTime());
}
public void updateCurrentStatus(DynamicEvent event) {
updateEmployeeAvailability(event);
updateTaskAvailability(event);
updateCurrentStatus();
}
public void updateCurrentStatus() {
availableEmployees = filterAvailableEmployees();
availableTasks = filterAvailableTasks();
activeTasks = filterActiveTasks();
sampleEffortScenarios = generateEffortScenarioSample(availableTasks, SCENARIO_SAMPLE_SIZE);
////
SPSPLogger.info("Available employees: " + availableEmployees);
SPSPLogger.info("Available tasks: " + availableTasks);
SPSPLogger.info("Active tasks: " + activeTasks);
////
}
private List<Map<Integer, Double>> generateEffortScenarioSample(List<DynamicTask> tasks, int scenarioCount) {
List<Map<Integer, Double>> effortScenarioSample = new ArrayList<>(scenarioCount);
for (int i = 0; i < scenarioCount; i++) {
Map<Integer, Double> taskEffortSample = new HashMap<>(tasks.size());
for (DynamicTask task : tasks) {
double effortSample = TaskManager.generateEffortSample(task);
taskEffortSample.put(task.index(), effortSample);
}
effortScenarioSample.add(taskEffortSample);
}
return effortScenarioSample;
}
private void updateEmployeeAvailability(DynamicEvent event) {
int id = event.getSubject().getId();
if (event.getType() == EventType.EMPLOYEE_LEAVE) {
getEmployeeById(id).setAvailable(false);
} else if (event.getType() == EventType.EMPLOYEE_RETURN) {
getEmployeeById(id).setAvailable(true);
}
}
private void updateTaskAvailability(DynamicEvent event) {
List<Integer> incomingTasksIDs = getIncomingTasks(event);
makeTasksAvailable(incomingTasksIDs, event);
}
private void makeTasksAvailable(List<Integer> incomingTaskIDs, DynamicEvent event) {
int urgentTaskIndex = -1;
if (event.getType() == EventType.NEW_URGENT_TASK) {
if (!incomingTaskIDs.isEmpty()) {
urgentTaskIndex = incomingTaskIDs.remove(incomingTaskIDs.size() - 1);
}
}
for (int t : incomingTaskIDs) {
List<Integer> predecessors = chooseRandomTasks();
for (int p : predecessors) {
taskPrecedenceGraph.addEdge(p, t);
DynamicTask newTask = getTaskByIndex(t);
boolean available = TaskManager.isAvailable(newTask, availableEmployees, this, this.getTaskPrecedenceGraph());
newTask.setAvailable(available);
////
SPSPLogger.info("Regular T_" + t + " added after T_" + p + " (T_"+p+" -> T_" + t +")");
////
}
}
if (urgentTaskIndex > -1) {
List<Integer> successors = chooseRandomTasks();
for (int s : successors) {
taskPrecedenceGraph.addEdge(urgentTaskIndex, s);
DynamicTask newUrgentTask = getTaskByIndex(urgentTaskIndex);
boolean available = TaskManager.isAvailable(newUrgentTask, availableEmployees, this, this.getTaskPrecedenceGraph());
newUrgentTask.setAvailable(available);
////
SPSPLogger.info("Urgent T_" + urgentTaskIndex + " added before T_" + s + " (T_"+ urgentTaskIndex +" -> T_" + s +")");
////
}
}
}
private List<Integer> chooseRandomTasks() {
List<Integer> tasks = new ArrayList<>();
int numTasks = Math.random() <= 0.5 ? 2 : 1;
int s = availableTasks.size();
while (tasks.size() < numTasks && tasks.size() <= s && s > 0) {
int t = availableTasks.get(new Random().nextInt(s)).index();
if (!tasks.contains(t)) {
tasks.add(t);
}
}
return tasks;
}
private List<Integer> getIncomingTasks(DynamicEvent event) {
List<Integer> incomingTasks = new ArrayList<>();
double currentTime = event.getTime();
for (int i = 0; i < getTaskArrivalTimes().size(); i++) {
double time = getTaskArrivalTimes().get(i);
if (time > lastSchedulingTime && time <= currentTime) {
incomingTasks.add(i);
}
}
return incomingTasks;
}
private void updateFinishedEffort(List<DynamicEmployee> availableEmployees_, double currentTime) {
Map<Integer, EffortParameters> efforts = new HashMap<>();
double duration = 0.0;
double effortDuration = 0.0;
boolean durationBelowCurrentTime = true;
DynamicTaskPrecedenceGraph localTPG = taskPrecedenceGraph.copy();
List<DynamicTask> localAvailableTasks = cloneTasks(availableTasks);
while ((!localTPG.isEmpty() || !localAvailableTasks.isEmpty()) && durationBelowCurrentTime) {
List<DynamicTask> localActiveTasks = filterActiveTasks(localTPG, localAvailableTasks);
if (localActiveTasks.isEmpty()) {
throw new RuntimeException("Problem instance not solvable!");
}
DedicationMatrix normalizedSchedule = normalize(this.previousSchedule, localActiveTasks);
double partialDuration = Double.POSITIVE_INFINITY;
for (DynamicTask localTask : localActiveTasks) {
EffortParameters ep = TaskManager.getEffortProperties(localTask, availableEmployees_, normalizedSchedule);
efforts.put(localTask.index(), ep);
// Checking if active task had anyone really working on it
if (ep.timeSpent > 0) {
partialDuration = Math.min(partialDuration, ep.timeSpent);
}
}
duration += partialDuration;
effortDuration = partialDuration;
if (duration + lastSchedulingTime > currentTime) {
effortDuration -= (duration + lastSchedulingTime - currentTime);
durationBelowCurrentTime = false;
}
SPSPLogger.debug(
"partialDuration = " + partialDuration + "; " +
"effortDuration = " + effortDuration + "; " +
"delta = " + (currentTime - lastSchedulingTime)
);
// Updates finished effort for the local active tasks
for (DynamicTask localTask : localActiveTasks) {
EffortParameters ep = efforts.get(localTask.index());
// Checks again if task was really active
if (ep.timeSpent > 0) {
double finished = ep.finishedEffort(effortDuration);
localTask.addFinishedEffort(finished);
////
SPSPLogger.debug("localTask " + localTask + " finished effort: (" + (localTask.getFinishedEffort() - finished) + " + " + finished + ") = " + localTask.getFinishedEffort());
////
}
}
// Calculates cost
double partialCost = 0;
for (DynamicTask localTask : localActiveTasks) {
for (DynamicEmployee e : availableEmployees_) {
partialCost += taskCostByEmployee(e, localTask, normalizedSchedule, effortDuration);
}
}
totalCost += partialCost;
// Updates local tasks state
if (durationBelowCurrentTime) {
for (DynamicTask localTask : localActiveTasks) {
if (localTask.isFinished()) {
////
SPSPLogger.debug("Local task " + localTask +" COMPLETE");
////
localTPG.remove(localTask.index());
localAvailableTasks.remove(localTask);
}
}
}
} // while
// Update finished effort for the actual project active tasks
for (DynamicTask globalTask : activeTasks) {
EffortParameters ep = efforts.get(globalTask.index());
// Checks once more if task was really active
if (ep.timeSpent > 0) {
globalTask.addFinishedEffort(ep.finishedEffort(effortDuration));
}
if (globalTask.isFinished()) {
globalTask.setAvailable(false);
taskPrecedenceGraph.remove(globalTask.index());
////
SPSPLogger.info(
String.format(
Locale.US,
"%-4s is COMPLETE: %7.4f / %7.4f (%6.2f %%)",
globalTask,
globalTask.getFinishedEffort(),
globalTask.getEffort(),
globalTask.finishedEffortRatio() * 100
)
);
////
} else {
globalTask.setEffort(reestimateEffort(globalTask));
////
SPSPLogger.info(
String.format(
Locale.US,
"%-4s finished effort: %7.4f / %7.4f (%6.2f %%)",
globalTask,
globalTask.getFinishedEffort(),
globalTask.getEffort(),
globalTask.finishedEffortRatio() * 100
)
);
////
}
}
availableTasks = filterAvailableTasks();
totalDuration = currentTime;
}
private double reestimateEffort(DynamicTask task) {
double realEffort = task.getEffort();
double finishedEffort = task.getFinishedEffort();
while (realEffort <= finishedEffort) {
realEffort = TaskManager.sampleEstimatedEffort(task);
}
return realEffort;
}
private DedicationMatrix normalize(DedicationMatrix dm, List<DynamicTask> activeTasks) {
DedicationMatrix normalized = new DedicationMatrix(dm);
for (DynamicTask t : activeTasks) {
for (DynamicEmployee e : availableEmployees) {
double n = dm.getDedication(e.index(), t.index()) / max(1, dedicationSum(dm, activeTasks, e) / e.getMaxDedication());
normalized.setDedication(e.index(), t.index(), n);
}
}
return normalized;
}
private double dedicationSum(DedicationMatrix dm, List<DynamicTask> activeTasks, DynamicEmployee e) {
return sum(activeTasksDedication(dm, activeTasks, e));
}
private List<Double> activeTasksDedication(DedicationMatrix dm, List<DynamicTask> activeTasks, DynamicEmployee employee) {
List<Double> result = new ArrayList<>();
for (DynamicTask task : activeTasks) {
result.add(dm.getDedication(employee.index(), task.index()));
}
return result;
}
private double taskCostByEmployee(DynamicEmployee e, DynamicTask t, DedicationMatrix solution, double duration) {
double employeeDedication = solution.getDedication(e.index(), t.index());
double regularCost = e.getSalary() * employeeDedication * duration;
return regularCost + getOvertimeCost(e, employeeDedication - 1, duration);
}
private double getOvertimeCost(DynamicEmployee e, double overdedication, double duration) {
double overtimeCost = e.getOvertimeSalary() * overdedication * duration;
overtimeCost = overtimeCost > 0 ? overtimeCost : 0;
return overtimeCost;
}
public List<DynamicEvent> getEvents() {
return events;
}
public void setEvents(List<DynamicEvent> events) {
this.events = events;
}
public double calculateRobustness(DedicationMatrix solution, Efficiency efficiency) throws InvalidSolutionException {
List<Double> durationDistances = new ArrayList<>();
List<Double> costDistances = new ArrayList<>();
List<DynamicTask> scenarioAvailableTasks = cloneTasks(availableTasks);
for (Map<Integer, Double> effortScenario : getSampleEffortScenarios()) {
for (DynamicTask t : scenarioAvailableTasks) {
t.setEffort(effortScenario.get(t.index()));
}
Efficiency scenario = evaluateEfficiency(solution, scenarioAvailableTasks);
durationDistances.add(efficiencyDistance(scenario.duration, efficiency.duration));
costDistances.add(efficiencyDistance(scenario.cost, efficiency.cost));
}
return Math.sqrt(avg(durationDistances)) + ROBUSTNESS_COST_WEIGHT * Math.sqrt(avg(costDistances));
}
public double calculateStability(DedicationMatrix solution) {
if (previousSchedule == null) {
return 0;
}
double stabilityValue = 0;
for (DynamicEmployee e : availableEmployees) {
for (DynamicTask t : availableTasks) {
double currentDedication = solution.getDedication(e.index(), t.index());
double previousDedication = previousSchedule.getDedication(e.index(), t.index());
double w = reschedulingPenalty(currentDedication, previousDedication);
stabilityValue = stabilityValue + (w * Math.abs(currentDedication - previousDedication));
}
}
return stabilityValue;
}
public double reschedulingPenalty(double currentDedication, double previousDedication) {
if (currentDedication > 0 && previousDedication == 0) {
return 2;
} else if (currentDedication == 0 && previousDedication > 0) {
return 1.5;
}
return 1;
}
private double efficiencyDistance(double scenarioObjective, double solutionObjective) {
return Math.pow(Math.max(0, (scenarioObjective - solutionObjective) / solutionObjective), 2);
}
private double avg(List<Double> doubleList) {
return sum(doubleList) / doubleList.size();
}
public List<DynamicTask> filterAvailableTasks() {
List<DynamicTask> availableTasks = new ArrayList<>();
for (DynamicTask task : getTasks()) {
if (task.isAvailable()) {
availableTasks.add(task);
}
}
return availableTasks;
}
private List<DynamicEmployee> filterAvailableEmployees() {
List<DynamicEmployee> availableEmployees = new ArrayList<>();
for (DynamicEmployee employee : getEmployees()) {
if (employee.isAvailable()) {
availableEmployees.add(employee);
}
}
return availableEmployees;
}
private List<DynamicTask> filterActiveTasks() {
List<DynamicTask> activeTasks = new ArrayList<>();
for (int t : taskPrecedenceGraph.getIndependentTasks()) {
DynamicTask task = getTaskByIndex(t);
if (task.isAvailable()) {
activeTasks.add(task);
}
}
return activeTasks;
}
private List<DynamicTask> filterActiveTasks(DynamicTaskPrecedenceGraph tpg, final List<DynamicTask> tasks) {
List<DynamicTask> active = new ArrayList<>();
for (DynamicTask task : tasks) {
for (int t : tpg.getIndependentTasks()) {
if (task.index() == t && task.isAvailable()) {
active.add(task);
}
}
}
return active;
}
public boolean isFinished() {
for (DynamicTask task : getTasks()) {
if (!task.isFinished()) {
return false;
}
}
return true;
}
public int missingSkills() {
return TaskManager.totalMissingSkills(getAvailableTasks(), getAvailableEmployees());
}
public int missingSkills(DynamicTask task, List<DynamicEmployee> employees) {
return TaskManager.missingSkills(task, employees);
}
public double getAvailableEmployeeMaxDedication() {
double max = 0;
for (DynamicEmployee employee : this.getAvailableEmployees()) {
if (employee.getMaxDedication() > max) {
max = employee.getMaxDedication();
}
}
return max;
}
public double getAvailableEmployeeMinDedication() {
double min = POSITIVE_INFINITY;
for (DynamicEmployee employee : this.getAvailableEmployees()) {
if (employee.getMaxDedication() < min) {
min = employee.getMaxDedication();
}
}
return min;
}
public double getTotalEstimatedRemainingEffort() {
double remainingEffort = 0;
for (DynamicTask task : getAvailableTasks()) {
remainingEffort += task.getRemainingEffort();
}
return remainingEffort;
}
/**
* Returns the task team among the available employees at the moment
* @param task
* @param solution
* @return
*/
public List<DynamicEmployee> availableTaskTeam(DynamicTask task, DedicationMatrix solution) {
return taskTeam(task, solution, availableEmployees);
}
public List<DynamicEmployee> taskTeam(DynamicTask task, DedicationMatrix solution, List<DynamicEmployee> employees) {
List<DynamicEmployee> team = new ArrayList<>();
for (int e : TaskManager.team(task, solution)) {
DynamicEmployee teamEmployee = getEmployeeByIndex(e);
for (DynamicEmployee filterEmployee: employees) {
if (teamEmployee.index() == filterEmployee.index()) {
team.add(teamEmployee);
}
}
}
return team;
}
@Override
public String toString() {
return "DynamicProject{\n\t" +
"tasks=" + tasks + ",\n\t" +
"employees=" + employees + ",\n\t" +
"taskPrecedenceGraph=" + taskPrecedenceGraph +
"\n}";
}
public Efficiency evaluateEfficiency(DedicationMatrix dm) throws InvalidSolutionException {
return evaluateEfficiency(dm, availableTasks);
}
public Efficiency evaluateEfficiency(DedicationMatrix dm, List<DynamicTask> tasks) throws InvalidSolutionException {
double duration = 0;
double cost = 0;
double partialCost = 0;
DynamicTaskPrecedenceGraph localTPG = taskPrecedenceGraph.copy();
List<DynamicTask> localAvailableTasks = cloneTasks(tasks);
// --------------------------
// Repairing overhead (begin)
// --------------------------
//// First headcount repair heuristic
dm = removeNonProficientEmployees(dm, localAvailableTasks);
//// Second headcount repair heuristic
for (DynamicTask t: localAvailableTasks) {
final List<DynamicEmployee> taskTeam = availableTaskTeam(t, dm);
List<DynamicEmployee> originalTeam = getSortedTeamByProficiencyInTask(taskTeam, t);
List<DynamicEmployee> repairedTeam = new ArrayList<>(originalTeam);
if (originalTeam.size() > t.getMaximumHeadcount()) {
for (DynamicEmployee e: originalTeam) {
DynamicEmployee removed = repairedTeam.remove(0);
if (missingSkills(t, repairedTeam) == 0) {
dm.setDedication(e.index(), t.index(), 0);
} else {
repairedTeam.add(0, removed);
}
}
}
// Penalizing task effort if max headcount constraint violated
if (repairedTeam.size() > t.getMaximumHeadcount()) {
t.setEffort(t.getEffort() + TaskManager.adjustedEffort(dm, t));
}
}
// --------------------------
// Repairing overhead (end)
// --------------------------
int over = 0;
while (!localTPG.isEmpty() || !(over == tasks.size())) {
List<DynamicTask> localActiveTasks = filterActiveTasks(localTPG, localAvailableTasks);
if (localActiveTasks.isEmpty()) {
throw new RuntimeException("Problem instance not solvable!");
}
DedicationMatrix normDM = normalize(dm, localActiveTasks);
// Duration calculation
double partialDuration = Double.POSITIVE_INFINITY;
for (DynamicTask localTask : localActiveTasks) {
EffortParameters ep = TaskManager.getEffortProperties(localTask, availableEmployees, normDM);
partialDuration = Math.min(partialDuration, ep.timeSpent);
double finishedEffort = ep.finishedEffort(partialDuration);
try {
localTask.addFinishedEffort(finishedEffort);
} catch (IllegalArgumentException iae) {
SPSPLogger.trace(iae.getMessage());
throw new InvalidSolutionException();
}
}
duration += partialDuration;
// Cost calculation
for (DynamicEmployee e : availableEmployees) {
for (DynamicTask t : localActiveTasks) {
partialCost += taskCostByEmployee(e, t, normDM, partialDuration);
}
}
cost += partialCost;
// Finished tasks removal
for (DynamicTask localTask : localActiveTasks) {
if (localTask.isFinished()) {
localTask.setAvailable(false);
localTPG.remove(localTask.index());
// localAvailableTasks.remove(localTask);
over += 1;
}
}
}
return new Efficiency(duration, cost);
}
/**
* First headcount repair heuristic
* @param dm
* @param localAvailableTasks
* @return
*/
private DedicationMatrix removeNonProficientEmployees(DedicationMatrix dm, final List<DynamicTask> localAvailableTasks) {
for (DynamicTask t: localAvailableTasks) {
for (DynamicEmployee e : availableEmployees) {
if (e.getProficiencyOnTask().get(t.index()) == 0) {
dm.setDedication(e.index(), t.index(), 0);
}
}
}
return dm;
}
public List<DynamicEmployee> getSortedTeamByProficiencyInTask(final List<DynamicEmployee> team, final DynamicTask task) {
List<DynamicEmployee> sortedTeam = new ArrayList<>(team);
Collections.sort(sortedTeam, new Comparator<DynamicEmployee>() {
@Override
public int compare(DynamicEmployee e1, DynamicEmployee e2) {
double p1 = e1.getProficiencyOnTask().get(task.index());
double p2 = e2.getProficiencyOnTask().get(task.index());
if (p1 == p2) {
return 0;
}
return p1 < p2 ? -1 : 1;
}
});
return sortedTeam;
}
public double penalizeDuration(int missingSkills) {
double minEmployeeDedication = getAvailableEmployeeMinDedication();
double estimatedRemainingEffort = getTotalEstimatedRemainingEffort();
return 14 * K * missingSkills * estimatedRemainingEffort / minEmployeeDedication;
}
public double penalizeCost(int missingSkills) {
double cost = 0;
for (DynamicEmployee employee : getAvailableEmployees()) {
for (DynamicTask task : getAvailableTasks()) {
cost += employee.getOvertimeSalary() * task.getRemainingEffort();
}
}
return 14 * missingSkills * cost;
}
public double penalizeRobustness(int missingSkills) {
return 2 * CROB * missingSkills;
}
public double penalizeStability(int missingSkills) {
int numberOfAvailableEmployees = getAvailableEmployees().size();
int numberOfAvailableTasks = getAvailableTasks().size();
double maxDedication = getAvailableEmployeeMaxDedication();
return 2 * missingSkills * numberOfAvailableEmployees * numberOfAvailableTasks * maxDedication;
}
public double getTotalDuration() {
return totalDuration;
}
public double getTotalCost() {
return totalCost;
}
public void setInstanceDescription(String instanceDescription) {
this.instanceDescription = instanceDescription;
}
public String getInstanceDescription() {
return instanceDescription;
}
public List<Integer> getLastAvailableEmployees() {
return this.lastAvailableEmployees;
}
private void setLastAvailableEmployees(List<DynamicEmployee> lastAvailableEmployees_) {
List<Integer> employees = new ArrayList<>();
for (DynamicEmployee emp: lastAvailableEmployees_) {
employees.add(emp.index());
}
this.lastAvailableEmployees = employees;
}
}
|
|
/*******************************************************************************
* Copyright (c) 2000, 2011 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.jdt.internal.ui.compare;
import java.lang.reflect.InvocationTargetException;
import java.util.Map;
import org.eclipse.team.core.TeamException;
import org.eclipse.team.ui.history.ElementLocalHistoryPageSource;
import org.eclipse.team.ui.history.HistoryPageCompareEditorInput;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.core.runtime.Assert;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.filebuffers.FileBuffers;
import org.eclipse.core.filebuffers.ITextFileBuffer;
import org.eclipse.core.filebuffers.ITextFileBufferManager;
import org.eclipse.core.filebuffers.LocationKind;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.text.IDocument;
import org.eclipse.jface.text.TextUtilities;
import org.eclipse.compare.CompareConfiguration;
import org.eclipse.compare.CompareUI;
import org.eclipse.compare.IStreamContentAccessor;
import org.eclipse.compare.ITypedElement;
import org.eclipse.jdt.core.ICompilationUnit;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jdt.core.IMember;
import org.eclipse.jdt.core.ISourceRange;
import org.eclipse.jdt.core.JavaModelException;
import org.eclipse.jdt.core.SourceRange;
import org.eclipse.jdt.core.dom.ASTNode;
import org.eclipse.jdt.core.dom.AnnotationTypeDeclaration;
import org.eclipse.jdt.core.dom.BodyDeclaration;
import org.eclipse.jdt.core.dom.CompilationUnit;
import org.eclipse.jdt.core.dom.EnumDeclaration;
import org.eclipse.jdt.core.dom.NodeFinder;
import org.eclipse.jdt.core.dom.rewrite.ASTRewrite;
import org.eclipse.jdt.internal.corext.dom.ASTNodes;
import org.eclipse.jdt.internal.corext.util.Resources;
import org.eclipse.jdt.internal.ui.IJavaHelpContextIds;
import org.eclipse.jdt.internal.ui.JavaPlugin;
import org.eclipse.jdt.internal.ui.javaeditor.JavaEditor;
import org.eclipse.jdt.internal.ui.util.ExceptionHandler;
/**
* Provides "Replace from local history" for Java elements.
*/
class JavaReplaceWithEditionActionImpl extends JavaHistoryActionImpl {
protected boolean fPrevious= false;
JavaReplaceWithEditionActionImpl(boolean previous) {
super(true);
fPrevious= previous;
}
@Override
public void run(ISelection selection) {
Shell shell= getShell();
final IMember input= getEditionElement(selection);
if (input == null) {
MessageDialog.openInformation(shell, CompareMessages.ReplaceFromHistory_title, CompareMessages.ReplaceFromHistory_invalidSelectionMessage);
return;
}
final IFile file= getFile(input);
if (file == null) {
showError();
return;
}
IStatus status= Resources.makeCommittable(file, shell);
if (!status.isOK()) {
return;
}
if (fPrevious) {
String errorTitle= CompareMessages.ReplaceFromHistory_title;
String errorMessage= CompareMessages.ReplaceFromHistory_internalErrorMessage;
try {
ITypedElement ti = ElementLocalHistoryPageSource.getPreviousEdition(file, input);
if (ti == null) {
MessageDialog.openInformation(shell, errorTitle, CompareMessages.ReplaceFromHistory_parsingErrorMessage);
return;
}
replace(input, file, ti);
} catch (TeamException e) {
ExceptionHandler.handle(e, shell, errorTitle, errorMessage);
}
} else {
JavaElementHistoryPageSource pageSource = JavaElementHistoryPageSource.getInstance();
CompareConfiguration cc = new CompareConfiguration();
cc.setLeftEditable(false);
cc.setRightEditable(false);
HistoryPageCompareEditorInput ci = new HistoryPageCompareEditorInput(cc, pageSource, input) {
@Override
protected void performReplace(Object selectedElement) {
if (selectedElement instanceof ITypedElement) {
JavaReplaceWithEditionActionImpl.this.replace(input, file, (ITypedElement)selectedElement);
}
}
};
ci.setReplace(true);
ci.setTitle(CompareMessages.JavaReplaceWithEditionActionImpl_0);
ci.setHelpContextId(IJavaHelpContextIds.REPLACE_ELEMENT_WITH_HISTORY_DIALOG);
CompareUI.openCompareDialog(ci);
}
}
public void replace(IMember input, IFile file, ITypedElement element) {
Shell shell= getShell();
String errorTitle= CompareMessages.ReplaceFromHistory_title;
String errorMessage= CompareMessages.ReplaceFromHistory_internalErrorMessage;
// get the document where to insert the text
IPath path= file.getFullPath();
ITextFileBufferManager bufferManager= FileBuffers.getTextFileBufferManager();
ITextFileBuffer textFileBuffer= null;
try {
bufferManager.connect(path, LocationKind.IFILE, null);
textFileBuffer= bufferManager.getTextFileBuffer(path, LocationKind.IFILE);
IDocument document= textFileBuffer.getDocument();
performReplace(input, file, textFileBuffer, document, element);
} catch(InvocationTargetException ex) {
ExceptionHandler.handle(ex, shell, errorTitle, errorMessage);
} catch(InterruptedException ex) {
// shouldn't be called because is not cancelable
Assert.isTrue(false);
} catch(CoreException ex) {
ExceptionHandler.handle(ex, shell, errorTitle, errorMessage);
} finally {
try {
if (textFileBuffer != null)
bufferManager.disconnect(path, LocationKind.IFILE, null);
} catch (CoreException e) {
JavaPlugin.log(e);
}
}
}
private void performReplace(IMember input, IFile file,
ITextFileBuffer textFileBuffer, IDocument document, ITypedElement ti)
throws CoreException, JavaModelException,
InvocationTargetException, InterruptedException {
if (ti instanceof IStreamContentAccessor) {
boolean inEditor= beingEdited(file);
String content= JavaCompareUtilities.readString((IStreamContentAccessor)ti);
String newContent= trimTextBlock(content, TextUtilities.getDefaultLineDelimiter(document), input.getJavaProject());
if (newContent == null) {
showError();
return;
}
ICompilationUnit compilationUnit= input.getCompilationUnit();
CompilationUnit root= parsePartialCompilationUnit(compilationUnit);
ISourceRange nameRange= input.getNameRange();
if (nameRange == null)
nameRange= input.getSourceRange();
// workaround for bug in getNameRange(): for AnnotationMembers length is negative
int length= nameRange.getLength();
if (length < 0)
length= 1;
ASTNode node2= NodeFinder.perform(root, new SourceRange(nameRange.getOffset(), length));
ASTNode node;
if (node2.getNodeType() == ASTNode.INITIALIZER)
node= node2;
else
node= ASTNodes.getParent(node2, BodyDeclaration.class);
if (node == null)
node= ASTNodes.getParent(node2, AnnotationTypeDeclaration.class);
if (node == null)
node= ASTNodes.getParent(node2, EnumDeclaration.class);
//ASTNode node= getBodyContainer(root, input);
if (node == null) {
showError();
return;
}
ASTRewrite rewriter= ASTRewrite.create(root.getAST());
rewriter.replace(node, rewriter.createStringPlaceholder(newContent, node.getNodeType()), null);
if (inEditor) {
JavaEditor je= getEditor(file);
if (je != null)
je.setFocus();
}
Map<String, String> options= null;
IJavaProject javaProject= compilationUnit.getJavaProject();
if (javaProject != null)
options= javaProject.getOptions(true);
applyChanges(rewriter, document, textFileBuffer, getShell(), inEditor, options);
}
}
private void showError() {
MessageDialog.openError(getShell(), CompareMessages.ReplaceFromHistory_title, CompareMessages.ReplaceFromHistory_internalErrorMessage);
}
}
|
|
/**
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.observables;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicReference;
import rx.*;
import rx.Observable;
import rx.Observer;
import rx.annotations.Experimental;
import rx.exceptions.OnErrorNotImplementedException;
import rx.functions.*;
import rx.internal.operators.*;
import rx.internal.util.BlockingUtils;
import rx.internal.util.UtilityFunctions;
import rx.subscriptions.Subscriptions;
/**
* {@code BlockingObservable} is a variety of {@link Observable} that provides blocking operators. It can be
* useful for testing and demo purposes, but is generally inappropriate for production applications (if you
* think you need to use a {@code BlockingObservable} this is usually a sign that you should rethink your
* design).
* <p>
* You construct a {@code BlockingObservable} from an {@code Observable} with {@link #from(Observable)} or
* {@link Observable#toBlocking()}.
* <p>
* The documentation for this interface makes use of a form of marble diagram that has been modified to
* illustrate blocking operators. The following legend explains these marble diagrams:
* <p>
* <img width="640" height="301" src="https://github.com/ReactiveX/RxJava/wiki/images/rx-operators/B.legend.png" alt="">
*
* @see <a href="https://github.com/ReactiveX/RxJava/wiki/Blocking-Observable-Operators">RxJava wiki: Blocking
* Observable Operators</a>
* @param <T>
* the type of item emitted by the {@code BlockingObservable}
*/
public final class BlockingObservable<T> {
private final Observable<? extends T> o;
private BlockingObservable(Observable<? extends T> o) {
this.o = o;
}
/**
* Converts an {@link Observable} into a {@code BlockingObservable}.
*
* @param o
* the {@link Observable} you want to convert
* @return a {@code BlockingObservable} version of {@code o}
*/
public static <T> BlockingObservable<T> from(final Observable<? extends T> o) {
return new BlockingObservable<T>(o);
}
/**
* Invokes a method on each item emitted by this {@code BlockingObservable} and blocks until the Observable
* completes.
* <p>
* <em>Note:</em> This will block even if the underlying Observable is asynchronous.
* <p>
* <img width="640" height="330" src="https://github.com/ReactiveX/RxJava/wiki/images/rx-operators/B.forEach.png" alt="">
* <p>
* This is similar to {@link Observable#subscribe(Subscriber)}, but it blocks. Because it blocks it does not
* need the {@link Subscriber#onCompleted()} or {@link Subscriber#onError(Throwable)} methods. If the
* underlying Observable terminates with an error, rather than calling {@code onError}, this method will
* throw an exception.
*
* <p>The difference between this method and {@link #subscribe(Action1)} is that the {@code onNext} action
* is executed on the emission thread instead of the current thread.
*
* @param onNext
* the {@link Action1} to invoke for each item emitted by the {@code BlockingObservable}
* @throws RuntimeException
* if an error occurs
* @see <a href="http://reactivex.io/documentation/operators/subscribe.html">ReactiveX documentation: Subscribe</a>
* @see #subscribe(Action1)
*/
public void forEach(final Action1<? super T> onNext) {
final CountDownLatch latch = new CountDownLatch(1);
final AtomicReference<Throwable> exceptionFromOnError = new AtomicReference<Throwable>();
/*
* Use 'subscribe' instead of 'unsafeSubscribe' for Rx contract behavior
* (see http://reactivex.io/documentation/contract.html) as this is the final subscribe in the chain.
*/
Subscription subscription = o.subscribe(new Subscriber<T>() {
@Override
public void onCompleted() {
latch.countDown();
}
@Override
public void onError(Throwable e) {
/*
* If we receive an onError event we set the reference on the
* outer thread so we can git it and throw after the
* latch.await().
*
* We do this instead of throwing directly since this may be on
* a different thread and the latch is still waiting.
*/
exceptionFromOnError.set(e);
latch.countDown();
}
@Override
public void onNext(T args) {
onNext.call(args);
}
});
BlockingUtils.awaitForComplete(latch, subscription);
if (exceptionFromOnError.get() != null) {
if (exceptionFromOnError.get() instanceof RuntimeException) {
throw (RuntimeException) exceptionFromOnError.get();
} else {
throw new RuntimeException(exceptionFromOnError.get());
}
}
}
/**
* Returns an {@link Iterator} that iterates over all items emitted by this {@code BlockingObservable}.
* <p>
* <img width="640" height="315" src="https://github.com/ReactiveX/RxJava/wiki/images/rx-operators/B.getIterator.png" alt="">
*
* @return an {@link Iterator} that can iterate over the items emitted by this {@code BlockingObservable}
* @see <a href="http://reactivex.io/documentation/operators/to.html">ReactiveX documentation: To</a>
*/
public Iterator<T> getIterator() {
return BlockingOperatorToIterator.toIterator(o);
}
/**
* Returns the first item emitted by this {@code BlockingObservable}, or throws
* {@code NoSuchElementException} if it emits no items.
*
* @return the first item emitted by this {@code BlockingObservable}
* @throws NoSuchElementException
* if this {@code BlockingObservable} emits no items
* @see <a href="http://reactivex.io/documentation/operators/first.html">ReactiveX documentation: First</a>
*/
public T first() {
return blockForSingle(o.first());
}
/**
* Returns the first item emitted by this {@code BlockingObservable} that matches a predicate, or throws
* {@code NoSuchElementException} if it emits no such item.
*
* @param predicate
* a predicate function to evaluate items emitted by this {@code BlockingObservable}
* @return the first item emitted by this {@code BlockingObservable} that matches the predicate
* @throws NoSuchElementException
* if this {@code BlockingObservable} emits no such items
* @see <a href="http://reactivex.io/documentation/operators/first.html">ReactiveX documentation: First</a>
*/
public T first(Func1<? super T, Boolean> predicate) {
return blockForSingle(o.first(predicate));
}
/**
* Returns the first item emitted by this {@code BlockingObservable}, or a default value if it emits no
* items.
*
* @param defaultValue
* a default value to return if this {@code BlockingObservable} emits no items
* @return the first item emitted by this {@code BlockingObservable}, or the default value if it emits no
* items
* @see <a href="http://reactivex.io/documentation/operators/first.html">ReactiveX documentation: First</a>
*/
public T firstOrDefault(T defaultValue) {
return blockForSingle(o.map(UtilityFunctions.<T>identity()).firstOrDefault(defaultValue));
}
/**
* Returns the first item emitted by this {@code BlockingObservable} that matches a predicate, or a default
* value if it emits no such items.
*
* @param defaultValue
* a default value to return if this {@code BlockingObservable} emits no matching items
* @param predicate
* a predicate function to evaluate items emitted by this {@code BlockingObservable}
* @return the first item emitted by this {@code BlockingObservable} that matches the predicate, or the
* default value if this {@code BlockingObservable} emits no matching items
* @see <a href="http://reactivex.io/documentation/operators/first.html">ReactiveX documentation: First</a>
*/
public T firstOrDefault(T defaultValue, Func1<? super T, Boolean> predicate) {
return blockForSingle(o.filter(predicate).map(UtilityFunctions.<T>identity()).firstOrDefault(defaultValue));
}
/**
* Returns the last item emitted by this {@code BlockingObservable}, or throws
* {@code NoSuchElementException} if this {@code BlockingObservable} emits no items.
* <p>
* <img width="640" height="315" src="https://github.com/ReactiveX/RxJava/wiki/images/rx-operators/B.last.png" alt="">
*
* @return the last item emitted by this {@code BlockingObservable}
* @throws NoSuchElementException
* if this {@code BlockingObservable} emits no items
* @see <a href="http://reactivex.io/documentation/operators/last.html">ReactiveX documentation: Last</a>
*/
public T last() {
return blockForSingle(o.last());
}
/**
* Returns the last item emitted by this {@code BlockingObservable} that matches a predicate, or throws
* {@code NoSuchElementException} if it emits no such items.
* <p>
* <img width="640" height="315" src="https://github.com/ReactiveX/RxJava/wiki/images/rx-operators/B.last.p.png" alt="">
*
* @param predicate
* a predicate function to evaluate items emitted by the {@code BlockingObservable}
* @return the last item emitted by the {@code BlockingObservable} that matches the predicate
* @throws NoSuchElementException
* if this {@code BlockingObservable} emits no items
* @see <a href="http://reactivex.io/documentation/operators/last.html">ReactiveX documentation: Last</a>
*/
public T last(final Func1<? super T, Boolean> predicate) {
return blockForSingle(o.last(predicate));
}
/**
* Returns the last item emitted by this {@code BlockingObservable}, or a default value if it emits no
* items.
* <p>
* <img width="640" height="310" src="https://github.com/ReactiveX/RxJava/wiki/images/rx-operators/B.lastOrDefault.png" alt="">
*
* @param defaultValue
* a default value to return if this {@code BlockingObservable} emits no items
* @return the last item emitted by the {@code BlockingObservable}, or the default value if it emits no
* items
* @see <a href="http://reactivex.io/documentation/operators/last.html">ReactiveX documentation: Last</a>
*/
public T lastOrDefault(T defaultValue) {
return blockForSingle(o.map(UtilityFunctions.<T>identity()).lastOrDefault(defaultValue));
}
/**
* Returns the last item emitted by this {@code BlockingObservable} that matches a predicate, or a default
* value if it emits no such items.
* <p>
* <img width="640" height="315" src="https://github.com/ReactiveX/RxJava/wiki/images/rx-operators/B.lastOrDefault.p.png" alt="">
*
* @param defaultValue
* a default value to return if this {@code BlockingObservable} emits no matching items
* @param predicate
* a predicate function to evaluate items emitted by this {@code BlockingObservable}
* @return the last item emitted by this {@code BlockingObservable} that matches the predicate, or the
* default value if it emits no matching items
* @see <a href="http://reactivex.io/documentation/operators/last.html">ReactiveX documentation: Last</a>
*/
public T lastOrDefault(T defaultValue, Func1<? super T, Boolean> predicate) {
return blockForSingle(o.filter(predicate).map(UtilityFunctions.<T>identity()).lastOrDefault(defaultValue));
}
/**
* Returns an {@link Iterable} that always returns the item most recently emitted by this
* {@code BlockingObservable}.
* <p>
* <img width="640" height="490" src="https://github.com/ReactiveX/RxJava/wiki/images/rx-operators/B.mostRecent.png" alt="">
*
* @param initialValue
* the initial value that the {@link Iterable} sequence will yield if this
* {@code BlockingObservable} has not yet emitted an item
* @return an {@link Iterable} that on each iteration returns the item that this {@code BlockingObservable}
* has most recently emitted
* @see <a href="http://reactivex.io/documentation/operators/first.html">ReactiveX documentation: First</a>
*/
public Iterable<T> mostRecent(T initialValue) {
return BlockingOperatorMostRecent.mostRecent(o, initialValue);
}
/**
* Returns an {@link Iterable} that blocks until this {@code BlockingObservable} emits another item, then
* returns that item.
* <p>
* <img width="640" height="490" src="https://github.com/ReactiveX/RxJava/wiki/images/rx-operators/B.next.png" alt="">
*
* @return an {@link Iterable} that blocks upon each iteration until this {@code BlockingObservable} emits
* a new item, whereupon the Iterable returns that item
* @see <a href="http://reactivex.io/documentation/operators/takelast.html">ReactiveX documentation: TakeLast</a>
*/
public Iterable<T> next() {
return BlockingOperatorNext.next(o);
}
/**
* Returns an {@link Iterable} that returns the latest item emitted by this {@code BlockingObservable},
* waiting if necessary for one to become available.
* <p>
* If this {@code BlockingObservable} produces items faster than {@code Iterator.next} takes them,
* {@code onNext} events might be skipped, but {@code onError} or {@code onCompleted} events are not.
* <p>
* Note also that an {@code onNext} directly followed by {@code onCompleted} might hide the {@code onNext}
* event.
*
* @return an Iterable that always returns the latest item emitted by this {@code BlockingObservable}
* @see <a href="http://reactivex.io/documentation/operators/first.html">ReactiveX documentation: First</a>
*/
public Iterable<T> latest() {
return BlockingOperatorLatest.latest(o);
}
/**
* If this {@code BlockingObservable} completes after emitting a single item, return that item, otherwise
* throw a {@code NoSuchElementException}.
* <p>
* <img width="640" height="315" src="https://github.com/ReactiveX/RxJava/wiki/images/rx-operators/B.single.png" alt="">
*
* @return the single item emitted by this {@code BlockingObservable}
* @see <a href="http://reactivex.io/documentation/operators/first.html">ReactiveX documentation: First</a>
*/
public T single() {
return blockForSingle(o.single());
}
/**
* If this {@code BlockingObservable} completes after emitting a single item that matches a given predicate,
* return that item, otherwise throw a {@code NoSuchElementException}.
* <p>
* <img width="640" height="315" src="https://github.com/ReactiveX/RxJava/wiki/images/rx-operators/B.single.p.png" alt="">
*
* @param predicate
* a predicate function to evaluate items emitted by this {@link BlockingObservable}
* @return the single item emitted by this {@code BlockingObservable} that matches the predicate
* @see <a href="http://reactivex.io/documentation/operators/first.html">ReactiveX documentation: First</a>
*/
public T single(Func1<? super T, Boolean> predicate) {
return blockForSingle(o.single(predicate));
}
/**
* If this {@code BlockingObservable} completes after emitting a single item, return that item; if it emits
* more than one item, throw an {@code IllegalArgumentException}; if it emits no items, return a default
* value.
* <p>
* <img width="640" height="315" src="https://github.com/ReactiveX/RxJava/wiki/images/rx-operators/B.singleOrDefault.png" alt="">
*
* @param defaultValue
* a default value to return if this {@code BlockingObservable} emits no items
* @return the single item emitted by this {@code BlockingObservable}, or the default value if it emits no
* items
* @see <a href="http://reactivex.io/documentation/operators/first.html">ReactiveX documentation: First</a>
*/
public T singleOrDefault(T defaultValue) {
return blockForSingle(o.map(UtilityFunctions.<T>identity()).singleOrDefault(defaultValue));
}
/**
* If this {@code BlockingObservable} completes after emitting a single item that matches a predicate,
* return that item; if it emits more than one such item, throw an {@code IllegalArgumentException}; if it
* emits no items, return a default value.
* <p>
* <img width="640" height="315" src="https://github.com/ReactiveX/RxJava/wiki/images/rx-operators/B.singleOrDefault.p.png" alt="">
*
* @param defaultValue
* a default value to return if this {@code BlockingObservable} emits no matching items
* @param predicate
* a predicate function to evaluate items emitted by this {@code BlockingObservable}
* @return the single item emitted by the {@code BlockingObservable} that matches the predicate, or the
* default value if no such items are emitted
* @see <a href="http://reactivex.io/documentation/operators/first.html">ReactiveX documentation: First</a>
*/
public T singleOrDefault(T defaultValue, Func1<? super T, Boolean> predicate) {
return blockForSingle(o.filter(predicate).map(UtilityFunctions.<T>identity()).singleOrDefault(defaultValue));
}
/**
* Returns a {@link Future} representing the single value emitted by this {@code BlockingObservable}.
* <p>
* If {@link BlockingObservable} emits more than one item, {@link java.util.concurrent.Future} will receive an
* {@link java.lang.IllegalArgumentException}. If {@link BlockingObservable} is empty, {@link java.util.concurrent.Future}
* will receive an {@link java.util.NoSuchElementException}.
* <p>
* If the {@code BlockingObservable} may emit more than one item, use {@code Observable.toList().toBlocking().toFuture()}.
* <p>
* <img width="640" height="395" src="https://github.com/ReactiveX/RxJava/wiki/images/rx-operators/B.toFuture.png" alt="">
*
* @return a {@link Future} that expects a single item to be emitted by this {@code BlockingObservable}
* @see <a href="http://reactivex.io/documentation/operators/to.html">ReactiveX documentation: To</a>
*/
public Future<T> toFuture() {
return BlockingOperatorToFuture.toFuture(o);
}
/**
* Converts this {@code BlockingObservable} into an {@link Iterable}.
* <p>
* <img width="640" height="315" src="https://github.com/ReactiveX/RxJava/wiki/images/rx-operators/B.toIterable.png" alt="">
*
* @return an {@link Iterable} version of this {@code BlockingObservable}
* @see <a href="http://reactivex.io/documentation/operators/to.html">ReactiveX documentation: To</a>
*/
public Iterable<T> toIterable() {
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
return getIterator();
}
};
}
/**
* Helper method which handles the actual blocking for a single response.
* <p>
* If the {@link Observable} errors, it will be thrown right away.
*
* @return the actual item
*/
private T blockForSingle(final Observable<? extends T> observable) {
final AtomicReference<T> returnItem = new AtomicReference<T>();
final AtomicReference<Throwable> returnException = new AtomicReference<Throwable>();
final CountDownLatch latch = new CountDownLatch(1);
Subscription subscription = observable.subscribe(new Subscriber<T>() {
@Override
public void onCompleted() {
latch.countDown();
}
@Override
public void onError(final Throwable e) {
returnException.set(e);
latch.countDown();
}
@Override
public void onNext(final T item) {
returnItem.set(item);
}
});
BlockingUtils.awaitForComplete(latch, subscription);
if (returnException.get() != null) {
if (returnException.get() instanceof RuntimeException) {
throw (RuntimeException) returnException.get();
} else {
throw new RuntimeException(returnException.get());
}
}
return returnItem.get();
}
/**
* Runs the source observable to a terminal event, ignoring any values and rethrowing any exception.
*/
@Experimental
public void subscribe() {
final CountDownLatch cdl = new CountDownLatch(1);
final Throwable[] error = { null };
Subscription s = o.subscribe(new Subscriber<T>() {
@Override
public void onNext(T t) {
}
@Override
public void onError(Throwable e) {
error[0] = e;
cdl.countDown();
}
@Override
public void onCompleted() {
cdl.countDown();
}
});
BlockingUtils.awaitForComplete(cdl, s);
Throwable e = error[0];
if (e != null) {
if (e instanceof RuntimeException) {
throw (RuntimeException)e;
} else {
throw new RuntimeException(e);
}
}
}
/**
* Subscribes to the source and calls back the Observer methods on the current thread.
* @param observer the observer to call event methods on
*/
@Experimental
public void subscribe(Observer<? super T> observer) {
final NotificationLite<T> nl = NotificationLite.instance();
final BlockingQueue<Object> queue = new LinkedBlockingQueue<Object>();
Subscription s = o.subscribe(new Subscriber<T>() {
@Override
public void onNext(T t) {
queue.offer(nl.next(t));
}
@Override
public void onError(Throwable e) {
queue.offer(nl.error(e));
}
@Override
public void onCompleted() {
queue.offer(nl.completed());
}
});
try {
for (;;) {
Object o = queue.poll();
if (o == null) {
o = queue.take();
}
if (nl.accept(observer, o)) {
return;
}
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
observer.onError(e);
} finally {
s.unsubscribe();
}
}
/** Constant to indicate the onStart method should be called. */
static final Object ON_START = new Object();
/** Constant indicating the setProducer method should be called. */
static final Object SET_PRODUCER = new Object();
/** Indicates an unsubscripton happened */
static final Object UNSUBSCRIBE = new Object();
/**
* Subscribes to the source and calls the Subscriber methods on the current thread.
* <p>
* The unsubscription and backpressure is composed through.
* @param subscriber the subscriber to forward events and calls to in the current thread
*/
@Experimental
public void subscribe(Subscriber<? super T> subscriber) {
final NotificationLite<T> nl = NotificationLite.instance();
final BlockingQueue<Object> queue = new LinkedBlockingQueue<Object>();
final Producer[] theProducer = { null };
Subscriber<T> s = new Subscriber<T>() {
@Override
public void onNext(T t) {
queue.offer(nl.next(t));
}
@Override
public void onError(Throwable e) {
queue.offer(nl.error(e));
}
@Override
public void onCompleted() {
queue.offer(nl.completed());
}
@Override
public void setProducer(Producer p) {
theProducer[0] = p;
queue.offer(SET_PRODUCER);
}
@Override
public void onStart() {
queue.offer(ON_START);
}
};
subscriber.add(s);
subscriber.add(Subscriptions.create(new Action0() {
@Override
public void call() {
queue.offer(UNSUBSCRIBE);
}
}));
o.subscribe(s);
try {
for (;;) {
if (subscriber.isUnsubscribed()) {
break;
}
Object o = queue.poll();
if (o == null) {
o = queue.take();
}
if (subscriber.isUnsubscribed() || o == UNSUBSCRIBE) {
break;
}
if (o == ON_START) {
subscriber.onStart();
} else
if (o == SET_PRODUCER) {
subscriber.setProducer(theProducer[0]);
} else
if (nl.accept(subscriber, o)) {
return;
}
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
subscriber.onError(e);
} finally {
s.unsubscribe();
}
}
/**
* Subscribes to the source and calls the given action on the current thread and rethrows any exception wrapped
* into OnErrorNotImplementedException.
*
* <p>The difference between this method and {@link #forEach(Action1)} is that the
* action is always executed on the current thread.
*
* @param onNext the callback action for each source value
* @see #forEach(Action1)
*/
@Experimental
public void subscribe(final Action1<? super T> onNext) {
subscribe(onNext, new Action1<Throwable>() {
@Override
public void call(Throwable t) {
throw new OnErrorNotImplementedException(t);
}
}, Actions.empty());
}
/**
* Subscribes to the source and calls the given actions on the current thread.
* @param onNext the callback action for each source value
* @param onError the callback action for an error event
*/
@Experimental
public void subscribe(final Action1<? super T> onNext, final Action1<? super Throwable> onError) {
subscribe(onNext, onError, Actions.empty());
}
/**
* Subscribes to the source and calls the given actions on the current thread.
* @param onNext the callback action for each source value
* @param onError the callback action for an error event
* @param onCompleted the callback action for the completion event.
*/
@Experimental
public void subscribe(final Action1<? super T> onNext, final Action1<? super Throwable> onError, final Action0 onCompleted) {
subscribe(new Observer<T>() {
@Override
public void onNext(T t) {
onNext.call(t);
}
@Override
public void onError(Throwable e) {
onError.call(e);
}
@Override
public void onCompleted() {
onCompleted.call();
}
});
}
}
|
|
package com.smartdevicelink.test.rpc.requests;
import java.util.Hashtable;
import org.json.JSONException;
import org.json.JSONObject;
import com.smartdevicelink.marshal.JsonRPCMarshaller;
import com.smartdevicelink.protocol.enums.FunctionID;
import com.smartdevicelink.proxy.RPCMessage;
import com.smartdevicelink.proxy.rpc.GetVehicleData;
import com.smartdevicelink.test.BaseRpcTests;
import com.smartdevicelink.test.JsonUtils;
import com.smartdevicelink.test.Test;
import com.smartdevicelink.test.json.rpc.JsonFileReader;
/**
* This is a unit test class for the SmartDeviceLink library project class :
* {@link com.smartdevicelink.rpc.GetVehicleData}
*/
public class GetVehicleDataTests extends BaseRpcTests {
@Override
protected RPCMessage createMessage(){
GetVehicleData msg = new GetVehicleData();
msg.setSpeed(Test.GENERAL_BOOLEAN);
msg.setRpm(Test.GENERAL_BOOLEAN);
msg.setExternalTemperature(Test.GENERAL_BOOLEAN);
msg.setFuelLevel(Test.GENERAL_BOOLEAN);
msg.setPrndl(Test.GENERAL_BOOLEAN);
msg.setTirePressure(Test.GENERAL_BOOLEAN);
msg.setEngineTorque(Test.GENERAL_BOOLEAN);
msg.setOdometer(Test.GENERAL_BOOLEAN);
msg.setGps(Test.GENERAL_BOOLEAN);
msg.setFuelLevelState(Test.GENERAL_BOOLEAN);
msg.setInstantFuelConsumption(Test.GENERAL_BOOLEAN);
msg.setVin(Test.GENERAL_BOOLEAN);
msg.setBeltStatus(Test.GENERAL_BOOLEAN);
msg.setBodyInformation(Test.GENERAL_BOOLEAN);
msg.setDeviceStatus(Test.GENERAL_BOOLEAN);
msg.setDriverBraking(Test.GENERAL_BOOLEAN);
msg.setWiperStatus(Test.GENERAL_BOOLEAN);
msg.setHeadLampStatus(Test.GENERAL_BOOLEAN);
msg.setAccPedalPosition(Test.GENERAL_BOOLEAN);
msg.setSteeringWheelAngle(Test.GENERAL_BOOLEAN);
msg.setECallInfo(Test.GENERAL_BOOLEAN);
msg.setAirbagStatus(Test.GENERAL_BOOLEAN);
msg.setEmergencyEvent(Test.GENERAL_BOOLEAN);
msg.setClusterModeStatus(Test.GENERAL_BOOLEAN);
msg.setMyKey(Test.GENERAL_BOOLEAN);
return msg;
}
@Override
protected String getMessageType(){
return RPCMessage.KEY_REQUEST;
}
@Override
protected String getCommandType(){
return FunctionID.GET_VEHICLE_DATA.toString();
}
@Override
protected JSONObject getExpectedParameters(int sdlVersion){
JSONObject result = new JSONObject();
try{
result.put(GetVehicleData.KEY_SPEED, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_RPM, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_EXTERNAL_TEMPERATURE, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_FUEL_LEVEL, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_PRNDL, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_TIRE_PRESSURE, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_ENGINE_TORQUE, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_ODOMETER, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_GPS, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_FUEL_LEVEL_STATE, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_INSTANT_FUEL_CONSUMPTION, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_VIN, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_BELT_STATUS, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_BODY_INFORMATION, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_DEVICE_STATUS, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_DRIVER_BRAKING, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_WIPER_STATUS, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_HEAD_LAMP_STATUS, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_ACC_PEDAL_POSITION, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_STEERING_WHEEL_ANGLE, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_E_CALL_INFO, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_AIRBAG_STATUS, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_EMERGENCY_EVENT, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_CLUSTER_MODE_STATUS, Test.GENERAL_BOOLEAN);
result.put(GetVehicleData.KEY_MY_KEY, Test.GENERAL_BOOLEAN);
}catch(JSONException e){
fail(Test.JSON_FAIL);
}
return result;
}
/**
* Tests the expected values of the RPC message.
*/
public void testRpcValues () {
// Valid Tests
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getSpeed());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getRpm());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getExternalTemperature());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getFuelLevel());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getPrndl());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getTirePressure());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getEngineTorque());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getOdometer());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getGps());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getFuelLevelState());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getInstantFuelConsumption());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getVin());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getBeltStatus());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getBodyInformation());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getVin());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getDeviceStatus());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getDriverBraking());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getWiperStatus());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getHeadLampStatus());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getAccPedalPosition());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getSteeringWheelAngle());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getECallInfo());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getAirbagStatus());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getEmergencyEvent());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getClusterModeStatus());
assertTrue(Test.TRUE, ( (GetVehicleData) msg ).getMyKey());
// Invalid/Null Tests
GetVehicleData msg = new GetVehicleData();
assertNotNull(Test.NOT_NULL, msg);
testNullBase(msg);
assertNull(Test.NULL, msg.getAccPedalPosition());
assertNull(Test.NULL, msg.getAirbagStatus());
assertNull(Test.NULL, msg.getBeltStatus());
assertNull(Test.NULL, msg.getDriverBraking());
assertNull(Test.NULL, msg.getFuelLevel());
assertNull(Test.NULL, msg.getTirePressure());
assertNull(Test.NULL, msg.getWiperStatus());
assertNull(Test.NULL, msg.getGps());
assertNull(Test.NULL, msg.getSpeed());
assertNull(Test.NULL, msg.getRpm());
assertNull(Test.NULL, msg.getFuelLevelState());
assertNull(Test.NULL, msg.getInstantFuelConsumption());
assertNull(Test.NULL, msg.getExternalTemperature());
assertNull(Test.NULL, msg.getVin());
assertNull(Test.NULL, msg.getPrndl());
assertNull(Test.NULL, msg.getOdometer());
assertNull(Test.NULL, msg.getBodyInformation());
assertNull(Test.NULL, msg.getDeviceStatus());
assertNull(Test.NULL, msg.getHeadLampStatus());
assertNull(Test.NULL, msg.getEngineTorque());
assertNull(Test.NULL, msg.getSteeringWheelAngle());
assertNull(Test.NULL, msg.getECallInfo());
assertNull(Test.NULL, msg.getEmergencyEvent());
assertNull(Test.NULL, msg.getClusterModeStatus());
assertNull(Test.NULL, msg.getMyKey());
}
/**
* Tests a valid JSON construction of this RPC message.
*/
public void testJsonConstructor () {
JSONObject commandJson = JsonFileReader.readId(this.mContext, getCommandType(), getMessageType());
assertNotNull(Test.NOT_NULL, commandJson);
try {
Hashtable<String, Object> hash = JsonRPCMarshaller.deserializeJSONObject(commandJson);
GetVehicleData cmd = new GetVehicleData(hash);
JSONObject body = JsonUtils.readJsonObjectFromJsonObject(commandJson, getMessageType());
assertNotNull(Test.NOT_NULL, body);
// Test everything in the json body.
assertEquals("Command name doesn't match input name", JsonUtils.readStringFromJsonObject(body, RPCMessage.KEY_FUNCTION_NAME), cmd.getFunctionName());
assertEquals("Correlation ID doesn't match input ID", JsonUtils.readIntegerFromJsonObject(body, RPCMessage.KEY_CORRELATION_ID), cmd.getCorrelationID());
JSONObject parameters = JsonUtils.readJsonObjectFromJsonObject(body, RPCMessage.KEY_PARAMETERS);
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_SPEED), cmd.getSpeed());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_RPM), cmd.getRpm());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_EXTERNAL_TEMPERATURE), cmd.getExternalTemperature());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_FUEL_LEVEL), cmd.getFuelLevel());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_VIN), cmd.getVin());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_PRNDL), cmd.getPrndl());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_TIRE_PRESSURE), cmd.getTirePressure());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_ENGINE_TORQUE), cmd.getEngineTorque());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_ODOMETER), cmd.getOdometer());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_GPS), cmd.getGps());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_FUEL_LEVEL_STATE), cmd.getFuelLevelState());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_INSTANT_FUEL_CONSUMPTION), cmd.getInstantFuelConsumption());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_BELT_STATUS), cmd.getBeltStatus());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_BODY_INFORMATION), cmd.getBodyInformation());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_DEVICE_STATUS), cmd.getDeviceStatus());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_DRIVER_BRAKING), cmd.getDriverBraking());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_WIPER_STATUS), cmd.getWiperStatus());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_HEAD_LAMP_STATUS), cmd.getHeadLampStatus());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_ACC_PEDAL_POSITION), cmd.getAccPedalPosition());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_STEERING_WHEEL_ANGLE), cmd.getSteeringWheelAngle());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_E_CALL_INFO), cmd.getECallInfo());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_AIRBAG_STATUS), cmd.getAirbagStatus());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_EMERGENCY_EVENT), cmd.getEmergencyEvent());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_CLUSTER_MODE_STATUS), cmd.getClusterModeStatus());
assertEquals(Test.MATCH, JsonUtils.readBooleanFromJsonObject(parameters, GetVehicleData.KEY_MY_KEY), cmd.getMyKey());
} catch (JSONException e) {
fail(Test.JSON_FAIL);
}
}
}
|
|
/*
** 2014 July 10
**
** The author disclaims copyright to this source code. In place of
** a legal notice, here is a blessing:
** May you do good and not evil.
** May you find forgiveness for yourself and forgive others.
** May you share freely, never taking more than you give.
*/
package info.ata4.unity.cli.extract.mesh;
import info.ata4.unity.DisUnity;
import info.ata4.unity.engine.Mesh;
import info.ata4.unity.engine.SubMesh;
import info.ata4.unity.engine.struct.Color32;
import info.ata4.unity.engine.struct.Vector2f;
import info.ata4.unity.engine.struct.Vector3f;
import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
*
* @author Nico Bergemann <barracuda415 at yahoo.de>
*/
class PlyWriter extends MeshWriter {
private PrintStream ps;
PlyWriter(MeshHandler handler) {
super(handler);
}
@Override
void write(MeshData meshData) throws IOException {
Mesh mesh = meshData.getMesh();
List<Vector3f> vertices = meshData.getVertices();
List<Vector3f> normals = meshData.getNormals();
List<Vector2f> uv1 = meshData.getUV1();
List<Vector2f> uv2 = meshData.getUV2();
List<Color32> colors = meshData.getColors();
// PLY can't have more than one mesh per file, so write one file per
// sub-mesh
final int subMeshCount = mesh.subMeshes.size();
final int vertsPerFace = 3;
for (int i = 0; i < subMeshCount; i++) {
SubMesh subMesh = mesh.subMeshes.get(i);
// use prefix if there's more than one submesh
String name = mesh.name;
if (subMeshCount > 1) {
name = String.format("%s_%d", name, i);
}
try (PrintStream ps_ = handler.getPrintStream(name, "ply")) {
ps = ps_;
// write sub-mesh triangles
List<Integer> subMeshTriangles = meshData.getTriangles().get(i);
final int firstVertex = subMesh.firstVertex.intValue();
final int vertexCount = subMesh.vertexCount.intValue();
final int faceCount = subMeshTriangles.size() / vertsPerFace;
// write header
writeHeaderStart();
writeComment("Created by " + DisUnity.getSignature());
writeVertexHeader(vertexCount, !normals.isEmpty(), !uv1.isEmpty(),
!uv2.isEmpty(), !colors.isEmpty());
writeFaceHeader(faceCount);
writeHeaderEnd();
// write vertices
for (int j = firstVertex; j < firstVertex + vertexCount; j++) {
Vector3f v = vertices.get(j);
Vector3f vn = normals.isEmpty() ? null : normals.get(j);
Vector2f vt1 = uv1.isEmpty() ? null : uv1.get(j);
Vector2f vt2 = uv2.isEmpty() ? null : uv2.get(j);
Color32 c = colors.isEmpty() ? null : colors.get(j);
writeVertex(v, vn, vt1, vt2, c);
}
// write faces
List<Integer> faceTriangles = new ArrayList<>();
for (int j = 0; j < subMeshTriangles.size(); j++) {
faceTriangles.add(subMeshTriangles.get(j) - firstVertex);
if (faceTriangles.size() == vertsPerFace) {
writeFace(faceTriangles);
faceTriangles.clear();
}
}
}
}
}
private void writeLine() {
ps.println();
}
private void writeHeaderStart() {
ps.println("ply");
ps.println("format ascii 1.0");
}
private void writeComment(String comment) {
ps.print("comment ");
ps.println(comment);
}
private void writeVertexHeader(int elements, boolean normals, boolean uv1, boolean uv2, boolean colors) {
ps.print("element vertex ");
ps.println(elements);
ps.println("property float x");
ps.println("property float y");
ps.println("property float z");
if (normals) {
ps.println("property float nx");
ps.println("property float ny");
ps.println("property float nz");
}
if (uv1) {
ps.println("property float s");
ps.println("property float t");
}
if (uv2) {
ps.println("property float s2");
ps.println("property float t2");
}
if (colors) {
ps.println("property uchar red");
ps.println("property uchar green");
ps.println("property uchar blue");
}
}
private void writeFaceHeader(int elements) {
ps.print("element face ");
ps.println(elements);
ps.println("property list uchar int vertex_indices");
}
private void writeHeaderEnd() {
ps.println("end_header");
}
private void writeVector(Vector2f v) {
ps.print(v.x);
ps.print(' ');
ps.print(1 - v.y);
}
private void writeVector(Vector3f v) {
ps.print(-v.x);
ps.print(' ');
ps.print(-v.z);
ps.print(' ');
ps.print(v.y);
}
private void writeColor(Color32 c) {
ps.print(c.r);
ps.print(' ');
ps.print(c.g);
ps.print(' ');
ps.print(c.b);
}
private void writeVertex(Vector3f v, Vector3f vn, Vector2f vt1, Vector2f vt2, Color32 c) {
writeVector(v);
if (vn != null) {
ps.print(' ');
writeVector(vn);
}
if (vt1 != null) {
ps.print(' ');
writeVector(vt1);
}
if (vt2 != null) {
ps.print(' ');
writeVector(vt2);
}
if (c != null) {
ps.print(' ');
writeColor(c);
}
writeLine();
}
private void writeFace(List<Integer> indices) {
// reverse winding to fix normals after x axis has been flipped
Collections.reverse(indices);
ps.print(indices.size());
ps.print(' ');
for (Integer index : indices) {
ps.print(index);
ps.print(' ');
}
ps.println();
}
}
|
|
package com.cjbarker.wb.ws;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpEntityEnclosingRequest;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.HttpVersion;
import org.apache.http.StatusLine;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.conn.params.ConnManagerParams;
import org.apache.http.conn.params.ConnPerRoute;
import org.apache.http.conn.params.ConnPerRouteBean;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicHeader;
import org.apache.http.params.HttpConnectionParams;
import org.apache.http.params.HttpParams;
import org.apache.http.params.HttpProtocolParams;
import com.cjbarker.wb.Util;
import android.util.Log;
/**
* Class provides HTTP request/response handling for weather API clients.
*
* @author CJ Barker
*/
public class ClientRequest {
// static vars
private static final String TAG = "HttpClient";
private static final String DEFAULT_USER_AGENT = "WB/1.0";
private static final String DEFAULT_ACCEPT_TYPES = "application/json,application/xml";
public static enum Method { GET };
// predefined headers
private static final BasicHeader HEADER_CONTENT_TYPE =
new BasicHeader("Content-Type", "application/xml");
private static final BasicHeader HEADER_ACCEPT =
new BasicHeader("Accept", "application/xml");
private static final BasicHeader HEADER_CONNECTION =
new BasicHeader("Connection", "Keep-Alive");
private static final BasicHeader HEADER_CACHE_CONTROL =
new BasicHeader("Cache-Control", "no-cache");
private static final BasicHeader HEADER_PRAGMA =
new BasicHeader("Pragma", "no-cache");
// member vars
private String baseUri;
private String userAgent;
private String acceptTypes;
private ClientRequest() {}
/**
* Default constructor
* @param uri URI to perform HTTP request on
*/
public ClientRequest(String uri) {
this(uri, DEFAULT_USER_AGENT);
}
/**
* Constructor
* @param uri URI to perform HTTP request on
* @param userAgent HTTP user-agent header
*/
public ClientRequest(String uri, String userAgent) {
this(uri, DEFAULT_USER_AGENT, DEFAULT_ACCEPT_TYPES);
}
/**
* Constructor
* @param uri URI to perform HTTP request on
* @param userAgent HTTP user-agent header
* @param acceptTypes HTTP accept-type header
*/
public ClientRequest(String uri, String userAgent, String acceptTypes) {
this.baseUri = uri;
this.userAgent = userAgent;
this.acceptTypes = acceptTypes;
}
public ClientResponse sendRequest(Method method) throws IOException {
return sendRequest(method, null, null);
}
public ClientResponse sendRequest(Method method, String resource) throws IOException {
return sendRequest(method, resource, null);
}
/**
* Performs HTTP request and returns the associated response.
*
* @param method HTTP method to execute
* @param resource Resource parameter(s) to append to baseUri
* @param payload Optional request payload to send in HTTP body
* @return Parsed client response
* @throws IOException Occurs if HTTP IOException encountered
*/
public ClientResponse sendRequest(Method method, String resource, byte[] payload) throws IOException {
ClientResponse clientResp = null;
if (resource == null || resource.length() <= 0) {
resource ="";
}
else {
if (resource.charAt(0) == '/' && resource.length() >= 2) {
resource = resource.substring(1);
}
resource = (resource.charAt(resource.length()-1) == '/') ? resource.substring(0, resource.length()-1) : resource;
}
final String url = (Util.isEmpty(resource)) ? this.baseUri : this.baseUri + "/" + resource;
long beginMS = System.currentTimeMillis();
Log.d(TAG, "HTTP ["+ method +"] to "+ url + " at time (ms): " + beginMS);
URI uri = URI.create(url);
HttpHost host = new HttpHost(uri.getHost(), uri.getPort(), uri.getScheme());
HttpClient client = getHttpClient();
HttpRequestBase request = getRequest(method, url);
request.setHeaders(getHeaders());
if (payload != null && payload.length > 0) {
HttpEntity ent = new ByteArrayEntity(payload);
((HttpEntityEnclosingRequest) request).setEntity(ent);
}
HttpResponse response = null;
try {
response = client.execute(host, request);
if (response == null) {
Log.e(TAG, "Failed to get HTTP response");
throw new IOException("Failed to get HTTP response");
}
StatusLine status = response.getStatusLine();
if (status.getStatusCode() == HttpStatus.SC_OK || status.getStatusCode() == HttpStatus.SC_CREATED || status.getStatusCode() == HttpStatus.SC_NOT_MODIFIED)
{
Header[] headers = response.getAllHeaders();
byte[] data = getResponseEntity(response);
if (data != null && data.length > 0) {
Log.d(TAG, "Also read data of length = " + data.length);
}
else {
Log.d(TAG, "Data is null?");
}
clientResp = new ClientResponse(status.getStatusCode(), data, headers);
}
else {
Log.e(TAG, "Error connecting to "+ uri +" with response code: " + status.getStatusCode());
}
}
catch (IOException ioe) {
Log.e(TAG, "Encountered IOException for HTTP ["+ method +"] to "+ uri);
throw ioe;
}
finally {
long endMS = System.currentTimeMillis();
Log.d(TAG, "End request to "+uri+" at time (ms): " + endMS + ". It took about " + (endMS - beginMS) + " ms.");
}
return clientResp;
}
/**
* Builds HTTP Client w/ associated parameters
* @return HttpClient
*/
private HttpClient getHttpClient() {
HttpClient httpClient = new DefaultHttpClient();
HttpParams params = httpClient.getParams();
// Set timeout
HttpConnectionParams.setConnectionTimeout(params, 3000);
HttpConnectionParams.setSoTimeout(params, 3000);
// Set basic data
HttpProtocolParams.setVersion(params, HttpVersion.HTTP_1_1);
HttpProtocolParams.setContentCharset(params, "UTF-8");
HttpProtocolParams.setUseExpectContinue(params, true);
HttpProtocolParams.setUserAgent(params, this.userAgent);
// Make connection pool
ConnPerRoute connPerRoute = new ConnPerRouteBean(30);
ConnManagerParams.setMaxConnectionsPerRoute(params, connPerRoute);
ConnManagerParams.setMaxTotalConnections(params, 30);
return httpClient;
}
/**
* Create appropriate request based on the method
* @param method HTTP Method for request
* @param uri URI to perform request on
* @return
*/
private HttpRequestBase getRequest(Method method, String uri) {
HttpRequestBase request = null;
if (method == Method.GET) {
request = new HttpGet(uri);
}
else {
throw new IllegalArgumentException("Not support such type!");
}
return request;
}
/**
* Reads the response payload (entity) and returns it
*
* @param response HttpResponse to read message body for payload
* @return Parsed response's payload
*/
private byte[] getResponseEntity(HttpResponse response) {
try {
// Pull content stream from response
HttpEntity entity = response.getEntity();
if (entity != null) {
InputStream inputStream = entity.getContent();
ByteArrayOutputStream content = new ByteArrayOutputStream();
// Read response into a buffered stream
int readBytes;
byte [] buffer = new byte[512];
while ((readBytes = inputStream.read(buffer)) != -1) {
content.write(buffer, 0, readBytes);
}
inputStream.close();
return content.toByteArray();
}
} catch (IOException e) {
Log.e(TAG, "Get response entity failed!");
}
return null;
}
/**
* Builds HTTP headers
* @return Http headers
*/
private Header[] getHeaders() {
List<Header> l = new ArrayList<Header>();
l.add(HEADER_CONTENT_TYPE);
l.add(HEADER_ACCEPT);
l.add(HEADER_CONNECTION);
l.add(HEADER_CACHE_CONTROL);
l.add(HEADER_PRAGMA);
return l.toArray(new Header[] {});
}
public String toString() {
StringBuilder buf = new StringBuilder();
buf.append("BaseURI: ").append(baseUri).append("\n");
buf.append("UserAgent: ").append(userAgent).append("\n");
buf.append("AcceptType: ").append(acceptTypes).append("\n");
return buf.toString();
}
/* getters */
public String getBaseURI() {
return this.baseUri;
}
public String getUserAgent() {
return this.userAgent;
}
/* setters */
public void setBaseURI(String uri) {
this.baseUri = uri;
}
public void setUserAgent(String agent) {
this.userAgent = agent;
}
public void setAcceptType(String type) {
this.acceptTypes = type;
}
}
|
|
package org.pale.stumpy2.model;
import java.awt.Color;
import java.awt.Graphics;
import java.util.List;
import java.awt.Point;
import java.awt.Rectangle;
import java.sql.Connection;
import java.util.LinkedList;
import javax.swing.JComponent;
import javax.swing.JPanel;
import org.pale.stumpy2.componentview.ComponentBoxView;
/**
* There are many possible components in a patch, and they're represented by
* this class. This is the component in the patch, NOT the component instances
* in the patch instance (but then the editor has no concept of instances, so
* disregard that statement!)
*
* @author white
*
*/
public class Component implements Visitable {
/** counter used to construct unique IDs */
static int idcounter;
/** unique ID used by server */
int id;
/**
* The type of this component
*/
private ComponentType type;
/**
* Position in canvas
*/
public Rectangle rect;
/**
* page number
*/
public int page=0;
/**
* Special text which may be sent from the server
*/
public String extraText;
/**
* The connection (component and output) to which each input is connected.
* May be null.
*/
private Input[] inputs;
/**
* The components parameters. This is created by the type's
* createParameters() method
*/
private Parameter[] params = null;
/**
* A connection between two components - it's a link from an input of this
* component to an output of another
*
* @author white
*
*/
public class Input {
Component c;
int output;
Input(Component c, int output) {
this.c = c;
this.output = output;
}
}
// / array indicating whether getting a particular output always
// / runs the component, or whether the cached version is used if it
// / has been run before in this frame
boolean[] runOutputAlways;
private Patch patch;
private String comment;
public void setRunOutputAlways(int index, boolean state) {
runOutputAlways[index] = state;
}
public void setComment(String c) {
comment = c;
ComponentBoxView.getInstance().commentChanged(this);
}
public String getComment(){
return comment;
}
/**
* return the connection to this input.
*
* @param i
*/
public Input getInput(int i) {
return inputs[i];
}
/**
* @return component type
*/
public ComponentType getType() {
return type;
}
/**
* create and set component type. Constructor is not public; it's only
* created by a factory method in the ComponentType.
*
* @param type
* @param pos
* position in canvas
*/
Component(Patch patch, ComponentType type, Point pos, int page) {
this.patch = patch;
this.page = page;
this.id = idcounter++;
this.type = type;
this.inputs = new Input[type.getInputCount()];
this.runOutputAlways = new boolean[type.getOutputCount()];
this.rect = new Rectangle(pos, type.getSize());
this.params = type.createParameters(this);
if (this.params == null) {
// if that returned null, we'd still better create a zero-length
// array
this.params = new Parameter[0];
}
// run through the array and set the parameter IDs
for (int i = 0; i < this.params.length; i++) {
this.params[i].setID(i);
}
}
/**
* Accept a visit from some visitor object - just passes it down.
*/
@Override
public void accept(Visitor v) {
v.visitComponent(this);
}
/**
* Set an input link to another component. DON'T call directly; use
* Patch.setComponentInput()
*
* @param input
* the input to connect in this component
* @param c
* the component to which to connect
* @param output
* the output of that component to which our input is connected
* @throws ConnectionOutOfRangeException
* @throws ConnectionTypeMismatchException
*
*/
void setInput(int input, Component c, int output)
throws ConnectionOutOfRangeException,
ConnectionTypeMismatchException {
if (input >= type.getInputCount())
throw new ConnectionOutOfRangeException();
if (output >= c.type.getOutputCount())
throw new ConnectionOutOfRangeException();
if(getType().getInputType(input).getID()<25 ||
c.getType().getOutputType(output).getID()<25){ // type ID code 25 not checked (is ANY type).
if (c.getType().getOutputType(output).getID() != getType()
.getInputType(input).getID())
throw new ConnectionTypeMismatchException();
}
inputs[input] = new Input(c, output);
}
/**
* Clear an input link from another component
*
* @param input
* index
* @throws ConnectionOutOfRangeException
*/
void unsetInput(int input) throws ConnectionOutOfRangeException {
if (input >= type.getInputCount())
throw new ConnectionOutOfRangeException();
inputs[input] = null;
}
/**
* Remove any input connections to a given other component, or an input
* connect from a given output. Iterates through input array finding
* connections to the given component, setting them to null.
*
* @param c
* the component to remove
* @param j
* the optional index of a particular output to remove, or -1 for
* any output to that component
*/
public void removeInputConnections(Component c, int j) {
for (int i = 0; i < this.inputs.length; i++) {
if (inputs[i] != null && inputs[i].c == c
&& (inputs[i].output == j || j == -1)) {
inputs[i] = null;
}
}
}
/**
* Delegate drawing to the type object - done as two calls, so that we can
* override them separately
*
* @throws ConnectionOutOfRangeException
* this really shouldn't happen.
*/
public void draw(Graphics g, boolean highlight) {
g.setColor(Color.BLACK);
type.draw(g, highlight, this);
}
/**
* Draw connections, dealing with different pages
* @param g
* @param highlight
* @param curpage
* @throws ConnectionOutOfRangeException
*/
public void drawConnections(Graphics g, boolean highlight, int curpage)
throws ConnectionOutOfRangeException {
type.drawConnections(g, highlight, this, curpage);
}
/**
* Create a list of editor widgets for the parameters of this component. We
* provide the patch so that the system an inform listeners of parameter
* changes.
*
* @param parameterChangeListener
* @return a list of new JComponent
*/
public List<JComponent> createEditors(
ParameterChangeListener parameterChangeListener) {
List<JComponent> list = new LinkedList<JComponent>();
if (params != null) {
for (Parameter p : params) {
JComponent e = p.createControl(parameterChangeListener);
list.add(e);
}
}
return list;
}
/**
* Is a click at the given point in this component, and if so, is it on an
* input or output?
*
* @param p
* the point on the canvas
* @param curpage
* @return an object describing the component and input or output, or null.
*/
public ComponentAndConnection getComponentClick(Point p, int curpage) {
// first we check that it can intersect at all!
Rectangle br = new Rectangle(rect);
br.grow(ComponentType.CONNECTION_DETECT_WIDTH,
ComponentType.CONNECTION_DETECT_WIDTH);
// if that's true, go on to check all the inputs and outputs; a more
// detailed check.
if (br.contains(p) && page==curpage) {
return type.getComponentClick(this, p);
} else {
return null;
}
}
/**
* Component memento class - has to be a Bean, old bean, because we're using
* XMLEncoder
*
* @author white
*
*/
static public class Memento {
/**
* The type of the component
*/
public String type;
/**
* Its location on screen
*/
public Point location;
/**
* The comment text
*/
public String comment;
int page;
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
/**
* Connection data - an array of pairs of ints, with nulls instead of
* pairs where inputs are not connected. In each pair, 0=the index of
* the output component within the patch, 1=the index of that
* component's output
*/
int[][] inputData;
boolean[] runOutputAlways;
public boolean[] getRunOutputAlways() {
return runOutputAlways;
}
public void setRunOutputAlways(boolean[] runOutputAlways) {
this.runOutputAlways = runOutputAlways;
}
public int[][] getInputData() {
return inputData;
}
public void setInputData(int[][] inputData) {
this.inputData = inputData;
}
/**
* The parameters and their values
*/
public Parameter.Memento parameters[];
public Parameter.Memento[] getParameters() {
return parameters;
}
public void setParameters(Parameter.Memento[] parameters) {
this.parameters = parameters;
}
public void setLocation(Point location) {
this.location = location;
}
public Point getLocation() {
return location;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public void setPage(int page) {
this.page = page;
}
public int getPage() {
return page;
}
}
/**
* This creates a memento for all component data except the
* interconnections, which are managed by the patch memento
*
* @return the memento, which needs interconnection data adding to it
*/
Memento createMemento() {
Memento m = new Memento();
m.setType(getType().getName());
m.setLocation(new Point(rect.x, rect.y));
m.setRunOutputAlways(runOutputAlways);
m.setComment(comment);
m.setPage(page);
Parameter.Memento p[] = new Parameter.Memento[params.length];
for (int i = 0; i < params.length; i++) {
p[i] = params[i].createMemento();
}
m.setParameters(p);
return m;
}
/**
* Create a components (sans connections) from a memento
*
* @param cm
* the memento
* @param patch
* the patch to which I belong
* @throws UnknownComponentTypeException
*/
public Component(Memento cm, Patch patch)
throws UnknownComponentTypeException {
this.id = idcounter++;
this.patch = patch;
this.type = ComponentTypeRegistry.getInstance().getComponentType(
cm.getType());
Point pos = cm.getLocation();
this.inputs = new Input[type.getInputCount()];
this.rect = new Rectangle(pos, type.getSize());
this.runOutputAlways = cm.getRunOutputAlways();
this.comment = cm.getComment();
this.page = cm.getPage();
// create the parameters from the parameter mementoes
Parameter.Memento[] pms = cm.getParameters();
params = new Parameter[pms.length];
for (int i = 0; i < pms.length; i++) {
params[i] = pms[i].create();
}
}
/**
* Append to a list a command to create the component
*
* @param cmds
* @param patchid
*/
public void writeSyncCreateCommands(List<String> cmds, int patchid) {
cmds.add("nc " + patchid + " " + id + " " + type.getName());
for (int i = 0; i < type.getOutputCount(); i++) {
writeSyncRunAlwaysCommands(cmds, patchid, i);
}
// and commands to initialise the parameters
writeSyncParamCommands(cmds, patchid);
}
/**
* Append a command to update the runalways state of an output
*/
public void writeSyncRunAlwaysCommands(List<String> cmds, int patchid,
int output) {
cmds.add("sra " + patchid + " " + id + " " + output + " "
+ (runOutputAlways[output] ? "y" : "n"));
}
/**
* Append commands to link this component's inputs to others. Done as part
* of a complete sync, will not delete existing links etc.
*
* @param cmds
* @param patchid
*/
public void writeSyncLinkCommands(List<String> cmds, int patchid) {
for (int i = 0; i < type.getInputCount(); i++) {
Input inp = inputs[i];
if (inp != null)
cmds.add("lc " + patchid + " " + id + " " + i + " " + inp.c.id
+ " " + inp.output);
}
}
/**
* Append commands to set all this component's parameters.
*
* @param cmds
* @param patchid
*/
public void writeSyncParamCommands(List<String> cmds, int patchid) {
if (params != null) {
for (Parameter p : params) {
p.writeSyncCommand(cmds, patchid, id);
}
}
}
/**
* Return the ID of the component within the patch
*
* @return
*/
public int getID() {
return id;
}
/**
* return the patch I'm in
*
* @return
*/
public Patch getPatch() {
return patch;
}
public void setExtraText(String str) {
extraText = str;
}
/**
* set a new component type from the old one, getting the new one from the
* current list. If this new type has different params/connections this
* might be "fun".
*
* @throws UnknownComponentTypeException
*/
public void rehash() throws UnknownComponentTypeException {
String name = type.getName();
type = ComponentTypeRegistry.getInstance().getComponentType(name);
}
}
|
|
package com.jakewharton.utilities;
import java.util.LinkedList;
import java.util.List;
import com.jakewharton.snakewallpaper.R;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.Rect;
import android.preference.DialogPreference;
import android.util.AttributeSet;
import android.util.Log;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.LinearLayout.LayoutParams;
/**
* Dialog preference which allows for the selection of the locations of launcher widgets.
*
* @author Jake Wharton
*/
public class WidgetLocationsPreference extends DialogPreference {
/**
* Tag used for logging.
*/
private static final String LOG = "WidgetLocationsPreference";
/**
* Number of numbers stored in a rectangle (L, R, T, B).
*/
private static final int RECTANGLE_LENGTH = 4;
/**
* Offset from the sides of the dialog.
*/
private static final int PADDING = 10;
/**
* Widget location view.
*/
private WidgetLocatorView mView;
/**
* The string representation of the locations.
*/
private String mValue;
/**
* The string representation of the locations used for the save callback.
*/
private String mTempValue;
/**
* Number of icon rows on the launcher.
*/
private int mIconRows;
/**
* Number of icon columns on the launcher.
*/
private int mIconCols;
/**
* Create a new instance of the WidgetLocationsPreference.
*
* @param context Context.
* @param attrs Attributes.
*/
public WidgetLocationsPreference(final Context context, final AttributeSet attrs) {
super(context, attrs);
this.setPersistent(true);
}
@Override
protected View onCreateDialogView() {
final Context context = this.getContext();
final LinearLayout layout = new LinearLayout(context);
layout.setOrientation(LinearLayout.VERTICAL);
layout.setPadding(WidgetLocationsPreference.PADDING, WidgetLocationsPreference.PADDING, WidgetLocationsPreference.PADDING, WidgetLocationsPreference.PADDING);
final TextView text = new TextView(context);
text.setText(R.string.widgetlocations_howto);
layout.addView(text, new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.WRAP_CONTENT));
this.mView = new WidgetLocatorView(context, this.mIconRows, this.mIconCols, this.mValue);
layout.addView(this.mView, new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
return layout;
}
/**
* Update the number of icon rows and columns on the launcher.
*
* @param iconRows Number of rows.
* @param iconCols Number of columns.
*/
public void setIconCounts(final int iconRows, final int iconCols) {
this.mIconRows = iconRows;
this.mIconCols = iconCols;
}
@Override
protected Object onGetDefaultValue(final TypedArray a, final int index) {
return a.getString(index);
}
@Override
protected void onSetInitialValue(final boolean restore, final Object defaultValue) {
this.mValue = this.getPersistedString(defaultValue == null ? "" : (String)defaultValue);
}
@Override
protected void onDialogClosed(final boolean positiveResult) {
super.onDialogClosed(positiveResult);
if (positiveResult) {
this.mTempValue = this.mValue;
if (this.callChangeListener(this.mTempValue)) {
this.saveValue(this.mTempValue);
}
}
}
/**
* Set and persist the string representation of the widget locations.
* @param value
*/
private void saveValue(final String value) {
this.setValue(value);
this.persistString(value);
}
/**
* Set the string representation of the widget locations.
* @param value
*/
private void setValue(final String value) {
this.mValue = value;
}
/**
* Convert a persisted string value to the actual widget locations.
*
* @param string Persisted string.
* @return List of Rects where the widgets are located.
*/
public static List<Rect> convertStringToWidgetList(final String string) {
final List<Rect> list = new LinkedList<Rect>();
if ((string.length() % WidgetLocationsPreference.RECTANGLE_LENGTH) != 0) {
throw new IllegalArgumentException("String length must be a multiple of four.");
}
int i = 0;
while (i < string.length()) {
try {
final Rect r = new Rect();
r.left = Integer.parseInt(String.valueOf(string.charAt(i)));
r.top = Integer.parseInt(String.valueOf(string.charAt(i+1)));
r.right = Integer.parseInt(String.valueOf(string.charAt(i+2)));
r.bottom = Integer.parseInt(String.valueOf(string.charAt(i+3)));
list.add(r);
} catch (NumberFormatException e) {
Log.w(WidgetLocationsPreference.LOG, "Invalid rectangle: " + string.substring(i, WidgetLocationsPreference.RECTANGLE_LENGTH));
} finally {
i += WidgetLocationsPreference.RECTANGLE_LENGTH;
}
}
return list;
}
/**
* View which allows for the selecting of widget locations
*
* @author Jake Wharton
*/
private class WidgetLocatorView extends View {
/**
* Offset from the sides of the view.
*/
private static final float OFFSET = 5;
/**
* Location at which the current widget location begins.
*/
private Point mTouchStart;
/**
* Location at which the current widget location ends.
*/
private Point mTouchEnd;
/**
* Number of icon rows to display.
*/
private final int mRows;
/**
* Number of icon columns to display.
*/
private final int mCols;
/**
* The width of a single icon.
*/
private float mIconWidth;
/**
* The height of a single icon.
*/
private float mIconHeight;
/**
* The width of the virtual screen on the view.
*/
private float mWidth;
/**
* The width of the virtual screen on the view.
*/
private float mHeight;
/**
* Paint used to draw the icon divider lines.
*/
private final Paint mLine;
/**
* Paint used to draw the current widget.
*/
private final Paint mDrawing;
/**
* Paint used to draw the existing widgets.
*/
private final Paint mWidget;
/**
* List of current existing widget locations.
*/
private final List<Rect> mWidgets;
/**
* Detect long-presses on the view.
*/
private final GestureDetector gestureDetector = new GestureDetector(new GestureDetector.OnGestureListener() {
public boolean onSingleTapUp(MotionEvent e) { return false; }
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) { return false; }
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) { return false; }
public boolean onDown(MotionEvent e) { return false; }
public void onShowPress(MotionEvent e) {}
public void onLongPress(MotionEvent e) {
WidgetLocatorView.this.delete();
}
});
/**
* Create a new instance of the WidgetLocatorView.
*
* @param context Context.
* @param rows Number of icon rows.
* @param cols Number of icon columns.
* @param value Persisted value of widget location representation.
*/
public WidgetLocatorView(final Context context, final int rows, final int cols, final String value) {
super(context);
this.mRows = rows;
this.mCols = cols;
this.mLine = new Paint(Paint.ANTI_ALIAS_FLAG);
this.mLine.setColor(Color.GRAY);
this.mLine.setStrokeWidth(2);
this.mDrawing = new Paint(Paint.ANTI_ALIAS_FLAG);
this.mDrawing.setColor(Color.RED);
this.mDrawing.setStyle(Paint.Style.STROKE);
this.mWidget = new Paint(Paint.ANTI_ALIAS_FLAG);
this.mWidget.setColor(Color.GREEN);
this.mWidget.setStyle(Paint.Style.STROKE);
this.mWidgets = WidgetLocationsPreference.convertStringToWidgetList(value);
}
@Override
protected void onDraw(final Canvas c) {
c.save();
c.translate(WidgetLocatorView.OFFSET, WidgetLocatorView.OFFSET);
//Draw lines
for (int row = 0; row <= this.mRows; row++) {
final float rowPosition = row * this.mIconHeight;
c.drawLine(0, rowPosition, this.mWidth, rowPosition, this.mLine);
}
for (int col = 0; col <= this.mCols; col++) {
final float colPosition = col * this.mIconWidth;
c.drawLine(colPosition, 0, colPosition, this.mHeight, this.mLine);
}
final float iconWidthOverTwo = this.mIconWidth / 2.0f;
final float iconHeightOverTwo = this.mIconHeight / 2.0f;
final float offset = ((this.mIconHeight < this.mIconWidth) ? this.mIconHeight : this.mIconWidth) / 4.0f;
//Saved widgets
for (final Rect widget : this.mWidgets) {
final float left = (widget.left * this.mIconWidth) + iconWidthOverTwo - offset;
final float right = (widget.right * this.mIconWidth) + iconWidthOverTwo + offset;
final float top = (widget.top * this.mIconHeight) + iconHeightOverTwo - offset;
final float bottom = (widget.bottom * this.mIconHeight) + iconHeightOverTwo + offset;
c.drawRect(left, top, right, bottom, this.mWidget);
c.drawLine(left, top, right, bottom, this.mWidget);
c.drawLine(left, bottom, right, top, this.mWidget);
}
//Currently drawing widget
if (this.mTouchStart != null) {
final Rect pointRect = this.toRectangle();
final float left = (pointRect.left * this.mIconWidth) + iconWidthOverTwo - offset;
final float right = (pointRect.right * this.mIconWidth) + iconWidthOverTwo + offset;
final float top = (pointRect.top * this.mIconHeight) + iconHeightOverTwo - offset;
final float bottom = (pointRect.bottom * this.mIconHeight) + iconHeightOverTwo + offset;
c.drawRect(left, top, right, bottom, this.mDrawing);
}
c.restore();
}
@Override
protected void onSizeChanged(final int width, final int height, final int oldWidth, final int oldHeight) {
super.onSizeChanged(width, height, oldWidth, oldHeight);
this.mWidth = width - (2 * WidgetLocatorView.OFFSET);
this.mHeight = height - (2 * WidgetLocatorView.OFFSET);
this.mIconWidth = this.mWidth / (1.0f * this.mCols);
this.mIconHeight = this.mHeight / (1.0f * this.mRows);
}
@Override
public boolean onTouchEvent(final MotionEvent event) {
if (this.gestureDetector.onTouchEvent(event)) {
return true;
}
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
this.mTouchStart = this.mTouchEnd = this.getPoint(event.getX(), event.getY());
this.invalidate();
return true;
case MotionEvent.ACTION_MOVE:
this.mTouchEnd = this.getPoint(event.getX(), event.getY());
this.invalidate();
return true;
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_CANCEL:
this.mTouchEnd = this.getPoint(event.getX(), event.getY());
this.add();
this.mTouchStart = null;
this.mTouchEnd = null;
this.invalidate();
return true;
default:
return super.onTouchEvent(event);
}
}
/**
* Add a new widget using the two touch point locations as corners.
*/
private void add() {
final Rect newWidget = this.toRectangle();
final Rect insetWidget = new Rect(newWidget);
//This is so that intersect returns true if they are actually adjacent
insetWidget.inset(-1, -1);
for (final Rect widget : this.mWidgets) {
if (Rect.intersects(widget, insetWidget)) {
return;
}
}
if ((newWidget.height() == 0) && (newWidget.width() == 0)) {
return;
}
this.mWidgets.add(newWidget);
this.save();
}
/**
* Delete a widget at the long-pressed poisiton (if it exists).
*/
private void delete() {
for (final Rect widget : this.mWidgets) {
if ((this.mTouchEnd.x >= widget.left) && (this.mTouchEnd.x <= widget.right) && (this.mTouchEnd.y >= widget.top) && (this.mTouchEnd.y <= widget.bottom)) {
this.mWidgets.remove(widget);
break;
}
}
this.save();
this.invalidate();
}
/**
* Save the value to the parent instance.
*/
private void save() {
final StringBuilder builder = new StringBuilder();
for (final Rect widget : this.mWidgets) {
builder.append(Integer.toString(widget.left));
builder.append(Integer.toString(widget.top));
builder.append(Integer.toString(widget.right));
builder.append(Integer.toString(widget.bottom));
}
WidgetLocationsPreference.this.setValue(builder.toString());
}
/**
* Get the icon location Point from the current pixel coordinates.
*
* @param x X coordinate.
* @param y Y coordinate.
* @return Icon location Point.
*/
private Point getPoint(float x, float y) {
x -= WidgetLocatorView.OFFSET;
y -= WidgetLocatorView.OFFSET;
int newX = (int)(x / this.mIconWidth);
int newY = (int)(y / this.mIconHeight);
if (newX < 0) {
newX = 0;
} else if (newX >= this.mCols) {
newX = this.mCols - 1;
}
if (newY < 0) {
newY = 0;
} else if (newY >= this.mRows) {
newY = this.mRows - 1;
}
return new Point(newX, newY);
}
/**
* Convert the two touch Points to a Rect.
*
* @return Rect with corners at the two touch points.
*/
private Rect toRectangle() {
final boolean isStartXSmaller = (this.mTouchStart.x < this.mTouchEnd.x);
final boolean isStartYSmaller = (this.mTouchStart.y < this.mTouchEnd.y);
final Rect r = new Rect();
r.left = isStartXSmaller ? this.mTouchStart.x : this.mTouchEnd.x;
r.right = isStartXSmaller ? this.mTouchEnd.x : this.mTouchStart.x;
r.top = isStartYSmaller ? this.mTouchStart.y : this.mTouchEnd.y;
r.bottom = isStartYSmaller ? this.mTouchEnd.y : this.mTouchStart.y;
return r;
}
}
}
|
|
package main.model;
import java.awt.Point;
import java.util.Collection;
import java.util.EnumMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Observable;
import java.util.Observer;
import java.util.Set;
import main.model.domain.Army;
import main.model.domain.SupplyTrain;
import main.model.domain.TRADEABLE;
import static main.model.domain.TRADEABLE.*;
import main.model.domain.Traversable;
import main.presenter.Presentable;
import main.presenter.HexTile;
import main.presenter.TILE_TYPE;
public abstract class Domain implements Traversable, SupplyTrain, Presentable, Observer {
private String name;
private HexTile hex;
private Point point;
private Map<TRADEABLE, Integer> goodsStore;
private Set<Army> armies;
private int prestige;
private int fortification;
private int actionsLeft;
private int importsLeft;
private int exportsLeft;
private Domain neighbour_NE;
private Domain neighbour_E;
private Domain neighbour_SE;
private Domain neighbour_SW;
private Domain neighbour_W;
private Domain neighbour_NW;
public Domain (Point p, String name) {
this.point = p;
this.name = name;
this.goodsStore = new EnumMap<TRADEABLE, Integer>(TRADEABLE.class);
this.armies = new HashSet<Army>();
initialise(p);
}
private final void initialise (Point p) {
hex = new HexTile(this);
for (TRADEABLE t : TRADEABLE.values()) {
goodsStore.put(t, 0);
}
prestige = 0;
fortification = 0;
actionsLeft = 1;
importsLeft = 1;
exportsLeft = 1;
}
/**
* Gets the current stock level for a nominated tradeable resource.
* @param goodType - the tradeable resource in question
* @return - int - current (visible) stock levels
*/
public final int stock(TRADEABLE goodType) {
return goodsStore.get(goodType);
}
public final void importGoods (TRADEABLE goodType) {
goodsStore.put(goodType, goodsStore.get(goodType)+1);
}
/**
* Reduces the stock of a nominated {@link TRADEABLE} by one.
* @param goodType - the nominated tradeable
* @return true if total was reduced, otherwise false.
*/
public final boolean exportGoods (TRADEABLE goodType) {
int level = goodsStore.get(goodType);
if (level > 0) {
goodsStore.put(goodType, level-1);
return true;
}
return false;
}
public final HexTile getHex () {
return hex;
}
public final int prestige() {
return prestige;
}
@Override
public final Point location() {
return point;
}
protected final boolean actionLeft () {
return actionsLeft > 0;
}
protected final void useAction() {
actionsLeft-=1;
}
public final int fortification() {
return fortification;
}
private final boolean canFortify () {
return actionLeft();
}
private final boolean canFortifyWithTradeGoods() {
return canFortify() && (goodsStore.get(TRADE_GOODS) > 0);
}
public final void fortify() {
if (canFortify()) {
useAction();
fortification+=1;
}
}
public final void fortifyWithTradeGoods() {
if (canFortifyWithTradeGoods()) {
useAction();
fortification+=2;
int newValue = goodsStore.get(TRADE_GOODS)-1;
goodsStore.put(TRADE_GOODS, newValue);
}
}
@Override
public final boolean isSamePlaceAs(Traversable place) {
return location().equals(place.location());
}
@Override
public final String toHTML () {
StringBuilder reply = new StringBuilder();
reply.append("<html>");
reply.append("<b>" + name + "</b> <i>("+category().toString()+")</i>");
reply.append("<br>");
reply.append("Location: (" + point.x + ", " + point.y + ")");
reply.append("<br>");
reply.append("Prestige: " + prestige);
reply.append("<br>");
reply.append("Fortification: " + fortification);
reply.append("<br>");
reply.append("<u>Resources:</u>");
reply.append("<br>");
reply.append(goodsStoreToTable());
reply.append("<u>Left to do this turn:</u>");
reply.append("<br>");
reply.append("Actions: " + actionsLeft);
reply.append("<br>");
reply.append("Exports: " + exportsLeft);
reply.append("<br>");
reply.append("Imports: " + importsLeft);
reply.append("</html>");
return reply.toString();
}
private String goodsStoreToTable() {
StringBuilder reply = new StringBuilder();
reply.append("<table>");
for(TRADEABLE t : goodsStore.keySet()) {
reply.append("<tr>");
reply.append("<td>");
reply.append(t.toString());
reply.append("</td>");
reply.append("<td>");
reply.append(goodsStore.get(t));
reply.append("</td>");
reply.append("</tr>");
}
reply.append("</table>");
return reply.toString();
}
@Override
public abstract TILE_TYPE category();
public final void bindNeighbours(Collection<Domain> domains) {
int x = location().x;
int y = location().y;
neighbour_NE = find(domains, x+1, y-1);
neighbour_E = find(domains, x+1, y);
neighbour_SE = find(domains, x+1, y+1);
neighbour_SW = find(domains, x, y+1);
neighbour_W = find(domains, x-1, y);
neighbour_NW = find(domains, x, y-1);
}
private Domain find (Collection<Domain> domains, int x, int y) {
for (Domain d : domains) {
if (d.location().x == x && d.location().y == y) {
return d;
}
}
return null;
}
public final void placeArmy(Army army) {
armies.add(army);
}
public final int unitCount() {
return armies.size();
}
public final int friendlyUnitCount() {
int reply = 0;
for (Army a : armies) {
if (a.isFrom(this)) {
reply++;
}
}
return reply;
}
@Override
public final void moveNorthEast (Army army) {
if (neighbour_NE != null && armies.contains(army)) {
if (army.canTraverse(neighbour_NE)) {
armies.remove(army);
neighbour_NE.accept(army);
}
}
}
@Override
public final void moveEast (Army army) {
if (neighbour_E != null && armies.contains(army)) {
if (army.canTraverse(neighbour_E)) {
armies.remove(army);
neighbour_E.accept(army);
}
}
}
@Override
public final void moveSouthEast (Army army) {
if (neighbour_SE != null && armies.contains(army)) {
if (army.canTraverse(neighbour_SE)) {
armies.remove(army);
neighbour_SE.accept(army);
}
}
}
@Override
public final void moveSouthWest (Army army) {
if (neighbour_SW != null && armies.contains(army)) {
if (army.canTraverse(neighbour_SW)) {
armies.remove(army);
neighbour_SW.accept(army);
}
}
}
@Override
public final void moveWest (Army army) {
if (neighbour_W != null && armies.contains(army)) {
if (army.canTraverse(neighbour_W)) {
armies.remove(army);
neighbour_W.accept(army);
}
}
}
@Override
public final void moveNorthWest (Army army) {
if (neighbour_NW != null && armies.contains(army)) {
if (army.canTraverse(neighbour_NW)) {
armies.remove(army);
neighbour_NW.accept(army);
}
}
}
private final void accept(Army army) {
army.setLocation(this);
armies.add(army);
}
@Override
public void update(Observable o, Object arg) {
SEASON s = (SEASON) arg;
updateArmies(s);
this.actionsLeft = 1;
this.importsLeft = 1;
this.exportsLeft = 1;
}
private void updateArmies(SEASON s) {
armies.forEach(a -> a.update(s));
}
public final int remainingActions() {
return actionsLeft;
}
public abstract void supplyYourArmy (Army army);
public abstract void disbandAtHome(Army army);
public abstract void disbandRemotely(Army army);
}
|
|
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.android;
import com.beust.jcommander.IStringConverter;
import com.beust.jcommander.IValueValidator;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.Parameter;
import com.beust.jcommander.ParameterException;
import com.beust.jcommander.Parameters;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Stopwatch;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.Multimap;
import com.google.devtools.build.singlejar.ZipCombiner;
import com.google.devtools.build.singlejar.ZipCombiner.OutputMode;
import com.google.devtools.build.zip.ZipFileEntry;
import com.google.devtools.build.zip.ZipReader;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
import java.util.regex.Pattern;
/**
* Action to filter entries out of a Zip file.
*
* <p>The entries to remove are determined from the filterZips and filterTypes. All entries from the
* filter Zip files that have an extension listed in filterTypes will be removed. If no filterZips
* are specified, no entries will be removed. Specifying no filterTypes is treated as if an
* extension of '.*' was specified.
*
* <p>Assuming each Zip as a set of entries, the result is:
*
* <pre> outputZip = inputZip - union[x intersect filterTypes for x in filterZips]</pre>
*
* <p>
*
* <pre>
* Example Usage:
* java/com/google/build/android/ZipFilterAction\
* --inputZip path/to/inputZip
* --outputZip path/to/outputZip
* --filterZips [path/to/filterZip[,path/to/filterZip]...]
* --filterTypes [fileExtension[,fileExtension]...]
* --explicitFilters [fileRegex[,fileRegex]...]
* --outputMode [DONT_CARE|FORCE_DEFLATE|FORCE_STORED]
* --checkHashMismatch [IGNORE|WARN|ERROR]
* </pre>
*/
public class ZipFilterAction {
private static final Logger logger = Logger.getLogger(ZipFilterAction.class.getName());
/** Modes of performing content hash checking during zip filtering. */
public enum HashMismatchCheckMode {
/** Filter file from input zip iff a file is found with the same filename in filter zips. */
IGNORE,
/**
* Filter file from input zip iff a file is found with the same filename and content hash in
* filter zips. Print warning if the filename is identical but content hash is not.
*/
WARN,
/**
* Same behavior as WARN, but throw an error if a file is found with the same filename with
* different content hash.
*/
ERROR
}
@Parameters(optionPrefixes = "--")
static class Options {
@Parameter(
names = "--inputZip",
description = "Path of input zip.",
converter = PathFlagConverter.class,
validateValueWith = PathExistsValidator.class
)
Path inputZip;
@Parameter(
names = "--outputZip",
description = "Path to write output zip.",
converter = PathFlagConverter.class
)
Path outputZip;
@Parameter(
names = "--filterZips",
description = "Filter zips.",
converter = PathFlagConverter.class,
validateValueWith = AllPathsExistValidator.class
)
List<Path> filterZips = ImmutableList.of();
@Parameter(names = "--filterTypes", description = "Filter file types.")
List<String> filterTypes = ImmutableList.of();
@Parameter(names = "--explicitFilters", description = "Explicitly specified filters.")
List<String> explicitFilters = ImmutableList.of();
@Parameter(names = "--outputMode", description = "Output zip compression mode.")
OutputMode outputMode = OutputMode.DONT_CARE;
@Parameter(
names = "--checkHashMismatch",
description =
"Ignore, warn or throw an error if the content hashes of two files with the "
+ "same name are different."
)
HashMismatchCheckMode hashMismatchCheckMode = HashMismatchCheckMode.WARN;
/**
* @deprecated please use --checkHashMismatch ERROR instead. Other options are IGNORE and WARN.
*/
@Deprecated
@Parameter(
names = "--errorOnHashMismatch",
description = "Error on entry filter with hash mismatch."
)
boolean errorOnHashMismatch = false;
/**
* @deprecated please use --checkHashMismatch WARN instead. Other options are IGNORE and WARN.
* <p>This is a hack to support existing users of --noerrorOnHashMismatch. JCommander does
* not support setting boolean flags with "--no", so instead we set the default to false and
* just ignore anyone who passes --noerrorOnHashMismatch.
*/
@Deprecated
@Parameter(names = "--noerrorOnHashMismatch")
boolean ignored = false;
}
/** Converts string flags to paths. Public because JCommander invokes this by reflection. */
public static class PathFlagConverter implements IStringConverter<Path> {
@Override
public Path convert(String text) {
return FileSystems.getDefault().getPath(text);
}
}
/** Validates that a path exists. Public because JCommander invokes this by reflection. */
public static class PathExistsValidator implements IValueValidator<Path> {
@Override
public void validate(String s, Path path) {
if (!Files.exists(path)) {
throw new ParameterException(String.format("%s is not a valid path.", path.toString()));
}
}
}
/** Validates that a set of paths exist. Public because JCommander invokes this by reflection. */
public static class AllPathsExistValidator implements IValueValidator<List<Path>> {
@Override
public void validate(String s, List<Path> paths) {
for (Path path : paths) {
if (!Files.exists(path)) {
throw new ParameterException(String.format("%s is not a valid path.", path.toString()));
}
}
}
}
@VisibleForTesting
static Multimap<String, Long> getEntriesToOmit(
Collection<Path> filterZips, Collection<String> filterTypes) throws IOException {
// Escape filter types to prevent regex abuse
Set<String> escapedFilterTypes = new HashSet<>();
for (String filterType : filterTypes) {
escapedFilterTypes.add(Pattern.quote(filterType));
}
// Match any string that ends with any of the filter file types
String filterRegex = String.format(".*(%s)$", Joiner.on("|").join(escapedFilterTypes));
ImmutableSetMultimap.Builder<String, Long> entriesToOmit = ImmutableSetMultimap.builder();
for (Path filterZip : filterZips) {
try (ZipReader zip = new ZipReader(filterZip.toFile())) {
for (ZipFileEntry entry : zip.entries()) {
if (filterTypes.isEmpty() || entry.getName().matches(filterRegex)) {
entriesToOmit.put(entry.getName(), entry.getCrc());
}
}
}
}
return entriesToOmit.build();
}
public static void main(String[] args) throws IOException {
System.exit(run(args));
}
static int run(String[] args) throws IOException {
Options options = new Options();
new JCommander(options).parse(args);
logger.fine(
String.format(
"Creating filter from entries of type %s, in zip files %s.",
options.filterTypes, options.filterZips));
final Stopwatch timer = Stopwatch.createStarted();
Multimap<String, Long> entriesToOmit =
getEntriesToOmit(options.filterZips, options.filterTypes);
final String explicitFilter =
options.explicitFilters.isEmpty()
? ""
: String.format(".*(%s).*", Joiner.on("|").join(options.explicitFilters));
logger.fine(String.format("Filter created in %dms", timer.elapsed(TimeUnit.MILLISECONDS)));
ImmutableMap.Builder<String, Long> inputEntries = ImmutableMap.builder();
try (ZipReader input = new ZipReader(options.inputZip.toFile())) {
for (ZipFileEntry entry : input.entries()) {
inputEntries.put(entry.getName(), entry.getCrc());
}
}
// TODO(jingwen): Remove --errorOnHashMismatch when Blaze release with --checkHashMismatch
// is checked in.
if (options.errorOnHashMismatch) {
options.hashMismatchCheckMode = HashMismatchCheckMode.ERROR;
}
ZipFilterEntryFilter entryFilter =
new ZipFilterEntryFilter(
explicitFilter, entriesToOmit, inputEntries.build(), options.hashMismatchCheckMode);
try (OutputStream out = Files.newOutputStream(options.outputZip);
ZipCombiner combiner = new ZipCombiner(options.outputMode, entryFilter, out)) {
combiner.addZip(options.inputZip.toFile());
}
logger.fine(String.format("Filtering completed in %dms", timer.elapsed(TimeUnit.MILLISECONDS)));
return entryFilter.sawErrors() ? 1 : 0;
}
}
|
|
// Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.query.change;
import static com.google.gerrit.server.ApprovalsUtil.sortApprovals;
import com.google.auto.value.AutoValue;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Maps;
import com.google.common.collect.SetMultimap;
import com.google.gerrit.common.data.SubmitRecord;
import com.google.gerrit.common.data.SubmitTypeRecord;
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.ChangeMessage;
import com.google.gerrit.reviewdb.client.Patch;
import com.google.gerrit.reviewdb.client.PatchLineComment;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.PatchSetApproval;
import com.google.gerrit.reviewdb.client.RefNames;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.ApprovalsUtil;
import com.google.gerrit.server.ChangeMessagesUtil;
import com.google.gerrit.server.CurrentUser;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.PatchLineCommentsUtil;
import com.google.gerrit.server.change.MergeabilityCache;
import com.google.gerrit.server.git.GitRepositoryManager;
import com.google.gerrit.server.git.MergeUtil;
import com.google.gerrit.server.notedb.ChangeNotes;
import com.google.gerrit.server.notedb.NotesMigration;
import com.google.gerrit.server.notedb.ReviewerState;
import com.google.gerrit.server.patch.PatchList;
import com.google.gerrit.server.patch.PatchListCache;
import com.google.gerrit.server.patch.PatchListEntry;
import com.google.gerrit.server.patch.PatchListNotAvailableException;
import com.google.gerrit.server.project.ChangeControl;
import com.google.gerrit.server.project.NoSuchChangeException;
import com.google.gerrit.server.project.ProjectCache;
import com.google.gerrit.server.project.SubmitRuleEvaluator;
import com.google.gwtorm.server.OrmException;
import com.google.gwtorm.server.ResultSet;
import com.google.inject.assistedinject.Assisted;
import com.google.inject.assistedinject.AssistedInject;
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.errors.RepositoryNotFoundException;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.FooterLine;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevWalk;
import java.io.IOException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class ChangeData {
private static final int BATCH_SIZE = 50;
public static List<Change> asChanges(List<ChangeData> changeDatas)
throws OrmException {
List<Change> result = new ArrayList<>(changeDatas.size());
for (ChangeData cd : changeDatas) {
result.add(cd.change());
}
return result;
}
public static Map<Change.Id, ChangeData> asMap(List<ChangeData> changes) {
Map<Change.Id, ChangeData> result =
Maps.newHashMapWithExpectedSize(changes.size());
for (ChangeData cd : changes) {
result.put(cd.getId(), cd);
}
return result;
}
public static void ensureChangeLoaded(Iterable<ChangeData> changes)
throws OrmException {
ChangeData first = Iterables.getFirst(changes, null);
if (first == null) {
return;
} else if (first.notesMigration.readChanges()) {
for (ChangeData cd : changes) {
cd.change();
}
}
Map<Change.Id, ChangeData> missing = Maps.newHashMap();
for (ChangeData cd : changes) {
if (cd.change == null) {
missing.put(cd.getId(), cd);
}
}
if (missing.isEmpty()) {
return;
}
for (Change change : first.db.changes().get(missing.keySet())) {
missing.get(change.getId()).change = change;
}
}
public static void ensureAllPatchSetsLoaded(Iterable<ChangeData> changes)
throws OrmException {
ChangeData first = Iterables.getFirst(changes, null);
if (first == null) {
return;
} else if (first.notesMigration.readChanges()) {
for (ChangeData cd : changes) {
cd.patchSets();
}
}
List<ResultSet<PatchSet>> results = new ArrayList<>(BATCH_SIZE);
for (List<ChangeData> batch : Iterables.partition(changes, BATCH_SIZE)) {
results.clear();
for (ChangeData cd : batch) {
if (cd.patchSets == null) {
results.add(cd.db.patchSets().byChange(cd.getId()));
} else {
results.add(null);
}
}
for (int i = 0; i < batch.size(); i++) {
ResultSet<PatchSet> result = results.get(i);
if (result != null) {
batch.get(i).patchSets = result.toList();
}
}
}
}
public static void ensureCurrentPatchSetLoaded(Iterable<ChangeData> changes)
throws OrmException {
ChangeData first = Iterables.getFirst(changes, null);
if (first == null) {
return;
} else if (first.notesMigration.readChanges()) {
for (ChangeData cd : changes) {
cd.currentPatchSet();
}
}
Map<PatchSet.Id, ChangeData> missing = Maps.newHashMap();
for (ChangeData cd : changes) {
if (cd.currentPatchSet == null && cd.patchSets == null) {
missing.put(cd.change().currentPatchSetId(), cd);
}
}
if (missing.isEmpty()) {
return;
}
for (PatchSet ps : first.db.patchSets().get(missing.keySet())) {
missing.get(ps.getId()).currentPatchSet = ps;
}
}
public static void ensureCurrentApprovalsLoaded(Iterable<ChangeData> changes)
throws OrmException {
ChangeData first = Iterables.getFirst(changes, null);
if (first == null) {
return;
} else if (first.notesMigration.readChanges()) {
for (ChangeData cd : changes) {
cd.currentApprovals();
}
}
List<ResultSet<PatchSetApproval>> results = new ArrayList<>(BATCH_SIZE);
for (List<ChangeData> batch : Iterables.partition(changes, BATCH_SIZE)) {
results.clear();
for (ChangeData cd : batch) {
if (cd.currentApprovals == null) {
PatchSet.Id psId = cd.change().currentPatchSetId();
results.add(cd.db.patchSetApprovals().byPatchSet(psId));
} else {
results.add(null);
}
}
for (int i = 0; i < batch.size(); i++) {
ResultSet<PatchSetApproval> result = results.get(i);
if (result != null) {
batch.get(i).currentApprovals = sortApprovals(result);
}
}
}
}
public static void ensureMessagesLoaded(Iterable<ChangeData> changes)
throws OrmException {
ChangeData first = Iterables.getFirst(changes, null);
if (first == null) {
return;
} else if (first.notesMigration.readChanges()) {
for (ChangeData cd : changes) {
cd.messages();
}
return;
}
List<ResultSet<ChangeMessage>> results = new ArrayList<>(BATCH_SIZE);
for (List<ChangeData> batch : Iterables.partition(changes, BATCH_SIZE)) {
results.clear();
for (ChangeData cd : batch) {
if (cd.messages == null) {
PatchSet.Id psId = cd.change().currentPatchSetId();
results.add(cd.db.changeMessages().byPatchSet(psId));
} else {
results.add(null);
}
}
for (int i = 0; i < batch.size(); i++) {
ResultSet<ChangeMessage> result = results.get(i);
if (result != null) {
batch.get(i).messages = result.toList();
}
}
}
}
public static void ensureReviewedByLoadedForOpenChanges(
Iterable<ChangeData> changes) throws OrmException {
List<ChangeData> pending = new ArrayList<>();
for (ChangeData cd : changes) {
if (cd.reviewedBy == null && cd.change().getStatus().isOpen()) {
pending.add(cd);
}
}
if (!pending.isEmpty()) {
ensureAllPatchSetsLoaded(pending);
ensureMessagesLoaded(pending);
for (ChangeData cd : pending) {
cd.reviewedBy();
}
}
}
public interface Factory {
ChangeData create(ReviewDb db, Change.Id id);
ChangeData create(ReviewDb db, Change c);
ChangeData create(ReviewDb db, ChangeControl c);
}
/**
* Create an instance for testing only.
* <p>
* Attempting to lazy load data will fail with NPEs. Callers may consider
* manually setting fields that can be set.
*
* @param id change ID
* @return instance for testing.
*/
public static ChangeData createForTest(Change.Id id, int currentPatchSetId) {
ChangeData cd = new ChangeData(null, null, null, null, null, null, null,
null, null, null, null, null, null, id);
cd.currentPatchSet = new PatchSet(new PatchSet.Id(id, currentPatchSetId));
return cd;
}
private final ReviewDb db;
private final GitRepositoryManager repoManager;
private final ChangeControl.GenericFactory changeControlFactory;
private final IdentifiedUser.GenericFactory userFactory;
private final ProjectCache projectCache;
private final MergeUtil.Factory mergeUtilFactory;
private final ChangeNotes.Factory notesFactory;
private final ApprovalsUtil approvalsUtil;
private final ChangeMessagesUtil cmUtil;
private final PatchLineCommentsUtil plcUtil;
private final PatchListCache patchListCache;
private final NotesMigration notesMigration;
private final MergeabilityCache mergeabilityCache;
private final Change.Id legacyId;
private ChangeDataSource returnedBySource;
private Change change;
private ChangeNotes notes;
private String commitMessage;
private List<FooterLine> commitFooters;
private PatchSet currentPatchSet;
private Collection<PatchSet> patchSets;
private ListMultimap<PatchSet.Id, PatchSetApproval> allApprovals;
private List<PatchSetApproval> currentApprovals;
private Map<Integer, List<String>> files = new HashMap<>();
private Collection<PatchLineComment> publishedComments;
private CurrentUser visibleTo;
private ChangeControl changeControl;
private List<ChangeMessage> messages;
private List<SubmitRecord> submitRecords;
private ChangedLines changedLines;
private Boolean mergeable;
private Set<Account.Id> editsByUser;
private Set<Account.Id> reviewedBy;
@AssistedInject
private ChangeData(
GitRepositoryManager repoManager,
ChangeControl.GenericFactory changeControlFactory,
IdentifiedUser.GenericFactory userFactory,
ProjectCache projectCache,
MergeUtil.Factory mergeUtilFactory,
ChangeNotes.Factory notesFactory,
ApprovalsUtil approvalsUtil,
ChangeMessagesUtil cmUtil,
PatchLineCommentsUtil plcUtil,
PatchListCache patchListCache,
NotesMigration notesMigration,
MergeabilityCache mergeabilityCache,
@Assisted ReviewDb db,
@Assisted Change.Id id) {
this.db = db;
this.repoManager = repoManager;
this.changeControlFactory = changeControlFactory;
this.userFactory = userFactory;
this.projectCache = projectCache;
this.mergeUtilFactory = mergeUtilFactory;
this.notesFactory = notesFactory;
this.approvalsUtil = approvalsUtil;
this.cmUtil = cmUtil;
this.plcUtil = plcUtil;
this.patchListCache = patchListCache;
this.notesMigration = notesMigration;
this.mergeabilityCache = mergeabilityCache;
legacyId = id;
}
@AssistedInject
private ChangeData(
GitRepositoryManager repoManager,
ChangeControl.GenericFactory changeControlFactory,
IdentifiedUser.GenericFactory userFactory,
ProjectCache projectCache,
MergeUtil.Factory mergeUtilFactory,
ChangeNotes.Factory notesFactory,
ApprovalsUtil approvalsUtil,
ChangeMessagesUtil cmUtil,
PatchLineCommentsUtil plcUtil,
PatchListCache patchListCache,
NotesMigration notesMigration,
MergeabilityCache mergeabilityCache,
@Assisted ReviewDb db,
@Assisted Change c) {
this.db = db;
this.repoManager = repoManager;
this.changeControlFactory = changeControlFactory;
this.userFactory = userFactory;
this.projectCache = projectCache;
this.mergeUtilFactory = mergeUtilFactory;
this.notesFactory = notesFactory;
this.approvalsUtil = approvalsUtil;
this.cmUtil = cmUtil;
this.plcUtil = plcUtil;
this.patchListCache = patchListCache;
this.notesMigration = notesMigration;
this.mergeabilityCache = mergeabilityCache;
legacyId = c.getId();
change = c;
}
@AssistedInject
private ChangeData(
GitRepositoryManager repoManager,
ChangeControl.GenericFactory changeControlFactory,
IdentifiedUser.GenericFactory userFactory,
ProjectCache projectCache,
MergeUtil.Factory mergeUtilFactory,
ChangeNotes.Factory notesFactory,
ApprovalsUtil approvalsUtil,
ChangeMessagesUtil cmUtil,
PatchLineCommentsUtil plcUtil,
PatchListCache patchListCache,
NotesMigration notesMigration,
MergeabilityCache mergeabilityCache,
@Assisted ReviewDb db,
@Assisted ChangeControl c) {
this.db = db;
this.repoManager = repoManager;
this.changeControlFactory = changeControlFactory;
this.userFactory = userFactory;
this.projectCache = projectCache;
this.mergeUtilFactory = mergeUtilFactory;
this.notesFactory = notesFactory;
this.approvalsUtil = approvalsUtil;
this.cmUtil = cmUtil;
this.plcUtil = plcUtil;
this.patchListCache = patchListCache;
this.notesMigration = notesMigration;
this.mergeabilityCache = mergeabilityCache;
legacyId = c.getChange().getId();
change = c.getChange();
changeControl = c;
notes = c.getNotes();
}
public ReviewDb db() {
return db;
}
public boolean isFromSource(ChangeDataSource s) {
return s == returnedBySource;
}
public void cacheFromSource(ChangeDataSource s) {
returnedBySource = s;
}
public void setCurrentFilePaths(List<String> filePaths) throws OrmException {
PatchSet ps = currentPatchSet();
if (ps != null) {
files.put(ps.getPatchSetId(), ImmutableList.copyOf(filePaths));
}
}
public List<String> currentFilePaths() throws OrmException {
PatchSet ps = currentPatchSet();
if (ps == null) {
return null;
}
return filePaths(currentPatchSet);
}
public List<String> filePaths(PatchSet ps) throws OrmException {
if (!files.containsKey(ps.getPatchSetId())) {
Change c = change();
if (c == null) {
return null;
}
PatchList p;
try {
p = patchListCache.get(c, ps);
} catch (PatchListNotAvailableException e) {
List<String> emptyFileList = Collections.emptyList();
files.put(ps.getPatchSetId(), emptyFileList);
return emptyFileList;
}
List<String> r = new ArrayList<>(p.getPatches().size());
for (PatchListEntry e : p.getPatches()) {
if (Patch.COMMIT_MSG.equals(e.getNewName())) {
continue;
}
switch (e.getChangeType()) {
case ADDED:
case MODIFIED:
case DELETED:
case COPIED:
case REWRITE:
r.add(e.getNewName());
break;
case RENAMED:
r.add(e.getOldName());
r.add(e.getNewName());
break;
}
}
Collections.sort(r);
files.put(ps.getPatchSetId(), Collections.unmodifiableList(r));
}
return files.get(ps.getPatchSetId());
}
public ChangedLines changedLines() throws OrmException {
if (changedLines == null) {
Change c = change();
if (c == null) {
return null;
}
PatchSet ps = currentPatchSet();
if (ps == null) {
return null;
}
PatchList p;
try {
p = patchListCache.get(c, ps);
} catch (PatchListNotAvailableException e) {
return null;
}
changedLines = new ChangedLines(p.getInsertions(), p.getDeletions());
}
return changedLines;
}
public void setChangedLines(int insertions, int deletions) {
changedLines = new ChangedLines(insertions, deletions);
}
public Change.Id getId() {
return legacyId;
}
boolean fastIsVisibleTo(CurrentUser user) {
return visibleTo == user;
}
public boolean hasChangeControl() {
return changeControl != null;
}
public ChangeControl changeControl() throws OrmException {
if (changeControl == null) {
Change c = change();
try {
changeControl =
changeControlFactory.controlFor(c, userFactory.create(c.getOwner()));
} catch (NoSuchChangeException e) {
throw new OrmException(e);
}
}
return changeControl;
}
void cacheVisibleTo(ChangeControl ctl) {
visibleTo = ctl.getCurrentUser();
changeControl = ctl;
}
public Change change() throws OrmException {
if (change == null) {
reloadChange();
}
return change;
}
public void setChange(Change c) {
change = c;
}
public Change reloadChange() throws OrmException {
change = db.changes().get(legacyId);
return change;
}
public ChangeNotes notes() throws OrmException {
if (notes == null) {
notes = notesFactory.create(change());
}
return notes;
}
public PatchSet currentPatchSet() throws OrmException {
if (currentPatchSet == null) {
Change c = change();
if (c == null) {
return null;
}
for (PatchSet p : patchSets()) {
if (p.getId().equals(c.currentPatchSetId())) {
currentPatchSet = p;
return p;
}
}
}
return currentPatchSet;
}
public List<PatchSetApproval> currentApprovals()
throws OrmException {
if (currentApprovals == null) {
Change c = change();
if (c == null) {
currentApprovals = Collections.emptyList();
} else {
currentApprovals = ImmutableList.copyOf(approvalsUtil.byPatchSet(
db, changeControl(), c.currentPatchSetId()));
}
}
return currentApprovals;
}
public void setCurrentApprovals(List<PatchSetApproval> approvals) {
currentApprovals = approvals;
}
public String commitMessage() throws IOException, OrmException {
if (commitMessage == null) {
if (!loadCommitData()) {
return null;
}
}
return commitMessage;
}
public List<FooterLine> commitFooters() throws IOException, OrmException {
if (commitFooters == null) {
if (!loadCommitData()) {
return null;
}
}
return commitFooters;
}
private boolean loadCommitData() throws OrmException,
RepositoryNotFoundException, IOException, MissingObjectException,
IncorrectObjectTypeException {
PatchSet ps = currentPatchSet();
if (ps == null) {
return false;
}
String sha1 = ps.getRevision().get();
try (Repository repo = repoManager.openRepository(change().getProject());
RevWalk walk = new RevWalk(repo)) {
RevCommit c = walk.parseCommit(ObjectId.fromString(sha1));
commitMessage = c.getFullMessage();
commitFooters = c.getFooterLines();
}
return true;
}
/**
* @return patches for the change.
* @throws OrmException an error occurred reading the database.
*/
public Collection<PatchSet> patchSets()
throws OrmException {
if (patchSets == null) {
patchSets = db.patchSets().byChange(legacyId).toList();
}
return patchSets;
}
public void setPatchSets(Collection<PatchSet> patchSets) {
this.currentPatchSet = null;
this.patchSets = patchSets;
}
/**
* @return patch set with the given ID, or null if it does not exist.
* @throws OrmException an error occurred reading the database.
*/
public PatchSet patchSet(PatchSet.Id psId) throws OrmException {
if (currentPatchSet != null && currentPatchSet.getId().equals(psId)) {
return currentPatchSet;
}
for (PatchSet ps : patchSets()) {
if (ps.getId().equals(psId)) {
return ps;
}
}
return null;
}
/**
* @return all patch set approvals for the change, keyed by ID, ordered by
* timestamp within each patch set.
* @throws OrmException an error occurred reading the database.
*/
public ListMultimap<PatchSet.Id, PatchSetApproval> approvals()
throws OrmException {
if (allApprovals == null) {
allApprovals = approvalsUtil.byChange(db, notes());
}
return allApprovals;
}
public SetMultimap<ReviewerState, Account.Id> reviewers()
throws OrmException {
return approvalsUtil.getReviewers(notes(), approvals().values());
}
public Collection<PatchLineComment> publishedComments()
throws OrmException {
if (publishedComments == null) {
publishedComments = plcUtil.publishedByChange(db, notes());
}
return publishedComments;
}
public List<ChangeMessage> messages()
throws OrmException {
if (messages == null) {
messages = cmUtil.byChange(db, notes());
}
return messages;
}
public void setSubmitRecords(List<SubmitRecord> records) {
submitRecords = records;
}
public List<SubmitRecord> getSubmitRecords() {
return submitRecords;
}
public void setMergeable(Boolean mergeable) {
this.mergeable = mergeable;
}
public Boolean isMergeable() throws OrmException {
if (mergeable == null) {
Change c = change();
if (c == null) {
return null;
}
if (c.getStatus() == Change.Status.MERGED) {
mergeable = true;
} else {
PatchSet ps = currentPatchSet();
if (ps == null || !changeControl().isPatchVisible(ps, db)) {
return null;
}
try (Repository repo = repoManager.openRepository(c.getProject())) {
Ref ref = repo.getRefDatabase().exactRef(c.getDest().get());
SubmitTypeRecord rec = new SubmitRuleEvaluator(this)
.getSubmitType();
if (rec.status != SubmitTypeRecord.Status.OK) {
throw new OrmException(
"Error in mergeability check: " + rec.errorMessage);
}
String mergeStrategy = mergeUtilFactory
.create(projectCache.get(c.getProject()))
.mergeStrategyName();
mergeable = mergeabilityCache.get(
ObjectId.fromString(ps.getRevision().get()),
ref, rec.type, mergeStrategy, c.getDest(), repo, db);
} catch (IOException e) {
throw new OrmException(e);
}
}
}
return mergeable;
}
public Set<Account.Id> editsByUser() throws OrmException {
if (editsByUser == null) {
Change c = change();
if (c == null) {
return Collections.emptySet();
}
editsByUser = new HashSet<>();
Change.Id id = change.getId();
try (Repository repo = repoManager.openRepository(change.getProject())) {
for (String ref
: repo.getRefDatabase().getRefs(RefNames.REFS_USERS).keySet()) {
if (Change.Id.fromEditRefPart(ref).equals(id)) {
editsByUser.add(Account.Id.fromRefPart(ref));
}
}
} catch (IOException e) {
throw new OrmException(e);
}
}
return editsByUser;
}
public Set<Account.Id> reviewedBy() throws OrmException {
if (reviewedBy == null) {
Change c = change();
if (c == null) {
return Collections.emptySet();
}
List<ReviewedByEvent> events = new ArrayList<>();
for (ChangeMessage msg : messages()) {
if (msg.getAuthor() != null) {
events.add(ReviewedByEvent.create(msg));
}
}
for (PatchSet ps : patchSets()) {
events.add(ReviewedByEvent.create(ps));
}
Collections.sort(events, Collections.reverseOrder());
reviewedBy = new LinkedHashSet<>();
Account.Id owner = c.getOwner();
for (ReviewedByEvent event : events) {
if (owner.equals(event.author())) {
break;
}
reviewedBy.add(event.author());
}
}
return reviewedBy;
}
public void setReviewedBy(Set<Account.Id> reviewedBy) {
this.reviewedBy = reviewedBy;
}
@AutoValue
abstract static class ReviewedByEvent implements Comparable<ReviewedByEvent> {
private static ReviewedByEvent create(PatchSet ps) {
return new AutoValue_ChangeData_ReviewedByEvent(
ps.getUploader(), ps.getCreatedOn());
}
private static ReviewedByEvent create(ChangeMessage msg) {
return new AutoValue_ChangeData_ReviewedByEvent(
msg.getAuthor(), msg.getWrittenOn());
}
public abstract Account.Id author();
public abstract Timestamp ts();
@Override
public int compareTo(ReviewedByEvent other) {
return ts().compareTo(other.ts());
}
}
@Override
public String toString() {
MoreObjects.ToStringHelper h = MoreObjects.toStringHelper(this);
if (change != null) {
h.addValue(change);
} else {
h.addValue(legacyId);
}
return h.toString();
}
public static class ChangedLines {
public final int insertions;
public final int deletions;
ChangedLines(int insertions, int deletions) {
this.insertions = insertions;
this.deletions = deletions;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.shared.kerberos.codec;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
import java.nio.ByteBuffer;
import org.apache.directory.api.asn1.DecoderException;
import org.apache.directory.api.asn1.EncoderException;
import org.apache.directory.api.asn1.ber.Asn1Decoder;
import org.apache.directory.api.util.Strings;
import org.apache.directory.shared.kerberos.codec.paEncTsEnc.PaEncTsEncContainer;
import org.apache.directory.shared.kerberos.components.PaEncTsEnc;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
/**
* Test cases for PaEncTsEnc codec.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public class PaEncTsEncDecoderTest
{
@Test
public void testDecodeFullPaEncTsEnc()
{
ByteBuffer stream = ByteBuffer.allocate( 0x1A );
stream.put( new byte[]
{
0x30, 0x18,
( byte ) 0xA0, 0x11, // PaTimestamp
0x18,
0x0F,
'2',
'0',
'1',
'0',
'1',
'0',
'1',
'0',
'2',
'3',
'4',
'5',
'4',
'5',
'Z',
( byte ) 0xA1,
0x03, // PaUsec
0x02,
0x01,
0x01
} );
String decodedPdu = Strings.dumpBytes( stream.array() );
stream.flip();
PaEncTsEncContainer paEncTsEncContainer = new PaEncTsEncContainer();
try
{
Asn1Decoder.decode( stream, paEncTsEncContainer );
}
catch ( DecoderException de )
{
fail( de.getMessage() );
}
PaEncTsEnc paEncTsEnc = paEncTsEncContainer.getPaEncTsEnc();
assertEquals( "20101010234545Z", paEncTsEnc.getPaTimestamp().toString() );
assertEquals( 1, paEncTsEnc.getPausec() );
ByteBuffer bb = ByteBuffer.allocate( paEncTsEnc.computeLength() );
try
{
bb = paEncTsEnc.encode( bb );
// Check the length
assertEquals( 0x1A, bb.limit() );
String encodedPdu = Strings.dumpBytes( bb.array() );
assertEquals( encodedPdu, decodedPdu );
}
catch ( EncoderException ee )
{
fail();
}
}
@Test
public void testDecodePaEncTsEncWithEmptySeq() throws DecoderException
{
ByteBuffer stream = ByteBuffer.allocate( 2 );
stream.put( new byte[]
{
0x30, 0x0
} );
stream.flip();
PaEncTsEncContainer container = new PaEncTsEncContainer();
Assertions.assertThrows( DecoderException.class, () -> {
Asn1Decoder.decode(stream, container);
} );
}
@Test
public void testDecodePaEncTsEncEmptyPaTimestamp() throws DecoderException
{
ByteBuffer stream = ByteBuffer.allocate( 4 );
stream.put( new byte[]
{
0x30, 0x02,
( byte ) 0xA0, 0x00
} );
stream.flip();
PaEncTsEncContainer container = new PaEncTsEncContainer();
Assertions.assertThrows( DecoderException.class, () -> {
Asn1Decoder.decode(stream, container);
} );
}
@Test
public void testDecodeAdAndOrNullPaTimestamp() throws DecoderException
{
ByteBuffer stream = ByteBuffer.allocate( 6 );
stream.put( new byte[]
{
0x30, 0x04,
( byte ) 0xA0, 0x02,
0x18, 0x00
} );
stream.flip();
PaEncTsEncContainer container = new PaEncTsEncContainer();
Assertions.assertThrows( DecoderException.class, () -> {
Asn1Decoder.decode(stream, container);
} );
}
@Test
public void testDecodeAdAndOrNoPaTimestamp() throws DecoderException
{
ByteBuffer stream = ByteBuffer.allocate( 0x07 );
stream.put( new byte[]
{
0x30, 0x05,
( byte ) 0xA1, 0x03, // PaUsec
0x02,
0x01,
0x01
} );
stream.flip();
PaEncTsEncContainer container = new PaEncTsEncContainer();
Assertions.assertThrows( DecoderException.class, () -> {
Asn1Decoder.decode(stream, container);
} );
}
@Test
public void testDecodePaEncTsEncNoPaUsec()
{
ByteBuffer stream = ByteBuffer.allocate( 0x15 );
stream.put( new byte[]
{
0x30, 0x13,
( byte ) 0xA0, 0x11, // PaTimestamp
0x18,
0x0F,
'2',
'0',
'1',
'0',
'1',
'0',
'1',
'0',
'2',
'3',
'4',
'5',
'4',
'5',
'Z',
} );
String decodedPdu = Strings.dumpBytes( stream.array() );
stream.flip();
PaEncTsEncContainer paEncTsEncContainer = new PaEncTsEncContainer();
try
{
Asn1Decoder.decode( stream, paEncTsEncContainer );
}
catch ( DecoderException de )
{
fail( de.getMessage() );
}
PaEncTsEnc paEncTsEnc = paEncTsEncContainer.getPaEncTsEnc();
assertEquals( "20101010234545Z", paEncTsEnc.getPaTimestamp().toString() );
assertEquals( -1, paEncTsEnc.getPausec() );
ByteBuffer bb = ByteBuffer.allocate( paEncTsEnc.computeLength() );
try
{
bb = paEncTsEnc.encode( bb );
// Check the length
assertEquals( 0x15, bb.limit() );
String encodedPdu = Strings.dumpBytes( bb.array() );
assertEquals( encodedPdu, decodedPdu );
}
catch ( EncoderException ee )
{
fail();
}
}
}
|
|
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.util.trace;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.log.Logger;
import com.facebook.buck.util.HumanReadableException;
import com.facebook.buck.util.trace.ChromeTraceParser.ChromeTraceEventMatcher;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.FileVisitResult;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.attribute.FileTime;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
/**
* Utility to help with reading data from build trace files.
*/
public class BuildTraces {
/**
* Regex pattern that can be used as a parameter to {@link Pattern#compile(String)} to match a
* valid trace id.
*/
private static final String TRACE_ID_PATTERN_TEXT = "([0-9a-zA-Z-]+)";
public static final Pattern TRACE_ID_PATTERN = Pattern.compile(TRACE_ID_PATTERN_TEXT);
private static final Logger logger = Logger.get(BuildTraces.class);
private static final Pattern TRACES_FILE_PATTERN = Pattern.compile("build\\..*\\.trace$");
private final ProjectFilesystem projectFilesystem;
public BuildTraces(ProjectFilesystem projectFilesystem) {
this.projectFilesystem = projectFilesystem;
}
public static class TraceAttributes {
private static final ThreadLocal<DateFormat> DATE_FORMAT = new ThreadLocal<DateFormat>() {
@Override
protected DateFormat initialValue() {
return new SimpleDateFormat("EEE, MMM d h:mm a");
}
};
private final Optional<String> command;
private final FileTime lastModifiedTime;
public TraceAttributes(Optional<String> command, FileTime lastModifiedTime) {
this.command = command;
this.lastModifiedTime = lastModifiedTime;
}
public Optional<String> getCommand() {
return command;
}
public FileTime getLastModifiedTime() {
return lastModifiedTime;
}
public String getFormattedDateTime() {
if (lastModifiedTime.toMillis() != 0) {
return DATE_FORMAT.get().format(Date.from(lastModifiedTime.toInstant()));
} else {
return "";
}
}
}
public Iterable<InputStream> getInputsForTraces(String id) throws IOException {
ImmutableList.Builder<InputStream> tracesBuilder = ImmutableList.builder();
for (Path p : getPathsToTraces(id)) {
tracesBuilder.add(projectFilesystem.getInputStreamForRelativePath(p));
}
return tracesBuilder.build();
}
public TraceAttributes getTraceAttributesFor(String id) throws IOException {
for (Path p : getPathsToTraces(id)) {
if (isTraceForBuild(p, id)) {
return getTraceAttributesFor(p);
}
}
throw new HumanReadableException("Could not find a build trace with id %s.", id);
}
/**
* Parses a trace file and returns the command that the user executed to create the trace.
* <p>
* This method tries to be reasonably tolerant of changes to the .trace file schema, returning
* {@link Optional#empty()} if it does not find the fields in the JSON that it expects.
*/
public TraceAttributes getTraceAttributesFor(Path pathToTrace) throws IOException {
FileTime lastModifiedTime = projectFilesystem.getLastModifiedTime(pathToTrace);
Optional<String> command = parseCommandFrom(pathToTrace);
return new TraceAttributes(command, lastModifiedTime);
}
private Optional<String> parseCommandFrom(Path pathToTrace) {
Set<ChromeTraceParser.ChromeTraceEventMatcher<?>> matchers = ImmutableSet.of(
ChromeTraceParser.COMMAND);
ChromeTraceParser parser = new ChromeTraceParser(projectFilesystem);
Map<ChromeTraceEventMatcher<?>, Object> results;
try {
results = parser.parse(pathToTrace, matchers);
} catch (IOException e) {
logger.error(e);
return Optional.empty();
}
return ChromeTraceParser.getResultForMatcher(ChromeTraceParser.COMMAND, results);
}
private boolean isTraceForBuild(Path path, String id) {
String testPrefix = "build.";
String testSuffix = "." + id + ".trace";
String name = path.getFileName().toString();
return name.startsWith(testPrefix) && name.endsWith(testSuffix);
}
/**
* The most recent trace (the one with the greatest last-modified time) will be listed first.
*/
public List<Path> listTraceFilesByLastModified() throws IOException {
final List<Path> allTraces = new ArrayList<>();
projectFilesystem.walkFileTree(
projectFilesystem.getBuckPaths().getLogDir(),
new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(
Path file, BasicFileAttributes attrs) throws IOException {
Matcher matcher = TRACES_FILE_PATTERN.matcher(file.getFileName().toString());
if (matcher.matches()) {
allTraces.add(file);
}
return super.visitFile(file, attrs);
}
});
// Sort by:
// 1. Reverse chronological order.
// 2. Alphabetical order.
Collections.sort(allTraces, (path1, path2) -> {
int result = 0;
FileTime lastModifiedTime1;
FileTime lastModifiedTime2;
try {
lastModifiedTime1 = projectFilesystem.getLastModifiedTime(path1);
lastModifiedTime2 = projectFilesystem.getLastModifiedTime(path2);
} catch (IOException e) {
throw new RuntimeException(e);
}
result = lastModifiedTime2.compareTo(lastModifiedTime1);
if (result == 0) {
return path2.toString().compareTo(path1.toString());
} else {
return result;
}
});
return allTraces;
}
/**
* Returns a collection of paths containing traces for the specified build ID.
* <p>
* A given build might have more than one trace file (for example,
* the buck.py launcher has its own trace file).
*/
private Collection<Path> getPathsToTraces(final String id) throws IOException {
Preconditions.checkArgument(TRACE_ID_PATTERN.matcher(id).matches());
List<Path> traces = listTraceFilesByLastModified().stream()
.filter(input -> input.getFileName().toString().contains(id))
.collect(Collectors.toList());
if (traces.isEmpty()) {
throw new HumanReadableException("Could not find a build trace with id %s.", id);
} else {
return traces;
}
}
}
|
|
/**
* Copyright 2016 Confluent Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.confluent.admin.utils;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.config.types.Password;
import org.apache.kafka.common.protocol.SecurityProtocol;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import javax.security.auth.login.Configuration;
import kafka.security.minikdc.MiniKdc;
import kafka.server.KafkaConfig;
import kafka.server.KafkaServer;
import kafka.utils.CoreUtils;
import kafka.utils.SystemTime$;
import kafka.utils.TestUtils;
import scala.Option;
import scala.Option$;
import scala.collection.JavaConversions;
/**
* This class is based on code from
* src/test/java/io/confluent/support/metrics/common/kafka/EmbeddedKafkaCluster.java
* at
* https://github.com/confluentinc/support-metrics-common/support-metrics-common/
*
* Starts an embedded Kafka cluster including a backing ZooKeeper ensemble. It adds support for
* 1. Zookeeper in clustered mode with SASL security
* 2. Kafka with SASL_SSL security
* <p>
* This class should be used for unit/integration testing only.
*/
public class EmbeddedKafkaCluster {
private static final Logger log = LoggerFactory.getLogger(EmbeddedKafkaCluster.class);
private static final Option<SecurityProtocol> INTER_BROKER_SECURITY_PROTOCOL = Option.apply
(SecurityProtocol.PLAINTEXT);
private static final boolean ENABLE_CONTROLLED_SHUTDOWN = true;
private static final boolean ENABLE_DELETE_TOPIC = false;
private static final int BROKER_PORT_BASE = 39092;
private static final boolean ENABLE_PLAINTEXT = true;
private static final boolean ENABLE_SASL_PLAINTEXT = false;
private static final int SASL_PLAINTEXT_PORT = 0;
private static final boolean ENABLE_SSL = false;
private static final int SSL_PORT = 0;
private static final int SASL_SSL_PORT_BASE = 49092;
private static Option<Properties> brokerSaslProperties = Option$.MODULE$.<Properties>empty();
private static MiniKdc kdc;
private static File trustStoreFile;
private static Properties saslProperties;
private final Map<Integer, KafkaServer> brokersById = new ConcurrentHashMap<>();
private File jaasFilePath = null;
private Option<File> brokerTrustStoreFile = Option$.MODULE$.<File>empty();
private boolean enableSASLSSL = false;
private EmbeddedZookeeperEnsemble zookeeper = null;
private int numBrokers;
private int numZookeeperPeers;
private boolean isRunning = false;
public EmbeddedKafkaCluster(int numBrokers, int numZookeeperPeers) throws IOException {
this(numBrokers, numZookeeperPeers, false);
}
public EmbeddedKafkaCluster(int numBrokers, int numZookeeperPeers, boolean enableSASLSSL)
throws IOException {
this(numBrokers, numZookeeperPeers, enableSASLSSL, null, null);
}
public EmbeddedKafkaCluster(int numBrokers, int numZookeeperPeers, boolean enableSASLSSL,
String jaasFilePath, String miniKDCDir) throws IOException {
this.enableSASLSSL = enableSASLSSL;
if (numBrokers <= 0 || numZookeeperPeers <= 0) {
throw new IllegalArgumentException("number of servers must be >= 1");
}
if (jaasFilePath != null) {
this.jaasFilePath = new File(jaasFilePath);
}
this.numBrokers = numBrokers;
this.numZookeeperPeers = numZookeeperPeers;
if (this.enableSASLSSL) {
File workDir;
if (miniKDCDir != null) {
workDir = new File(miniKDCDir);
} else {
workDir = Files.createTempDirectory("kdc").toFile();
}
Properties kdcConf = MiniKdc.createConfig();
kdc = new MiniKdc(kdcConf, workDir);
kdc.start();
String jaasFile = createJAASFile();
System.setProperty("java.security.auth.login.config", jaasFile);
System.setProperty("zookeeper.authProvider.1",
"org.apache.zookeeper.server.auth.SASLAuthenticationProvider");
// Uncomment this to debug Kerberos issues.
// System.setProperty("sun.security.krb5.debug","true");
trustStoreFile = File.createTempFile("truststore", ".jks");
saslProperties = new Properties();
saslProperties.put(SaslConfigs.SASL_MECHANISM, "GSSAPI");
saslProperties.put(SaslConfigs.SASL_ENABLED_MECHANISMS, "GSSAPI");
this.brokerTrustStoreFile = Option.apply(trustStoreFile);
this.brokerSaslProperties = Option.apply(saslProperties);
}
zookeeper = new EmbeddedZookeeperEnsemble(numZookeeperPeers);
}
private String createJAASFile() throws IOException {
String zkServerPrincipal = "zookeeper/localhost";
String zkClientPrincipal = "zkclient/localhost";
String kafkaServerPrincipal = "kafka/localhost";
String kafkaClientPrincipal = "client/localhost";
if (jaasFilePath == null) {
jaasFilePath = new File(Files.createTempDirectory("sasl").toFile(), "jaas.conf");
}
FileWriter fwriter = new FileWriter(jaasFilePath);
String template = "" +
"Server {\n" +
" com.sun.security.auth.module.Krb5LoginModule required\n" +
" useKeyTab=true\n" +
" keyTab=\"$ZK_SERVER_KEYTAB$\"\n" +
" storeKey=true\n" +
" useTicketCache=false\n" +
" principal=\"$ZK_SERVER_PRINCIPAL$@EXAMPLE.COM\";\n" +
"};\n" +
"Client {\n" +
"com.sun.security.auth.module.Krb5LoginModule required\n" +
" useKeyTab=true\n" +
" keyTab=\"$ZK_CLIENT_KEYTAB$\"\n" +
" storeKey=true\n" +
" useTicketCache=false\n" +
" principal=\"$ZK_CLIENT_PRINCIPAL$@EXAMPLE.COM\";" +
"};" + "\n" +
"KafkaServer {\n" +
" com.sun.security.auth.module.Krb5LoginModule required\n" +
" useKeyTab=true\n" +
" keyTab=\"$KAFKA_SERVER_KEYTAB$\"\n" +
" storeKey=true\n" +
" useTicketCache=false\n" +
" serviceName=kafka\n" +
" principal=\"$KAFKA_SERVER_PRINCIPAL$@EXAMPLE.COM\";\n" +
"};\n" +
"KafkaClient {\n" +
"com.sun.security.auth.module.Krb5LoginModule required\n" +
" useKeyTab=true\n" +
" keyTab=\"$KAFKA_CLIENT_KEYTAB$\"\n" +
" storeKey=true\n" +
" useTicketCache=false\n" +
" serviceName=kafka\n" +
" principal=\"$KAFKA_CLIENT_PRINCIPAL$@EXAMPLE.COM\";" +
"};" + "\n";
String output = template
.replace("$ZK_SERVER_KEYTAB$", createKeytab(zkServerPrincipal))
.replace("$ZK_SERVER_PRINCIPAL$", zkServerPrincipal)
.replace("$ZK_CLIENT_KEYTAB$", createKeytab(zkClientPrincipal))
.replace("$ZK_CLIENT_PRINCIPAL$", zkClientPrincipal)
.replace("$KAFKA_SERVER_KEYTAB$", createKeytab(kafkaServerPrincipal))
.replace("$KAFKA_SERVER_PRINCIPAL$", kafkaServerPrincipal)
.replace("$KAFKA_CLIENT_KEYTAB$", createKeytab(kafkaClientPrincipal))
.replace("$KAFKA_CLIENT_PRINCIPAL$", kafkaClientPrincipal);
log.debug("JAAS Config: " + output);
fwriter.write(output);
fwriter.close();
return jaasFilePath.getAbsolutePath();
}
private String createKeytab(String principal) {
File keytabFile = TestUtils.tempFile();
List<String> principals = new ArrayList<>();
principals.add(principal);
kdc.createPrincipal(keytabFile,
JavaConversions.asScalaBuffer(principals).toList());
log.debug("Keytab file for " + principal + " : " + keytabFile.getAbsolutePath());
return keytabFile.getAbsolutePath();
}
public static void main(String... args) throws IOException {
if (args.length != 6) {
System.err.println("Usage : <command> <num_kafka_brokers> <num_zookeeper_nodes> " +
"<sasl_ssl_enabled> <client properties path> <jaas_file> " +
"<minikdc_working_dir>");
System.exit(1);
}
int numBrokers = Integer.parseInt(args[0]);
int numZKNodes = Integer.parseInt(args[1]);
boolean isSASLSSLEnabled = Boolean.parseBoolean(args[2]);
String clientPropsPath = args[3];
String jaasConfigPath = args[4];
String miniKDCDir = args[5];
System.out.println("Starting a " + numBrokers + " node Kafka cluster with " + numZKNodes +
" zookeeper nodes.");
if (isSASLSSLEnabled) {
System.out.println("SASL_SSL is enabled. jaas.conf=" + jaasConfigPath);
System.out.println("SASL_SSL is enabled. krb.conf=" + miniKDCDir + "/krb.conf");
}
final EmbeddedKafkaCluster kafka = new EmbeddedKafkaCluster(numBrokers, numZKNodes,
isSASLSSLEnabled, jaasConfigPath, miniKDCDir);
System.out.println("Writing client properties to " + clientPropsPath);
Properties props = kafka.getClientSecurityConfig();
Password trustStorePassword = (Password) props.get("ssl.truststore.password");
props.put("ssl.truststore.password", trustStorePassword.value());
props.put("ssl.enabled.protocols", "TLSv1.2");
props.store(new FileOutputStream(clientPropsPath), null);
kafka.start();
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
kafka.shutdown();
}
});
}
public Properties getClientSecurityConfig() {
if (enableSASLSSL) {
Properties clientSecurityProps = TestUtils.producerSecurityConfigs(
SecurityProtocol.SASL_SSL,
Option.apply(trustStoreFile),
Option.apply(saslProperties));
return clientSecurityProps;
} else {
return new Properties();
}
}
public void start() throws IOException {
initializeZookeeper();
for (int brokerId = 0; brokerId < numBrokers; brokerId++) {
log.debug("Starting broker with id {} ...", brokerId);
startBroker(brokerId, zookeeper.connectString());
}
isRunning = true;
}
public void shutdown() {
for (int brokerId : brokersById.keySet()) {
log.debug("Stopping broker with id {} ...", brokerId);
stopBroker(brokerId);
}
zookeeper.shutdown();
if (kdc != null) {
kdc.stop();
}
System.clearProperty("java.security.auth.login.config");
System.clearProperty("zookeeper.authProvider.1");
Configuration.setConfiguration(null);
isRunning = false;
}
private void initializeZookeeper() {
try {
zookeeper.start();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private void startBroker(int brokerId, String zkConnectString) throws IOException {
if (brokerId < 0) {
throw new IllegalArgumentException("broker id must not be negative");
}
Properties props = TestUtils
.createBrokerConfig(brokerId, zkConnectString, ENABLE_CONTROLLED_SHUTDOWN,
ENABLE_DELETE_TOPIC, BROKER_PORT_BASE + brokerId,
INTER_BROKER_SECURITY_PROTOCOL,
this.brokerTrustStoreFile,
this.brokerSaslProperties,
ENABLE_PLAINTEXT,
ENABLE_SASL_PLAINTEXT,
SASL_PLAINTEXT_PORT,
ENABLE_SSL,
SSL_PORT,
this.enableSASLSSL,
SASL_SSL_PORT_BASE + brokerId,
Option.<String>empty());
KafkaServer broker = TestUtils.createServer(KafkaConfig.fromProps(props), SystemTime$
.MODULE$);
brokersById.put(brokerId, broker);
}
private void stopBroker(int brokerId) {
if (brokersById.containsKey(brokerId)) {
KafkaServer broker = brokersById.get(brokerId);
broker.shutdown();
broker.awaitShutdown();
CoreUtils.delete(broker.config().logDirs());
brokersById.remove(brokerId);
}
}
public void setJaasFilePath(File jaasFilePath) {
this.jaasFilePath = jaasFilePath;
}
public String getBootstrapBroker(SecurityProtocol securityProtocol) {
switch (securityProtocol) {
case PLAINTEXT:
// The first broker will always listen on this port.
return "localhost:" + BROKER_PORT_BASE;
case SASL_SSL:
// The first broker will always listen on this port.
return "localhost:" + SASL_SSL_PORT_BASE;
default:
throw new RuntimeException(securityProtocol.name() + " is not supported.");
}
}
public boolean isRunning() {
return isRunning;
}
public String getZookeeperConnectString() {
return this.zookeeper.connectString();
}
}
|
|
/*
* Copyright 2008 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.common.collect.Sets;
import com.google.javascript.jscomp.ControlFlowGraph.Branch;
import com.google.javascript.jscomp.Scope.Var;
import com.google.javascript.jscomp.graph.DiGraph.DiGraphEdge;
import com.google.javascript.jscomp.graph.LatticeElement;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.BitSet;
import java.util.List;
import java.util.Set;
/**
* Compute the "liveness" of all local variables. A variable is "live" at a
* point of a program if the value it is currently holding might be read later.
* Otherwise, the variable is considered "dead" if we know for sure that it will
* no longer be read. Dead variables are candidates for dead assignment
* elimination and variable name sharing. The worst case safe assumption is to
* assume that all variables are live. In that case, we will have no opportunity
* for optimizations. This is especially the case within a TRY block when an
* assignment is not guaranteed to take place. We bail out by assuming that
* all variables are live.
* <p>
* Due to the possibility of inner functions and closures, certain "local"
* variables can escape the function. These variables will be considered as
* global and they can be retrieved with {@link #getEscapedLocals()}.
*
*/
class LiveVariablesAnalysis extends
DataFlowAnalysis<Node, LiveVariablesAnalysis.LiveVariableLattice> {
// 100 = ((# of original Power Rangers) ^
// (# years of Warren Harding in office)) *
// (# of Ninja Turtles)
static final int MAX_VARIABLES_TO_ANALYZE = 100;
public static final String ARGUMENT_ARRAY_ALIAS = "arguments";
private static class LiveVariableJoinOp
implements JoinOp<LiveVariableLattice> {
@Override
public LiveVariableLattice apply(List<LiveVariableLattice> in) {
LiveVariableLattice result = new LiveVariableLattice(in.get(0));
for (int i = 1; i < in.size(); i++) {
result.liveSet.or(in.get(i).liveSet);
}
return result;
}
}
/**
* The lattice that stores the liveness of all local variables at a given
* point in the program. The whole lattice is the power set of all local
* variables and a variable is live if it is in the set.
*/
static class LiveVariableLattice implements LatticeElement {
private final BitSet liveSet;
/**
* @param numVars Number of all local variables.
*/
private LiveVariableLattice(int numVars) {
this.liveSet = new BitSet(numVars);
}
private LiveVariableLattice(LiveVariableLattice other) {
Preconditions.checkNotNull(other);
this.liveSet = (BitSet) other.liveSet.clone();
}
@Override
public boolean equals(Object other) {
Preconditions.checkNotNull(other);
return (other instanceof LiveVariableLattice) &&
this.liveSet.equals(((LiveVariableLattice) other).liveSet);
}
public boolean isLive(Var v) {
Preconditions.checkNotNull(v);
return liveSet.get(v.index);
}
public boolean isLive(int index) {
return liveSet.get(index);
}
@Override
public String toString() {
return liveSet.toString();
}
@Override
public int hashCode() {
return liveSet.hashCode();
}
}
// The scope of the function that we are analyzing.
private final Scope jsScope;
private final Set<Var> escaped;
LiveVariablesAnalysis(ControlFlowGraph<Node> cfg, Scope jsScope,
AbstractCompiler compiler) {
super(cfg, new LiveVariableJoinOp());
this.jsScope = jsScope;
this.escaped = Sets.newHashSet();
computeEscaped(jsScope, escaped, compiler);
}
public Set<Var> getEscapedLocals() {
return escaped;
}
public int getVarIndex(String var) {
return jsScope.getVar(var).index;
}
@Override
boolean isForward() {
return false;
}
@Override
LiveVariableLattice createEntryLattice() {
return new LiveVariableLattice(jsScope.getVarCount());
}
@Override
LiveVariableLattice createInitialEstimateLattice() {
return new LiveVariableLattice(jsScope.getVarCount());
}
@Override
LiveVariableLattice flowThrough(Node node, LiveVariableLattice input) {
final BitSet gen = new BitSet(input.liveSet.size());
final BitSet kill = new BitSet(input.liveSet.size());
// Make kills conditional if the node can end abruptly by an exception.
boolean conditional = false;
List<DiGraphEdge<Node, Branch>> edgeList = getCfg().getOutEdges(node);
for (DiGraphEdge<Node, Branch> edge : edgeList) {
if (Branch.ON_EX.equals(edge.getValue())) {
conditional = true;
}
}
computeGenKill(node, gen, kill, conditional);
LiveVariableLattice result = new LiveVariableLattice(input);
// L_in = L_out - Kill + Gen
result.liveSet.andNot(kill);
result.liveSet.or(gen);
return result;
}
/**
* Computes the GEN and KILL set.
*
* @param n Root node.
* @param gen Local variables that are live because of the instruction at
* {@code n} will be added to this set.
* @param kill Local variables that are killed because of the instruction at
* {@code n} will be added to this set.
* @param conditional {@code true} if any assignments encountered are
* conditionally executed. These assignments might not kill a variable.
*/
private void computeGenKill(Node n, BitSet gen, BitSet kill,
boolean conditional) {
switch (n.getType()) {
case Token.SCRIPT:
case Token.BLOCK:
case Token.FUNCTION:
return;
case Token.WHILE:
case Token.DO:
case Token.IF:
computeGenKill(NodeUtil.getConditionExpression(n), gen, kill,
conditional);
return;
case Token.FOR:
if (!NodeUtil.isForIn(n)) {
computeGenKill(NodeUtil.getConditionExpression(n), gen, kill,
conditional);
} else {
// for(x in y) {...}
Node lhs = n.getFirstChild();
Node rhs = lhs.getNext();
if (lhs.isVar()) {
// for(var x in y) {...}
lhs = lhs.getLastChild();
}
if (lhs.isName()) {
addToSetIfLocal(lhs, kill);
addToSetIfLocal(lhs, gen);
} else {
computeGenKill(lhs, gen, kill, conditional);
}
// rhs is executed only once so we don't go into it every loop.
}
return;
case Token.VAR:
for (Node c = n.getFirstChild(); c != null; c = c.getNext()) {
if (c.hasChildren()) {
computeGenKill(c.getFirstChild(), gen, kill, conditional);
if (!conditional) {
addToSetIfLocal(c, kill);
}
}
}
return;
case Token.AND:
case Token.OR:
computeGenKill(n.getFirstChild(), gen, kill, conditional);
// May short circuit.
computeGenKill(n.getLastChild(), gen, kill, true);
return;
case Token.HOOK:
computeGenKill(n.getFirstChild(), gen, kill, conditional);
// Assume both sides are conditional.
computeGenKill(n.getFirstChild().getNext(), gen, kill, true);
computeGenKill(n.getLastChild(), gen, kill, true);
return;
case Token.NAME:
if (isArgumentsName(n)) {
markAllParametersEscaped();
} else {
addToSetIfLocal(n, gen);
}
return;
default:
if (NodeUtil.isAssignmentOp(n) && n.getFirstChild().isName()) {
Node lhs = n.getFirstChild();
if (!conditional) {
addToSetIfLocal(lhs, kill);
}
if (!n.isAssign()) {
// assignments such as a += 1 reads a.
addToSetIfLocal(lhs, gen);
}
computeGenKill(lhs.getNext(), gen, kill, conditional);
} else {
for (Node c = n.getFirstChild(); c != null; c = c.getNext()) {
computeGenKill(c, gen, kill, conditional);
}
}
return;
}
}
private void addToSetIfLocal(Node node, BitSet set) {
Preconditions.checkState(node.isName());
String name = node.getString();
if (!jsScope.isDeclared(name, false)) {
return;
}
Var var = jsScope.getVar(name);
if (!escaped.contains(var)) {
set.set(var.index);
}
}
/**
* Give up computing liveness of formal parameter by putting all the parameter
* names in the escaped set.
*/
void markAllParametersEscaped() {
Node lp = jsScope.getRootNode().getFirstChild().getNext();
for(Node arg = lp.getFirstChild(); arg != null; arg = arg.getNext()) {
escaped.add(jsScope.getVar(arg.getString()));
}
}
private boolean isArgumentsName(Node n) {
if (!n.isName()||
!n.getString().equals(ARGUMENT_ARRAY_ALIAS) ||
jsScope.isDeclared(ARGUMENT_ARRAY_ALIAS, false)) {
return false;
} else {
return true;
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.broker.admin.v2;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.util.Objects.isNull;
import static org.apache.commons.lang.StringUtils.defaultIfEmpty;
import static org.apache.pulsar.common.util.Codec.decode;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import io.swagger.annotations.ApiOperation;
import java.time.Clock;
import java.util.Optional;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.Encoded;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.container.AsyncResponse;
import javax.ws.rs.container.Suspended;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.apache.pulsar.broker.admin.AdminResource;
import org.apache.pulsar.broker.service.Topic;
import org.apache.pulsar.broker.web.RestException;
import org.apache.pulsar.common.naming.TopicName;
import org.apache.pulsar.common.schema.DeleteSchemaResponse;
import org.apache.pulsar.common.schema.GetSchemaResponse;
import org.apache.pulsar.common.schema.PostSchemaPayload;
import org.apache.pulsar.common.schema.PostSchemaResponse;
import org.apache.pulsar.common.schema.SchemaData;
import org.apache.pulsar.common.schema.SchemaType;
import org.apache.pulsar.common.schema.SchemaVersion;
@Path("/schemas")
public class SchemasResource extends AdminResource {
private final Clock clock;
public SchemasResource() {
this(Clock.systemUTC());
}
@VisibleForTesting
public SchemasResource(Clock clock) {
super();
this.clock = clock;
}
@GET
@Path("/{tenant}/{namespace}/{topic}/schema")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Get topic schema", response = GetSchemaResponse.class)
public void getSchema(
@PathParam("tenant") String tenant,
@PathParam("namespace") String namespace,
@PathParam("topic") String topic,
@Suspended final AsyncResponse response
) {
validateDestinationAndAdminOperation(tenant, namespace, topic);
String schemaId = buildSchemaId(tenant, namespace, topic);
pulsar().getSchemaRegistryService().getSchema(schemaId)
.handle((schema, error) -> {
if (isNull(error)) {
response.resume(
Response.ok()
.encoding(MediaType.APPLICATION_JSON)
.entity(GetSchemaResponse.builder()
.version(schema.version)
.type(schema.schema.getType())
.timestamp(schema.schema.getTimestamp())
.data(new String(schema.schema.getData()))
.properties(schema.schema.getProps())
.build()
)
.build()
);
} else {
response.resume(error);
}
return null;
});
}
@GET
@Path("/{tenant}/{namespace}/{topic}/schema/{version}")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Get topic schema")
public void getSchema(
@PathParam("tenant") String tenant,
@PathParam("namespace") String namespace,
@PathParam("topic") String topic,
@PathParam("version") @Encoded String version,
@Suspended final AsyncResponse response
) {
validateDestinationAndAdminOperation(tenant, namespace, topic);
String schemaId = buildSchemaId(tenant, namespace, topic);
SchemaVersion v = pulsar().getSchemaRegistryService().versionFromBytes(version.getBytes());
pulsar().getSchemaRegistryService().getSchema(schemaId, v)
.handle((schema, error) -> {
if (isNull(error)) {
if (schema.schema.isDeleted()) {
response.resume(Response.noContent());
} else {
response.resume(
Response.ok()
.encoding(MediaType.APPLICATION_JSON)
.entity(GetSchemaResponse.builder()
.version(schema.version)
.type(schema.schema.getType())
.timestamp(schema.schema.getTimestamp())
.data(new String(schema.schema.getData()))
.properties(schema.schema.getProps())
.build()
).build()
);
}
} else {
response.resume(error);
}
return null;
});
}
@DELETE
@Path("/{tenant}/{namespace}/{topic}/schema")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Delete topic schema")
public void deleteSchema(
@PathParam("tenant") String tenant,
@PathParam("namespace") String namespace,
@PathParam("topic") String topic,
@Suspended final AsyncResponse response
) {
validateDestinationAndAdminOperation(tenant, namespace, topic);
String schemaId = buildSchemaId(tenant, namespace, topic);
pulsar().getSchemaRegistryService().deleteSchema(schemaId, defaultIfEmpty(clientAppId(), ""))
.handle((version, error) -> {
if (isNull(error)) {
response.resume(
Response.ok().entity(
DeleteSchemaResponse.builder()
.version(version)
.build()
).build()
);
} else {
response.resume(error);
}
return null;
});
}
@POST
@Path("/{tenant}/{namespace}/{topic}/schema")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Post topic schema")
public void postSchema(
@PathParam("tenant") String tenant,
@PathParam("namespace") String namespace,
@PathParam("topic") String topic,
PostSchemaPayload payload,
@Suspended final AsyncResponse response
) {
validateDestinationAndAdminOperation(tenant, namespace, topic);
pulsar().getSchemaRegistryService().putSchemaIfAbsent(
buildSchemaId(tenant, namespace, topic),
SchemaData.builder()
.data(payload.getSchema().getBytes(Charsets.UTF_8))
.isDeleted(false)
.timestamp(clock.millis())
.type(SchemaType.valueOf(payload.getType()))
.user(defaultIfEmpty(clientAppId(), ""))
.build()
).thenAccept(version ->
response.resume(
Response.accepted().entity(
PostSchemaResponse.builder()
.version(version)
.build()
).build()
)
);
}
private String buildSchemaId(String tenant, String namespace, String topic) {
return TopicName.get("persistent", tenant, namespace, topic).getSchemaName();
}
private void validateDestinationAndAdminOperation(String tenant, String namespace, String topic) {
TopicName destinationName = TopicName.get(
"persistent", tenant, namespace, decode(topic)
);
try {
validateAdminAccessForTenant(destinationName.getTenant());
validateTopicOwnership(destinationName, false);
} catch (RestException e) {
if (e.getResponse().getStatus() == Response.Status.UNAUTHORIZED.getStatusCode()) {
throw new RestException(Response.Status.NOT_FOUND, "Not Found");
} else {
throw e;
}
}
}
}
|
|
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.todolist.data;
import android.content.ComponentName;
import android.content.ContentResolver;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.content.UriMatcher;
import android.content.pm.PackageManager;
import android.content.pm.ProviderInfo;
import android.database.sqlite.SQLiteDatabase;
import android.net.Uri;
import android.support.test.InstrumentationRegistry;
import android.support.test.runner.AndroidJUnit4;
import com.example.android.todolist.data.TaskContentProvider;
import com.example.android.todolist.data.TaskContract;
import com.example.android.todolist.data.TaskDbHelper;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.fail;
@RunWith(AndroidJUnit4.class)
public class TestTaskContentProvider {
/* Context used to access various parts of the system */
private final Context mContext = InstrumentationRegistry.getTargetContext();
/**
* Because we annotate this method with the @Before annotation, this method will be called
* before every single method with an @Test annotation. We want to start each test clean, so we
* delete all entries in the tasks directory to do so.
*/
@Before
public void setUp() {
/* Use TaskDbHelper to get access to a writable database */
TaskDbHelper dbHelper = new TaskDbHelper(mContext);
SQLiteDatabase database = dbHelper.getWritableDatabase();
database.delete(TaskContract.TaskEntry.TABLE_NAME, null, null);
}
//================================================================================
// Test ContentProvider Registration
//================================================================================
/**
* This test checks to make sure that the content provider is registered correctly in the
* AndroidManifest file. If it fails, you should check the AndroidManifest to see if you've
* added a <provider/> tag and that you've properly specified the android:authorities attribute.
*/
@Test
public void testProviderRegistry() {
/*
* A ComponentName is an identifier for a specific application component, such as an
* Activity, ContentProvider, BroadcastReceiver, or a Service.
*
* Two pieces of information are required to identify a component: the package (a String)
* it exists in, and the class (a String) name inside of that package.
*
* We will use the ComponentName for our ContentProvider class to ask the system
* information about the ContentProvider, specifically, the authority under which it is
* registered.
*/
String packageName = mContext.getPackageName();
String taskProviderClassName = TaskContentProvider.class.getName();
ComponentName componentName = new ComponentName(packageName, taskProviderClassName);
try {
/*
* Get a reference to the package manager. The package manager allows us to access
* information about packages installed on a particular device. In this case, we're
* going to use it to get some information about our ContentProvider under test.
*/
PackageManager pm = mContext.getPackageManager();
/* The ProviderInfo will contain the authority, which is what we want to test */
ProviderInfo providerInfo = pm.getProviderInfo(componentName, 0);
String actualAuthority = providerInfo.authority;
String expectedAuthority = packageName;
/* Make sure that the registered authority matches the authority from the Contract */
String incorrectAuthority =
"Error: TaskContentProvider registered with authority: " + actualAuthority +
" instead of expected authority: " + expectedAuthority;
assertEquals(incorrectAuthority,
actualAuthority,
expectedAuthority);
} catch (PackageManager.NameNotFoundException e) {
String providerNotRegisteredAtAll =
"Error: TaskContentProvider not registered at " + mContext.getPackageName();
/*
* This exception is thrown if the ContentProvider hasn't been registered with the
* manifest at all. If this is the case, you need to double check your
* AndroidManifest file
*/
fail(providerNotRegisteredAtAll);
}
}
//================================================================================
// Test UriMatcher
//================================================================================
private static final Uri TEST_TASKS = TaskContract.TaskEntry.CONTENT_URI;
// Content URI for a single task with id = 1
private static final Uri TEST_TASK_WITH_ID = TEST_TASKS.buildUpon().appendPath("1").build();
/**
* This function tests that the UriMatcher returns the correct integer value for
* each of the Uri types that the ContentProvider can handle. Uncomment this when you are
* ready to test your UriMatcher.
*/
@Test
public void testUriMatcher() {
/* Create a URI matcher that the TaskContentProvider uses */
UriMatcher testMatcher = TaskContentProvider.buildUriMatcher();
/* Test that the code returned from our matcher matches the expected TASKS int */
String tasksUriDoesNotMatch = "Error: The TASKS URI was matched incorrectly.";
int actualTasksMatchCode = testMatcher.match(TEST_TASKS);
int expectedTasksMatchCode = TaskContentProvider.TASKS;
assertEquals(tasksUriDoesNotMatch,
actualTasksMatchCode,
expectedTasksMatchCode);
/* Test that the code returned from our matcher matches the expected TASK_WITH_ID */
String taskWithIdDoesNotMatch =
"Error: The TASK_WITH_ID URI was matched incorrectly.";
int actualTaskWithIdCode = testMatcher.match(TEST_TASK_WITH_ID);
int expectedTaskWithIdCode = TaskContentProvider.TASK_WITH_ID;
assertEquals(taskWithIdDoesNotMatch,
actualTaskWithIdCode,
expectedTaskWithIdCode);
}
//================================================================================
// Test Insert
//================================================================================
// /**
// * Tests inserting a single row of data via a ContentResolver
// */
@Test
public void testInsert() {
/* Create values to insert */
ContentValues testTaskValues = new ContentValues();
testTaskValues.put(TaskContract.TaskEntry.COLUMN_DESCRIPTION, "Test description");
testTaskValues.put(TaskContract.TaskEntry.COLUMN_PRIORITY, 1);
/* TestContentObserver allows us to test if notifyChange was called appropriately */
TestUtilities.TestContentObserver taskObserver = TestUtilities.getTestContentObserver();
ContentResolver contentResolver = mContext.getContentResolver();
/* Register a content observer to be notified of changes to data at a given URI (tasks) */
contentResolver.registerContentObserver(
/* URI that we would like to observe changes to */
TaskContract.TaskEntry.CONTENT_URI,
/* Whether or not to notify us if descendants of this URI change */
true,
/* The observer to register (that will receive notifyChange callbacks) */
taskObserver);
Uri uri = contentResolver.insert(TaskContract.TaskEntry.CONTENT_URI, testTaskValues);
Uri expectedUri = ContentUris.withAppendedId(TaskContract.TaskEntry.CONTENT_URI, 1);
String insertProviderFailed = "Unable to insert item through Provider";
assertEquals(insertProviderFailed, uri, expectedUri);
/*
* If this fails, it's likely you didn't call notifyChange in your insert method from
* your ContentProvider.
*/
taskObserver.waitForNotificationOrFail();
/*
* waitForNotificationOrFail is synchronous, so after that call, we are done observing
* changes to content and should therefore unregister this observer.
*/
contentResolver.unregisterContentObserver(taskObserver);
}
//================================================================================
// Test Query (for tasks directory)
//================================================================================
// /**
// * Inserts data, then tests if a query for the tasks directory returns that data as a Cursor
// */
// @Test
// public void testQuery() {
//
// /* Get access to a writable database */
// TaskDbHelper dbHelper = new TaskDbHelper(mContext);
// SQLiteDatabase database = dbHelper.getWritableDatabase();
//
// /* Create values to insert */
// ContentValues testTaskValues = new ContentValues();
// testTaskValues.put(TaskContract.TaskEntry.COLUMN_DESCRIPTION, "Test description");
// testTaskValues.put(TaskContract.TaskEntry.COLUMN_PRIORITY, 1);
//
// /* Insert ContentValues into database and get a row ID back */
// long taskRowId = database.insert(
// /* Table to insert values into */
// TaskContract.TaskEntry.TABLE_NAME,
// null,
// /* Values to insert into table */
// testTaskValues);
//
// String insertFailed = "Unable to insert directly into the database";
// assertTrue(insertFailed, taskRowId != -1);
//
// /* We are done with the database, close it now. */
// database.close();
//
// /* Perform the ContentProvider query */
// Cursor taskCursor = mContext.getContentResolver().query(
// TaskContract.TaskEntry.CONTENT_URI,
// /* Columns; leaving this null returns every column in the table */
// null,
// /* Optional specification for columns in the "where" clause above */
// null,
// /* Values for "where" clause */
// null,
// /* Sort order to return in Cursor */
// null);
//
//
// String queryFailed = "Query failed to return a valid Cursor";
// assertTrue(queryFailed, taskCursor != null);
//
// /* We are done with the cursor, close it now. */
// taskCursor.close();
// }
//================================================================================
// Test Delete (for a single item)
//================================================================================
// /**
// * Tests deleting a single row of data via a ContentResolver
// */
// @Test
// public void testDelete() {
// /* Access writable database */
// TaskDbHelper helper = new TaskDbHelper(InstrumentationRegistry.getTargetContext());
// SQLiteDatabase database = helper.getWritableDatabase();
//
// /* Create a new row of task data */
// ContentValues testTaskValues = new ContentValues();
// testTaskValues.put(TaskContract.TaskEntry.COLUMN_DESCRIPTION, "Test description");
// testTaskValues.put(TaskContract.TaskEntry.COLUMN_PRIORITY, 1);
//
// /* Insert ContentValues into database and get a row ID back */
// long taskRowId = database.insert(
// /* Table to insert values into */
// TaskContract.TaskEntry.TABLE_NAME,
// null,
// /* Values to insert into table */
// testTaskValues);
//
// /* Always close the database when you're through with it */
// database.close();
//
// String insertFailed = "Unable to insert into the database";
// assertTrue(insertFailed, taskRowId != -1);
//
//
// /* TestContentObserver allows us to test if notifyChange was called appropriately */
// TestUtilities.TestContentObserver taskObserver = TestUtilities.getTestContentObserver();
//
// ContentResolver contentResolver = mContext.getContentResolver();
//
// /* Register a content observer to be notified of changes to data at a given URI (tasks) */
// contentResolver.registerContentObserver(
// /* URI that we would like to observe changes to */
// TaskContract.TaskEntry.CONTENT_URI,
// /* Whether or not to notify us if descendants of this URI change */
// true,
// /* The observer to register (that will receive notifyChange callbacks) */
// taskObserver);
//
//
//
// /* The delete method deletes the previously inserted row with id = 1 */
// Uri uriToDelete = TaskContract.TaskEntry.CONTENT_URI.buildUpon().appendPath("1").build();
// int tasksDeleted = contentResolver.delete(uriToDelete, null, null);
//
// String deleteFailed = "Unable to delete item in the database";
// assertTrue(deleteFailed, tasksDeleted != 0);
//
// /*
// * If this fails, it's likely you didn't call notifyChange in your delete method from
// * your ContentProvider.
// */
// taskObserver.waitForNotificationOrFail();
//
// /*
// * waitForNotificationOrFail is synchronous, so after that call, we are done observing
// * changes to content and should therefore unregister this observer.
// */
// contentResolver.unregisterContentObserver(taskObserver);
// }
}
|