gt
stringclasses 1
value | context
stringlengths 2.05k
161k
|
---|---|
package edu.buffalo.cse.cse486586.simpledynamo;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.UnknownHostException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Formatter;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import android.content.ContentProvider;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.DatabaseUtils;
import android.database.MatrixCursor;
import android.database.sqlite.SQLiteDatabase;
import android.net.Uri;
import android.os.AsyncTask;
import android.telephony.TelephonyManager;
import android.util.Log;
public class SimpleDynamoProvider extends ContentProvider {
String TAG = SimpleDynamoProvider.class.getSimpleName();
static final int SERVER_PORT = 10000;
ArrayList<String> avdList = new ArrayList<String>();
final String[] ports = {"11124", "11112", "11108", "11116", "11120"};
final String[] nodeName = {"5562", "5556", "5554", "5558", "5560"};
ArrayList<String> nodeList = null;
ArrayList<String> hashList = new ArrayList<String>();
String curNode;
ConcurrentHashMap<String, String> localDB = new ConcurrentHashMap<String, String>();
ConcurrentHashMap<String, String> replicaDB = new ConcurrentHashMap<String, String>();
// ConcurrentHashMap<String, String> replica2 = new ConcurrentHashMap<String, String>();
ArrayList<String> container = new ArrayList<String>();
String[] reqColumns = {DBHandler.COL_NAME_KEY, DBHandler.COL_NAME_VALUE};
DBHandler mDBhandler = null;
final String STORE_COR = "store_coordinator";
final String STORE_REPLICA_1 = "store_replica one";
final String STORE_REPLICA_2 = "store_replica two";
final String QUERY_CUSTOM = "query_custom";
final String QUERY_HM_RESULT = "query_hm_result";
final String QUERY_ONE = "query_one";
final String QUERY_FOUND_KEY = "query_found_key";
final String QUERY_FOUND_SUCC = "query_found_succ";
public static final String TYPE_COR = "cor";
public static final String TYPE_REP1 = "rep1";
public static final String TYPE_REP2 = "rep2";
public static final String DELETE_IN_COR = "delete in cor";
public static final String DELETE_IN_REPLICA_1 = "delete in replica 1";
public static final String DELETE_IN_REPLICA_2 = "delete in replica 2";
public static final String INITIAL = "INITIAL";
public static final String RECOVER_COR = "RECOVER_COR";
public static final String RECOVERY_REP = "RECOVER_REP";
public static final String QUERY_ONE_SUCC = "QUERY_ONE_SUCC";
public static final String QUERY_STAR_RESULT = "QUERY_STAR_RESULT";
public static final String QUERY_STAR_RESULT_NEW = "QUERY_STAR_RESULT_NEW";
String curKey ="";
ContentResolver cr = null;
Cursor cursor;
MatrixCursor mCursor = null;
String curPort = null;
Object lockInsert = new Object();
Object lockQuery = new Object();
volatile boolean stopFlag = false;
@Override
public String getType(Uri uri) {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean onCreate() {
//initialization
mDBhandler = new DBHandler(getContext());
cr = (this.getContext()).getContentResolver();
TelephonyManager tel = (TelephonyManager) getContext().getSystemService(Context.TELEPHONY_SERVICE);
String portStr = tel.getLine1Number().substring(tel.getLine1Number().length() - 4);
curNode = String.valueOf((Integer.parseInt(portStr))); //5554
curPort = String.valueOf((Integer.parseInt(portStr) * 2)); //11108
try { //server task
ServerSocket serverSocket = new ServerSocket(SERVER_PORT);
new ServerTask().executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, serverSocket);
} catch (IOException e) {
Log.e("Exception", "server Socket Exception");
}
//adding elements with hash value..
nodeList = new ArrayList(Arrays.asList(nodeName));
for(String n : nodeList) {
try {
hashList.add(genHash(n));
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
}
}
for(int i = 0; i < 5; i++) {
try {
Log.i(TAG, "Node and Hash values are " + i + "::" + nodeList.get(i) + "::" + genHash(nodeList.get(i)));
String portToSend = nodeList.get(i);
if(portToSend.compareTo(curNode) != 0) { //send only for other ports
String msgToSend = INITIAL + ";;" + curNode + "==" + (Integer.parseInt(portToSend) * 2);
Log.i(TAG, "broadcasting in on create with msg:" + msgToSend);
new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend);
// Socket socket = new Socket(InetAddress.getByAddress(new byte[]{10, 0, 2, 2}), Integer.parseInt(portToSend) * 2);
// OutputStream outToAnother = socket.getOutputStream();
// // Log.i("Port Name", (Integer.parseInt(portToSend) * 2));
// outToAnother.write(msgToSend.getBytes());
// socket.close();
}
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
} catch (Exception e) {
Log.e(TAG, "exception in oncreate " + e);
}
}
for(int i = 0; i < 5; i++) {
Log.i(TAG, "avd and Hash values are " + nodeList.get(i) + "::" + hashList.get(i));
}
Log.i(TAG, "check getNodeid for 5562:" + getNodeId("5562"));
Log.i(TAG, "check getNodeid for 5556:" + getNodeId("5556"));
Log.i(TAG, "check getNodeid for 5554:" + getNodeId("5554"));
Log.i(TAG, "check getNodeid for 5558:" + getNodeId("5558"));
Log.i(TAG, "check getNodeid for 5560:" + getNodeId("5560"));
return false;
}
@Override
public Uri insert(Uri uri, ContentValues values) {
Log.i(TAG, "values are sent to insert " + values.toString());
Log.i(TAG, "values are sent to insert " + values.toString());
synchronized (lockInsert) {
try {
String key = (String) (values.get("key"));
String value = (String) (values.get("value"));
Log.i(TAG, "key is " + key + "; value is " + value);
String insertNode = getNodeId(key);
int index = nodeList.indexOf(insertNode);
String replica1 = nodeList.get((index + 1) % 5);
String replica2 = nodeList.get((index + 2) % 5);
Log.i(TAG, "index insert Node and replica nodes are " + index + ":" + insertNode + ":" + replica1 + ":" + replica2);
SQLiteDatabase db;
Log.i(TAG, "insert node is " + insertNode + ";; curNode is " + curNode);
if (insertNode.compareTo(curNode) == 0) { //insert locally
Log.i(TAG, "insert Node in the if loop");
try {
db = mDBhandler.getWritableDatabase();
localDB.put(key, value);
//setting the new content values with col TYPE
ContentValues cv = new ContentValues();
cv.put(DBHandler.COL_NAME_KEY, key);
cv.put(DBHandler.COL_NAME_VALUE, value);
// cv.put(DBHandler.COL_TYPE, TYPE_COR); //setting as coordinator
Log.i(TAG, "content values which are inserted : " + cv);
//hack for insert as well as updating the table with single entry
// synchronized(lockInsert) {
long rowId = db.insertWithOnConflict(DBHandler.TABLE_NAME, null, cv, android.database.sqlite.SQLiteDatabase.CONFLICT_REPLACE);
if (rowId == -1) { //value already exists
Log.i("Conflict", "Error inserting values in DB");
} else {
Log.i(TAG, "success fully inserted " + values.toString());
}
// }
} catch (Exception e) {
Log.v(TAG, "Exception while inserting :" + e);
}
//send to replica_1
// synchronized(lockInsert) {
Integer portToSend1 = Integer.parseInt(replica1) * 2;
String msgToSend1 = STORE_REPLICA_1 + ";;" + key + ";;" + value + "==" + portToSend1;
Log.i(TAG, "insert in if part :" + msgToSend1);
// Socket socket1 = new Socket(InetAddress.getByAddress(new byte[]{10, 0, 2, 2}), portToSend1);
// OutputStream outToAnother1 = socket1.getOutputStream();
// Log.i("Port Name", String.valueOf(portToSend1));
// outToAnother1.write(msgToSend1.getBytes());
// socket1.close();
new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend1);
//send to replica2
Integer portToSend2 = Integer.parseInt(replica2) * 2; //to get the port numbers
String msgToSend2 = STORE_REPLICA_2 + ";;" + key + ";;" + value + "==" + portToSend2;
Log.i(TAG, "insert in if part :" + msgToSend2);
// Socket socket2 = new Socket(InetAddress.getByAddress(new byte[]{10, 0, 2, 2}), portToSend2);
// OutputStream outToAnother2 = socket2.getOutputStream();
// Log.i("Port Name", String.valueOf(portToSend2));
// outToAnother2.write(msgToSend2.getBytes());
// socket2.close();
new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend2);
// }
return uri;
} else {
Log.i(TAG, "insert Node in the else loop");
// synchronized(lockInsert) {
Integer portToSend = Integer.parseInt(insertNode) * 2;
String msgToSend = STORE_COR + ";;" + key + ";;" + value + "==" + portToSend;
Log.i(TAG, "insert :" + msgToSend + "::" + portToSend);
// Socket socket = new Socket(InetAddress.getByAddress(new byte[]{10, 0, 2, 2}), portToSend);
// OutputStream outToAnother = socket.getOutputStream();
// Log.i("Port Name", String.valueOf(portToSend));
// outToAnother.write(msgToSend.getBytes());
// socket.close();
new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend);
//send to replica_1
Integer portToSend1 = Integer.parseInt(replica1) * 2;
String msgToSend1 = STORE_REPLICA_1 + ";;" + key + ";;" + value + "==" + portToSend1;
Log.i(TAG, "insert in if part :" + msgToSend1);
// Socket socket1 = new Socket(InetAddress.getByAddress(new byte[]{10, 0, 2, 2}), portToSend1);
// OutputStream outToAnother1 = socket1.getOutputStream();
// Log.i("Port Name", String.valueOf(portToSend1));
// outToAnother1.write(msgToSend1.getBytes());
// socket1.close();
new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend1);
//send to replica2
Integer portToSend2 = Integer.parseInt(replica2) * 2; //to get the port numbers
String msgToSend2 = STORE_REPLICA_2 + ";;" + key + ";;" + value + "==" + portToSend2;
Log.i(TAG, "insert in if part :" + msgToSend2);
// Socket socket2 = new Socket(InetAddress.getByAddress(new byte[]{10, 0, 2, 2}), portToSend2);
// OutputStream outToAnother2 = socket2.getOutputStream();
// Log.i("Port Name", String.valueOf(portToSend2));
// outToAnother2.write(msgToSend2.getBytes());
// socket2.close();
new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend2);
// }
}
} catch (Exception e) {
Log.e(TAG, "Exception in on insert :" + e);
}
}
return null;
}
@Override
public Cursor query(Uri uri, String[] projection, String selection,
String[] selectionArgs, String sortOrder) {
synchronized (lockQuery) {
String[] reqColumns = {DBHandler.COL_NAME_KEY, DBHandler.COL_NAME_VALUE};
SQLiteDatabase db;
mCursor = new MatrixCursor(new String[]{DBHandler.COL_NAME_KEY, DBHandler.COL_NAME_VALUE});
try {
db = mDBhandler.getReadableDatabase();
String key = selection;
if (selection.equals("\"@\"")) {
Log.i(TAG, "Query: selecting all the data from the current AVD");
cursor = db.query(DBHandler.TABLE_NAME, reqColumns, null, null, null, null, null);
Log.i(TAG, "length of cursor :" + cursor.getColumnCount());
return cursor;
} else if (selection.equals("\"*\"")) {
Log.i(TAG, "Query: selecting data from ALL the AVD");
cursor = db.query(DBHandler.TABLE_NAME, reqColumns, null, null, null, null, null);
//add it to the matrix cursor
cursor.moveToFirst();
for (int move = 0; move < cursor.getCount(); move++) {
String keyTemp = cursor.getString(cursor.getColumnIndex(DBHandler.COL_NAME_KEY));
String valueTemp = cursor.getString(cursor.getColumnIndex(DBHandler.COL_NAME_VALUE));
mCursor.addRow(new String[]{keyTemp, valueTemp});
cursor.moveToNext();
}
for (String port : ports) {
// String portToSend = String.valueOf((Integer.parseInt(port) * 2));
String msgToSend = QUERY_CUSTOM + ";;" + (Integer.parseInt(curNode) * 2);
Log.i(TAG, "query key-valuefor each loop :" + msgToSend);
Socket socket = new Socket(InetAddress.getByAddress(new byte[]{10, 0, 2, 2}), Integer.parseInt(port));
OutputStream outToAnother = socket.getOutputStream();
Log.i(TAG, "port in the query for loop:" + port);
outToAnother.write(msgToSend.getBytes());
socket.close();
}
Thread.sleep(2000);
Log.i(TAG, "container is NOT empty;; size:" + container.size());
for (String ele : container) {
mCursor.addRow(ele.split(";;")); //adding the elements in the cursor
}
Log.i(TAG, "matrix cursor count is :" + mCursor.getCount());
return mCursor;
} else {
Log.i(TAG, "Query: else part getting one key");
Log.i(TAG, "Query: Selection:" + selection);
if (localDB.containsKey(key) || replicaDB.containsKey(key)) { //contains the key
Log.i(TAG, "Query contains in the localDB " + key);
cursor = db.query(DBHandler.TABLE_NAME, null, DBHandler.COL_NAME_KEY + "=" + "'" + selection + "'", null, null, null, null);
cursor.moveToFirst();
Log.i(TAG, "cursor values for the key : " + key + "::" + DatabaseUtils.dumpCursorToString(cursor));
return cursor;
} else { //search in the next node
Log.i(TAG, "Query does not contain in the localDB " + selection);
String portToSend = getNodeId(key);
curKey = selection;
stopFlag = false;
String colnames[] = {"key", "value"};
mCursor = new MatrixCursor(colnames);
String msgToSend = QUERY_ONE + ";;" + key + ";;" + curPort + "==" + (Integer.parseInt(portToSend) * 2);
Log.i(TAG, "query key-value :" + msgToSend);
new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend);
// send to successor
int index = nodeList.indexOf(portToSend);
String succOfToSend = nodeList.get((index + 1) % 5);
msgToSend = QUERY_ONE_SUCC + ";;" + key + ";;" + curPort + "==" + (Integer.parseInt(succOfToSend) * 2);
Log.i(TAG, "query key-value for the succ :" + msgToSend);
new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend);
container.clear(); //empty the arraylist
while (!stopFlag) {
//wait till the key is obtained
}
Log.i(TAG, "stop Flag has been reset");
// for (String ele : container) {
// mCursor.addRow(ele.split(";;")); //adding the elements in the cursor
// }
curKey = "";
mCursor.moveToFirst();
System.out.println("FINAL" + DatabaseUtils.dumpCursorToString(mCursor));
return mCursor;
}
}
}catch(InterruptedException e){
e.printStackTrace();
}catch(UnknownHostException e){
e.printStackTrace();
}catch(IOException e){
e.printStackTrace();
}
}
return null;
}
@Override
public int delete(Uri uri, String selection, String[] selectionArgs) {
SQLiteDatabase db;
db = mDBhandler.getWritableDatabase();
String key = selection;
try {
Log.i(TAG, "key is " + key);
if (selection.equals("\"@\"") || selection.equals("\"*\"") ) {
Log.i(TAG, "Delete: selecting all the data from the current AVD");
db.delete(DBHandler.TABLE_NAME, null, null);
}
else if(localDB.contains(key)){ //
try {
db = mDBhandler.getWritableDatabase();
localDB.remove(key);
db.delete(DBHandler.TABLE_NAME, DBHandler.COL_NAME_KEY + "=" + "'" + key + "'", null);
//deleting the replica entries too
int index = nodeList.indexOf(curNode);
String replica1 = nodeList.get((index + 1) % 5);
String replica2 = nodeList.get((index + 2) % 5);
String msgToSend = DELETE_IN_REPLICA_1 + ";;" + key + "==" + (Integer.parseInt(replica1) * 2);
Log.i(TAG, "DELETING in replica 1 " + msgToSend);
new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend);
String msgToSend2 = DELETE_IN_REPLICA_2 + ";;" + key + "==" + (Integer.parseInt(replica2) * 2);
Log.i(TAG, "DELETING in replica 2 " + msgToSend2);
new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend2);
} catch (Exception e) {
Log.e(TAG, "Exception while deleting :" + e);
}
} else { //delete in the corresponding avd
try {
String deleteNode = getNodeId(key);
int index = nodeList.indexOf(deleteNode);
String replica1 = nodeList.get((index + 1) % 5);
String replica2 = nodeList.get((index + 2) % 5);
String msgToSend = DELETE_IN_COR + ";;" + key + "==" + (Integer.parseInt(deleteNode) * 2);
Log.i(TAG, "DELETING in cor " + msgToSend);
new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend);
String msgToSend1 = DELETE_IN_REPLICA_1 + ";;" + key + "==" + (Integer.parseInt(replica1) * 2);
Log.i(TAG, "DELETING in replica 1 " + msgToSend1);
new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend1);
String msgToSend2 = DELETE_IN_REPLICA_2 + ";;" + key + "==" + (Integer.parseInt(replica2) * 2);
Log.i(TAG, "DELETING in replica 2 " + msgToSend2);
new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend2);
} catch (Exception e) {
Log.e(TAG, "Exception while deleting in else part " + e);
}
}
} catch (Exception e) {
Log.e(TAG, "Exception in delete " + e);
}
return 0;
}
@Override
public int update(Uri uri, ContentValues values, String selection,
String[] selectionArgs) {
// TODO Auto-generated method stub
return 0;
}
private String genHash(String input) throws NoSuchAlgorithmException {
MessageDigest sha1 = MessageDigest.getInstance("SHA-1");
byte[] sha1Hash = sha1.digest(input.getBytes());
Formatter formatter = new Formatter();
for (byte b : sha1Hash) {
formatter.format("%02x", b);
}
return formatter.toString();
}
//helper classes
class ServerTask extends AsyncTask<ServerSocket, String, Void> {
String TAG = ServerTask.class.getSimpleName();
@Override
protected Void doInBackground(ServerSocket... sockets) {
ServerSocket serverSocket = sockets[0];
Socket server;
try {
// While loop to repeatedly check for socket connectivity
while (true) {
server = serverSocket.accept();
InputStream inFromAnother = server.getInputStream();
StringBuilder sb = new StringBuilder();
int v;
while ((v = inFromAnother.read()) != -1) {
char ch = (char) v;
sb.append(ch);
}
String msgFromStream = sb.toString();
Log.i("output", sb.toString());
String msgRead[] = msgFromStream.trim().split(";;");
String signal = msgRead[0];
String key, value;
ContentValues cv = null;
Uri newUri;
SQLiteDatabase db;
Cursor tempCursor;
switch (signal) {
case INITIAL: //INITIAL + ";;" + curNode + "==" + (Integer.parseInt(portToSend) * 2);
db = mDBhandler.getReadableDatabase();
String[] reqColumns = {DBHandler.COL_NAME_KEY, DBHandler.COL_NAME_VALUE};
tempCursor = db.query(DBHandler.TABLE_NAME, reqColumns, null, null, null, null, null);
Log.i(TAG, "tempcursor count " + tempCursor.getCount());
if(tempCursor.getCount() > 0) { //test if the content provider is empty or not
String toSendPort = msgRead[1]; //555*
int index = nodeList.indexOf(toSendPort);
String succ1 = nodeList.get((index + 1) % 5);
String succ2 = nodeList.get((index + 2) % 5);
String pred1 = nodeList.get((index + 5 - 1) % 5);
String pred2 = nodeList.get((index + 5 - 2) % 5);
Log.i(TAG, "toSendport : " + toSendPort + " pred1 :" + pred1 + " pred2 :" + pred2 + " succ1 : " + succ1 + " succ2 : " + succ2);
if(curNode.compareTo(succ1) == 0 || curNode.compareTo(succ2) == 0) { //send the replica to get the cordinator
String corValues = replicaDB.toString();
String msgToSend = RECOVER_COR + ";;" + corValues + "==" + (Integer.parseInt(toSendPort) * 2);
Log.i(TAG, "recovery for cor is " + msgToSend);
new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend);
} else if(curNode.compareTo(pred1) == 0 || curNode.compareTo(pred2) == 0) { //send the coordinator to get the replica
//send the replica
String repValues = localDB.toString();
String msgToSend = RECOVERY_REP + ";;" + repValues + "==" + (Integer.parseInt(toSendPort) * 2);
Log.i(TAG, "recovery for rep is " + msgToSend);
new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend);
}
}
break;
case RECOVER_COR: //RECOVER_COR + ";;" + corValues + "==" + (Integer.parseInt(toSendPort) * 2);
Log.i(TAG, "update the values of local db cor");
new UpdateLocalDBTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgRead[1]);
break;
case RECOVERY_REP: //RECOVERY_REP + ";;" + repValues + "==" + (Integer.parseInt(toSendPort) * 2);
Log.i(TAG, "update the values in replica DB");
new UpdateReplica1DBTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgRead[1]);
break;
case STORE_COR: //STORE_COR + ";;" + key + ";;" + value + "==" + portToSend;
synchronized(lockInsert) {
key = msgRead[1];
value = msgRead[2];
localDB.put(key, value);
db = mDBhandler.getWritableDatabase();
cv = new ContentValues();
cv.put(DBHandler.COL_NAME_KEY, key);
cv.put(DBHandler.COL_NAME_VALUE, value);
// cv.put(DBHandler.COL_TYPE, TYPE_COR); //setting as coordinator
Log.i(TAG, "Content values in coordinator is " + cv);
long rowId = db.insertWithOnConflict(DBHandler.TABLE_NAME, null, cv, android.database.sqlite.SQLiteDatabase.CONFLICT_REPLACE);
Log.v("insert", "insert since one node" + cv.toString());
if (rowId == -1) { //value already exists
Log.i("Conflict", "Error inserting values in DB");
} else {
Log.i(TAG, "success fully inserted in replica 1" + cv.toString());
}
}
// newUri = cr.insert(
// buildUri("content", "edu.buffalo.cse.cse486586.simpledynamo.provider"),
// cv
// );
break;
case STORE_REPLICA_1: // STORE_REPLICA_2 + ";;" + key + ";;" + value
synchronized(lockInsert) {
String key1 = msgRead[1];
String value1 = msgRead[2];
replicaDB.put(key1, value1);
Log.i(TAG, "key values to be in replica 1 is " + key1 + "::" + value1);
db = mDBhandler.getWritableDatabase();
cv = new ContentValues();
cv.put(DBHandler.COL_NAME_KEY, key1);
cv.put(DBHandler.COL_NAME_VALUE, value1);
// cv.put(DBHandler.COL_TYPE, TYPE_REP1); //setting as Replica1
Log.i(TAG, "Content values in replica 1 is " + cv);
long rowId1 = db.insertWithOnConflict(DBHandler.TABLE_NAME, null, cv, android.database.sqlite.SQLiteDatabase.CONFLICT_REPLACE);
Log.v("insert", "insert since one node" + cv.toString());
if (rowId1 == -1) { //value already exists
Log.i("Conflict", "Error inserting values in DB");
} else {
Log.i(TAG, "success fully inserted in replica 1" + cv.toString());
}
}
//
// newUri = cr.insert(
// buildUri("content", "edu.buffalo.cse.cse486586.simpledynamo.provider"),
// cv
// );
//wrting with new Col type
// cv = new ContentValues();
// cv.put(DBHandler.COL_NAME_KEY, key1);
// cv.put(DBHandler.COL_NAME_VALUE, value1);
// cv.put(DBHandler.COL_TYPE, TYPE_REP1); //setting as replica 1
// Log.i(TAG, "Content values in replica 1 is " + cv);
// newUri = cr.insert(
// buildUri("content", "edu.buffalo.cse.cse486586.simpledynamo.provider"),
// cv
// );
break;
case STORE_REPLICA_2: // STORE_REPLICA_2 + ";;" + key + ";;" + value
synchronized(lockInsert) {
String key2 = msgRead[1];
String value2 = msgRead[2];
replicaDB.put(key2, value2);
Log.i(TAG, "key values to be in replica 2 is " + key2 + "::" + value2);
db = mDBhandler.getWritableDatabase();
cv = new ContentValues();
cv.put(DBHandler.COL_NAME_KEY, key2);
cv.put(DBHandler.COL_NAME_VALUE, value2);
// cv.put(DBHandler.COL_TYPE, TYPE_REP1); //setting as coordinator
long rowId2 = db.insertWithOnConflict(DBHandler.TABLE_NAME, null, cv, android.database.sqlite.SQLiteDatabase.CONFLICT_REPLACE);
Log.v("insert", "insert since one node" + cv.toString());
if (rowId2 == -1) { //value already exists
Log.i("Conflict", "Error inserting values in DB");
} else {
Log.i(TAG, "success fully inserted in replica 2" + cv.toString());
}
}
// Log.i(TAG, "Content values in coordinator is " + cv);
// newUri = cr.insert(
// buildUri("content", "edu.buffalo.cse.cse486586.simpledynamo.provider"),
// cv
// );
//writing with new col values
// cv = new ContentValues();
// cv.put(DBHandler.COL_NAME_KEY, key2);
// cv.put(DBHandler.COL_NAME_VALUE, value2);
// cv.put(DBHandler.COL_TYPE, TYPE_REP2); //setting as replica 2
// Log.i(TAG, "Content values in replica 2 is " + cv);
// newUri = cr.insert(
// buildUri("content", "edu.buffalo.cse.cse486586.simpledynamo.provider"),
// cv
// );
break;
case QUERY_CUSTOM: // QUERY_CUSTOM + ";;" + curNode.port + "==" + portToSend;
String oPort = msgRead[1];
if(oPort != curNode) { //origin port need not be set
// for (Map.Entry<String, String> map : localDB.entrySet()) {
// Log.i(TAG, " map values which are in * :" + map.getKey() + ";;" + map.getValue());
// String msgToOrigin = QUERY_HM_RESULT + ";;" + map.getKey() + ";;" + map.getValue();
// Socket socket = new Socket(InetAddress.getByAddress(new byte[]{10, 0, 2, 2}), Integer.parseInt(oPort));
// OutputStream outToAnother = socket.getOutputStream();
// Log.i(TAG, "the port :" + oPort);
// outToAnother.write(msgToOrigin.getBytes());
// socket.close();
// }
// //changing to content provider
// String corValues = localDB.toString();
// String msgToSend = QUERY_STAR_RESULT + ";;" + corValues + "==" + oPort;
// Log.i(TAG, "Query Star result");
// new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend);
db = mDBhandler.getReadableDatabase();
String[] reqColumns2 = {DBHandler.COL_NAME_KEY, DBHandler.COL_NAME_VALUE};
cursor = db.query(DBHandler.TABLE_NAME, reqColumns2, null, null, null, null, null);
Log.i(TAG, "length of cursor :" + cursor.getColumnCount());
cursor.moveToFirst();
String msgToSend = QUERY_STAR_RESULT_NEW + ";;";
for (int move = 0; move < cursor.getCount(); move++) {
String keyTemp = cursor.getString(cursor.getColumnIndex(DBHandler.COL_NAME_KEY));
String valueTemp = cursor.getString(cursor.getColumnIndex(DBHandler.COL_NAME_VALUE));
msgToSend += keyTemp + "=" + valueTemp + ",";
cursor.moveToNext();
}
msgToSend += "==" + oPort;
Log.i(TAG, "Query_one_succ_new: found key " + msgToSend);
new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend);
}
break;
case QUERY_STAR_RESULT_NEW:
String kvValues = msgRead[1];
String[] sp = kvValues.split(",");
for(String ele : sp) {
String x = ele.trim();
System.out.println(x);
String[] kvPairs = x.split("=");
if(kvPairs.length == 2) {
System.out.println("result to be stored in container is :" + kvPairs[0] + "::" + kvPairs[1]);
container.add(kvPairs[0] + ";;" + kvPairs[1]);
}
}
break;
case QUERY_ONE: // QUERY_ONE + ";;" + key + ";;" + curPort + "==" + portToSend;
// synchronized (lockQuery) {
String key_one = msgRead[1];
String oPort_one = msgRead[2];
if (localDB.containsKey(key_one)) {
// String value_one = localDB.get(key_one);
db = mDBhandler.getReadableDatabase();
String[] reqColumns1 = {DBHandler.COL_NAME_KEY, DBHandler.COL_NAME_VALUE};
Cursor c = null;
c = db.query(DBHandler.TABLE_NAME, reqColumns1, DBHandler.COL_NAME_KEY + "=" + "'" + key_one + "'", null, null, null, null);
c.moveToFirst();
String keyTemp = c.getString(c.getColumnIndex(DBHandler.COL_NAME_KEY));
String valueTemp = c.getString(c.getColumnIndex(DBHandler.COL_NAME_VALUE));
String msgToSend = QUERY_FOUND_KEY + ";;" + key_one + ";;" + valueTemp + "==" + oPort_one;
Log.i(TAG, "Query_next: found key :" + msgToSend);
new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend);
} else { //send it to the corresponding port
}
// }
break;
case QUERY_ONE_SUCC: //QUERY_ONE_SUCC + ";;" + key + ";;" + curPort + "==" + (Integer.parseInt(succOfToSend) * 2);
String key_one_succ = msgRead[1];
String oPort_one_succ = msgRead[2];
Log.i(TAG, "Query_ONE_SUCC for key " + key_one_succ);
// if(replicaDB.containsKey(key_one_succ)) {
// String value_one_succ = replicaDB.get(key_one_succ);
// String msgToSend = QUERY_FOUND_KEY + ";;" + key_one_succ + ";;" + value_one_succ + "==" + oPort_one_succ;
// Log.i(TAG, "Query_next: found key :" + msgToSend);
// new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend);
// } else if(localDB.containsKey(key_one_succ)) {
// String value_one_succ = localDB.get(key_one_succ);
// String msgToSend = QUERY_FOUND_KEY + ";;" + key_one_succ + ";;" + value_one_succ + "==" + oPort_one_succ;
// Log.i(TAG, "Query_next: found key :" + msgToSend);
// new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend);
// }
db = mDBhandler.getReadableDatabase();
String[] reqColumns1 = {DBHandler.COL_NAME_KEY, DBHandler.COL_NAME_VALUE};
Cursor c =null;
c = db.query(DBHandler.TABLE_NAME, reqColumns1, DBHandler.COL_NAME_KEY + "=" + "'" + key_one_succ + "'", null, null, null, null);
// String value1;
// if (cursor.moveToFirst()) {
// do {
// String key1 = cursor.getString(cursor.getColumnIndex(DBHandler.COL_NAME_KEY));
// value1 = cursor.getString(cursor.getColumnIndex(DBHandler.COL_NAME_VALUE));
// } while (cursor.moveToNext());
// String msgToSend = QUERY_FOUND_KEY + ";;" + key_one_succ + ";;" + value1 + "==" + oPort_one_succ;
// Log.i(TAG, "Query_one_succ: found key :" + msgToSend);
// new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend);
// }
// Log.i(TAG, "QUERY_ONE_SUCC value of cursor count " + cursor.getCount());
c.moveToFirst();
String keyTemp = c.getString(c.getColumnIndex(DBHandler.COL_NAME_KEY));
String valueTemp = c.getString(c.getColumnIndex(DBHandler.COL_NAME_VALUE));
String msgToSend = QUERY_FOUND_SUCC + ";;" + key_one_succ + ";;" + valueTemp + "==" + oPort_one_succ;
new ClientTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, msgToSend);
// for (int move = 0; move < cursor.getCount(); move++) {
//
// Log.i(TAG, "Query_one_succ: found key :" + msgToSend);
//
// cursor.moveToNext();
// }
break;
case QUERY_FOUND_SUCC:
String c1 = msgRead[1];
String c2 = msgRead[2];
if(curKey.equals(c1)) {
Log.i(TAG, "got the key value in succ" + c1 + "::" + c2);
curKey ="";
String colnames[] ={"key","value"};
mCursor = new MatrixCursor(colnames);
mCursor.addRow(new String[] {c1, c2});
stopFlag = true;
Log.i(TAG, "Query found key in succ" + c1 + "::" + c2);
}
break;
case QUERY_FOUND_KEY: //QUERY_FOUND_KEY + ";;" + key_one + ";;" + value_one + "==" + oPort_one;
//add to the array list
String column1 = msgRead[1];
String column2 = msgRead[2];
if(curKey.equals(column1)) {
Log.i(TAG, "got the key value " + column1 + "::" + column2);
curKey = "";
String colnames[] ={"key","value"};
mCursor = new MatrixCursor(colnames);
mCursor.addRow(new String[] {column1, column2});
stopFlag = true;
}
break;
case QUERY_HM_RESULT: // QUERY_HM_RESULT + ";;" + map.getKey() + ";;" + map.getValue() + "==" + oPort;
String result = msgRead[1] + ";;" + msgRead[2];
Log.i(TAG, "results stored in the container is " + result);
container.add(result);
break;
case QUERY_STAR_RESULT: //QUERY_STAR_RESULT + ";;" + corValues + "==" + oPort;
String hmValues = msgRead[1];
String res = hmValues.replaceAll("[\\[\\](){}]","");
Log.i(TAG, "star results of hashmap values " + res);
String[] kvSplit = res.split(",");
for(String ele : kvSplit) {
String x = ele.trim();
System.out.println(x);
String[] kvPairs = x.split("=");
if(kvPairs.length == 2) {
System.out.println("result to be stored in container is :" + kvPairs[0] + "::" + kvPairs[1]);
container.add(kvPairs[0] + ";;" + kvPairs[1]);
}
}
break;
case DELETE_IN_COR:
String key_to_delete = msgRead[1];
db = mDBhandler.getWritableDatabase();
localDB.remove(key_to_delete);
db.delete(DBHandler.TABLE_NAME, DBHandler.COL_NAME_KEY + "=" + "'" + key_to_delete + "'", null);
break;
case DELETE_IN_REPLICA_1: //DELETE_IN_REPLICA_1 + ";;" + key + "==" + (Integer.parseInt(replica1) * 2);
String key_to_delete_1 = msgRead[1];
db = mDBhandler.getWritableDatabase();
replicaDB.remove(key_to_delete_1);
db.delete(DBHandler.TABLE_NAME, DBHandler.COL_NAME_KEY + "=" + "'" + key_to_delete_1 + "'", null);
break;
case DELETE_IN_REPLICA_2: //DELETE_IN_REPLICA_1 + ";;" + key + "==" + (Integer.parseInt(replica1) * 2);
String key_to_delete_2 = msgRead[1];
db = mDBhandler.getWritableDatabase();
replicaDB.remove(key_to_delete_2);
db.delete(DBHandler.TABLE_NAME, DBHandler.COL_NAME_KEY + "=" + "'" + key_to_delete_2 + "'", null);
break;
}
}
} catch (IOException e) {
Log.e("TAG", "Server Socket creation failed");
}
return null;
}
}
/*
format : signalType;;msgToSEnd;;Seperatedwitsemicolon==Port
*/
private class ClientTask extends AsyncTask<String, Void, Void> {
@Override
protected Void doInBackground(String... msgs) {
try {
//print socket address
Log.i(TAG, "got the socket address");
String[] msgWithPort = msgs[0].split("=="); //to get the port address
String remotePort = msgWithPort[1];
String dataToSend = msgWithPort[0];
Log.i("msgWithPort", msgWithPort[0]);
if(remotePort != null) {
Socket socket = new Socket(InetAddress.getByAddress(new byte[]{10, 0, 2, 2}), Integer.parseInt(remotePort));
OutputStream outToAnother = socket.getOutputStream();
Log.i("Port Name", remotePort);
outToAnother.write(dataToSend.getBytes());
socket.close();
}
} catch (UnknownHostException e) {
Log.e(TAG, "ClientTask UnknownHostException");
} catch (IOException e) {
Log.e(TAG, "ClientTask socket IOException :" + e);
}
return null;
}
}
/*
used to update the Local DB contents of the content provider
*/
private class UpdateLocalDBTask extends AsyncTask<String, Void, Void> {
String TAG = UpdateLocalDBTask.class.getSimpleName();
@Override
protected Void doInBackground(String... msgs) {
Log.i(TAG, "Update localDB class with msg :" + msgs); //{1=one, 2=two, 3=three}
String corValues = msgs[0];
String res = corValues.replaceAll("[\\[\\](){}]","");
Log.i(TAG, "cor values after removing the braces" + res);
SQLiteDatabase db = mDBhandler.getWritableDatabase();;
ContentValues cv;
String[] sp = res.split(",");
for(String ele : sp) {
String x = ele.trim();
System.out.println(x);
String[] kvPairs = x.split("=");
try {
String resNode = getNodeId(kvPairs[0]);
if(resNode.compareTo(curNode) == 0) { //store only if they are equal
System.out.println("result to be stored in cor are :" + kvPairs[0] + "::" + kvPairs[1]);
localDB.put(kvPairs[0], kvPairs[1]);
cv = new ContentValues();
cv.put(DBHandler.COL_NAME_KEY, kvPairs[0]);
cv.put(DBHandler.COL_NAME_VALUE, kvPairs[1]);
// cv.put(DBHandler.COL_TYPE, TYPE_COR); //setting as coordinator
long rowId1 = db.insertWithOnConflict(DBHandler.TABLE_NAME, null, cv, android.database.sqlite.SQLiteDatabase.CONFLICT_REPLACE);
Log.v("insert", "insert in the updateLocalDB Task " + cv.toString());
if (rowId1 == -1) { //value already exists
Log.i("Conflict", "Error inserting values in DB");
} else {
Log.i(TAG, "success fully inserted in Local DB" + cv.toString());
}
}
} catch (Exception e) {
Log.e(TAG, "Exception in Update local DB task : " + e);
}
}
// if(msgs[0].contains("==")) {
// String[] keyValueString = msgs[0].split("==");
// SQLiteDatabase db = mDBhandler.getWritableDatabase();;
// ContentValues cv;
// for(String kv : keyValueString) {
// String[] kvArr = kv.split("::");
// localDB.put(kvArr[0], kvArr[1]);
// Log.i(TAG, "inserted this pair in localDB :" + kvArr[0] + "," + kvArr[1]);
// cv = new ContentValues();
// cv.put(DBHandler.COL_NAME_KEY, kvArr[0]);
// cv.put(DBHandler.COL_NAME_VALUE, kvArr[1]);
// cv.put(DBHandler.COL_TYPE, TYPE_COR); //setting as coordinator
// long rowId1 = db.insertWithOnConflict(DBHandler.TABLE_NAME, null, cv, android.database.sqlite.SQLiteDatabase.CONFLICT_REPLACE);
// Log.v("insert", "insert since one node" + cv.toString());
// if (rowId1 == -1) { //value already exists
// Log.i("Conflict", "Error inserting values in DB");
// } else {
// Log.i(TAG, "success fully inserted in Local DB" + cv.toString());
// }
//
// }
// }
return null;
}
}
/*
used to update the replica 1 DB contents of the content provider
*/
private class UpdateReplica1DBTask extends AsyncTask<String, Void, Void> {
String TAG = UpdateLocalDBTask.class.getSimpleName();
@Override
protected Void doInBackground(String... msgs) {
Log.i(TAG, "Update ReplicaDB class with msg :" + msgs); //{1=one, 2=two, 3=three}
String repValues = msgs[0];
String res = repValues.replaceAll("[\\[\\](){}]","");
Log.i(TAG, " replica values after removing the braces" + res);
SQLiteDatabase db = mDBhandler.getWritableDatabase();;
ContentValues cv;
String[] sp = res.split(",");
for(String ele : sp) {
String x = ele.trim();
System.out.println(x);
String[] kvPairs = x.split("=");
if(kvPairs.length == 2) {
System.out.println("result to be stored in replica is :" + kvPairs[0] + "::" + kvPairs[1]);
replicaDB.put(kvPairs[0], kvPairs[1]);
cv = new ContentValues();
cv.put(DBHandler.COL_NAME_KEY, kvPairs[0]);
cv.put(DBHandler.COL_NAME_VALUE, kvPairs[1]);
// cv.put(DBHandler.COL_TYPE, TYPE_REP1); //setting as coordinator
long rowId1 = db.insertWithOnConflict(DBHandler.TABLE_NAME, null, cv, android.database.sqlite.SQLiteDatabase.CONFLICT_REPLACE);
Log.v("insert", "insert since one node" + cv.toString());
if (rowId1 == -1) { //value already exists
Log.i("Conflict", "Error inserting values in DB");
} else {
Log.i(TAG, "success fully inserted in replica DB" + cv.toString());
}
}
}
return null;
}
}
//helper functions
/*
@param: String: key value
@return: Node where the key has to inserted
*/
private String getNodeId(String key) {
String keyHash = "";
try {
keyHash = genHash(key);
Log.i(TAG, "in gerNodeid generated hash is "+ key + " :: " + keyHash);
} catch (NoSuchAlgorithmException e) {
Log.e(TAG, "No argument exception in getNode ID " + e);
}
String insertNode = null;
// boolean stop = false;
// for(int i = 0; !stop && i < 5; i++) { //check from starting
// int cmp = keyHash.compareTo(hashList.get(i)); //if greater then that is the node we need to insert on
// if(cmp > 0) {
// insertNode = nodeList.get(i);
// Log.i(TAG, "in getNode id inside if loop cmp value is " + cmp + "::" + insertNode);
// stop = true;
// }
// }
// if(insertNode == null) { //setting the last node as insert node if null
// insertNode = nodeList.get(4);
// Log.i(TAG, "in getNode id inside if insert node is null " + insertNode);
// }
int key_62 = keyHash.compareTo(hashList.get(0));
int key_56 = keyHash.compareTo(hashList.get(1));
int key_54 = keyHash.compareTo(hashList.get(2));
int key_58 = keyHash.compareTo(hashList.get(3));
int key_60 = keyHash.compareTo(hashList.get(4));
Log.i(TAG, "compare vaues are 62 56 54 58 60 " + key_62 + " " + key_56 + " " + key_54 + " " + key_58 + " " + key_60 );
if(key_62 <= 0 || key_60 > 0) { //cornor case
insertNode = "5562";
Log.i(TAG, "getNodeId insert node 1:" + insertNode);
} else if(key_62 > 0 && key_56 <= 0) {
insertNode = nodeList.get(1);
Log.i(TAG, "getNodeId insert node 2:" + insertNode);
} else if(key_56 > 0 && key_54 <= 0) {
insertNode = nodeList.get(2);
Log.i(TAG, "getNodeId insert node 3:" + insertNode);
} else if(key_54 > 0 && key_58 <= 0) {
insertNode = nodeList.get(3);
Log.i(TAG, "getNodeId insert node 4:" + insertNode);
} else if(key_58 > 0 && key_60 <= 0) {
insertNode = nodeList.get(4);
Log.i(TAG, "getNodeId insert node 5:" + insertNode);
}
return insertNode;
}
/*
Builds the URI for content resolver
*/
private Uri buildUri(String scheme, String authority) {
Uri.Builder uriBuilder = new Uri.Builder();
uriBuilder.authority(authority);
uriBuilder.scheme(scheme);
return uriBuilder.build();
}
}
|
|
/*
* Copyright (C) 2009 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sop4j.base.google.common.collect;
import static com.sop4j.base.google.common.base.Preconditions.checkNotNull;
import com.sop4j.base.google.common.annotations.GwtCompatible;
import com.sop4j.base.google.common.base.Objects;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
/**
* An immutable {@link Table} with reliable user-specified iteration order.
* Does not permit null keys or values.
*
* <p><b>Note</b>: Although this class is not final, it cannot be subclassed as
* it has no public or protected constructors. Thus, instances of this class are
* guaranteed to be immutable.
*
* <p>See the Guava User Guide article on <a href=
* "http://code.google.com/p/guava-libraries/wiki/ImmutableCollectionsExplained">
* immutable collections</a>.
*
* @author Gregory Kick
* @since 11.0
*/
@GwtCompatible
// TODO(gak): make serializable
public abstract class ImmutableTable<R, C, V> extends AbstractTable<R, C, V> {
private static final ImmutableTable<Object, Object, Object> EMPTY
= new SparseImmutableTable<Object, Object, Object>(
ImmutableList.<Cell<Object, Object, Object>>of(),
ImmutableSet.of(), ImmutableSet.of());
/** Returns an empty immutable table. */
@SuppressWarnings("unchecked")
public static <R, C, V> ImmutableTable<R, C, V> of() {
return (ImmutableTable<R, C, V>) EMPTY;
}
/** Returns an immutable table containing a single cell. */
public static <R, C, V> ImmutableTable<R, C, V> of(R rowKey,
C columnKey, V value) {
return new SingletonImmutableTable<R, C, V>(rowKey, columnKey, value);
}
/**
* Returns an immutable copy of the provided table.
*
* <p>The {@link Table#cellSet()} iteration order of the provided table
* determines the iteration ordering of all views in the returned table. Note
* that some views of the original table and the copied table may have
* different iteration orders. For more control over the ordering, create a
* {@link Builder} and call {@link Builder#orderRowsBy},
* {@link Builder#orderColumnsBy}, and {@link Builder#putAll}
*
* <p>Despite the method name, this method attempts to avoid actually copying
* the data when it is safe to do so. The exact circumstances under which a
* copy will or will not be performed are undocumented and subject to change.
*/
public static <R, C, V> ImmutableTable<R, C, V> copyOf(
Table<? extends R, ? extends C, ? extends V> table) {
if (table instanceof ImmutableTable) {
@SuppressWarnings("unchecked")
ImmutableTable<R, C, V> parameterizedTable
= (ImmutableTable<R, C, V>) table;
return parameterizedTable;
} else {
int size = table.size();
switch (size) {
case 0:
return of();
case 1:
Cell<? extends R, ? extends C, ? extends V> onlyCell
= Iterables.getOnlyElement(table.cellSet());
return ImmutableTable.<R, C, V>of(onlyCell.getRowKey(),
onlyCell.getColumnKey(), onlyCell.getValue());
default:
ImmutableSet.Builder<Cell<R, C, V>> cellSetBuilder
= ImmutableSet.builder();
for (Cell<? extends R, ? extends C, ? extends V> cell :
table.cellSet()) {
/*
* Must cast to be able to create a Cell<R, C, V> rather than a
* Cell<? extends R, ? extends C, ? extends V>
*/
cellSetBuilder.add(cellOf((R) cell.getRowKey(),
(C) cell.getColumnKey(), (V) cell.getValue()));
}
return RegularImmutableTable.forCells(cellSetBuilder.build());
}
}
}
/**
* Returns a new builder. The generated builder is equivalent to the builder
* created by the {@link Builder#ImmutableTable.Builder()} constructor.
*/
public static <R, C, V> Builder<R, C, V> builder() {
return new Builder<R, C, V>();
}
/**
* Verifies that {@code rowKey}, {@code columnKey} and {@code value} are
* non-null, and returns a new entry with those values.
*/
static <R, C, V> Cell<R, C, V> cellOf(R rowKey, C columnKey, V value) {
return Tables.immutableCell(checkNotNull(rowKey), checkNotNull(columnKey),
checkNotNull(value));
}
/**
* A builder for creating immutable table instances, especially {@code public
* static final} tables ("constant tables"). Example: <pre> {@code
*
* static final ImmutableTable<Integer, Character, String> SPREADSHEET =
* new ImmutableTable.Builder<Integer, Character, String>()
* .put(1, 'A', "foo")
* .put(1, 'B', "bar")
* .put(2, 'A', "baz")
* .build();}</pre>
*
* <p>By default, the order in which cells are added to the builder determines
* the iteration ordering of all views in the returned table, with {@link
* #putAll} following the {@link Table#cellSet()} iteration order. However, if
* {@link #orderRowsBy} or {@link #orderColumnsBy} is called, the views are
* sorted by the supplied comparators.
*
* For empty or single-cell immutable tables, {@link #of()} and
* {@link #of(Object, Object, Object)} are even more convenient.
*
* <p>Builder instances can be reused - it is safe to call {@link #build}
* multiple times to build multiple tables in series. Each table is a superset
* of the tables created before it.
*
* @since 11.0
*/
public static final class Builder<R, C, V> {
private final List<Cell<R, C, V>> cells = Lists.newArrayList();
private Comparator<? super R> rowComparator;
private Comparator<? super C> columnComparator;
/**
* Creates a new builder. The returned builder is equivalent to the builder
* generated by {@link ImmutableTable#builder}.
*/
public Builder() {}
/**
* Specifies the ordering of the generated table's rows.
*/
public Builder<R, C, V> orderRowsBy(Comparator<? super R> rowComparator) {
this.rowComparator = checkNotNull(rowComparator);
return this;
}
/**
* Specifies the ordering of the generated table's columns.
*/
public Builder<R, C, V> orderColumnsBy(
Comparator<? super C> columnComparator) {
this.columnComparator = checkNotNull(columnComparator);
return this;
}
/**
* Associates the ({@code rowKey}, {@code columnKey}) pair with {@code
* value} in the built table. Duplicate key pairs are not allowed and will
* cause {@link #build} to fail.
*/
public Builder<R, C, V> put(R rowKey, C columnKey, V value) {
cells.add(cellOf(rowKey, columnKey, value));
return this;
}
/**
* Adds the given {@code cell} to the table, making it immutable if
* necessary. Duplicate key pairs are not allowed and will cause {@link
* #build} to fail.
*/
public Builder<R, C, V> put(
Cell<? extends R, ? extends C, ? extends V> cell) {
if (cell instanceof Tables.ImmutableCell) {
checkNotNull(cell.getRowKey());
checkNotNull(cell.getColumnKey());
checkNotNull(cell.getValue());
@SuppressWarnings("unchecked") // all supported methods are covariant
Cell<R, C, V> immutableCell = (Cell<R, C, V>) cell;
cells.add(immutableCell);
} else {
put(cell.getRowKey(), cell.getColumnKey(), cell.getValue());
}
return this;
}
/**
* Associates all of the given table's keys and values in the built table.
* Duplicate row key column key pairs are not allowed, and will cause
* {@link #build} to fail.
*
* @throws NullPointerException if any key or value in {@code table} is null
*/
public Builder<R, C, V> putAll(
Table<? extends R, ? extends C, ? extends V> table) {
for (Cell<? extends R, ? extends C, ? extends V> cell : table.cellSet()) {
put(cell);
}
return this;
}
/**
* Returns a newly-created immutable table.
*
* @throws IllegalArgumentException if duplicate key pairs were added
*/
public ImmutableTable<R, C, V> build() {
int size = cells.size();
switch (size) {
case 0:
return of();
case 1:
return new SingletonImmutableTable<R, C, V>(
Iterables.getOnlyElement(cells));
default:
return RegularImmutableTable.forCells(
cells, rowComparator, columnComparator);
}
}
}
ImmutableTable() {}
@Override public ImmutableSet<Cell<R, C, V>> cellSet() {
return (ImmutableSet<Cell<R, C, V>>) super.cellSet();
}
@Override
abstract ImmutableSet<Cell<R, C, V>> createCellSet();
@Override
final UnmodifiableIterator<Cell<R, C, V>> cellIterator() {
throw new AssertionError("should never be called");
}
@Override
public ImmutableCollection<V> values() {
return (ImmutableCollection<V>) super.values();
}
@Override
abstract ImmutableCollection<V> createValues();
@Override
final Iterator<V> valuesIterator() {
throw new AssertionError("should never be called");
}
/**
* {@inheritDoc}
*
* @throws NullPointerException if {@code columnKey} is {@code null}
*/
@Override public ImmutableMap<R, V> column(C columnKey) {
checkNotNull(columnKey);
return Objects.firstNonNull(
(ImmutableMap<R, V>) columnMap().get(columnKey),
ImmutableMap.<R, V>of());
}
@Override public ImmutableSet<C> columnKeySet() {
return columnMap().keySet();
}
/**
* {@inheritDoc}
*
* <p>The value {@code Map<R, V>} instances in the returned map are
* {@link ImmutableMap} instances as well.
*/
@Override public abstract ImmutableMap<C, Map<R, V>> columnMap();
/**
* {@inheritDoc}
*
* @throws NullPointerException if {@code rowKey} is {@code null}
*/
@Override public ImmutableMap<C, V> row(R rowKey) {
checkNotNull(rowKey);
return Objects.firstNonNull(
(ImmutableMap<C, V>) rowMap().get(rowKey),
ImmutableMap.<C, V>of());
}
@Override public ImmutableSet<R> rowKeySet() {
return rowMap().keySet();
}
/**
* {@inheritDoc}
*
* <p>The value {@code Map<C, V>} instances in the returned map are
* {@link ImmutableMap} instances as well.
*/
@Override public abstract ImmutableMap<R, Map<C, V>> rowMap();
@Override
public boolean contains(@Nullable Object rowKey, @Nullable Object columnKey) {
return get(rowKey, columnKey) != null;
}
@Override
public boolean containsValue(@Nullable Object value) {
return values().contains(value);
}
/**
* Guaranteed to throw an exception and leave the table unmodified.
*
* @throws UnsupportedOperationException always
* @deprecated Unsupported operation.
*/
@Deprecated @Override public final void clear() {
throw new UnsupportedOperationException();
}
/**
* Guaranteed to throw an exception and leave the table unmodified.
*
* @throws UnsupportedOperationException always
* @deprecated Unsupported operation.
*/
@Deprecated @Override public final V put(R rowKey, C columnKey, V value) {
throw new UnsupportedOperationException();
}
/**
* Guaranteed to throw an exception and leave the table unmodified.
*
* @throws UnsupportedOperationException always
* @deprecated Unsupported operation.
*/
@Deprecated @Override public final void putAll(
Table<? extends R, ? extends C, ? extends V> table) {
throw new UnsupportedOperationException();
}
/**
* Guaranteed to throw an exception and leave the table unmodified.
*
* @throws UnsupportedOperationException always
* @deprecated Unsupported operation.
*/
@Deprecated @Override public final V remove(Object rowKey, Object columnKey) {
throw new UnsupportedOperationException();
}
}
|
|
// =================================================================================================
// Copyright 2011 Twitter, Inc.
// -------------------------------------------------------------------------------------------------
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this work except in compliance with the License.
// You may obtain a copy of the License in the LICENSE file, or at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// =================================================================================================
package com.twitter.common.logging;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.twitter.common.quantity.Amount;
import com.twitter.common.quantity.Time;
import com.twitter.common.stats.StatImpl;
import com.twitter.common.stats.Stats;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.logging.Logger;
/**
* Log that buffers requests before sending them to a wrapped log.
*
* @author William Farner
*/
public class BufferedLog<T, R> implements Log<T, Void> {
private static final Logger LOG = Logger.getLogger(BufferedLog.class.getName());
private static final ExecutorService DEFAULT_EXECUTOR_SERVICE =
Executors.newSingleThreadExecutor(
new ThreadFactoryBuilder().setDaemon(true).setNameFormat("Log Pusher-%d").build());
private static final int DEFAULT_MAX_BUFFER_SIZE = 100000;
// TODO(William Farner): Change to use a ScheduledExecutorService instead of a timer.
private final TimerTask logPusher = new TimerTask() {
@Override public void run() {
flush();
}
};
// Local buffer of log messages.
private final List<T> localBuffer = Lists.newLinkedList();
// The log that is being buffered.
private Log<T, R> bufferedLog;
// Filter to determine when a log request should be retried.
private Predicate<R> retryFilter = null;
// Maximum number of log entries that can be buffered before truncation (lost messages).
private int maxBufferSize = DEFAULT_MAX_BUFFER_SIZE;
// Maximum buffer length before attempting to submit.
private int chunkLength;
// Maximum time for a message to sit in the buffer before attempting to flush.
private Amount<Integer, Time> flushInterval;
// Service to handle flushing the log.
private ExecutorService logSubmitService = DEFAULT_EXECUTOR_SERVICE;
private BufferedLog() {
// Created through builder.
Stats.export(new StatImpl<Integer>("scribe_buffer_size") {
public Integer read() { return getBacklog(); }
});
}
public static <T, R> Builder<T, R> builder() {
return new Builder<T, R>();
}
/**
* Starts the log submission service by scheduling a timer to periodically submit messages.
*/
private void start() {
long flushIntervalMillis = flushInterval.as(Time.MILLISECONDS);
new Timer(true).scheduleAtFixedRate(logPusher, flushIntervalMillis, flushIntervalMillis);
}
/**
* Gets the current number of messages in the local buffer.
*
* @return The number of backlogged messages.
*/
protected int getBacklog() {
synchronized (localBuffer) {
return localBuffer.size();
}
}
/**
* Stores a log entry, flushing immediately if the buffer length limit is exceeded.
*
* @param entry Entry to log.
*/
@Override
public Void log(T entry) {
synchronized (localBuffer) {
localBuffer.add(entry);
if (localBuffer.size() >= chunkLength) {
logSubmitService.submit(logPusher);
}
}
return null;
}
@Override
public Void log(List<T> entries) {
for (T entry : entries) log(entry);
return null;
}
@Override
public void flush() {
List<T> buffer = copyBuffer();
if (buffer.isEmpty()) return;
R result = bufferedLog.log(buffer);
// Restore the buffer if the write was not successful.
if (retryFilter != null && retryFilter.apply(result)) {
LOG.warning("Log request failed, restoring spooled messages.");
restoreToLocalBuffer(buffer);
}
}
/**
* Creats a snapshot of the local buffer and clears the local buffer.
*
* @return A snapshot of the local buffer.
*/
private List<T> copyBuffer() {
synchronized (localBuffer) {
List<T> bufferCopy = ImmutableList.copyOf(localBuffer);
localBuffer.clear();
return bufferCopy;
}
}
/**
* Restores log entries back to the local buffer. This can be used to commit entries back to the
* buffer after a flush operation failed.
*
* @param buffer The log entries to restore.
*/
private void restoreToLocalBuffer(List<T> buffer) {
synchronized (localBuffer) {
int restoreRecords = Math.min(buffer.size(), maxBufferSize - localBuffer.size());
if (restoreRecords != buffer.size()) {
LOG.severe((buffer.size() - restoreRecords) + " log records truncated!");
if (restoreRecords == 0) return;
}
localBuffer.addAll(0, buffer.subList(buffer.size() - restoreRecords, buffer.size()));
}
}
/**
* Configures a BufferedLog object.
*
* @param <T> Log message type.
* @param <R> Log result type.
*/
public static class Builder<T, R> {
private final BufferedLog<T, R> instance;
public Builder() {
instance = new BufferedLog<T, R>();
}
/**
* Specifies the log that should be buffered.
*
* @param bufferedLog Log to buffer requests to.
* @return A reference to the builder.
*/
public Builder<T, R> buffer(Log<T, R> bufferedLog) {
instance.bufferedLog = bufferedLog;
return this;
}
/**
* Adds a custom retry filter that will be used to determine whether a log result {@code R}
* should be used to indicate that a log request should be retried. Log submit retry behavior
* is not defined when the filter throws uncaught exceptions.
*
* @param retryFilter Filter to determine whether to retry.
* @return A reference to the builder.
*/
public Builder<T, R> withRetryFilter(Predicate<R> retryFilter) {
instance.retryFilter = retryFilter;
return this;
}
/**
* Specifies the maximum allowable buffer size, after which log records will be dropped to
* conserve memory.
*
* @param maxBufferSize Maximum buffer size.
* @return A reference to the builder.
*/
public Builder<T, R> withMaxBuffer(int maxBufferSize) {
instance.maxBufferSize = maxBufferSize;
return this;
}
/**
* Specifies the desired number of log records to submit in each request.
*
* @param chunkLength Maximum number of records to accumulate before trying to submit.
* @return A reference to the builder.
*/
public Builder<T, R> withChunkLength(int chunkLength) {
instance.chunkLength = chunkLength;
return this;
}
/**
* Specifies the maximum amount of time that a log entry may wait in the buffer before an
* attempt is made to flush the buffer.
*
* @param flushInterval Log flush interval.
* @return A reference to the builder.
*/
public Builder<T, R> withFlushInterval(Amount<Integer, Time> flushInterval) {
instance.flushInterval = flushInterval;
return this;
}
/**
* Specifies the executor service to use for (synchronously or asynchronously) sending
* log entries.
*
* @param logSubmitService Log submit executor service.
* @return A reference to the builder.
*/
public Builder<T, R> withExecutorService(ExecutorService logSubmitService) {
instance.logSubmitService = logSubmitService;
return this;
}
/**
* Creates the buffered log.
*
* @return The prepared buffered log.
*/
public BufferedLog<T, R> build() {
Preconditions.checkArgument(instance.chunkLength > 0);
Preconditions.checkArgument(instance.flushInterval.as(Time.MILLISECONDS) > 0);
Preconditions.checkNotNull(instance.logSubmitService);
Preconditions.checkArgument(instance.chunkLength <= instance.maxBufferSize);
instance.start();
return instance;
}
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.phrase;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.search.suggest.DirectSpellcheckerSettings;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
class PhraseSuggestionContext extends SuggestionContext {
private final BytesRef SEPARATOR = new BytesRef(" ");
private float maxErrors = 0.5f;
private BytesRef separator = SEPARATOR;
private float realworldErrorLikelihood = 0.95f;
private List<DirectCandidateGenerator> generators = new ArrayList<>();
private int gramSize = 1;
private float confidence = 1.0f;
private int tokenLimit = NoisyChannelSpellChecker.DEFAULT_TOKEN_LIMIT;
private BytesRef preTag;
private BytesRef postTag;
private CompiledScript collateQueryScript;
private Map<String, Object> collateScriptParams = new HashMap<>(1);
private WordScorer.WordScorerFactory scorer;
private boolean requireUnigram = true;
private boolean prune = false;
public PhraseSuggestionContext(Suggester<? extends PhraseSuggestionContext> suggester) {
super(suggester);
}
public float maxErrors() {
return maxErrors;
}
public void setMaxErrors(Float maxErrors) {
this.maxErrors = maxErrors;
}
public BytesRef separator() {
return separator;
}
public void setSeparator(BytesRef separator) {
this.separator = separator;
}
public Float realworldErrorLikelyhood() {
return realworldErrorLikelihood;
}
public void setRealWordErrorLikelihood(Float realworldErrorLikelihood) {
this.realworldErrorLikelihood = realworldErrorLikelihood;
}
public void addGenerator(DirectCandidateGenerator generator) {
this.generators.add(generator);
}
public List<DirectCandidateGenerator> generators() {
return this.generators ;
}
public void setGramSize(int gramSize) {
this.gramSize = gramSize;
}
public int gramSize() {
return gramSize;
}
public float confidence() {
return confidence;
}
public void setConfidence(float confidence) {
this.confidence = confidence;
}
public void setModel(WordScorer.WordScorerFactory scorer) {
this.scorer = scorer;
}
public WordScorer.WordScorerFactory model() {
return scorer;
}
static class DirectCandidateGenerator extends DirectSpellcheckerSettings {
private Analyzer preFilter;
private Analyzer postFilter;
private String field;
private int size = 5;
public String field() {
return field;
}
public void setField(String field) {
this.field = field;
}
public int size() {
return size;
}
public void size(int size) {
if (size <= 0) {
throw new IllegalArgumentException("Size must be positive");
}
this.size = size;
}
public Analyzer preFilter() {
return preFilter;
}
public void preFilter(Analyzer preFilter) {
this.preFilter = preFilter;
}
public Analyzer postFilter() {
return postFilter;
}
public void postFilter(Analyzer postFilter) {
this.postFilter = postFilter;
}
}
public void setRequireUnigram(boolean requireUnigram) {
this.requireUnigram = requireUnigram;
}
public boolean getRequireUnigram() {
return requireUnigram;
}
public void setTokenLimit(int tokenLimit) {
this.tokenLimit = tokenLimit;
}
public int getTokenLimit() {
return tokenLimit;
}
public void setPreTag(BytesRef preTag) {
this.preTag = preTag;
}
public BytesRef getPreTag() {
return preTag;
}
public void setPostTag(BytesRef postTag) {
this.postTag = postTag;
}
public BytesRef getPostTag() {
return postTag;
}
CompiledScript getCollateQueryScript() {
return collateQueryScript;
}
void setCollateQueryScript(CompiledScript collateQueryScript) {
this.collateQueryScript = collateQueryScript;
}
Map<String, Object> getCollateScriptParams() {
return collateScriptParams;
}
void setCollateScriptParams(Map<String, Object> collateScriptParams) {
this.collateScriptParams = collateScriptParams;
}
void setCollatePrune(boolean prune) {
this.prune = prune;
}
boolean collatePrune() {
return prune;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.hadoopImpl.mapreduce.lib;
import java.util.Arrays;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.accumulo.core.client.sample.SamplerConfiguration;
import org.apache.accumulo.core.client.summary.SummarizerConfiguration;
import org.apache.accumulo.core.conf.AccumuloConfiguration;
import org.apache.accumulo.core.conf.ConfigurationCopy;
import org.apache.accumulo.core.conf.DefaultConfiguration;
import org.apache.accumulo.core.conf.Property;
import org.apache.accumulo.core.sample.impl.SamplerConfigurationImpl;
import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @since 1.6.0
*/
public class FileOutputConfigurator extends ConfiguratorBase {
private static final Logger log = LoggerFactory.getLogger(FileOutputConfigurator.class);
/**
* Configuration keys for {@link AccumuloConfiguration}.
*
* @since 1.6.0
*/
public static enum Opts {
ACCUMULO_PROPERTIES
}
/**
* The supported Accumulo properties we set in this OutputFormat, that change the behavior of the
* RecordWriter.<br>
* These properties correspond to the supported public static setter methods available to this
* class.
*
* @param property
* the Accumulo property to check
* @since 1.6.0
*/
protected static Boolean isSupportedAccumuloProperty(Property property) {
switch (property) {
case TABLE_FILE_COMPRESSION_TYPE:
case TABLE_FILE_COMPRESSED_BLOCK_SIZE:
case TABLE_FILE_BLOCK_SIZE:
case TABLE_FILE_COMPRESSED_BLOCK_SIZE_INDEX:
case TABLE_FILE_REPLICATION:
return true;
default:
return false;
}
}
/**
* Helper for transforming Accumulo configuration properties into something that can be stored
* safely inside the Hadoop Job configuration.
*
* @param implementingClass
* the class whose name will be used as a prefix for the property configuration key
* @param conf
* the Hadoop configuration object to configure
* @param property
* the supported Accumulo property
* @param value
* the value of the property to set
* @since 1.6.0
*/
private static <T> void setAccumuloProperty(Class<?> implementingClass, Configuration conf,
Property property, T value) {
if (isSupportedAccumuloProperty(property)) {
String val = String.valueOf(value);
if (property.getType().isValidFormat(val)) {
String key =
enumToConfKey(implementingClass, Opts.ACCUMULO_PROPERTIES) + "." + property.getKey();
log.debug("Setting accumulo property {} = {} ", key, val);
conf.set(key, val);
} else
throw new IllegalArgumentException(
"Value is not appropriate for property type '" + property.getType() + "'");
} else
throw new IllegalArgumentException("Unsupported configuration property " + property.getKey());
}
/**
* This helper method provides an AccumuloConfiguration object constructed from the Accumulo
* defaults, and overridden with Accumulo properties that have been stored in the Job's
* configuration.
*
* @param implementingClass
* the class whose name will be used as a prefix for the property configuration key
* @param conf
* the Hadoop configuration object to configure
* @since 1.6.0
*/
public static AccumuloConfiguration getAccumuloConfiguration(Class<?> implementingClass,
Configuration conf) {
String prefix = enumToConfKey(implementingClass, Opts.ACCUMULO_PROPERTIES) + ".";
ConfigurationCopy acuConf = new ConfigurationCopy(DefaultConfiguration.getInstance());
for (Entry<String,String> entry : conf)
if (entry.getKey().startsWith(prefix)) {
String propString = entry.getKey().substring(prefix.length());
Property prop = Property.getPropertyByKey(propString);
if (prop != null) {
acuConf.set(prop, entry.getValue());
} else if (Property.isValidTablePropertyKey(propString)) {
acuConf.set(propString, entry.getValue());
} else {
throw new IllegalArgumentException("Unknown accumulo file property " + propString);
}
}
return acuConf;
}
/**
* Sets the compression type to use for data blocks. Specifying a compression may require
* additional libraries to be available to your Job.
*
* @param implementingClass
* the class whose name will be used as a prefix for the property configuration key
* @param conf
* the Hadoop configuration object to configure
* @param compressionType
* one of "none", "gz", "lzo", "snappy", or "zstd"
* @since 1.6.0
*/
public static void setCompressionType(Class<?> implementingClass, Configuration conf,
String compressionType) {
if (compressionType == null
|| !Arrays.asList("none", "gz", "lzo", "snappy", "zstd").contains(compressionType))
throw new IllegalArgumentException(
"Compression type must be one of: none, gz, lzo, snappy, zstd");
setAccumuloProperty(implementingClass, conf, Property.TABLE_FILE_COMPRESSION_TYPE,
compressionType);
}
/**
* Sets the size for data blocks within each file.<br>
* Data blocks are a span of key/value pairs stored in the file that are compressed and indexed as
* a group.
*
* <p>
* Making this value smaller may increase seek performance, but at the cost of increasing the size
* of the indexes (which can also affect seek performance).
*
* @param implementingClass
* the class whose name will be used as a prefix for the property configuration key
* @param conf
* the Hadoop configuration object to configure
* @param dataBlockSize
* the block size, in bytes
* @since 1.6.0
*/
public static void setDataBlockSize(Class<?> implementingClass, Configuration conf,
long dataBlockSize) {
setAccumuloProperty(implementingClass, conf, Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE,
dataBlockSize);
}
/**
* Sets the size for file blocks in the file system; file blocks are managed, and replicated, by
* the underlying file system.
*
* @param implementingClass
* the class whose name will be used as a prefix for the property configuration key
* @param conf
* the Hadoop configuration object to configure
* @param fileBlockSize
* the block size, in bytes
* @since 1.6.0
*/
public static void setFileBlockSize(Class<?> implementingClass, Configuration conf,
long fileBlockSize) {
setAccumuloProperty(implementingClass, conf, Property.TABLE_FILE_BLOCK_SIZE, fileBlockSize);
}
/**
* Sets the size for index blocks within each file; smaller blocks means a deeper index hierarchy
* within the file, while larger blocks mean a more shallow index hierarchy within the file. This
* can affect the performance of queries.
*
* @param implementingClass
* the class whose name will be used as a prefix for the property configuration key
* @param conf
* the Hadoop configuration object to configure
* @param indexBlockSize
* the block size, in bytes
* @since 1.6.0
*/
public static void setIndexBlockSize(Class<?> implementingClass, Configuration conf,
long indexBlockSize) {
setAccumuloProperty(implementingClass, conf, Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE_INDEX,
indexBlockSize);
}
/**
* Sets the file system replication factor for the resulting file, overriding the file system
* default.
*
* @param implementingClass
* the class whose name will be used as a prefix for the property configuration key
* @param conf
* the Hadoop configuration object to configure
* @param replication
* the number of replicas for produced files
* @since 1.6.0
*/
public static void setReplication(Class<?> implementingClass, Configuration conf,
int replication) {
setAccumuloProperty(implementingClass, conf, Property.TABLE_FILE_REPLICATION, replication);
}
/**
* @since 1.8.0
*/
public static void setSampler(Class<?> implementingClass, Configuration conf,
SamplerConfiguration samplerConfig) {
Map<String,String> props = new SamplerConfigurationImpl(samplerConfig).toTablePropertiesMap();
Set<Entry<String,String>> es = props.entrySet();
for (Entry<String,String> entry : es) {
conf.set(enumToConfKey(implementingClass, Opts.ACCUMULO_PROPERTIES) + "." + entry.getKey(),
entry.getValue());
}
}
public static void setSummarizers(Class<?> implementingClass, Configuration conf,
SummarizerConfiguration[] sumarizerConfigs) {
Map<String,String> props = SummarizerConfiguration.toTableProperties(sumarizerConfigs);
for (Entry<String,String> entry : props.entrySet()) {
conf.set(enumToConfKey(implementingClass, Opts.ACCUMULO_PROPERTIES) + "." + entry.getKey(),
entry.getValue());
}
}
}
|
|
package com.github.lisicnu.libDroid.util;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.List;
/**
* <p/>
* <p/>
* Author: Eden Lee<p/>
* Date: 2014/11/24 <p/>
* Email: checkway@outlook.com <p/>
* Version: 1.0 <p/>
*/
public class ShellUtils {
public static final String COMMAND_SU = "su";
public static final String COMMAND_SH = "sh";
public static final String COMMAND_EXIT = "exit\n";
public static final String COMMAND_LINE_END = "\n";
private ShellUtils() {
}
/**
* check whether has root permission
*
* @return
*/
public static boolean checkRootPermission() {
return execCommand("echo root", true, false).result == 0;
}
/**
* execute shell command, default return result msg
*
* @param command command
* @param isRoot whether need to run with root
* @return
* @see ShellUtils#execCommand(String[], boolean, boolean)
*/
public static CommandResult execCommand(String command, boolean isRoot) {
return execCommand(new String[]{command}, isRoot, true);
}
/**
* execute shell commands, default return result msg
*
* @param commands command list
* @param isRoot whether need to run with root
* @return
* @see ShellUtils#execCommand(String[], boolean, boolean)
*/
public static CommandResult execCommand(List<String> commands, boolean isRoot) {
return execCommand(commands == null ? null : commands.toArray(new String[]{}), isRoot, true);
}
/**
* execute shell commands, default return result msg
*
* @param commands command array
* @param isRoot whether need to run with root
* @return
* @see ShellUtils#execCommand(String[], boolean, boolean)
*/
public static CommandResult execCommand(String[] commands, boolean isRoot) {
return execCommand(commands, isRoot, true);
}
/**
* execute shell command
*
* @param command command
* @param isRoot whether need to run with root
* @param isNeedResultMsg whether need result msg
* @return
* @see ShellUtils#execCommand(String[], boolean, boolean)
*/
public static CommandResult execCommand(String command, boolean isRoot, boolean isNeedResultMsg) {
return execCommand(new String[]{command}, isRoot, isNeedResultMsg);
}
/**
* execute shell commands
*
* @param commands command list
* @param isRoot whether need to run with root
* @param isNeedResultMsg whether need result msg
* @return
* @see ShellUtils#execCommand(String[], boolean, boolean)
*/
public static CommandResult execCommand(List<String> commands, boolean isRoot, boolean isNeedResultMsg) {
return execCommand(commands == null ? null : commands.toArray(new String[]{}), isRoot, isNeedResultMsg);
}
/**
* execute shell commands
*
* @param commands command array
* @param isRoot whether need to run with root
* @param isNeedResultMsg whether need result msg
* @return <ul>
* <li>if isNeedResultMsg is false, {@link CommandResult#successMsg} is null and
* {@link CommandResult#errorMsg} is null.</li>
* <li>if {@link CommandResult#result} is -1, there maybe some excepiton.</li>
* </ul>
*/
public static CommandResult execCommand(String[] commands, boolean isRoot, boolean isNeedResultMsg) {
int result = -1;
if (commands == null || commands.length == 0) {
return new CommandResult(result, null, null);
}
Process process = null;
BufferedReader successResult = null;
BufferedReader errorResult = null;
StringBuilder successMsg = null;
StringBuilder errorMsg = null;
DataOutputStream os = null;
try {
process = Runtime.getRuntime().exec(isRoot ? COMMAND_SU : COMMAND_SH);
os = new DataOutputStream(process.getOutputStream());
for (String command : commands) {
if (command == null) {
continue;
}
// donnot use os.writeBytes(commmand), avoid chinese charset error
os.write(command.getBytes());
os.writeBytes(COMMAND_LINE_END);
os.flush();
}
os.writeBytes(COMMAND_EXIT);
os.flush();
result = process.waitFor();
// get command result
if (isNeedResultMsg) {
successMsg = new StringBuilder();
errorMsg = new StringBuilder();
successResult = new BufferedReader(new InputStreamReader(process.getInputStream()));
errorResult = new BufferedReader(new InputStreamReader(process.getErrorStream()));
String s;
while ((s = successResult.readLine()) != null) {
successMsg.append(s);
}
while ((s = errorResult.readLine()) != null) {
errorMsg.append(s);
}
}
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (os != null) {
os.close();
}
if (successResult != null) {
successResult.close();
}
if (errorResult != null) {
errorResult.close();
}
} catch (IOException e) {
e.printStackTrace();
}
if (process != null) {
process.destroy();
}
}
return new CommandResult(result, successMsg == null ? null : successMsg.toString(), errorMsg == null ? null
: errorMsg.toString());
}
/**
* result of command
* <ul>
* <li>{@link CommandResult#result} means result of command, 0 means normal, else means error, same to excute in
* linux shell</li>
* <li>{@link CommandResult#successMsg} means success message of command result</li>
* <li>{@link CommandResult#errorMsg} means error message of command result</li>
* </ul>
*
* @author <a href="http://www.trinea.cn" target="_blank">Trinea</a> 2013-5-16
*/
public static class CommandResult {
/**
* result of command *
*/
public int result;
/**
* success message of command result *
*/
public String successMsg;
/**
* error message of command result *
*/
public String errorMsg;
public CommandResult(int result) {
this.result = result;
}
public CommandResult(int result, String successMsg, String errorMsg) {
this.result = result;
this.successMsg = successMsg;
this.errorMsg = errorMsg;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.io;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.channels.Channel;
import org.apache.commons.codec.binary.ExtendedHex;
import org.apache.commons.io.output.LineLevelAppender;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.Validate;
/**
* An {@link OutputStream} that formats its written bytes in the same way that
* {@link HexDump#dump(byte[], long, OutputStream, int)} does by default. It
* can be modified to format data slightly different - e.g., use lowercase
* instead of uppercase, use {@code long} offset instead of {@code int}
* @author Lyor G.
* @since Sep 25, 2013 9:40:27 AM
*/
public class HexDumpOutputStream extends OutputStream implements Channel {
private final LineLevelAppender _appender;
private final byte[] _workBuf, _oneByte=new byte[1];
private final boolean _lowercase, _longOffset;
private int _filledLen;
private boolean _closed;
private long _displayOffset;
private final StringBuilder _lineData;
public static final int DEFAULT_BYTES_PER_LINE=16;
public static final boolean DEFAULT_HEX_CASE=false, DEFAULT_LONG_OFFSET=false;
public HexDumpOutputStream(LineLevelAppender appender) {
this(appender, DEFAULT_BYTES_PER_LINE);
}
public HexDumpOutputStream(LineLevelAppender appender, int bytesPerLine) {
this(appender, bytesPerLine, DEFAULT_HEX_CASE, DEFAULT_LONG_OFFSET);
}
public HexDumpOutputStream(LineLevelAppender appender, boolean useLowercase) {
this(appender, DEFAULT_BYTES_PER_LINE, useLowercase, DEFAULT_LONG_OFFSET);
}
public HexDumpOutputStream(LineLevelAppender appender, int bytesPerLine, boolean useLowercase, boolean longOffset) {
_appender = Validate.notNull(appender, "No appender", ArrayUtils.EMPTY_OBJECT_ARRAY);
Validate.isTrue(bytesPerLine > 0, "Bad bytes-per-line value: %d", bytesPerLine);
_workBuf = new byte[bytesPerLine];
_lineData = new StringBuilder((2 /* HEX */ + 1 /* space */ + 1 /* character */) * bytesPerLine + Long.SIZE /* some extra for the offset */);
_lowercase = useLowercase;
_longOffset = longOffset;
}
public final LineLevelAppender getLineLevelAppender() {
return _appender;
}
public final boolean isLowercase() {
return _lowercase;
}
public final boolean isLongOffset() {
return _longOffset;
}
@Override
public boolean isOpen() {
return (!_closed);
}
@Override
public void write(int b) throws IOException {
_oneByte[0] = (byte) (b & 0xFF);
write(_oneByte, 0, 1);
}
@Override
public void write(byte[] b) throws IOException {
write(b, 0, b.length);
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
if (!isOpen()) {
throw new IOException("Stream is closed");
}
if (len <= 0) {
return;
}
LineLevelAppender appender=getLineLevelAppender();
if (!appender.isWriteEnabled()) {
return;
}
int curPos=off, remain=len;
if (_filledLen < _workBuf.length) {
int avail=_workBuf.length - _filledLen, cpySize=Math.min(avail, remain);
System.arraycopy(b, curPos, _workBuf, _filledLen, cpySize);
if ((_filledLen += cpySize) >= _workBuf.length) {
try {
writeAccumulatedData(appender, _displayOffset, _workBuf, 0, _filledLen, _workBuf.length);
} finally {
_displayOffset += _filledLen;
_filledLen = 0;
}
}
curPos += cpySize;
remain -= cpySize;
}
while(remain > 0) {
int avail=Math.min(remain, _workBuf.length);
// If less than a full line, accumulate for next write iteration
if (avail < _workBuf.length) {
System.arraycopy(b, curPos, _workBuf, 0, avail);
_filledLen = avail;
} else {
try {
writeAccumulatedData(appender, _displayOffset, b, curPos, avail, _workBuf.length);
} finally {
_displayOffset += avail;
}
}
remain -= avail;
curPos += avail;
}
}
protected void writeAccumulatedData(LineLevelAppender appender, long offset, byte[] data, int off, int len, int bytesPerLine) throws IOException {
_lineData.setLength(0);
if (isLongOffset()) {
ExtendedHex.appendHex(_lineData, isLowercase(), offset);
} else {
ExtendedHex.appendHex(_lineData, isLowercase(), (int) offset);
}
_lineData.append(' ');
for (int index=0, pos=off; index < len; index++, pos++) {
ExtendedHex.appendHex(_lineData, isLowercase(), data[pos]).append(' ');
}
// if less than bytes per line, then pad with spaces
for (int index=len; index < bytesPerLine; index++) {
_lineData.append(" ");
}
for (int index=0, pos=off; index < len; index++, pos++) {
char ch=(char) (data[pos] & 0xFF);
if ((ch < ' ') || (ch > 0x7E)) {
_lineData.append('.');
} else {
_lineData.append(ch);
}
}
appender.writeLineData(_lineData);
}
@Override
public void close() throws IOException {
if (isOpen()) {
try {
if (_filledLen > 0) {
try {
LineLevelAppender appender=getLineLevelAppender();
if (appender.isWriteEnabled()) {
writeAccumulatedData(appender, _displayOffset, _workBuf, 0, _filledLen, _workBuf.length);
}
} finally {
_displayOffset += _filledLen;
_filledLen = 0;
}
}
} finally {
_closed = true;
}
}
}
}
|
|
package kafka.admin;
import com.alibaba.fastjson.JSONObject;
import com.google.common.base.Predicate;
import com.google.common.collect.*;
import kafka.api.PartitionMetadata;
import kafka.api.TopicMetadata;
import kafka.cluster.Broker;
import kafka.cluster.LogConfigs;
import kafka.common.*;
import kafka.utils.*;
import org.I0Itec.zkclient.ZkClient;
import org.I0Itec.zkclient.exception.ZkNodeExistsException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import static com.google.common.base.Preconditions.checkState;
public class AdminUtils {
public static Random rand = new Random();
public static String TopicConfigChangeZnodePrefix = "config_change_";
static Logger logger = LoggerFactory.getLogger(AdminUtils.class);
/**
* There are 2 goals of replica assignment:
* 1. Spread the replicas evenly among brokers.
* 2. For partitions assigned to a particular broker, their other replicas are spread over the other brokers.
* <p/>
* To achieve this goal, we:
* 1. Assign the first replica of each partition by round-robin, starting from a random position in the broker list.
* 2. Assign the remaining replicas of each partition with an increasing shift.
* <p/>
* Here is an example of assigning
* broker-0 broker-1 broker-2 broker-3 broker-4
* p0 p1 p2 p3 p4 (1st replica)
* p5 p6 p7 p8 p9 (1st replica)
* p4 p0 p1 p2 p3 (2nd replica)
* p8 p9 p5 p6 p7 (2nd replica)
* p3 p4 p0 p1 p2 (3nd replica)
* p7 p8 p9 p5 p6 (3nd replica)
*/
public static Multimap<Integer, Integer> assignReplicasToBrokers(List<Integer> brokerList, int nPartitions, int replicationFactor) {
return assignReplicasToBrokers(brokerList, nPartitions, replicationFactor, -1, -1);
}
public static Multimap<Integer, Integer> assignReplicasToBrokers(List<Integer> brokerList, int nPartitions, int replicationFactor, int fixedStartIndex/* = -1*/, int startPartitionId /*= -1*/) {
if (nPartitions <= 0)
throw new AdminOperationException("number of partitions must be larger than 0");
if (replicationFactor <= 0)
throw new AdminOperationException("replication factor must be larger than 0");
if (replicationFactor > brokerList.size())
throw new AdminOperationException("replication factor: " + replicationFactor + " larger than available brokers: " + brokerList.size());
Multimap<Integer, Integer> ret = HashMultimap.create();
int startIndex = (fixedStartIndex >= 0) ? fixedStartIndex : rand.nextInt(brokerList.size());
int currentPartitionId = (startPartitionId >= 0) ? startPartitionId : 0;
int nextReplicaShift = (fixedStartIndex >= 0) ? fixedStartIndex : rand.nextInt(brokerList.size());
for (int i = 0; i < nPartitions; ++i) {
if (currentPartitionId > 0 && (currentPartitionId % brokerList.size() == 0))
nextReplicaShift += 1;
int firstReplicaIndex = (currentPartitionId + startIndex) % brokerList.size();
List<Integer> replicaList = Lists.newArrayList(brokerList.get(firstReplicaIndex));
for (int j = 0; j < replicationFactor - 1; ++j)
replicaList.add(brokerList.get(replicaIndex(firstReplicaIndex, nextReplicaShift, j, brokerList.size())));
Collections.reverse(replicaList);
ret.putAll(currentPartitionId, replicaList);
currentPartitionId = currentPartitionId + 1;
}
return ret;
}
public static void addPartitions(ZkClient zkClient, String topic, int numPartitions /* = 1*/, String replicaAssignmentStr/* = ""*/) {
Multimap<TopicAndPartition, Integer> existingPartitionsReplicaList = ZkUtils.getReplicaAssignmentForTopics(zkClient, Lists.newArrayList(topic));
if (existingPartitionsReplicaList.size() == 0)
throw new AdminOperationException("The topic %s does not exist", topic);
final Collection<Integer> existingReplicaList = Utils.head(existingPartitionsReplicaList)._2;
int partitionsToAdd = numPartitions - existingPartitionsReplicaList.size();
if (partitionsToAdd <= 0)
throw new AdminOperationException("The number of partitions for a topic can only be increased");
// create the new partition replication list
List<Integer> brokerList = ZkUtils.getSortedBrokerList(zkClient);
Multimap<Integer, Integer> newPartitionReplicaList = (replicaAssignmentStr == null || replicaAssignmentStr == "") ? assignReplicasToBrokers(brokerList, partitionsToAdd, existingReplicaList.size(), Utils.head(existingReplicaList), existingPartitionsReplicaList.size()) : getManualReplicaAssignment(replicaAssignmentStr, Sets.newHashSet(brokerList), existingPartitionsReplicaList.size());
// check if manual assignment has the right replication factor
Multimap<Integer, Integer> unmatchedRepFactorList = Utils.filter(newPartitionReplicaList, new Predicate2<Integer, Collection<Integer>>() {
@Override
public boolean apply(Integer integer, Collection<Integer> p) {
return p.size() != existingReplicaList.size();
}
});
if (unmatchedRepFactorList.size() != 0)
throw new AdminOperationException("The replication factor in manual replication assignment " + " is not equal to the existing replication factor for the topic " + existingReplicaList.size());
logger.info("Add partition list for {} is {}", topic, newPartitionReplicaList);
Multimap<Integer, Integer> partitionReplicaList = Utils.map(existingPartitionsReplicaList, new Function2<TopicAndPartition, Collection<Integer>, Tuple2<Integer, Collection<Integer>>>() {
@Override
public Tuple2<Integer, Collection<Integer>> apply(TopicAndPartition _1, Collection<Integer> _2) {
return Tuple2.make(_1.partition, _2);
}
});
// add the new list
partitionReplicaList.putAll(newPartitionReplicaList);
createOrUpdateTopicPartitionAssignmentPathInZK(zkClient, topic, partitionReplicaList, new Properties(), /*update = */true);
}
public static Multimap<Integer, Integer> getManualReplicaAssignment(String replicaAssignmentList, Set<Integer> availableBrokerList, int startPartitionId) {
String[] partitionList = replicaAssignmentList.split(",");
Multimap<Integer, Integer> ret = HashMultimap.create();
int partitionId = startPartitionId;
partitionList = Utils.takeRight(partitionList, (partitionList.length - partitionId));
for (int i = 0; i < partitionList.length; ++i) {
List<Integer> brokerList = Utils.mapList(partitionList[i].split(":"), new Function1<String, Integer>() {
@Override
public Integer apply(String s) {
return Integer.parseInt(s.trim());
}
});
if (brokerList.size() <= 0)
throw new AdminOperationException("replication factor must be larger than 0");
if (brokerList.size() != Sets.newHashSet(brokerList).size())
throw new AdminOperationException("duplicate brokers in replica assignment: " + brokerList);
if (!Utils.subsetOf(Sets.newHashSet(brokerList), availableBrokerList))
throw new AdminOperationException("some specified brokers not available. specified brokers: " + brokerList + "available broker:" + availableBrokerList);
ret.putAll(partitionId, brokerList);
if (ret.get(partitionId).size() != ret.get(startPartitionId).size())
throw new AdminOperationException("partition " + i + " has different replication factor: " + brokerList);
partitionId = partitionId + 1;
}
return ret;
}
public static void deleteTopic(ZkClient zkClient, String topic) {
zkClient.deleteRecursive(ZkUtils.getTopicPath(topic));
zkClient.deleteRecursive(ZkUtils.getTopicConfigPath(topic));
}
public static Boolean topicExists(ZkClient zkClient, String topic) {
return zkClient.exists(ZkUtils.getTopicPath(topic));
}
public static void createTopic(ZkClient zkClient, String topic, int partitions, int replicationFactor, Properties topicConfig /*= new Properties*/) {
List<Integer> brokerList = ZkUtils.getSortedBrokerList(zkClient);
Multimap<Integer, Integer> replicaAssignment = assignReplicasToBrokers(brokerList, partitions, replicationFactor);
createOrUpdateTopicPartitionAssignmentPathInZK(zkClient, topic, replicaAssignment, topicConfig, false);
}
public static void createOrUpdateTopicPartitionAssignmentPathInZK(ZkClient zkClient, String topic, final Multimap<Integer, Integer> partitionReplicaAssignment, Properties config /*= new Properties*/) {
createOrUpdateTopicPartitionAssignmentPathInZK(zkClient, topic, partitionReplicaAssignment, config, false);
}
public static void createOrUpdateTopicPartitionAssignmentPathInZK(ZkClient zkClient, String topic, final Multimap<Integer, Integer> partitionReplicaAssignment, Properties config /*= new Properties*/, boolean update/* = false*/) {
// validate arguments
Topic.validate(topic);
LogConfigs.validate(config);
checkState(Utils.mapSet(partitionReplicaAssignment, new Function2<Integer, Collection<Integer>, Integer>() {
@Override
public Integer apply(Integer arg1, Collection<Integer> _) {
return _.size();
}
}).size() == 1, "All partitions should have the same number of replicas.");
String topicPath = ZkUtils.getTopicPath(topic);
if (!update && zkClient.exists(topicPath))
throw new TopicExistsException("Topic \"%s\" already exists.", topic);
Utils.foreach(partitionReplicaAssignment, new Callable2<Integer, Collection<Integer>>() {
@Override
public void apply(Integer integer, Collection<Integer> reps) {
checkState(reps.size() == Sets.newHashSet(reps).size(), "Duplicate replica assignment found: " + partitionReplicaAssignment);
}
});
// write out the config if there is any, this isn't transactional with the partition assignments
writeTopicConfig(zkClient, topic, config);
// create the partition assignment
writeTopicPartitionAssignment(zkClient, topic, partitionReplicaAssignment, update);
}
private static void writeTopicPartitionAssignment(ZkClient zkClient, String topic, Multimap<Integer, Integer> replicaAssignment, boolean update) {
try {
String zkPath = ZkUtils.getTopicPath(topic);
String jsonPartitionData = ZkUtils.replicaAssignmentZkData(Utils.map(replicaAssignment, new Function2<Integer, Collection<Integer>, Tuple2<String, Collection<Integer>>>() {
@Override
public Tuple2<String, Collection<Integer>> apply(Integer _1, Collection<Integer> _2) {
return Tuple2.make(_1 + "", _2);
}
}));
if (!update) {
logger.info("Topic creation {}", jsonPartitionData);
ZkUtils.createPersistentPath(zkClient, zkPath, jsonPartitionData);
} else {
logger.info("Topic update {}", jsonPartitionData);
ZkUtils.updatePersistentPath(zkClient, zkPath, jsonPartitionData);
}
logger.debug("Updated path {} with {} for replica assignment", zkPath, jsonPartitionData);
} catch (ZkNodeExistsException e) {
throw new TopicExistsException("topic %s already exists", topic);
} catch (Throwable e) {
throw new AdminOperationException(e.toString());
}
}
/**
* Update the config for an existing topic and create a change notification so the change will propagate to other brokers
*
* @param zkClient: The ZkClient handle used to write the new config to zookeeper
* @param topic: The topic for which configs are being changed
* @param configs: The final set of configs that will be applied to the topic. If any new configs need to be added or
* existing configs need to be deleted, it should be done prior to invoking this API
*/
public static void changeTopicConfig(ZkClient zkClient, String topic, Properties configs) {
if (!topicExists(zkClient, topic))
throw new AdminOperationException("Topic \"%s\" does not exist.", (topic));
// remove the topic overrides
LogConfigs.validate(configs);
// write the new config--may not exist if there were previously no overrides
writeTopicConfig(zkClient, topic, configs);
// create the change notification
zkClient.createPersistentSequential(ZkUtils.TopicConfigChangesPath + "/" + TopicConfigChangeZnodePrefix, Json.encode(topic));
}
/**
* Write out the topic config to zk, if there is any
*/
private static void writeTopicConfig(ZkClient zkClient, String topic, Properties config) {
Map<String, Object> map = Maps.newHashMap();
map.put("version", 1);
map.put("config", config);
ZkUtils.updatePersistentPath(zkClient, ZkUtils.getTopicConfigPath(topic), Json.encode(map));
}
/**
* Read the topic config (if any) from zk
*/
public static Properties fetchTopicConfig(ZkClient zkClient, String topic) {
String str = zkClient.readData(ZkUtils.getTopicConfigPath(topic), true);
Properties props = new Properties();
if (str != null) {
JSONObject map = Json.parseFull(str);
if (map == null) {
// there are no config overrides
} else {
checkState(map.getInteger("version") == 1);
JSONObject config = map.getJSONObject("config");
if (config == null) {
throw new IllegalArgumentException("Invalid topic config: " + str);
}
props.putAll(config);
// case o => throw new IllegalArgumentException("Unexpected value in config: " + str)
}
}
return props;
}
public static Map<String, Properties> fetchAllTopicConfigs(final ZkClient zkClient) {
return Utils.map(ZkUtils.getAllTopics(zkClient), new Function1<String, Tuple2<String, Properties>>() {
@Override
public Tuple2<String, Properties> apply(String topic) {
return Tuple2.make(topic, fetchTopicConfig(zkClient, topic));
}
});
}
public static TopicMetadata fetchTopicMetadataFromZk(String topic, ZkClient zkClient) {
return fetchTopicMetadataFromZk(topic, zkClient, Maps.<Integer, Broker> newHashMap());
}
public static Set<TopicMetadata> fetchTopicMetadataFromZk(Set<String> topics, final ZkClient zkClient) {
final Map<Integer, Broker> cachedBrokerInfo = Maps.newHashMap();
return Utils.mapSet(topics, new Function1<String, TopicMetadata>() {
@Override
public TopicMetadata apply(String topic) {
return fetchTopicMetadataFromZk(topic, zkClient, cachedBrokerInfo);
}
});
}
private static TopicMetadata fetchTopicMetadataFromZk(final String topic, final ZkClient zkClient, final Map<Integer, Broker> cachedBrokerInfo) {
if (ZkUtils.pathExists(zkClient, ZkUtils.getTopicPath(topic))) {
Multimap<Integer, Integer> topicPartitionAssignment = ZkUtils.getPartitionAssignmentForTopics(zkClient, Lists.newArrayList(topic)).get(topic);
List<Tuple2<Integer, Collection<Integer>>> sortedPartitions = Utils.sortWith(Utils.toList(topicPartitionAssignment), new Comparator<Tuple2<Integer, Collection<Integer>>>() {
@Override
public int compare(Tuple2<Integer, Collection<Integer>> m1, Tuple2<Integer, Collection<Integer>> m2) {
return m1._1 < m2._1 ? -1 : (m1._1.equals(m2._1) ? 0 : -1);
}
});
List<PartitionMetadata> partitionMetadata = Utils.mapList(sortedPartitions, new Function1<Tuple2<Integer, Collection<Integer>>, PartitionMetadata>() {
@Override
public PartitionMetadata apply(Tuple2<Integer, Collection<Integer>> partitionMap) {
int partition = partitionMap._1;
Collection<Integer> replicas = partitionMap._2;
List<Integer> inSyncReplicas = ZkUtils.getInSyncReplicasForPartition(zkClient, topic, partition);
Integer leader = ZkUtils.getLeaderForPartition(zkClient, topic, partition);
logger.debug("replicas = " + replicas + ", in sync replicas = " + inSyncReplicas + ", leader = " + leader);
Broker leaderInfo = null;
List<Broker> replicaInfo = null;
List<Broker> isrInfo = null;
try {
if (leader == null) {
throw new LeaderNotAvailableException("No leader exists for partition " + partition);
}
try {
leaderInfo = Utils.head(getBrokerInfoFromCache(zkClient, cachedBrokerInfo, Lists.newArrayList(leader)));
} catch (Throwable e) {
throw new LeaderNotAvailableException(e, "Leader not available for partition [%s,%d]", topic, partition);
}
try {
replicaInfo = getBrokerInfoFromCache(zkClient, cachedBrokerInfo, replicas);
isrInfo = getBrokerInfoFromCache(zkClient, cachedBrokerInfo, inSyncReplicas);
} catch (Throwable e) {
throw new ReplicaNotAvailableException(e);
}
if (replicaInfo.size() < replicas.size()) {
final List<Integer> brokerIds = Utils.mapList(replicaInfo, new Function1<Broker, Integer>() {
@Override
public Integer apply(Broker _) {
return _.id;
}
});
throw new ReplicaNotAvailableException("Replica information not available for following brokers: " + Utils.filter(replicas, new Predicate<Integer>() {
@Override
public boolean apply(Integer _) {
return !brokerIds.contains(_);
}
}));
}
if (isrInfo.size() < inSyncReplicas.size()) {
final List<Integer> brokerIds = Utils.mapList(isrInfo, new Function1<Broker, Integer>() {
@Override
public Integer apply(Broker _) {
return _.id;
}
});
throw new ReplicaNotAvailableException("In Sync Replica information not available for following brokers: " + Utils.filter(inSyncReplicas, new Predicate<Integer>() {
@Override
public boolean apply(Integer _) {
return !brokerIds.contains(_);
}
}));
}
return new PartitionMetadata(partition, leaderInfo, replicaInfo, isrInfo, ErrorMapping.NoError);
} catch (Throwable e) {
logger.debug("Error while fetching metadata for partition [{},{}]", topic, partition, e);
return new PartitionMetadata(partition, leaderInfo, replicaInfo, isrInfo, ErrorMapping.codeFor(e.getClass()));
}
}
});
return new TopicMetadata(topic, partitionMetadata);
} else {
// topic doesn't exist, send appropriate error code
return new TopicMetadata(topic, Lists.<PartitionMetadata> newArrayList(), ErrorMapping.UnknownTopicOrPartitionCode);
}
}
private static List<Broker> getBrokerInfoFromCache(final ZkClient zkClient, final Map<Integer, Broker> cachedBrokerInfo, Collection<Integer> brokerIds) {
final List<Integer> failedBrokerIds = Lists.newArrayList();
final Map<Integer, Broker> finalCachedBrokerInfo = cachedBrokerInfo;
final List<Broker> brokerMetadata = Utils.mapList(brokerIds, new Function1<Integer, Broker>() {
@Override
public Broker apply(Integer id) {
Broker brokerInfo = finalCachedBrokerInfo.get(id);
if (brokerInfo != null)
return brokerInfo; // return broker info from the cache
// fetch it from zookeeper
brokerInfo = ZkUtils.getBrokerInfo(zkClient, id);
if (brokerInfo != null) {
cachedBrokerInfo.put(id, brokerInfo);
return brokerInfo;
}
failedBrokerIds.add(id);
return null;
}
});
return Utils.filter(brokerMetadata, new Predicate<Broker>() {
@Override
public boolean apply(Broker input) {
return input != null;
}
});
}
private static int replicaIndex(int firstReplicaIndex, int secondReplicaShift, int replicaIndex, int nBrokers) {
int shift = 1 + (secondReplicaShift + replicaIndex) % (nBrokers - 1);
return (firstReplicaIndex + shift) % nBrokers;
}
}
|
|
package org.jetbrains.plugins.scala.lang.formatting.settings;
import com.intellij.application.options.CodeStyleAbstractPanel;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.highlighter.EditorHighlighter;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.psi.PsiFile;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.uiDesigner.core.GridConstraints;
import com.intellij.uiDesigner.core.GridLayoutManager;
import com.intellij.uiDesigner.core.Spacer;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.plugins.scala.ScalaBundle;
import org.jetbrains.plugins.scala.ScalaFileType;
import org.jetbrains.plugins.scala.highlighter.ScalaEditorHighlighter;
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import java.util.ResourceBundle;
/**
* User: Dmitry Naydanov
* Date: 4/23/12
*/
public class MultiLineStringCodeStylePanel extends CodeStyleAbstractPanel {
private JPanel panel1;
private JComboBox supportLevelChooser;
private JLabel supportLevelLabel;
private JTextField marginCharTextField;
private JLabel marginCharLabel;
private JCheckBox openingQuotesOnNewCheckBox;
private JCheckBox keepOpeningQuotesCheckBox;
private JSpinner marginIndentSpinner;
private JCheckBox processMarginCheckBox;
protected MultiLineStringCodeStylePanel(CodeStyleSettings settings) {
super(settings);
ScalaCodeStyleSettings scalaSettings = settings.getCustomSettings(ScalaCodeStyleSettings.class);
marginIndentSpinner.setModel(new SpinnerNumberModel(1, 1, null, 1));
setSettings(scalaSettings);
//validation
marginCharTextField.addKeyListener(new KeyAdapter() {
@Override
public void keyTyped(KeyEvent e) {
final String text = marginCharTextField.getText();
final String selectedText = marginCharTextField.getSelectedText();
if (isInvalidInput(text, selectedText, e)) {
e.consume();
}
}
});
marginCharTextField.addFocusListener(new NonEmptyFieldValidator(marginCharTextField));
supportLevelChooser.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
if (supportLevelChooser.getSelectedIndex() != ScalaCodeStyleSettings.MULTILINE_STRING_ALL) {
processMarginCheckBox.setSelected(false);
processMarginCheckBox.setEnabled(false);
} else {
processMarginCheckBox.setEnabled(true);
}
}
});
}
@Override
protected int getRightMargin() {
return 0;
}
@Override
protected String getTabTitle() {
return ScalaBundle.message("multi.line.string.settings.panel");
}
@Override
protected void prepareForReformat(PsiFile psiFile) {
}
@Override
protected EditorHighlighter createHighlighter(EditorColorsScheme scheme) {
return new ScalaEditorHighlighter(null, null, scheme);
}
@NotNull
@Override
protected FileType getFileType() {
return ScalaFileType.INSTANCE;
}
@Override
protected String getPreviewText() {
return "";
}
@Override
public void apply(CodeStyleSettings settings) {
if (!isModified(settings)) return;
ScalaCodeStyleSettings scalaSettings = settings.getCustomSettings(ScalaCodeStyleSettings.class);
scalaSettings.MULTILINE_STRING_SUPORT = supportLevelChooser.getSelectedIndex();
scalaSettings.MARGIN_CHAR = marginCharTextField.getText().charAt(0);
scalaSettings.MULTI_LINE_QUOTES_ON_NEW_LINE = openingQuotesOnNewCheckBox.isSelected();
scalaSettings.KEEP_MULTI_LINE_QUOTES = keepOpeningQuotesCheckBox.isSelected();
scalaSettings.MULTI_LINE_STRING_MARGIN_INDENT = (Integer) marginIndentSpinner.getValue();
scalaSettings.PROCESS_MARGIN_ON_COPY_PASTE = processMarginCheckBox.isSelected();
}
@Override
public boolean isModified(CodeStyleSettings settings) {
ScalaCodeStyleSettings scalaSettings = settings.getCustomSettings(ScalaCodeStyleSettings.class);
if (scalaSettings.MULTILINE_STRING_SUPORT != supportLevelChooser.getSelectedIndex()) return true;
if (!(scalaSettings.MARGIN_CHAR + "").equals(marginCharTextField.getText())) return true;
if (scalaSettings.MULTI_LINE_QUOTES_ON_NEW_LINE != openingQuotesOnNewCheckBox.isSelected()) return true;
if (scalaSettings.KEEP_MULTI_LINE_QUOTES != keepOpeningQuotesCheckBox.isSelected()) return true;
if (scalaSettings.MULTI_LINE_STRING_MARGIN_INDENT != (Integer) marginIndentSpinner.getValue()) return true;
if (scalaSettings.PROCESS_MARGIN_ON_COPY_PASTE != processMarginCheckBox.isSelected()) return true;
return false;
}
@Override
public JComponent getPanel() {
return panel1;
}
@Override
protected void resetImpl(CodeStyleSettings settings) {
ScalaCodeStyleSettings scalaSettings = settings.getCustomSettings(ScalaCodeStyleSettings.class);
setSettings(scalaSettings);
}
private void setSettings(ScalaCodeStyleSettings scalaSettings) {
supportLevelChooser.setSelectedIndex(scalaSettings.MULTILINE_STRING_SUPORT);
marginCharTextField.setText(scalaSettings.MARGIN_CHAR + "");
openingQuotesOnNewCheckBox.setSelected(scalaSettings.MULTI_LINE_QUOTES_ON_NEW_LINE);
keepOpeningQuotesCheckBox.setSelected(scalaSettings.KEEP_MULTI_LINE_QUOTES);
marginIndentSpinner.setValue(scalaSettings.MULTI_LINE_STRING_MARGIN_INDENT);
processMarginCheckBox.setSelected(scalaSettings.PROCESS_MARGIN_ON_COPY_PASTE);
}
private static boolean isInvalidInput(@NotNull String text, String selectedText, KeyEvent e) {
return text.length() > 0 && !e.isActionKey() && e.getKeyChar() != KeyEvent.VK_BACK_SPACE &&
e.getKeyChar() != KeyEvent.VK_DELETE && !text.equals(selectedText);
}
{
// GUI initializer generated by IntelliJ IDEA GUI Designer
// >>> IMPORTANT!! <<<
// DO NOT EDIT OR ADD ANY CODE HERE!
$$$setupUI$$$();
}
/**
* Method generated by IntelliJ IDEA GUI Designer
* >>> IMPORTANT!! <<<
* DO NOT edit this method OR call it in your code!
*
* @noinspection ALL
*/
private void $$$setupUI$$$() {
panel1 = new JPanel();
panel1.setLayout(new GridLayoutManager(7, 3, new Insets(0, 0, 0, 0), -1, -1));
panel1.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEmptyBorder(10, 10, 10, 10), null));
supportLevelLabel = new JLabel();
this.$$$loadLabelText$$$(supportLevelLabel, ResourceBundle.getBundle("org/jetbrains/plugins/scala/ScalaBundle").getString("multi.line.string.support.option"));
panel1.add(supportLevelLabel, new GridConstraints(0, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
supportLevelChooser = new JComboBox();
final DefaultComboBoxModel defaultComboBoxModel1 = new DefaultComboBoxModel();
defaultComboBoxModel1.addElement("None");
defaultComboBoxModel1.addElement("Closing quotes on new line");
defaultComboBoxModel1.addElement("Insert margin char");
supportLevelChooser.setModel(defaultComboBoxModel1);
panel1.add(supportLevelChooser, new GridConstraints(0, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
final Spacer spacer1 = new Spacer();
panel1.add(spacer1, new GridConstraints(0, 2, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_WANT_GROW, 1, null, null, null, 0, false));
marginCharLabel = new JLabel();
this.$$$loadLabelText$$$(marginCharLabel, ResourceBundle.getBundle("org/jetbrains/plugins/scala/ScalaBundle").getString("multi.line.string.support.margin.char.label"));
panel1.add(marginCharLabel, new GridConstraints(1, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
marginCharTextField = new JTextField();
marginCharTextField.setText("|");
panel1.add(marginCharTextField, new GridConstraints(1, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_WANT_GROW, GridConstraints.SIZEPOLICY_FIXED, null, new Dimension(150, -1), null, 0, false));
final JLabel label1 = new JLabel();
this.$$$loadLabelText$$$(label1, ResourceBundle.getBundle("org/jetbrains/plugins/scala/ScalaBundle").getString("multi.line.string.margin.char.indent"));
panel1.add(label1, new GridConstraints(2, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
keepOpeningQuotesCheckBox = new JCheckBox();
this.$$$loadButtonText$$$(keepOpeningQuotesCheckBox, ResourceBundle.getBundle("org/jetbrains/plugins/scala/ScalaBundle").getString("multi.line.string.keep.opening.quotes"));
panel1.add(keepOpeningQuotesCheckBox, new GridConstraints(4, 0, 1, 2, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
final Spacer spacer2 = new Spacer();
panel1.add(spacer2, new GridConstraints(6, 1, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_VERTICAL, 1, GridConstraints.SIZEPOLICY_WANT_GROW, null, null, null, 0, false));
marginIndentSpinner = new JSpinner();
panel1.add(marginIndentSpinner, new GridConstraints(2, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_WANT_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
openingQuotesOnNewCheckBox = new JCheckBox();
this.$$$loadButtonText$$$(openingQuotesOnNewCheckBox, ResourceBundle.getBundle("org/jetbrains/plugins/scala/ScalaBundle").getString("multi.line.string.opening.quotes.on.new.line"));
panel1.add(openingQuotesOnNewCheckBox, new GridConstraints(3, 0, 1, 2, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
processMarginCheckBox = new JCheckBox();
this.$$$loadButtonText$$$(processMarginCheckBox, ResourceBundle.getBundle("org/jetbrains/plugins/scala/ScalaBundle").getString("multi.line.string.process.margin.on.copy.paste"));
panel1.add(processMarginCheckBox, new GridConstraints(5, 0, 1, 2, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
}
/**
* @noinspection ALL
*/
private void $$$loadLabelText$$$(JLabel component, String text) {
StringBuffer result = new StringBuffer();
boolean haveMnemonic = false;
char mnemonic = '\0';
int mnemonicIndex = -1;
for (int i = 0; i < text.length(); i++) {
if (text.charAt(i) == '&') {
i++;
if (i == text.length()) break;
if (!haveMnemonic && text.charAt(i) != '&') {
haveMnemonic = true;
mnemonic = text.charAt(i);
mnemonicIndex = result.length();
}
}
result.append(text.charAt(i));
}
component.setText(result.toString());
if (haveMnemonic) {
component.setDisplayedMnemonic(mnemonic);
component.setDisplayedMnemonicIndex(mnemonicIndex);
}
}
/**
* @noinspection ALL
*/
private void $$$loadButtonText$$$(AbstractButton component, String text) {
StringBuffer result = new StringBuffer();
boolean haveMnemonic = false;
char mnemonic = '\0';
int mnemonicIndex = -1;
for (int i = 0; i < text.length(); i++) {
if (text.charAt(i) == '&') {
i++;
if (i == text.length()) break;
if (!haveMnemonic && text.charAt(i) != '&') {
haveMnemonic = true;
mnemonic = text.charAt(i);
mnemonicIndex = result.length();
}
}
result.append(text.charAt(i));
}
component.setText(result.toString());
if (haveMnemonic) {
component.setMnemonic(mnemonic);
component.setDisplayedMnemonicIndex(mnemonicIndex);
}
}
/**
* @noinspection ALL
*/
public JComponent $$$getRootComponent$$$() {
return panel1;
}
private static class NonEmptyFieldValidator extends FocusAdapter {
private String myOldText;
private final JTextField myField;
private NonEmptyFieldValidator(JTextField field) {
super();
myField = field;
}
@Override
public void focusGained(FocusEvent e) {
myOldText = myField.getText();
}
@Override
public void focusLost(FocusEvent e) {
if (myField.getText().length() == 0) {
myField.setText(myOldText);
}
}
}
}
|
|
package org.esupportail.smsu.dao.beans;
import java.io.Serializable;
/**
* The class that represents customized groups.
*/
public class CustomizedGroup implements Serializable {
/**
* Hibernate reference for customized group.
*/
public static final String REF = "CustomizedGroup";
/**
* Hibernate property for the account.
*/
public static final String PROP_ACCOUNT = "Account";
/**
* Hibernate property for the number of recipient by sms.
*/
public static final String PROP_QUOTA_ORDER = "QuotaOrder";
/**
* Hibernate property for the quota of sms.
*/
public static final String PROP_QUOTA_SMS = "QuotaSms";
/**
* Hibernate property for the number of consumed sms.
*/
public static final String PROP_CONSUMED_SMS = "ConsumedSms";
/**
* Hibernate property for the role.
*/
public static final String PROP_ROLE = "Role";
/**
* Hibernate property for the label.
*/
public static final String PROP_LABEL = "Label";
/**
* Hibernate property for the identifier.
*/
public static final String PROP_ID = "Id";
/**
* The serialization id.
*/
private static final long serialVersionUID = -3409241721229863372L;
/**
* customized group identifier.
*/
private java.lang.Integer id;
/**
* label of the group.
*/
private java.lang.String label;
/**
* quota of sms.
*/
private java.lang.Long quotaSms;
/**
* number of recipient by sms.
*/
private java.lang.Long quotaOrder;
/**
* number of consumed sms.
*/
private java.lang.Long consumedSms;
/**
* account associated to the group.
*/
private Account account;
/**
* role.
*/
private Role role;
/**
* collection of supervisors.
*/
private java.util.Set<Person> supervisors;
/**
* Bean constructor.
*/
public CustomizedGroup() {
super();
quotaSms = Long.parseLong("0");
quotaOrder = Long.parseLong("0");
consumedSms = Long.parseLong("0");
}
/**
* Constructor for required fields.
*/
public CustomizedGroup(
final java.lang.Integer id,
final Account account,
final Role role,
final java.lang.String label,
final java.lang.Long quotaSms,
final java.lang.Long quotaOrder,
final java.lang.Long consumedSms) {
this.setId(id);
this.setAccount(account);
this.setRole(role);
this.setLabel(label);
this.setQuotaSms(quotaSms);
this.setQuotaOrder(quotaOrder);
this.setConsumedSms(consumedSms);
}
/**
* Return the unique identifier of this class.
* @hibernate.id
* generator-class="native"
* column="CGR_ID"
*/
public java.lang.Integer getId() {
return id;
}
/**
* Set the unique identifier of this class.
* @param id the new ID
*/
public void setId(final java.lang.Integer id) {
this.id = id;
}
/**
* Return the value associated with the column: CGR_LABEL.
*/
public java.lang.String getLabel() {
return label;
}
/**
* Set the value related to the column: CGR_LABEL.
* @param label the CGR_LABEL value
*/
public void setLabel(final java.lang.String label) {
this.label = label;
}
/**
* Return the value associated with the column: CGR_QUOTA_SMS.
*/
public java.lang.Long getQuotaSms() {
return quotaSms;
}
/**
* Set the value related to the column: CGR_QUOTA_SMS.
* @param quotaSms the CGR_QUOTA_SMS value
*/
public void setQuotaSms(final java.lang.Long quotaSms) {
this.quotaSms = quotaSms;
}
/**
* Check whether the account is allowed to send nbSms
*/
public boolean checkQuotaSms(int nbToSend) {
long nbAvailable = getQuotaSms() - getConsumedSms();
return nbAvailable >= nbToSend;
}
/**
* Return the value associated with the column: CGR_QUOTA_ORDER.
*/
public java.lang.Long getQuotaOrder() {
return quotaOrder;
}
/**
* Set the value related to the column: CGR_QUOTA_ORDER.
* @param quotaOrder the CGR_QUOTA_ORDER value
*/
public void setQuotaOrder(final java.lang.Long quotaOrder) {
this.quotaOrder = quotaOrder;
}
/**
* Return the value associated with the column: CGR_CONSUMED_SMS.
*/
public java.lang.Long getConsumedSms() {
return consumedSms;
}
/**
* Set the value related to the column: CGR_CONSUMED_SMS.
* @param consumedSms the CGR_CONSUMED_SMS value
*/
public void setConsumedSms(final java.lang.Long consumedSms) {
this.consumedSms = consumedSms;
}
/**
* Return the value associated with the column: ACC_ID.
*/
public Account getAccount() {
return account;
}
/**
* Set the value related to the column: ACC_ID.
* @param account the ACC_ID value
*/
public void setAccount(final Account account) {
this.account = account;
}
/**
* Return the value associated with the column: ROL_ID.
*/
public Role getRole() {
return role;
}
/**
* Set the value related to the column: ROL_ID.
* @param role the ROL_ID value
*/
public void setRole(final Role role) {
this.role = role;
}
/**
* Return the value associated with the column: Supervisors.
*/
public java.util.Set<Person> getSupervisors() {
return supervisors;
}
/**
* Set the value related to the column: Supervisors.
* @param supervisors the Supervisors value
*/
public void setSupervisors(final java.util.Set<Person> supervisors) {
this.supervisors = supervisors;
}
/**
* add person to the collection of supervisors.
* @param person
*/
public void addToSupervisors(final Person person) {
if (null == getSupervisors()) {
setSupervisors(new java.util.TreeSet<Person>());
}
getSupervisors().add(person);
}
/**
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(final Object obj) {
if (null == obj) {
return false;
}
if (!(obj instanceof CustomizedGroup)) {
return false;
} else {
CustomizedGroup customizedGroup = (CustomizedGroup) obj;
if (null == this.getId() || null == customizedGroup.getId()) {
return false;
} else {
return this.getId().equals(customizedGroup.getId());
}
}
}
/**
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
return super.hashCode();
}
/**
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return "CustomizedGroup#" + hashCode() + "[id=[" + id + "], label=[" + label
+ "]]";
}
}
|
|
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.extractor;
import static com.google.common.truth.Truth.assertThat;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Unit test for {@link ConstantBitrateSeekMap}. */
@RunWith(AndroidJUnit4.class)
public final class ConstantBitrateSeekMapTest {
private ConstantBitrateSeekMap constantBitrateSeekMap;
@Test
public void isSeekable_forKnownInputLength_returnSeekable() {
constantBitrateSeekMap =
new ConstantBitrateSeekMap(
/* inputLength= */ 1000,
/* firstFrameBytePosition= */ 0,
/* bitrate= */ 8_000,
/* frameSize= */ 100);
assertThat(constantBitrateSeekMap.isSeekable()).isTrue();
}
@Test
public void isSeekable_forUnknownInputLength_returnUnseekable() {
constantBitrateSeekMap =
new ConstantBitrateSeekMap(
/* inputLength= */ C.LENGTH_UNSET,
/* firstFrameBytePosition= */ 0,
/* bitrate= */ 8_000,
/* frameSize= */ 100);
assertThat(constantBitrateSeekMap.isSeekable()).isFalse();
}
@Test
public void getSeekPoints_forUnseekableInput_returnSeekPoint0() {
int firstBytePosition = 100;
constantBitrateSeekMap =
new ConstantBitrateSeekMap(
/* inputLength= */ C.LENGTH_UNSET,
/* firstFrameBytePosition= */ firstBytePosition,
/* bitrate= */ 8_000,
/* frameSize= */ 100);
SeekMap.SeekPoints seekPoints = constantBitrateSeekMap.getSeekPoints(/* timeUs= */ 123);
assertThat(seekPoints.first.timeUs).isEqualTo(0);
assertThat(seekPoints.first.position).isEqualTo(firstBytePosition);
assertThat(seekPoints.second).isEqualTo(seekPoints.first);
}
@Test
public void getDurationUs_forKnownInputLength_returnCorrectDuration() {
constantBitrateSeekMap =
new ConstantBitrateSeekMap(
/* inputLength= */ 2_300,
/* firstFrameBytePosition= */ 100,
/* bitrate= */ 8_000,
/* frameSize= */ 100);
// Bitrate = 8000 (bits/s) = 1000 (bytes/s)
// FrameSize = 100 (bytes), so 1 frame = 1s = 100_000 us
// Input length = 2300 (bytes), first frame = 100, so duration = 2_200_000 us.
assertThat(constantBitrateSeekMap.getDurationUs()).isEqualTo(2_200_000);
}
@Test
public void getDurationUs_forUnnnownInputLength_returnUnknownDuration() {
constantBitrateSeekMap =
new ConstantBitrateSeekMap(
/* inputLength= */ C.LENGTH_UNSET,
/* firstFrameBytePosition= */ 100,
/* bitrate= */ 8_000,
/* frameSize= */ 100);
assertThat(constantBitrateSeekMap.getDurationUs()).isEqualTo(C.TIME_UNSET);
}
@Test
public void getSeekPoints_forSeekableInput_forSyncPosition0_return1SeekPoint() {
int firstBytePosition = 100;
constantBitrateSeekMap =
new ConstantBitrateSeekMap(
/* inputLength= */ 2_300,
/* firstFrameBytePosition= */ firstBytePosition,
/* bitrate= */ 8_000,
/* frameSize= */ 100);
SeekMap.SeekPoints seekPoints = constantBitrateSeekMap.getSeekPoints(/* timeUs= */ 0);
assertThat(seekPoints.first.timeUs).isEqualTo(0);
assertThat(seekPoints.first.position).isEqualTo(firstBytePosition);
assertThat(seekPoints.second).isEqualTo(seekPoints.first);
}
@Test
public void getSeekPoints_forSeekableInput_forSeekPointAtSyncPosition_return1SeekPoint() {
constantBitrateSeekMap =
new ConstantBitrateSeekMap(
/* inputLength= */ 2_300,
/* firstFrameBytePosition= */ 100,
/* bitrate= */ 8_000,
/* frameSize= */ 100);
SeekMap.SeekPoints seekPoints = constantBitrateSeekMap.getSeekPoints(/* timeUs= */ 1_200_000);
// Bitrate = 8000 (bits/s) = 1000 (bytes/s)
// FrameSize = 100 (bytes), so 1 frame = 1s = 100_000 us
assertThat(seekPoints.first.timeUs).isEqualTo(1_200_000);
assertThat(seekPoints.first.position).isEqualTo(1300);
assertThat(seekPoints.second).isEqualTo(seekPoints.first);
}
@Test
public void getSeekPoints_forSeekableInput_forNonSyncSeekPosition_return2SeekPoints() {
constantBitrateSeekMap =
new ConstantBitrateSeekMap(
/* inputLength= */ 2_300,
/* firstFrameBytePosition= */ 100,
/* bitrate= */ 8_000,
/* frameSize= */ 100);
SeekMap.SeekPoints seekPoints = constantBitrateSeekMap.getSeekPoints(/* timeUs= */ 345_678);
// Bitrate = 8000 (bits/s) = 1000 (bytes/s)
// FrameSize = 100 (bytes), so 1 frame = 1s = 100_000 us
assertThat(seekPoints.first.timeUs).isEqualTo(300_000);
assertThat(seekPoints.first.position).isEqualTo(400);
assertThat(seekPoints.second.timeUs).isEqualTo(400_000);
assertThat(seekPoints.second.position).isEqualTo(500);
}
@Test
public void getSeekPoints_forSeekableInput_forSeekPointWithinLastFrame_return1SeekPoint() {
constantBitrateSeekMap =
new ConstantBitrateSeekMap(
/* inputLength= */ 2_300,
/* firstFrameBytePosition= */ 100,
/* bitrate= */ 8_000,
/* frameSize= */ 100);
SeekMap.SeekPoints seekPoints = constantBitrateSeekMap.getSeekPoints(/* timeUs= */ 2_123_456);
assertThat(seekPoints.first.timeUs).isEqualTo(2_100_000);
assertThat(seekPoints.first.position).isEqualTo(2_200);
assertThat(seekPoints.second).isEqualTo(seekPoints.first);
}
@Test
public void getSeekPoints_forSeekableInput_forSeekPointAtEndOfStream_return1SeekPoint() {
constantBitrateSeekMap =
new ConstantBitrateSeekMap(
/* inputLength= */ 2_300,
/* firstFrameBytePosition= */ 100,
/* bitrate= */ 8_000,
/* frameSize= */ 100);
SeekMap.SeekPoints seekPoints = constantBitrateSeekMap.getSeekPoints(/* timeUs= */ 2_200_000);
assertThat(seekPoints.first.timeUs).isEqualTo(2_100_000);
assertThat(seekPoints.first.position).isEqualTo(2_200);
assertThat(seekPoints.second).isEqualTo(seekPoints.first);
}
@Test
public void getTimeUsAtPosition_forPosition0_return0() {
constantBitrateSeekMap =
new ConstantBitrateSeekMap(
/* inputLength= */ 2_300,
/* firstFrameBytePosition= */ 100,
/* bitrate= */ 8_000,
/* frameSize= */ 100);
long timeUs = constantBitrateSeekMap.getTimeUsAtPosition(0);
assertThat(timeUs).isEqualTo(0);
}
@Test
public void getTimeUsAtPosition_forPositionWithinStream_returnCorrectTime() {
constantBitrateSeekMap =
new ConstantBitrateSeekMap(
/* inputLength= */ 2_300,
/* firstFrameBytePosition= */ 100,
/* bitrate= */ 8_000,
/* frameSize= */ 100);
long timeUs = constantBitrateSeekMap.getTimeUsAtPosition(1234);
assertThat(timeUs).isEqualTo(1_134_000);
}
@Test
public void getTimeUsAtPosition_forPositionAtEndOfStream_returnStreamDuration() {
constantBitrateSeekMap =
new ConstantBitrateSeekMap(
/* inputLength= */ 2_300,
/* firstFrameBytePosition= */ 100,
/* bitrate= */ 8_000,
/* frameSize= */ 100);
long timeUs = constantBitrateSeekMap.getTimeUsAtPosition(2300);
assertThat(timeUs).isEqualTo(constantBitrateSeekMap.getDurationUs());
}
}
|
|
/**
* Copyright (C) 2017 thinh ho
* This file is part of 'keestore' which is released under the MIT license.
* See LICENSE at the project root directory.
*/
package keestore.crypto;
import static org.junit.Assert.assertTrue;
import java.io.UnsupportedEncodingException;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.SecureRandom;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.KeySpec;
import java.security.spec.PKCS8EncodedKeySpec;
import java.security.spec.X509EncodedKeySpec;
import javax.crypto.Cipher;
import javax.crypto.SecretKey;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import org.junit.Before;
import org.junit.Test;
/**
* Base unit tests.
*
* @author thinh
*
*/
public abstract class KeyCryptoUnitTest {
/**
* Exception to be thrown around in unit tests.
*
*/
protected static final class ExpectedCryptoException extends CryptoException {
private static final long serialVersionUID = 2176114514661570549L;
}
protected static final String charset = "UTF-8";
protected static final String message = "this is a secret message!";
protected static final String defaultKeyPairAlgorithm = "RSA";
protected static final int defaultKeyPairKeySize = 1024;
protected KeyCrypto crypto;
protected Crypto cryptoContext;
protected String email;
protected String password;
/**
* <p>
* Each unit test is expected to be testing a specific crypto
* implementation.
* </p>
*
* @return
*/
abstract CryptoEngine getCryptoEngine();
/**
* <p>
* The password value since this can be a crypto-dependent value (i.e. the
* length may be a specific length).
* </p>
*
* @return
*/
abstract String getPasswordValue();
/**
* <p>
* Routine for re-loading the secret key outside of the underlying
* {@code CryptoEngine} implementation.
* </p>
*
* @return
* @throws Exception
*/
abstract byte[] loadSecretKey() throws Exception;
/**
* <p>
* Specific unit tests can override accordingly.
* </p>
*
* @throws Exception
*/
void initCryptoContext() throws Exception {
cryptoContext = crypto.createCrypto(password, defaultKeyPairAlgorithm, defaultKeyPairKeySize);
}
/**
* <p>
* Unit tests can override if the crypto context has a different
* implementation.
* </p>
*
* @return
* @throws InvalidKeySpecException
* @throws NoSuchAlgorithmException
*/
PublicKey getPublicKey() throws InvalidKeySpecException, NoSuchAlgorithmException {
KeySpec spec = new X509EncodedKeySpec(cryptoContext.getPublicKey());
PublicKey publicKey = KeyFactory.getInstance(defaultKeyPairAlgorithm).generatePublic(spec);
return publicKey;
}
/**
* <p>
* Unit tests can override if the crypto context has a different
* implementation.
* </p>
*
* @return
* @throws InvalidKeySpecException
* @throws NoSuchAlgorithmException
*/
PrivateKey getPrivateKey() throws InvalidKeySpecException, NoSuchAlgorithmException {
KeySpec spec = new PKCS8EncodedKeySpec(cryptoContext.getPrivateKey());
PrivateKey privateKey = KeyFactory.getInstance(defaultKeyPairAlgorithm).generatePrivate(spec);
return privateKey;
}
@Before
public void init() throws Exception {
// known email
email = "userkeygeneration@test.com";
// user password is only known to user
password = new String(getPasswordValue().getBytes(charset), charset);
// implementation of the crypto
crypto = new KeyCrypto(getCryptoEngine());
initCryptoContext();
}
@Test
public void initTest() {
assertTrue("Unit test must have a CryptoEngine implementation", getCryptoEngine() != null);
assertTrue(crypto != null);
assertTrue("Crypto Context not initialized", cryptoContext != null);
assertTrue(email != null && email.length() > 0);
assertTrue(password != null && password.length() > 0);
}
@Test
public void testCryptoEngineEncryptDecrypt() throws CryptoException, UnsupportedEncodingException {
byte[] encrypted = crypto.encrypt(cryptoContext.getSecretKey(), message.getBytes(charset));
assertTrue(encrypted != null && encrypted.length > 0);
byte[] decrypted = crypto.decrypt(cryptoContext.getSecretKey(), encrypted);
assertTrue(decrypted != null && decrypted.length > 0);
assertTrue("Decrypted message does not match original message", new String(decrypted, charset).equals(message));
}
@Test
public void testCryptoEngineGetRandomSecretKey1() {
byte[] key = crypto.generateKey();
assertTrue(key != null && key.length > 0);
}
@Test
public void testCryptoEngineGetRandomSecretKey2() {
SecretKey key = crypto.randomKey();
assertTrue(key != null);
}
@Test
public void testCryptoEngineEncryptDecryptRandomSecretKey1() throws CryptoException, UnsupportedEncodingException {
byte[] key = crypto.generateKey();
byte[] encrypted = crypto.encrypt(key, message.getBytes(charset));
assertTrue(encrypted != null && encrypted.length > 0);
byte[] decrypted = crypto.decrypt(key, encrypted);
assertTrue(decrypted != null && decrypted.length > 0);
assertTrue("Decrypted message does not match original message", new String(decrypted, charset).equals(message));
}
@Test
public void testCryptoEngineEncryptDecryptRandomSecretKey2() throws CryptoException, UnsupportedEncodingException {
SecretKey key = crypto.randomKey();
byte[] encrypted = crypto.encrypt(key.getEncoded(), message.getBytes(charset));
assertTrue(encrypted != null && encrypted.length > 0);
byte[] decrypted = crypto.decrypt(key.getEncoded(), encrypted);
assertTrue(decrypted != null && decrypted.length > 0);
assertTrue("Decrypted message does not match original message", new String(decrypted, charset).equals(message));
}
@Test(expected = ExpectedCryptoException.class)
public void testCryptoEngineInvalidKeyDecrypt() throws CryptoException, UnsupportedEncodingException {
byte[] encrypted = crypto.encrypt(cryptoContext.getSecretKey(), message.getBytes(charset));
assertTrue(encrypted != null && encrypted.length > 0);
try {
byte[] decrypted = crypto.decrypt("invalidcryptosymmetrickey".getBytes(charset), encrypted);
assertTrue("Decryption should not have gotten this far: " + new String(decrypted), false);
} catch (Exception e) {
throw new ExpectedCryptoException();
}
}
@Test
public void testStandardPublicKeyEncryption() throws Exception {
// encrypt the message using the public key
final Cipher encryptCipher = Cipher.getInstance(defaultKeyPairAlgorithm);
encryptCipher.init(Cipher.ENCRYPT_MODE, getPublicKey());
byte[] encrypted = encryptCipher.doFinal(message.getBytes(charset));
assertTrue(encrypted != null && encrypted.length > 0);
// decrypt using private key
final Cipher decryptCipher = Cipher.getInstance(defaultKeyPairAlgorithm);
decryptCipher.init(Cipher.DECRYPT_MODE, getPrivateKey());
byte[] decrypted = decryptCipher.doFinal(encrypted);
assertTrue(decrypted != null && decrypted.length > 0);
assertTrue("Decrypted message does not match original message", new String(decrypted, charset).equals(message));
}
protected SecretKey getSecretKey(String algorithm, String password) throws UnsupportedEncodingException {
byte[] key = Crypto.decode(password).get();
SecretKey k = new SecretKeySpec(key, 0, key.length, algorithm);
return k;
}
protected void doSymmetricEncryptDecryptWithInitializingVector(String algorithm, String password,
String cipherTransformation, int ivLength) throws Exception {
byte[] encrypted = null;
byte[] decrypted = null;
SecretKey k = getSecretKey(algorithm, password);
// encrypt
Cipher encryptCipher = Cipher.getInstance(cipherTransformation);
encryptCipher.init(Cipher.ENCRYPT_MODE, k, new IvParameterSpec(createInitializingVector(ivLength)));
encrypted = encryptCipher.doFinal(message.getBytes(charset));
// decrypt (different IV)
Cipher cipher1 = Cipher.getInstance(cipherTransformation);
cipher1.init(Cipher.DECRYPT_MODE, k, new IvParameterSpec(createInitializingVector(ivLength)));
decrypted = cipher1.doFinal(encrypted);
assertTrue("Decrypted message should not match because a different IV was used during decryption",
!(new String(decrypted, charset).equals(message)));
// decrypt (same IV)
Cipher cipher2 = Cipher.getInstance(cipherTransformation);
cipher2.init(Cipher.DECRYPT_MODE, k, new IvParameterSpec(encryptCipher.getIV()));
decrypted = cipher2.doFinal(encrypted);
assertTrue("Decrypted message does not match original message", new String(decrypted, charset).equals(message));
// decrypt (differet key)
Cipher cipher3 = Cipher.getInstance(cipherTransformation);
cipher3.init(Cipher.DECRYPT_MODE, getSecretKey(k.getAlgorithm(), getPasswordValue()),
new IvParameterSpec(encryptCipher.getIV()));
decrypted = cipher3.doFinal(encrypted);
assertTrue("Decrypted message should not match original message",
!(new String(decrypted, charset).equals(message)));
}
protected byte[] createInitializingVector(int size) {
byte[] unique = new byte[size];
try {
SecureRandom.getInstanceStrong().nextBytes(unique);
} catch (NoSuchAlgorithmException e) {
throw new CryptoException(e);
}
return unique;
}
}
|
|
package com.github.javactic.futures;
import com.github.javactic.*;
import io.vavr.CheckedRunnable;
import io.vavr.Tuple;
import io.vavr.Tuple2;
import io.vavr.control.Option;
import io.vavr.control.Try;
import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.theories.DataPoints;
import org.junit.experimental.theories.Theories;
import org.junit.experimental.theories.Theory;
import org.junit.runner.RunWith;
import java.time.Duration;
import java.util.concurrent.CompletionException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@RunWith(Theories.class)
public class OrFutureTest {
ExecutionContext<String> CTX = ExecutionContext.of(ExecutionContext.OF_EXCEPTION_MESSAGE, Executors.newSingleThreadExecutor());
@DataPoints
public static Executor[] configs = {Executors.newSingleThreadExecutor(), Executors.newCachedThreadPool()};
private static final String FAIL = "fail";
@Theory
public void create(Executor es) throws InterruptedException {
ExecutionContext<String> ctx = ExecutionContext.of(ExecutionContext.OF_EXCEPTION_MESSAGE, es);
CountDownLatch latch = new CountDownLatch(1);
OrFuture<String, String> orf = ctx.future(() -> {
try {
latch.await();
} catch (Exception e) {
e.printStackTrace();
}
return Good.of("success");
});
assertEquals(Option.none(), orf.getOption());
latch.countDown();
CountDownLatch latch2 = new CountDownLatch(1);
orf.onComplete(or -> latch2.countDown());
latch2.await();
assertEquals(Good.of("success"), orf.getOption().get());
}
@Theory
public void filter(Executor es) throws Exception {
ExecutionContext<String> ctx = ExecutionContext.of(ExecutionContext.OF_EXCEPTION_MESSAGE, es);
OrFuture<Integer, String> orFuture = getF(ctx, 4)
.filter(i -> (i > 10) ? Pass.instance() : Fail.of(FAIL));
Or<Integer, String> or = orFuture.get(Duration.ofSeconds(10));
Assert.assertEquals(FAIL, or.getBad());
}
@Theory
public void map(Executor es) throws Exception {
ExecutionContext<String> ctx = ExecutionContext.of(ExecutionContext.OF_EXCEPTION_MESSAGE, es);
OrFuture<String, String> orFuture = getF(ctx, 5)
.map(i -> "" + i);
Assert.assertEquals("5", orFuture.get(Duration.ofSeconds(10)).get());
}
@Theory
public void badMap(Executor es) throws Exception {
ExecutionContext<String> ctx = ExecutionContext.of(ExecutionContext.OF_EXCEPTION_MESSAGE, es);
OrFuture<String, String> orFuture = ctx.future(() -> Bad.<String, String>of("bad"))
.badMap(s -> new StringBuilder(s).reverse().toString());
Assert.assertEquals("dab", orFuture.get(Duration.ofSeconds(10)).getBad());
}
@Theory
public void flatMap(Executor es) throws Exception {
ExecutionContext<String> ctx = ExecutionContext.of(ExecutionContext.OF_EXCEPTION_MESSAGE, es);
OrFuture<String, String> orFuture = getF(ctx, 6)
.flatMap(i -> ctx.future(() -> Good.of(i + "")));
Assert.assertEquals("6", orFuture.get(Duration.ofSeconds(10)).get());
orFuture = getF(ctx, 7).flatMap(i -> ctx.badFuture(FAIL));
Assert.assertEquals(FAIL, orFuture.get(Duration.ofSeconds(10)).getBad());
orFuture = ctx.badFuture(FAIL).flatMap(i -> ctx.goodFuture("7"));
assertEquals(FAIL, orFuture.getUnsafe().getBad());
}
@Theory
public void andThen(Executor es) throws Exception {
ExecutionContext<String> ctx = ExecutionContext.of(ExecutionContext.OF_EXCEPTION_MESSAGE, es);
OrFuture<String, String> future = ctx.goodFuture("good");
AtomicBoolean visited = new AtomicBoolean();
Or<String, String> result = future
.andThen(or -> {
throw new RuntimeException("Test throwing exception in andThen");
})
.andThen(or -> visited.set(true))
.getUnsafe();
assertTrue(visited.get());
assertEquals("good", result.get());
}
@Test
public void recover() throws Exception {
OrFuture<String, String> recover = CTX.<String, String>badFuture(FAIL).recover(f -> "5");
assertEquals("5", recover.get(Duration.ofSeconds(10)).get());
}
@Test
public void recoverWith() throws Exception {
OrFuture<String, String> recover = CTX.<String, String>badFuture(FAIL).recoverWith(f -> CTX.goodFuture("5"));
assertEquals("5", recover.get(Duration.ofSeconds(10)).get());
}
@Test
public void transform() throws Exception {
OrFuture<String, String> or = CTX.badFuture(FAIL);
OrFuture<Integer, Integer> transform = or.transform(s -> 5, f -> -5);
assertEquals(-5, transform.get(Duration.ofSeconds(10)).getBad().intValue());
}
@Test
public void zip() throws Exception {
OrFuture<String, Object> foo = CTX.goodFuture("foo");
OrFuture<Integer, Object> num = CTX.goodFuture(123);
OrFuture<Tuple2<String, Integer>, Every<Object>> zip = foo.zip(num);
assertEquals(Tuple.of("foo", 123), zip.get(Duration.ofSeconds(10)).get());
}
@Test
public void withContext() {
String startThread = "start";
String endThread = "end";
ExecutionContext<String> start = ExecutionContext.of(ExecutionContext.OF_EXCEPTION_MESSAGE, newExecutor(startThread));
ExecutionContext<String> end = ExecutionContext.of(ExecutionContext.OF_EXCEPTION_MESSAGE, newExecutor(endThread));
OrFuture<String, String> startGood = start.future(() -> Or.good("good"));
OrFuture<String, String> endGood = startGood.with(end);
SimpleSafe<String> startSafe = new SimpleSafe<>();
SimpleSafe<String> endSafe = new SimpleSafe<>();
startGood.onComplete(or -> startSafe.set(Thread.currentThread().getName()));
endGood.onComplete(or -> endSafe.set(Thread.currentThread().getName()));
assertEquals(startThread, startSafe.get());
assertEquals(endThread, endSafe.get());
}
@Test
public void getUnsafe() {
CountDownLatch latch = new CountDownLatch(1);
OrFuture<String, String> or = CTX.future(() -> {
try {
latch.await();
return Or.good("good");
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
});
Assert.assertEquals("OrFuture(N/A)", or.toString());
Thread thread = Thread.currentThread();
ForkJoinPool.commonPool().execute(() -> {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
thread.interrupt();
});
try {
or.getUnsafe();
Assert.fail("should throw");
} catch(CompletionException ce) {
// expected
latch.countDown();
}
}
private <G> OrFuture<G, String> getF(ExecutionContext<String> ctx, G g) {
return ctx.future(() -> Good.of(g));
}
private Executor newExecutor(String name) {
return Executors.newSingleThreadExecutor(r -> new Thread(r, name));
}
private static class SimpleSafe<T> {
private final CountDownLatch latch = new CountDownLatch(1);
private T value;
void set(T value) {
this.value = value;
latch.countDown();
}
T get() {
sneakyRun(latch::await);
return value;
}
}
public static <T extends Throwable> void sneakyRun(CheckedRunnable r) throws T {
try {
r.run();
} catch (Throwable throwable) {
throw (T)throwable;
}
}
}
|
|
package com.winterwell.nlp.simpleparser;
import java.util.regex.MatchResult;
import java.util.regex.Pattern;
import com.winterwell.utils.TodoException;
import com.winterwell.utils.Utils;
import com.winterwell.utils.containers.Slice;
/**
* Parse a fixed bit of text. Sets the parsed word as the AST.getX() value.
*
* Use
* import static com.winterwell.nlp.simpleparser.Parsers.*;
*
* @author daniel
*
*/
class Lit extends Parser<String> {
private String word;
public Lit(String word) {
super();
this.word = word;
canBeZeroLength = word.length() == 0;
}
@Override
protected String sample() {
return word;
}
@Override
public ParseResult doParse(ParseState state) {
assert state.down == this;
Slice text = state.unparsed();
if (!text.startsWith(word))
return null;
Slice parsed = new Slice(text, 0, word.length());
assert parsed.toString().equals(word) : parsed + " != " + word;
AST<String> tree = new AST<String>(this, parsed);
tree.setX(word);
ParseResult r = new ParseResult(state, tree, state.text, state.posn
+ word.length());
return r;
}
@Override
public String toString() {
return word;
}
}
/**
* Not fast or clever, but easy to use. A parser combinator for recursive
* descent parsers with loop-checking. Use<br>
* <code>import static com.winterwell.nlp.simpleparser.Parser.*;</code><br>
* To get easy access to the combinators. Loosely inspired by the Scala Parser
* class.
*
* TODO back-tracking
*
* @testedby ParserTest}
* @author daniel
*/
public class Parsers {
/**
* Find by label, or null
* @param name
* @return
*/
public static Parser getParser(String name) {
return Parser.parsers.get(name);
}
static final class Opt<PT> extends First<PT> {
public Opt(Parser p) {
super(new Parser[] { p, lit("").label(null) });
}
@Override
public String toString() {
return Utils.isBlank(name) ? subs[0] + "?" : name;
}
}
static final class Word extends Parser<String> {
private final String word;
Word(String word) {
this.word = word;
canBeZeroLength = word.length() == 0;
}
@Override
protected ParseResult<String> doParse(ParseState state) {
Slice unp = state.unparsed();
if (!unp.startsWith(word))
return null;
AST<String> ast = new AST<String>(this, new Slice(unp, 0,
word.length()));
ast.setX(word);
ParseResult<String> r = new ParseResult<String>(state, ast,
state.text, state.posn + word.length());
if (unp.length() == word.length())
return r;
char c = unp.charAt(word.length());
if (Character.isLetterOrDigit(c))
return null;
return r;
}
@Override
public String toString() {
return word; // '"'+word+'"';
}
@Override
protected String sample() {
return word;
}
}
/**
* A mandatory space. Does not create a node in the AST
*/
public static final Parser space = regex("\\s+").label(null);
public static final Parser optSpace = opt(space);
static {
space.canBeZeroLength = false;
}
/**
* Matches *optional* brackets, e.g. "(body)" or plain "body", or
* "((body))". For obligatory brackets, just use {@link #seq(Parser...)}
*
* @param open
* @param body
* @param close
* @return
* @testedby ParserTest#testBracketed()}
*/
public static <PT> Parser<PT> bracketed(String open, Parser<PT> body,
String close) {
// try for a nice name
String bs = Utils.isBlank(body.name) ? "_r" + Utils.getId() : body.name;
String name = open + bs + close;
if (Parser.parsers.containsKey(name)) {
// oh well - something unique
name = "_" + Utils.getUID();
}
Parser _open = lit(open).label(null);
Parser _close = lit(close).label(null);
Parser<PT> bp = first(
seq(_open, first(body, ref(name)), _close).label(name), body)
.setDesc("?brackets");
return bp;
}
public static ChainParser chain(Parser element, Parser separator) {
return new ChainParser(element, separator, 1, Integer.MAX_VALUE);
}
/**
* Like {@link #or(Parser...)} but without backtracking. The first match
* wins. This is more efficient (and easier to debug) than or.
*/
public static <X> Parser<X> first(Parser... parsers) {
return new First<X>(parsers);
}
/**
* Convenience for {@link #lit(String...)} with label null. This parser is
* NOT optional - one of the words must be present. But the nodes will not
* be added to the abstract syntax tree (AST), i.e. the results from the
* parse are ignored.
*
* @param words
* @return
*/
public static Parser<String> ignore(final String... words) {
return lit(words).label(null);
}
/**
* An anonymous literal if one String is provided. Otherwise a first() over
* the words
*
* @param name
* @param words
* @return
*/
public static Parser<String> lit(final String... words) {
if (words.length == 1)
return new Lit(words[0]);
assert words.length != 0 : words;
Parser[] lits = new Parser[words.length];
for (int i = 0; i < words.length; i++) {
String w = words[i];
Lit lit = new Lit(w);
lits[i] = lit;
}
return first(lits);
}
/**
* Match a number and create a double. Convenience for a common case.
*
* @param name
* 'cos you might easily have two of these & need different
* names, e.g. x + y
* @return
*/
public static PP<Number> num(String name) {
return new PP<Number>(regex("-?\\d+(\\.\\d+)?")) {
@Override
protected Number process(ParseResult<?> pr) {
return Double.valueOf(pr.parsed());
}
}.label(name);
}
/**
* Make a rule optional. E.g. opt(space) for optional whitespace. This is a
* convenience for {@link #first(Parser...)} - it is not a true or (no
* backtracking)
*
* @param parser
* @return A parser which never fails - returns the base result, or a blank
* tree.
*/
public static <PT> Parser<PT> opt(final Parser<PT> parser) {
return new Opt<PT>(parser);
}
@Deprecated
// Buggy! // TODO fix!
public static Parser or(Parser... parsers) {
return new Or(parsers);
}
/**
* Use to build recursive rules. This will lazily load a parser of the same
* name. Note: be careful that only *one* such parser gets defined!
*
* @param name
*/
public static Parser ref(String name) {
return new Ref(name);
}
/**
* Match a regular expression. ParseResult.getX() returns the successful
* {@link MatchResult} object.
*
* @param regex
* @return
*/
public static RegexParser regex(String regex) {
return new RegexParser(Pattern.compile("^" + regex));
}
public static Parser repeat(Parser parser) {
return repeat(parser, 0, Integer.MAX_VALUE);
}
public static Parser repeat(Parser parser, int min, int max) {
throw new TodoException();
}
public static Parser seq(final Parser... parsers) {
assert parsers.length != 0;
if (parsers.length == 1)
return parsers[0];
return new SeqnParser(parsers);
// Parser next = parsers[parsers.length-1];
// for (int i=parsers.length-2; i>=0; i--) {
// SeqnParser seqn = new SeqnParser(parsers[i], next);
// next = seqn;
// }
// return next;
}
/**
* Like lit(), but only matches whole-words. E.g. "hell" would match on
* "hell." or "hell " but not "hello". Note that "[s]hell" (s already
* parsed) would match.
* <p>
* Uses {@link #first(Parser...)} if there are multiple words.
* @return a fresh parser
*/
public static Parser<String> word(final String... words) {
if (words.length == 1)
return new Word(words[0]);
Parser[] lits = new Parser[words.length];
for (int i = 0; i < words.length; i++) {
String w = words[i];
Word lit = new Word(w);
lits[i] = lit;
}
return first(lits);
}
}
|
|
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skyframe;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.events.EventHandler;
import com.google.devtools.build.lib.packages.AspectDefinition;
import com.google.devtools.build.lib.packages.Attribute;
import com.google.devtools.build.lib.packages.DependencyFilter;
import com.google.devtools.build.lib.packages.NoSuchPackageException;
import com.google.devtools.build.lib.packages.NoSuchTargetException;
import com.google.devtools.build.lib.packages.NoSuchThingException;
import com.google.devtools.build.lib.packages.Rule;
import com.google.devtools.build.lib.skyframe.TransitiveTraversalFunction.FirstErrorMessageAccumulator;
import com.google.devtools.build.lib.util.GroupedList;
import com.google.devtools.build.skyframe.SkyFunction;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyValue;
import com.google.devtools.build.skyframe.ValueOrException2;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
/**
* This class is like {@link TransitiveTargetFunction}, but the values it returns do not contain
* {@link NestedSet}s. It performs the side-effects of {@link TransitiveTargetFunction} (i.e.,
* ensuring that transitive targets and their packages have been loaded). It evaluates to a
* {@link TransitiveTraversalValue} that contains the first error message it encountered, and a
* set of names of providers if the target is a rule.
*/
public class TransitiveTraversalFunction
extends TransitiveBaseTraversalFunction<FirstErrorMessageAccumulator> {
@Override
Label argumentFromKey(SkyKey key) {
return (Label) key.argument();
}
@Override
SkyKey getKey(Label label) {
return TransitiveTraversalValue.key(label);
}
@Override
FirstErrorMessageAccumulator processTarget(Label label, TargetAndErrorIfAny targetAndErrorIfAny) {
NoSuchTargetException errorIfAny = targetAndErrorIfAny.getErrorLoadingTarget();
String errorMessageIfAny = errorIfAny == null ? null : errorIfAny.getMessage();
return new FirstErrorMessageAccumulator(errorMessageIfAny);
}
@Override
void processDeps(
FirstErrorMessageAccumulator accumulator,
EventHandler eventHandler,
TargetAndErrorIfAny targetAndErrorIfAny,
Iterable<Map.Entry<SkyKey, ValueOrException2<NoSuchPackageException, NoSuchTargetException>>>
depEntries) {
for (Map.Entry<SkyKey, ValueOrException2<NoSuchPackageException, NoSuchTargetException>> entry :
depEntries) {
TransitiveTraversalValue transitiveTraversalValue;
try {
transitiveTraversalValue = (TransitiveTraversalValue) entry.getValue().get();
if (transitiveTraversalValue == null) {
continue;
}
} catch (NoSuchPackageException | NoSuchTargetException e) {
accumulator.maybeSet(e.getMessage());
continue;
}
String firstErrorMessage = transitiveTraversalValue.getFirstErrorMessage();
if (firstErrorMessage != null) {
accumulator.maybeSet(firstErrorMessage);
}
}
}
@Override
protected Collection<Label> getAspectLabels(
Rule fromRule,
Attribute attr,
Label toLabel,
ValueOrException2<NoSuchPackageException, NoSuchTargetException> toVal,
Environment env) {
try {
if (toVal == null) {
return ImmutableList.of();
}
TransitiveTraversalValue traversalVal = (TransitiveTraversalValue) toVal.get();
if (traversalVal == null || traversalVal.getProviders() == null) {
return ImmutableList.of();
}
// Retrieve the providers of the dep from the TransitiveTraversalValue, so we can avoid
// issuing a dep on its defining Package.
return AspectDefinition.visitAspectsIfRequired(fromRule, attr,
traversalVal.getProviders(),
DependencyFilter.ALL_DEPS).values();
} catch (NoSuchThingException e) {
// Do nothing. This error was handled when we computed the corresponding
// TransitiveTargetValue.
return ImmutableList.of();
}
}
@Override
SkyValue computeSkyValue(TargetAndErrorIfAny targetAndErrorIfAny,
FirstErrorMessageAccumulator accumulator) {
boolean targetLoadedSuccessfully = targetAndErrorIfAny.getErrorLoadingTarget() == null;
String firstErrorMessage = accumulator.getFirstErrorMessage();
return targetLoadedSuccessfully
? TransitiveTraversalValue.forTarget(targetAndErrorIfAny.getTarget(), firstErrorMessage)
: TransitiveTraversalValue.unsuccessfulTransitiveTraversal(
firstErrorMessage, targetAndErrorIfAny.getTarget());
}
@Override
TargetMarkerValue getTargetMarkerValue(SkyKey targetMarkerKey, Environment env)
throws NoSuchTargetException, NoSuchPackageException, InterruptedException {
return TargetMarkerFunction.computeTargetMarkerValue(targetMarkerKey, env);
}
@Override
Collection<SkyKey> getLabelDepKeys(
SkyFunction.Environment env, TargetAndErrorIfAny targetAndErrorIfAny)
throws InterruptedException {
// As a performance optimization we may already know the deps we are about to request from
// last time #compute was called. By requesting these from the environment, we can avoid
// repeating the label visitation step. For TransitiveTraversalFunction#compute, the label deps
// dependency group is requested immediately after the package.
//
// IMPORTANT: No other package values should be requested inside
// TransitiveTraversalFunction#compute from this point forward.
Collection<SkyKey> oldDepKeys = getDepsAfterLastPackageDep(env, /*offset=*/ 1);
return oldDepKeys == null ? super.getLabelDepKeys(env, targetAndErrorIfAny) : oldDepKeys;
}
@Override
Iterable<SkyKey> getStrictLabelAspectDepKeys(
SkyFunction.Environment env,
Map<SkyKey, ValueOrException2<NoSuchPackageException, NoSuchTargetException>> depMap,
TargetAndErrorIfAny targetAndErrorIfAny)
throws InterruptedException {
// As a performance optimization we may already know the deps we are about to request from
// last time #compute was called. By requesting these from the environment, we can avoid
// repeating the label visitation step. For TransitiveTraversalFunction#compute, the label
// aspect deps dependency group is requested two groups after the package.
Collection<SkyKey> oldAspectDepKeys = getDepsAfterLastPackageDep(env, /*offset=*/ 2);
return oldAspectDepKeys == null
? super.getStrictLabelAspectDepKeys(env, depMap, targetAndErrorIfAny)
: oldAspectDepKeys;
}
@Nullable
private static Collection<SkyKey> getDepsAfterLastPackageDep(
SkyFunction.Environment env, int offset) {
GroupedList<SkyKey> temporaryDirectDeps = env.getTemporaryDirectDeps();
if (temporaryDirectDeps == null) {
return null;
}
int lastPackageDepIndex = getLastPackageValueIndex(temporaryDirectDeps);
if (lastPackageDepIndex == -1
|| temporaryDirectDeps.listSize() <= lastPackageDepIndex + offset) {
return null;
}
return temporaryDirectDeps.get(lastPackageDepIndex + offset);
}
private static int getLastPackageValueIndex(GroupedList<SkyKey> directDeps) {
int directDepsNumGroups = directDeps.listSize();
for (int i = directDepsNumGroups - 1; i >= 0; i--) {
List<SkyKey> depGroup = directDeps.get(i);
if (depGroup.size() == 1 && depGroup.get(0).functionName().equals(SkyFunctions.PACKAGE)) {
return i;
}
}
return -1;
}
/**
* Keeps track of the first error message encountered while traversing itself and its
* dependencies.
*/
static class FirstErrorMessageAccumulator {
@Nullable private String firstErrorMessage;
public FirstErrorMessageAccumulator(@Nullable String firstErrorMessage) {
this.firstErrorMessage = firstErrorMessage;
}
/** Remembers {@param errorMessage} if it is the first error message. */
void maybeSet(String errorMessage) {
Preconditions.checkNotNull(errorMessage);
if (firstErrorMessage == null) {
firstErrorMessage = errorMessage;
}
}
@Nullable
String getFirstErrorMessage() {
return firstErrorMessage;
}
}
}
|
|
/**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.operators.flowable;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import java.lang.reflect.Method;
import java.util.*;
import java.util.concurrent.*;
import org.junit.*;
import org.mockito.InOrder;
import org.reactivestreams.*;
import io.reactivex.*;
import io.reactivex.Flowable;
import io.reactivex.exceptions.*;
import io.reactivex.functions.LongConsumer;
import io.reactivex.internal.subscriptions.BooleanSubscription;
import io.reactivex.processors.PublishProcessor;
import io.reactivex.subscribers.*;
public class FlowableMergeDelayErrorTest {
Subscriber<String> stringObserver;
@Before
public void before() {
stringObserver = TestHelper.mockSubscriber();
}
@Test
public void testErrorDelayed1() {
final Flowable<String> o1 = Flowable.unsafeCreate(new TestErrorFlowable("four", null, "six")); // we expect to lose "six" from the source (and it should never be sent by the source since onError was called
final Flowable<String> o2 = Flowable.unsafeCreate(new TestErrorFlowable("one", "two", "three"));
Flowable<String> m = Flowable.mergeDelayError(o1, o2);
m.subscribe(stringObserver);
verify(stringObserver, times(1)).onError(any(NullPointerException.class));
verify(stringObserver, never()).onComplete();
verify(stringObserver, times(1)).onNext("one");
verify(stringObserver, times(1)).onNext("two");
verify(stringObserver, times(1)).onNext("three");
verify(stringObserver, times(1)).onNext("four");
verify(stringObserver, times(0)).onNext("five");
// despite not expecting it ... we don't do anything to prevent it if the source Flowable keeps sending after onError
// inner Flowable errors are considered terminal for that source
// verify(stringObserver, times(1)).onNext("six");
// inner Flowable errors are considered terminal for that source
}
@Test
public void testErrorDelayed2() {
final Flowable<String> o1 = Flowable.unsafeCreate(new TestErrorFlowable("one", "two", "three"));
final Flowable<String> o2 = Flowable.unsafeCreate(new TestErrorFlowable("four", null, "six")); // we expect to lose "six" from the source (and it should never be sent by the source since onError was called
final Flowable<String> o3 = Flowable.unsafeCreate(new TestErrorFlowable("seven", "eight", null));
final Flowable<String> o4 = Flowable.unsafeCreate(new TestErrorFlowable("nine"));
Flowable<String> m = Flowable.mergeDelayError(o1, o2, o3, o4);
m.subscribe(stringObserver);
verify(stringObserver, times(1)).onError(any(CompositeException.class));
verify(stringObserver, never()).onComplete();
verify(stringObserver, times(1)).onNext("one");
verify(stringObserver, times(1)).onNext("two");
verify(stringObserver, times(1)).onNext("three");
verify(stringObserver, times(1)).onNext("four");
verify(stringObserver, times(0)).onNext("five");
// despite not expecting it ... we don't do anything to prevent it if the source Flowable keeps sending after onError
// inner Flowable errors are considered terminal for that source
// verify(stringObserver, times(1)).onNext("six");
verify(stringObserver, times(1)).onNext("seven");
verify(stringObserver, times(1)).onNext("eight");
verify(stringObserver, times(1)).onNext("nine");
}
@Test
public void testErrorDelayed3() {
final Flowable<String> o1 = Flowable.unsafeCreate(new TestErrorFlowable("one", "two", "three"));
final Flowable<String> o2 = Flowable.unsafeCreate(new TestErrorFlowable("four", "five", "six"));
final Flowable<String> o3 = Flowable.unsafeCreate(new TestErrorFlowable("seven", "eight", null));
final Flowable<String> o4 = Flowable.unsafeCreate(new TestErrorFlowable("nine"));
Flowable<String> m = Flowable.mergeDelayError(o1, o2, o3, o4);
m.subscribe(stringObserver);
verify(stringObserver, times(1)).onError(any(NullPointerException.class));
verify(stringObserver, never()).onComplete();
verify(stringObserver, times(1)).onNext("one");
verify(stringObserver, times(1)).onNext("two");
verify(stringObserver, times(1)).onNext("three");
verify(stringObserver, times(1)).onNext("four");
verify(stringObserver, times(1)).onNext("five");
verify(stringObserver, times(1)).onNext("six");
verify(stringObserver, times(1)).onNext("seven");
verify(stringObserver, times(1)).onNext("eight");
verify(stringObserver, times(1)).onNext("nine");
}
@Test
public void testErrorDelayed4() {
final Flowable<String> o1 = Flowable.unsafeCreate(new TestErrorFlowable("one", "two", "three"));
final Flowable<String> o2 = Flowable.unsafeCreate(new TestErrorFlowable("four", "five", "six"));
final Flowable<String> o3 = Flowable.unsafeCreate(new TestErrorFlowable("seven", "eight"));
final Flowable<String> o4 = Flowable.unsafeCreate(new TestErrorFlowable("nine", null));
Flowable<String> m = Flowable.mergeDelayError(o1, o2, o3, o4);
m.subscribe(stringObserver);
verify(stringObserver, times(1)).onError(any(NullPointerException.class));
verify(stringObserver, never()).onComplete();
verify(stringObserver, times(1)).onNext("one");
verify(stringObserver, times(1)).onNext("two");
verify(stringObserver, times(1)).onNext("three");
verify(stringObserver, times(1)).onNext("four");
verify(stringObserver, times(1)).onNext("five");
verify(stringObserver, times(1)).onNext("six");
verify(stringObserver, times(1)).onNext("seven");
verify(stringObserver, times(1)).onNext("eight");
verify(stringObserver, times(1)).onNext("nine");
}
@Test
public void testErrorDelayed4WithThreading() {
final TestAsyncErrorFlowable o1 = new TestAsyncErrorFlowable("one", "two", "three");
final TestAsyncErrorFlowable o2 = new TestAsyncErrorFlowable("four", "five", "six");
final TestAsyncErrorFlowable o3 = new TestAsyncErrorFlowable("seven", "eight");
// throw the error at the very end so no onComplete will be called after it
final TestAsyncErrorFlowable o4 = new TestAsyncErrorFlowable("nine", null);
Flowable<String> m = Flowable.mergeDelayError(Flowable.unsafeCreate(o1), Flowable.unsafeCreate(o2), Flowable.unsafeCreate(o3), Flowable.unsafeCreate(o4));
m.subscribe(stringObserver);
try {
o1.t.join();
o2.t.join();
o3.t.join();
o4.t.join();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
verify(stringObserver, times(1)).onNext("one");
verify(stringObserver, times(1)).onNext("two");
verify(stringObserver, times(1)).onNext("three");
verify(stringObserver, times(1)).onNext("four");
verify(stringObserver, times(1)).onNext("five");
verify(stringObserver, times(1)).onNext("six");
verify(stringObserver, times(1)).onNext("seven");
verify(stringObserver, times(1)).onNext("eight");
verify(stringObserver, times(1)).onNext("nine");
verify(stringObserver, times(1)).onError(any(NullPointerException.class));
verify(stringObserver, never()).onComplete();
}
@Test
public void testCompositeErrorDelayed1() {
final Flowable<String> o1 = Flowable.unsafeCreate(new TestErrorFlowable("four", null, "six")); // we expect to lose "six" from the source (and it should never be sent by the source since onError was called
final Flowable<String> o2 = Flowable.unsafeCreate(new TestErrorFlowable("one", "two", null));
Flowable<String> m = Flowable.mergeDelayError(o1, o2);
m.subscribe(stringObserver);
verify(stringObserver, times(1)).onError(any(Throwable.class));
verify(stringObserver, never()).onComplete();
verify(stringObserver, times(1)).onNext("one");
verify(stringObserver, times(1)).onNext("two");
verify(stringObserver, times(0)).onNext("three");
verify(stringObserver, times(1)).onNext("four");
verify(stringObserver, times(0)).onNext("five");
// despite not expecting it ... we don't do anything to prevent it if the source Flowable keeps sending after onError
// inner Flowable errors are considered terminal for that source
// verify(stringObserver, times(1)).onNext("six");
}
@Test
public void testCompositeErrorDelayed2() {
final Flowable<String> o1 = Flowable.unsafeCreate(new TestErrorFlowable("four", null, "six")); // we expect to lose "six" from the source (and it should never be sent by the source since onError was called
final Flowable<String> o2 = Flowable.unsafeCreate(new TestErrorFlowable("one", "two", null));
Flowable<String> m = Flowable.mergeDelayError(o1, o2);
CaptureObserver w = new CaptureObserver();
m.subscribe(w);
assertNotNull(w.e);
int size = ((CompositeException)w.e).size();
if (size != 2) {
w.e.printStackTrace();
}
assertEquals(2, size);
// if (w.e instanceof CompositeException) {
// assertEquals(2, ((CompositeException) w.e).getExceptions().size());
// w.e.printStackTrace();
// } else {
// fail("Expecting CompositeException");
// }
}
/**
* The unit tests below are from OperationMerge and should ensure the normal merge functionality is correct.
*/
@Test
public void testMergeFlowableOfFlowables() {
final Flowable<String> o1 = Flowable.unsafeCreate(new TestSynchronousFlowable());
final Flowable<String> o2 = Flowable.unsafeCreate(new TestSynchronousFlowable());
Flowable<Flowable<String>> FlowableOfFlowables = Flowable.unsafeCreate(new Publisher<Flowable<String>>() {
@Override
public void subscribe(Subscriber<? super Flowable<String>> observer) {
observer.onSubscribe(new BooleanSubscription());
// simulate what would happen in a Flowable
observer.onNext(o1);
observer.onNext(o2);
observer.onComplete();
}
});
Flowable<String> m = Flowable.mergeDelayError(FlowableOfFlowables);
m.subscribe(stringObserver);
verify(stringObserver, never()).onError(any(Throwable.class));
verify(stringObserver, times(1)).onComplete();
verify(stringObserver, times(2)).onNext("hello");
}
@Test
public void testMergeArray() {
final Flowable<String> o1 = Flowable.unsafeCreate(new TestSynchronousFlowable());
final Flowable<String> o2 = Flowable.unsafeCreate(new TestSynchronousFlowable());
Flowable<String> m = Flowable.mergeDelayError(o1, o2);
m.subscribe(stringObserver);
verify(stringObserver, never()).onError(any(Throwable.class));
verify(stringObserver, times(2)).onNext("hello");
verify(stringObserver, times(1)).onComplete();
}
@Test
public void testMergeList() {
final Flowable<String> o1 = Flowable.unsafeCreate(new TestSynchronousFlowable());
final Flowable<String> o2 = Flowable.unsafeCreate(new TestSynchronousFlowable());
List<Flowable<String>> listOfFlowables = new ArrayList<Flowable<String>>();
listOfFlowables.add(o1);
listOfFlowables.add(o2);
Flowable<String> m = Flowable.mergeDelayError(Flowable.fromIterable(listOfFlowables));
m.subscribe(stringObserver);
verify(stringObserver, never()).onError(any(Throwable.class));
verify(stringObserver, times(1)).onComplete();
verify(stringObserver, times(2)).onNext("hello");
}
@Test
public void testMergeArrayWithThreading() {
final TestASynchronousFlowable o1 = new TestASynchronousFlowable();
final TestASynchronousFlowable o2 = new TestASynchronousFlowable();
Flowable<String> m = Flowable.mergeDelayError(Flowable.unsafeCreate(o1), Flowable.unsafeCreate(o2));
m.subscribe(stringObserver);
try {
o1.t.join();
o2.t.join();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
verify(stringObserver, never()).onError(any(Throwable.class));
verify(stringObserver, times(2)).onNext("hello");
verify(stringObserver, times(1)).onComplete();
}
@Test(timeout = 1000L)
public void testSynchronousError() {
final Flowable<Flowable<String>> o1 = Flowable.error(new RuntimeException("unit test"));
final CountDownLatch latch = new CountDownLatch(1);
Flowable.mergeDelayError(o1).subscribe(new DefaultSubscriber<String>() {
@Override
public void onComplete() {
fail("Expected onError path");
}
@Override
public void onError(Throwable e) {
latch.countDown();
}
@Override
public void onNext(String s) {
fail("Expected onError path");
}
});
try {
latch.await();
} catch (InterruptedException ex) {
fail("interrupted");
}
}
private static class TestSynchronousFlowable implements Publisher<String> {
@Override
public void subscribe(Subscriber<? super String> observer) {
observer.onSubscribe(new BooleanSubscription());
observer.onNext("hello");
observer.onComplete();
}
}
private static class TestASynchronousFlowable implements Publisher<String> {
Thread t;
@Override
public void subscribe(final Subscriber<? super String> observer) {
observer.onSubscribe(new BooleanSubscription());
t = new Thread(new Runnable() {
@Override
public void run() {
observer.onNext("hello");
observer.onComplete();
}
});
t.start();
}
}
private static class TestErrorFlowable implements Publisher<String> {
String[] valuesToReturn;
TestErrorFlowable(String... values) {
valuesToReturn = values;
}
@Override
public void subscribe(Subscriber<? super String> observer) {
observer.onSubscribe(new BooleanSubscription());
boolean errorThrown = false;
for (String s : valuesToReturn) {
if (s == null) {
System.out.println("throwing exception");
observer.onError(new NullPointerException());
errorThrown = true;
// purposefully not returning here so it will continue calling onNext
// so that we also test that we handle bad sequences like this
} else {
observer.onNext(s);
}
}
if (!errorThrown) {
observer.onComplete();
}
}
}
private static class TestAsyncErrorFlowable implements Publisher<String> {
String[] valuesToReturn;
TestAsyncErrorFlowable(String... values) {
valuesToReturn = values;
}
Thread t;
@Override
public void subscribe(final Subscriber<? super String> observer) {
observer.onSubscribe(new BooleanSubscription());
t = new Thread(new Runnable() {
@Override
public void run() {
for (String s : valuesToReturn) {
if (s == null) {
System.out.println("throwing exception");
try {
Thread.sleep(100);
} catch (Throwable e) {
}
observer.onError(new NullPointerException());
return;
} else {
observer.onNext(s);
}
}
System.out.println("subscription complete");
observer.onComplete();
}
});
t.start();
}
}
private static class CaptureObserver extends DefaultSubscriber<String> {
volatile Throwable e;
@Override
public void onComplete() {
}
@Override
public void onError(Throwable e) {
this.e = e;
}
@Override
public void onNext(String args) {
}
}
@Test
@Ignore("Subscribers should not throw")
public void testMergeSourceWhichDoesntPropagateExceptionBack() {
Flowable<Integer> source = Flowable.unsafeCreate(new Publisher<Integer>() {
@Override
public void subscribe(Subscriber<? super Integer> t1) {
t1.onSubscribe(new BooleanSubscription());
try {
t1.onNext(0);
} catch (Throwable swallow) {
}
t1.onNext(1);
t1.onComplete();
}
});
Flowable<Integer> result = Flowable.mergeDelayError(source, Flowable.just(2));
final Subscriber<Integer> o = TestHelper.mockSubscriber();
InOrder inOrder = inOrder(o);
result.subscribe(new DefaultSubscriber<Integer>() {
int calls;
@Override
public void onNext(Integer t) {
if (calls++ == 0) {
throw new TestException();
}
o.onNext(t);
}
@Override
public void onError(Throwable e) {
o.onError(e);
}
@Override
public void onComplete() {
o.onComplete();
}
});
/*
* If the child onNext throws, why would we keep accepting values from
* other sources?
*/
inOrder.verify(o).onNext(2);
inOrder.verify(o, never()).onNext(0);
inOrder.verify(o, never()).onNext(1);
inOrder.verify(o, never()).onNext(anyInt());
inOrder.verify(o).onError(any(TestException.class));
verify(o, never()).onComplete();
}
@Test
public void testErrorInParentFlowable() {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>();
Flowable.mergeDelayError(
Flowable.just(Flowable.just(1), Flowable.just(2))
.startWith(Flowable.<Integer> error(new RuntimeException()))
).subscribe(ts);
ts.awaitTerminalEvent();
ts.assertTerminated();
ts.assertValues(1, 2);
assertEquals(1, ts.errorCount());
}
@Test
public void testErrorInParentFlowableDelayed() throws Exception {
for (int i = 0; i < 50; i++) {
final TestASynchronous1sDelayedFlowable o1 = new TestASynchronous1sDelayedFlowable();
final TestASynchronous1sDelayedFlowable o2 = new TestASynchronous1sDelayedFlowable();
Flowable<Flowable<String>> parentFlowable = Flowable.unsafeCreate(new Publisher<Flowable<String>>() {
@Override
public void subscribe(Subscriber<? super Flowable<String>> op) {
op.onSubscribe(new BooleanSubscription());
op.onNext(Flowable.unsafeCreate(o1));
op.onNext(Flowable.unsafeCreate(o2));
op.onError(new NullPointerException("throwing exception in parent"));
}
});
Subscriber<String> stringObserver = TestHelper.mockSubscriber();
TestSubscriber<String> ts = new TestSubscriber<String>(stringObserver);
Flowable<String> m = Flowable.mergeDelayError(parentFlowable);
m.subscribe(ts);
System.out.println("testErrorInParentFlowableDelayed | " + i);
ts.awaitTerminalEvent(2000, TimeUnit.MILLISECONDS);
ts.assertTerminated();
verify(stringObserver, times(2)).onNext("hello");
verify(stringObserver, times(1)).onError(any(NullPointerException.class));
verify(stringObserver, never()).onComplete();
}
}
private static class TestASynchronous1sDelayedFlowable implements Publisher<String> {
Thread t;
@Override
public void subscribe(final Subscriber<? super String> observer) {
observer.onSubscribe(new BooleanSubscription());
t = new Thread(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
observer.onError(e);
}
observer.onNext("hello");
observer.onComplete();
}
});
t.start();
}
}
@Test
public void testDelayErrorMaxConcurrent() {
final List<Long> requests = new ArrayList<Long>();
Flowable<Integer> source = Flowable.mergeDelayError(Flowable.just(
Flowable.just(1).hide(),
Flowable.<Integer>error(new TestException()))
.doOnRequest(new LongConsumer() {
@Override
public void accept(long t1) {
requests.add(t1);
}
}), 1);
TestSubscriber<Integer> ts = new TestSubscriber<Integer>();
source.subscribe(ts);
ts.assertValue(1);
ts.assertTerminated();
ts.assertError(TestException.class);
assertEquals(Arrays.asList(1L, 1L, 1L), requests);
}
// This is pretty much a clone of testMergeList but with the overloaded MergeDelayError for Iterables
@Test
public void mergeIterable() {
final Flowable<String> o1 = Flowable.unsafeCreate(new TestSynchronousFlowable());
final Flowable<String> o2 = Flowable.unsafeCreate(new TestSynchronousFlowable());
List<Flowable<String>> listOfFlowables = new ArrayList<Flowable<String>>();
listOfFlowables.add(o1);
listOfFlowables.add(o2);
Flowable<String> m = Flowable.mergeDelayError(listOfFlowables);
m.subscribe(stringObserver);
verify(stringObserver, never()).onError(any(Throwable.class));
verify(stringObserver, times(1)).onComplete();
verify(stringObserver, times(2)).onNext("hello");
}
@SuppressWarnings("unchecked")
@Test
public void iterableMaxConcurrent() {
TestSubscriber<Integer> ts = TestSubscriber.create();
PublishProcessor<Integer> ps1 = PublishProcessor.create();
PublishProcessor<Integer> ps2 = PublishProcessor.create();
Flowable.mergeDelayError(Arrays.asList(ps1, ps2), 1).subscribe(ts);
assertTrue("ps1 has no subscribers?!", ps1.hasSubscribers());
assertFalse("ps2 has subscribers?!", ps2.hasSubscribers());
ps1.onNext(1);
ps1.onComplete();
assertFalse("ps1 has subscribers?!", ps1.hasSubscribers());
assertTrue("ps2 has no subscribers?!", ps2.hasSubscribers());
ps2.onNext(2);
ps2.onComplete();
ts.assertValues(1, 2);
ts.assertNoErrors();
ts.assertComplete();
}
@SuppressWarnings("unchecked")
@Test
public void iterableMaxConcurrentError() {
TestSubscriber<Integer> ts = TestSubscriber.create();
PublishProcessor<Integer> ps1 = PublishProcessor.create();
PublishProcessor<Integer> ps2 = PublishProcessor.create();
Flowable.mergeDelayError(Arrays.asList(ps1, ps2), 1).subscribe(ts);
assertTrue("ps1 has no subscribers?!", ps1.hasSubscribers());
assertFalse("ps2 has subscribers?!", ps2.hasSubscribers());
ps1.onNext(1);
ps1.onError(new TestException());
assertFalse("ps1 has subscribers?!", ps1.hasSubscribers());
assertTrue("ps2 has no subscribers?!", ps2.hasSubscribers());
ps2.onNext(2);
ps2.onError(new TestException());
ts.assertValues(1, 2);
ts.assertError(CompositeException.class);
ts.assertNotComplete();
CompositeException ce = (CompositeException)ts.errors().get(0);
assertEquals(2, ce.getExceptions().size());
}
@SuppressWarnings("unchecked")
@Test
@Ignore("No 2-9 parameter mergeDelayError() overloads")
public void mergeMany() throws Exception {
for (int i = 2; i < 10; i++) {
Class<?>[] clazz = new Class[i];
Arrays.fill(clazz, Flowable.class);
Flowable<Integer>[] obs = new Flowable[i];
Arrays.fill(obs, Flowable.just(1));
Integer[] expected = new Integer[i];
Arrays.fill(expected, 1);
Method m = Flowable.class.getMethod("mergeDelayError", clazz);
TestSubscriber<Integer> ts = TestSubscriber.create();
((Flowable<Integer>)m.invoke(null, (Object[])obs)).subscribe(ts);
ts.assertValues(expected);
ts.assertNoErrors();
ts.assertComplete();
}
}
static <T> Flowable<T> withError(Flowable<T> source) {
return source.concatWith(Flowable.<T>error(new TestException()));
}
@SuppressWarnings("unchecked")
@Test
@Ignore("No 2-9 parameter mergeDelayError() overloads")
public void mergeManyError() throws Exception {
for (int i = 2; i < 10; i++) {
Class<?>[] clazz = new Class[i];
Arrays.fill(clazz, Flowable.class);
Flowable<Integer>[] obs = new Flowable[i];
for (int j = 0; j < i; j++) {
obs[j] = withError(Flowable.just(1));
}
Integer[] expected = new Integer[i];
Arrays.fill(expected, 1);
Method m = Flowable.class.getMethod("mergeDelayError", clazz);
TestSubscriber<Integer> ts = TestSubscriber.create();
((Flowable<Integer>)m.invoke(null, (Object[])obs)).subscribe(ts);
ts.assertValues(expected);
ts.assertError(CompositeException.class);
ts.assertNotComplete();
CompositeException ce = (CompositeException)ts.errors().get(0);
assertEquals(i, ce.getExceptions().size());
}
}
@Test
public void array() {
for (int i = 1; i < 100; i++) {
@SuppressWarnings("unchecked")
Flowable<Integer>[] sources = new Flowable[i];
Arrays.fill(sources, Flowable.just(1));
Integer[] expected = new Integer[i];
for (int j = 0; j < i; j++) {
expected[j] = 1;
}
Flowable.mergeArrayDelayError(sources)
.test()
.assertResult(expected);
}
}
@SuppressWarnings("unchecked")
@Test
public void mergeArrayDelayError() {
Flowable.mergeArrayDelayError(Flowable.just(1), Flowable.just(2))
.test()
.assertResult(1, 2);
}
@SuppressWarnings("unchecked")
@Test
public void mergeIterableDelayErrorWithError() {
Flowable.mergeDelayError(
Arrays.asList(Flowable.just(1).concatWith(Flowable.<Integer>error(new TestException())),
Flowable.just(2)))
.test()
.assertFailure(TestException.class, 1, 2);
}
@Test
public void mergeDelayError() {
Flowable.mergeDelayError(
Flowable.just(Flowable.just(1),
Flowable.just(2)))
.test()
.assertResult(1, 2);
}
@Test
public void mergeDelayErrorWithError() {
Flowable.mergeDelayError(
Flowable.just(Flowable.just(1).concatWith(Flowable.<Integer>error(new TestException())),
Flowable.just(2)))
.test()
.assertFailure(TestException.class, 1, 2);
}
@Test
public void mergeDelayErrorMaxConcurrency() {
Flowable.mergeDelayError(
Flowable.just(Flowable.just(1),
Flowable.just(2)), 1)
.test()
.assertResult(1, 2);
}
@Test
public void mergeDelayErrorWithErrorMaxConcurrency() {
Flowable.mergeDelayError(
Flowable.just(Flowable.just(1).concatWith(Flowable.<Integer>error(new TestException())),
Flowable.just(2)), 1)
.test()
.assertFailure(TestException.class, 1, 2);
}
@SuppressWarnings("unchecked")
@Test
public void mergeIterableDelayErrorMaxConcurrency() {
Flowable.mergeDelayError(
Arrays.asList(Flowable.just(1),
Flowable.just(2)), 1)
.test()
.assertResult(1, 2);
}
@SuppressWarnings("unchecked")
@Test
public void mergeIterableDelayErrorWithErrorMaxConcurrency() {
Flowable.mergeDelayError(
Arrays.asList(Flowable.just(1).concatWith(Flowable.<Integer>error(new TestException())),
Flowable.just(2)), 1)
.test()
.assertFailure(TestException.class, 1, 2);
}
@Test
public void mergeDelayError3() {
Flowable.mergeDelayError(
Flowable.just(1),
Flowable.just(2),
Flowable.just(3)
)
.test()
.assertResult(1, 2, 3);
}
@Test
public void mergeDelayError3WithError() {
Flowable.mergeDelayError(
Flowable.just(1),
Flowable.just(2).concatWith(Flowable.<Integer>error(new TestException())),
Flowable.just(3)
)
.test()
.assertFailure(TestException.class, 1, 2, 3);
}
@SuppressWarnings("unchecked")
@Test
public void mergeIterableDelayError() {
Flowable.mergeDelayError(Arrays.asList(Flowable.just(1), Flowable.just(2)))
.test()
.assertResult(1, 2);
}
}
|
|
/*
Derby - Class org.apache.derby.impl.sql.compile.GroupByList
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.sql.compile;
import org.apache.derby.iapi.sql.compile.C_NodeTypes;
import org.apache.derby.iapi.services.sanity.SanityManager;
import org.apache.derby.iapi.error.StandardException;
import org.apache.derby.iapi.sql.dictionary.DataDictionary;
import org.apache.derby.iapi.reference.SQLState;
import org.apache.derby.iapi.reference.Limits;
import org.apache.derby.impl.sql.compile.ActivationClassBuilder;
import org.apache.derby.iapi.util.ReuseFactory;
import java.util.Vector;
/**
* A GroupByList represents the list of expressions in a GROUP BY clause in
* a SELECT statement.
*
*/
public class GroupByList extends OrderedColumnList
{
int numGroupingColsAdded = 0;
boolean rollup = false;
/**
Add a column to the list
@param column The column to add to the list
*/
public void addGroupByColumn(GroupByColumn column)
{
addElement(column);
}
/**
Get a column from the list
@param position The column to get from the list
*/
public GroupByColumn getGroupByColumn(int position)
{
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(position >=0 && position < size(),
"position (" + position +
") expected to be between 0 and " + size());
}
return (GroupByColumn) elementAt(position);
}
public void setRollup()
{
rollup = true;
}
public boolean isRollup()
{
return rollup;
}
/**
* Get the number of grouping columns that need to be added to the SELECT list.
*
* @return int The number of grouping columns that need to be added to
* the SELECT list.
*/
public int getNumNeedToAddGroupingCols()
{
return numGroupingColsAdded;
}
/**
* Bind the group by list. Verify:
* o Number of grouping columns matches number of non-aggregates in
* SELECT's RCL.
* o Names in the group by list are unique
* o Names of grouping columns match names of non-aggregate
* expressions in SELECT's RCL.
*
* @param select The SelectNode
* @param aggregateVector The aggregate vector being built as we find AggregateNodes
*
* @exception StandardException Thrown on error
*/
public void bindGroupByColumns(SelectNode select,
Vector aggregateVector)
throws StandardException
{
FromList fromList = select.getFromList();
ResultColumnList selectRCL = select.getResultColumns();
SubqueryList dummySubqueryList =
(SubqueryList) getNodeFactory().getNode(
C_NodeTypes.SUBQUERY_LIST,
getContextManager());
int numColsAddedHere = 0;
int size = size();
/* Only 32677 columns allowed in GROUP BY clause */
if (size > Limits.DB2_MAX_ELEMENTS_IN_GROUP_BY)
{
throw StandardException.newException(SQLState.LANG_TOO_MANY_ELEMENTS);
}
/* Bind the grouping column */
for (int index = 0; index < size; index++)
{
GroupByColumn groupByCol = (GroupByColumn) elementAt(index);
groupByCol.bindExpression(fromList,
dummySubqueryList, aggregateVector);
}
int rclSize = selectRCL.size();
for (int index = 0; index < size; index++)
{
boolean matchFound = false;
GroupByColumn groupingCol = (GroupByColumn) elementAt(index);
/* Verify that this entry in the GROUP BY list matches a
* grouping column in the select list.
*/
for (int inner = 0; inner < rclSize; inner++)
{
ResultColumn selectListRC = (ResultColumn) selectRCL.elementAt(inner);
if (!(selectListRC.getExpression() instanceof ColumnReference)) {
continue;
}
ColumnReference selectListCR = (ColumnReference) selectListRC.getExpression();
if (selectListCR.isEquivalent(groupingCol.getColumnExpression())) {
/* Column positions for grouping columns are 0-based */
groupingCol.setColumnPosition(inner + 1);
/* Mark the RC in the SELECT list as a grouping column */
selectListRC.markAsGroupingColumn();
matchFound = true;
break;
}
}
/* If no match found in the SELECT list, then add a matching
* ResultColumn/ColumnReference pair to the SelectNode's RCL.
* However, don't add additional result columns if the query
* specified DISTINCT, because distinct processing considers
* the entire RCL and including extra columns could change the
* results: e.g. select distinct a,b from t group by a,b,c
* should not consider column c in distinct processing (DERBY-3613)
*/
if (! matchFound && !select.hasDistinct() &&
groupingCol.getColumnExpression() instanceof ColumnReference)
{
// only add matching columns for column references not
// expressions yet. See DERBY-883 for details.
ResultColumn newRC;
/* Get a new ResultColumn */
newRC = (ResultColumn) getNodeFactory().getNode(
C_NodeTypes.RESULT_COLUMN,
groupingCol.getColumnName(),
groupingCol.getColumnExpression().getClone(),
getContextManager());
newRC.setVirtualColumnId(selectRCL.size() + 1);
newRC.markGenerated();
newRC.markAsGroupingColumn();
/* Add the new RC/CR to the RCL */
selectRCL.addElement(newRC);
/* Set the columnPosition in the GroupByColumn, now that it
* has a matching entry in the SELECT list.
*/
groupingCol.setColumnPosition(selectRCL.size());
// a new hidden or generated column is added to this RCL
// i.e. that the size() of the RCL != visibleSize().
// Error checking done later should be aware of this
// special case.
selectRCL.setCountMismatchAllowed(true);
/*
** Track the number of columns that we have added
** in this routine. We track this separately
** than the total number of columns added by this
** object (numGroupingColsAdded) because we
** might be bound (though not gagged) more than
** once (in which case numGroupingColsAdded will
** already be set).
*/
numColsAddedHere++;
}
if (groupingCol.getColumnExpression() instanceof JavaToSQLValueNode)
{
// disallow any expression which involves native java computation.
// Not possible to consider java expressions for equivalence.
throw StandardException.newException(
SQLState.LANG_INVALID_GROUPED_SELECT_LIST);
}
}
/* Verify that no subqueries got added to the dummy list */
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(dummySubqueryList.size() == 0,
"dummySubqueryList.size() is expected to be 0");
}
numGroupingColsAdded+= numColsAddedHere;
}
/**
* Find the matching grouping column if any for the given expression
*
* @param node an expression for which we are trying to find a match
* in the group by list.
*
* @return the matching GroupByColumn if one exists, null otherwise.
*
* @throws StandardException
*/
public GroupByColumn findGroupingColumn(ValueNode node)
throws StandardException
{
int sz = size();
for (int i = 0; i < sz; i++)
{
GroupByColumn gbc = (GroupByColumn)elementAt(i);
if (gbc.getColumnExpression().isEquivalent(node))
{
return gbc;
}
}
return null;
}
/**
* Remap all ColumnReferences in this tree to be clones of the
* underlying expression.
*
* @exception StandardException Thrown on error
*/
public void remapColumnReferencesToExpressions() throws StandardException
{
GroupByColumn gbc;
int size = size();
/* This method is called when flattening a FromTable. We should
* not be flattening a FromTable if the underlying expression that
* will get returned out, after chopping out the redundant ResultColumns,
* is not a ColumnReference. (See ASSERT below.)
*/
for (int index = 0; index < size; index++)
{
ValueNode retVN;
gbc = (GroupByColumn) elementAt(index);
retVN = gbc.getColumnExpression().remapColumnReferencesToExpressions();
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(retVN instanceof ColumnReference,
"retVN expected to be instanceof ColumnReference, not " +
retVN.getClass().getName());
}
gbc.setColumnExpression(retVN);
}
}
/**
* Convert this object to a String. See comments in QueryTreeNode.java
* for how this should be done for tree printing.
*
* @return This object as a String
*/
public String toString()
{
if (SanityManager.DEBUG) {
return "numGroupingColsAdded: " + numGroupingColsAdded + "\n" +
super.toString();
} else {
return "";
}
}
public void preprocess(
int numTables, FromList fromList, SubqueryList whereSubquerys,
PredicateList wherePredicates) throws StandardException
{
for (int index = 0; index < size(); index++)
{
GroupByColumn groupingCol = (GroupByColumn) elementAt(index);
groupingCol.setColumnExpression(
groupingCol.getColumnExpression().preprocess(
numTables, fromList, whereSubquerys, wherePredicates));
}
}
}
|
|
/**
* Copyright John Asmuth and Rutgers University 2009, all rights reserved.
*/
package edu.rutgers.rl3.comp;
import org.rlcommunity.rlglue.codec.AgentInterface;
import org.rlcommunity.rlglue.codec.types.Action;
import org.rlcommunity.rlglue.codec.types.Observation;
import org.rlcommunity.rlglue.codec.util.AgentLoader;
import java.util.Date;
import java.util.Vector;
import java.util.Random;
import rlVizLib.general.ParameterHolder;
import rlVizLib.messaging.agentShell.TaskSpecResponsePayload;
/**
* A simple agent that:
* - never goes to the left
* - mostly goes to the right
* - tends to jump when under coins
* - jumps if it cannot walk to the right due to a block
* - tends to jump when there is a monster nearby
* - tends to jump when there is a pit nearby
* - tends to run when there is nothing nearby
*
* Also, it will remember the last trial, and repeat it exactly except
* for the last 7 steps (assuming there are at least 7 steps).
*
* @author jasmuth
*
*/
public class ExampleMarioAgent implements AgentInterface {
/**
* Returns the char representing the tile at the given location.
* If unknown, returns '\0'.
*
* Valid tiles:
* M - the tile mario is currently on. there is no tile for a monster.
* $ - a coin
* b - a smashable brick
* ? - a question block
* | - a pipe. gets its own tile because often there are pirahna plants
* in them
* ! - the finish line
* And an integer in [1,7] is a 3 bit binary flag
* first bit is "cannot go through this tile from above"
* second bit is "cannot go through this tile from below"
* third bit is "cannot go through this tile from either side"
*
* @param x
* @param y
* @param obs
* @return
*/
public static char getTileAt(double xf, double yf, Observation obs) {
int x = (int) xf;
if (x < 0) {
return '7';
}
int y = 16 - (int) yf;
x -= obs.intArray[0];
if (x < 0 || x > 21 || y < 0 || y > 15) {
return '\0';
}
int index = y * 22 + x;
return obs.charArray[index];
}
public static TaskSpecResponsePayload isCompatible(ParameterHolder P, String TaskSpec) {
if (TaskSpec.contains("Mario-v1")) {
return new TaskSpecResponsePayload(false, "");
} else {
return new TaskSpecResponsePayload(true, "ExampleMarioAgent only works with Mario.");
}
}
/**
* All you need to know about a monster.
*
* @author jasmuth
*
*/
static class Monster {
double x;
double y;
/**
* The instantaneous change in x per step
*/
double sx;
/**
* The instantaneous change in y per step
*/
double sy;
/**
* The monster type
* 0 - Mario
* 1 - Red Koopa
* 2 - Green Koopa
* 3 - Goomba
* 4 - Spikey
* 5 - Pirahna Plant
* 6 - Mushroom
* 7 - Fire Flower
* 8 - Fireball
* 9 - Shell
* 10 - Big Mario
* 11 - Fiery Mario
*/
int type;
/**
* A human recognizable title for the monster
*/
String typeName;
/**
* Winged monsters bounce up and down
*/
boolean winged;
}
/**
* Gets all the monsters from the observation. Mario is included in this list.
*
* @param obs
* @return
*/
public static Monster[] getMonsters(Observation obs) {
Vector<Monster> monster_vec = new Vector<Monster>();
for (int i = 0; 1 + 2 * i < obs.intArray.length; i++) {
Monster m = new Monster();
m.type = obs.intArray[1 + 2 * i];
m.winged = obs.intArray[2 + 2 * i] != 0;
switch (m.type) {
case 0:
m.typeName = "Mario";
break;
case 1:
m.typeName = "Red Koopa";
break;
case 2:
m.typeName = "Green Koopa";
break;
case 3:
m.typeName = "Goomba";
break;
case 4:
m.typeName = "Spikey";
break;
case 5:
m.typeName = "Piranha Plant";
break;
case 6:
m.typeName = "Mushroom";
break;
case 7:
m.typeName = "Fire Flower";
break;
case 8:
m.typeName = "Fireball";
break;
case 9:
m.typeName = "Shell";
break;
case 10:
m.typeName = "Big Mario";
break;
case 11:
m.typeName = "Fiery Mario";
break;
}
m.x = obs.doubleArray[4 * i];
m.y = obs.doubleArray[4 * i + 1];
m.sx = obs.doubleArray[4 * i + 2];
m.sy = obs.doubleArray[4 * i + 3];
monster_vec.add(m);
}
return monster_vec.toArray(new Monster[0]);
}
/**
* Gets just mario's information.
*
* @param obs
* @return
*/
public static Monster getMario(Observation obs) {
Monster[] monsters = getMonsters(obs);
for (Monster m : monsters) {
if (m.type == 0 || m.type == 10 || m.type == 11) {
return m;
}
}
return null;
}
Random rand;
/**
* When this is true, Mario is pausing for some number of steps
*/
boolean walk_hesitating;
/**
* How many steps since the beginning of this trial
*/
int step_number;
/**
* How many steps since the beginning of this run
*/
int total_steps;
/**
* The time that the current trial began
*/
long trial_start;
/**
* The sequence of actions taken during the last trial
*/
Vector<Action> last_actions;
/**
* The sequence of actions taken so far during the current trial
*/
Vector<Action> this_actions;
public ExampleMarioAgent() {
rand = new Random(new java.util.Date().getTime());
last_actions = new Vector<Action>();
this_actions = new Vector<Action>();
}
public void agent_init(String task) {
total_steps = 0;
}
public void agent_cleanup() {
}
public Action agent_start(Observation o) {
trial_start = new Date().getTime();
step_number = 0;
return getAction(o);
}
public Action agent_step(double r, Observation o) {
step_number++;
total_steps++;
return getAction(o);
}
public void agent_end(double r) {
long time_passed = new Date().getTime() - trial_start;
if (this_actions.size() > 7) {
last_actions = this_actions;
last_actions.setSize(last_actions.size() - 7);
} else {
last_actions = new Vector<Action>();
}
this_actions = new Vector<Action>();
System.out.println("ended after " + total_steps + " total steps");
System.out.println("average " + 1000.0 * step_number / time_passed + " steps per second");
}
public String agent_message(String msg) {
// System.out.println("message asked:"+msg);
return null;
}
Action getAction(Observation o) {
if (last_actions.size() > step_number) {
Action act = last_actions.get(step_number);
this_actions.add(act);
return act;
}
Monster mario = ExampleMarioAgent.getMario(o);
Monster[] monsters = ExampleMarioAgent.getMonsters(o);
/*
* sometimes jump for no reason at all. at the end of this function,
* the value of this variable will be compared against a random number
* to see if mario jumps
*/
double jump_hesitation = .95;
/*
* Check the blocks in the area to mario's upper right
*/
for (int up = 0; up < 5; up++) {
for (int right = 0; right < 7; right++) {
char tile = ExampleMarioAgent.getTileAt(mario.x + right, mario.y + up, o);
if (tile == '$') {
// jump often if there is a coin
jump_hesitation *= .7;
} else if (tile == ' ' || tile == 'M' || tile == '\0') {
// don't worry if it is blank space
} else {
// tend to jump more if there is a block closer
jump_hesitation *= 1.0 * right / 7;
}
}
}
/*
* Search for a pit in front of mario.
*/
boolean is_pit = false;
for (int right = 0; !is_pit && right < 3; right++) {
boolean pit_col = true;
for (int down = 0; pit_col && mario.y - down >= 0; down++) {
char tile = ExampleMarioAgent.getTileAt(mario.x + right, mario.y - down, o);
if (tile != ' ' && tile != 'M' && tile != '\0') {
pit_col = false;
}
}
if (pit_col) {
is_pit = true;
}
}
if (is_pit) {
// always jump if there is a pit
jump_hesitation = 0;
}
/*
* Look for nearby monsters by checking the positions against mario's
*/
boolean monster_near = false;
for (Monster m : monsters) {
if (m.type == 0 || m.type == 10 || m.type == 11) {
// m is mario
continue;
}
double dx = m.x - mario.x;
double dy = m.y - mario.y;
if (dx > -1 && dx < 10 && dy > -4 && dy < 4) {
/* the more monsters and the closer they are, the more likely
* mario is to jump.
*/
jump_hesitation *= (dx + 2) / 12;
monster_near = true;
}
}
// hold down the jump button while in the air sometimes, to jump higher
if (mario.sy > .1) {
jump_hesitation *= .5;
}
// Sometimes hesitate if there is a monster near.
if (walk_hesitating) {
if (!monster_near || rand.nextDouble() > .8) {
walk_hesitating = false;
} else if (rand.nextDouble() > .9) {
walk_hesitating = false;
}
} else if (monster_near && rand.nextDouble() > .8) {
walk_hesitating = true;
} // sometimes hesitate even if there isn't one
else if (rand.nextDouble() > .9) {
walk_hesitating = true;
}
Action act = new Action(3, 0);
// -1, 0, 1 for direction, 1 is to the right
act.intArray[0] = walk_hesitating ? 0 : 1;
// 0, 1 for jump
act.intArray[1] = rand.nextDouble() > jump_hesitation ? 1 : 0;
// 0, 1 for speed
act.intArray[2] = (is_pit || !monster_near) ? 1 : 0;//rand.nextBoolean()?1:0;
//add the action to the trajectory being recorded, so it can be reused next trial
this_actions.add(act);
return act;
}
public static void main(String[] args) {
new AgentLoader(new ExampleMarioAgent()).run();
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ConnectorSplitSource;
import com.facebook.presto.spi.RecordCursor;
import com.facebook.presto.spi.RecordSet;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.connector.ConnectorTransactionHandle;
import com.facebook.presto.spi.predicate.Domain;
import com.facebook.presto.spi.predicate.Range;
import com.facebook.presto.spi.predicate.TupleDomain;
import com.facebook.presto.spi.predicate.ValueSet;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.facebook.presto.spi.type.VarcharType.createVarcharType;
import static com.facebook.presto.testing.TestingConnectorSession.SESSION;
import static com.google.common.collect.Iterables.getOnlyElement;
import static io.airlift.concurrent.MoreFutures.getFutureValue;
import static io.airlift.slice.Slices.utf8Slice;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
@Test
public class TestJdbcRecordSetProvider
{
private TestingDatabase database;
private JdbcClient jdbcClient;
private JdbcSplit split;
private JdbcTableHandle table;
private JdbcColumnHandle textColumn;
private JdbcColumnHandle textShortColumn;
private JdbcColumnHandle valueColumn;
@BeforeClass
public void setUp()
throws Exception
{
database = new TestingDatabase();
jdbcClient = database.getJdbcClient();
split = database.getSplit("example", "numbers");
table = jdbcClient.getTableHandle(new SchemaTableName("example", "numbers"));
Map<String, JdbcColumnHandle> columns = database.getColumnHandles("example", "numbers");
textColumn = columns.get("text");
textShortColumn = columns.get("text_short");
valueColumn = columns.get("value");
}
@AfterClass
public void tearDown()
throws Exception
{
database.close();
}
@Test
public void testGetRecordSet()
throws Exception
{
ConnectorTransactionHandle transaction = new JdbcTransactionHandle();
JdbcRecordSetProvider recordSetProvider = new JdbcRecordSetProvider(jdbcClient);
RecordSet recordSet = recordSetProvider.getRecordSet(transaction, SESSION, split, ImmutableList.of(textColumn, textShortColumn, valueColumn));
assertNotNull(recordSet, "recordSet is null");
RecordCursor cursor = recordSet.cursor();
assertNotNull(cursor, "cursor is null");
Map<String, Long> data = new LinkedHashMap<>();
while (cursor.advanceNextPosition()) {
data.put(cursor.getSlice(0).toStringUtf8(), cursor.getLong(2));
assertEquals(cursor.getSlice(0), cursor.getSlice(1));
}
assertEquals(data, ImmutableMap.<String, Long>builder()
.put("one", 1L)
.put("two", 2L)
.put("three", 3L)
.put("ten", 10L)
.put("eleven", 11L)
.put("twelve", 12L)
.build());
}
@Test
public void testTupleDomain()
throws Exception
{
// single value
getCursor(table, ImmutableList.of(textColumn, valueColumn), TupleDomain.withColumnDomains(
ImmutableMap.of(textColumn, Domain.singleValue(VARCHAR, utf8Slice("foo")))
));
// multiple values (string)
getCursor(table, ImmutableList.of(textColumn, valueColumn), TupleDomain.withColumnDomains(
ImmutableMap.of(textColumn, Domain.union(ImmutableList.of(Domain.singleValue(VARCHAR, utf8Slice("foo")), Domain.singleValue(VARCHAR, utf8Slice("bar")))))
));
// inequality (string)
getCursor(table, ImmutableList.of(textColumn, valueColumn), TupleDomain.withColumnDomains(
ImmutableMap.of(textColumn, Domain.create(ValueSet.ofRanges(Range.greaterThan(VARCHAR, utf8Slice("foo"))), false))
));
getCursor(table, ImmutableList.of(textColumn, valueColumn), TupleDomain.withColumnDomains(
ImmutableMap.of(textColumn, Domain.create(ValueSet.ofRanges(Range.greaterThan(VARCHAR, utf8Slice("foo"))), false))
));
getCursor(table, ImmutableList.of(textColumn, valueColumn), TupleDomain.withColumnDomains(
ImmutableMap.of(textColumn, Domain.create(ValueSet.ofRanges(Range.lessThanOrEqual(VARCHAR, utf8Slice("foo"))), false))
));
getCursor(table, ImmutableList.of(textColumn, valueColumn), TupleDomain.withColumnDomains(
ImmutableMap.of(textColumn, Domain.create(ValueSet.ofRanges(Range.lessThan(VARCHAR, utf8Slice("foo"))), false))
));
// is null
getCursor(table, ImmutableList.of(textColumn, valueColumn), TupleDomain.withColumnDomains(
ImmutableMap.of(textColumn, Domain.onlyNull(VARCHAR))
));
// not null
getCursor(table, ImmutableList.of(textColumn, valueColumn), TupleDomain.withColumnDomains(
ImmutableMap.of(textColumn, Domain.notNull(VARCHAR))
));
// specific value or null
getCursor(table, ImmutableList.of(textColumn, valueColumn), TupleDomain.withColumnDomains(
ImmutableMap.of(textColumn, Domain.union(ImmutableList.of(Domain.singleValue(VARCHAR, utf8Slice("foo")), Domain.onlyNull(VARCHAR))))
));
getCursor(table, ImmutableList.of(textColumn, valueColumn), TupleDomain.withColumnDomains(
ImmutableMap.of(textColumn, Domain.create(ValueSet.ofRanges(Range.range(VARCHAR, utf8Slice("bar"), true, utf8Slice("foo"), true)), false))
));
getCursor(table, ImmutableList.of(textColumn, textShortColumn, valueColumn), TupleDomain.withColumnDomains(
ImmutableMap.of(
textColumn,
Domain.create(ValueSet.ofRanges(
Range.range(VARCHAR, utf8Slice("bar"), true, utf8Slice("foo"), true),
Range.range(VARCHAR, utf8Slice("hello"), false, utf8Slice("world"), false)),
false
),
textShortColumn,
Domain.create(ValueSet.ofRanges(
Range.range(createVarcharType(32), utf8Slice("bar"), true, utf8Slice("foo"), true),
Range.range(createVarcharType(32), utf8Slice("hello"), false, utf8Slice("world"), false)),
false
)
)
));
getCursor(table, ImmutableList.of(textColumn, valueColumn), TupleDomain.withColumnDomains(
ImmutableMap.of(
textColumn,
Domain.create(ValueSet.ofRanges(
Range.range(VARCHAR, utf8Slice("bar"), true, utf8Slice("foo"), true),
Range.range(VARCHAR, utf8Slice("hello"), false, utf8Slice("world"), false),
Range.equal(VARCHAR, utf8Slice("apple")),
Range.equal(VARCHAR, utf8Slice("banana")),
Range.equal(VARCHAR, utf8Slice("zoo"))),
false
),
valueColumn,
Domain.create(ValueSet.ofRanges(
Range.range(BIGINT, 1L, true, 5L, true),
Range.range(BIGINT, 10L, false, 20L, false)),
true
)
)
));
}
private RecordCursor getCursor(JdbcTableHandle jdbcTableHandle, List<JdbcColumnHandle> columns, TupleDomain<ColumnHandle> domain)
throws InterruptedException
{
JdbcTableLayoutHandle layoutHandle = new JdbcTableLayoutHandle(jdbcTableHandle, domain);
ConnectorSplitSource splits = jdbcClient.getSplits(layoutHandle);
JdbcSplit split = (JdbcSplit) getOnlyElement(getFutureValue(splits.getNextBatch(1000)));
ConnectorTransactionHandle transaction = new JdbcTransactionHandle();
JdbcRecordSetProvider recordSetProvider = new JdbcRecordSetProvider(jdbcClient);
RecordSet recordSet = recordSetProvider.getRecordSet(transaction, SESSION, split, columns);
return recordSet.cursor();
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.StorageType;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.BlockListAsLongs;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.ReplicaOutputStreams;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetFactory;
import org.apache.hadoop.util.DataChecksum;
import org.junit.Before;
import org.junit.Test;
/**
* this class tests the methods of the SimulatedFSDataset.
*/
public class TestSimulatedFSDataset {
Configuration conf = null;
static final String bpid = "BP-TEST";
static final int NUMBLOCKS = 20;
static final int BLOCK_LENGTH_MULTIPLIER = 79;
@Before
public void setUp() throws Exception {
conf = new HdfsConfiguration();
SimulatedFSDataset.setFactory(conf);
}
long blockIdToLen(long blkid) {
return blkid*BLOCK_LENGTH_MULTIPLIER;
}
int addSomeBlocks(SimulatedFSDataset fsdataset, int startingBlockId)
throws IOException {
int bytesAdded = 0;
for (int i = startingBlockId; i < startingBlockId+NUMBLOCKS; ++i) {
ExtendedBlock b = new ExtendedBlock(bpid, i, 0, 0);
// we pass expected len as zero, - fsdataset should use the sizeof actual
// data written
ReplicaInPipelineInterface bInfo = fsdataset.createRbw(
StorageType.DEFAULT, b, false).getReplica();
ReplicaOutputStreams out = bInfo.createStreams(true,
DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512));
try {
OutputStream dataOut = out.getDataOut();
assertEquals(0, fsdataset.getLength(b));
for (int j=1; j <= blockIdToLen(i); ++j) {
dataOut.write(j);
assertEquals(j, bInfo.getBytesOnDisk()); // correct length even as we write
bytesAdded++;
}
} finally {
out.close();
}
b.setNumBytes(blockIdToLen(i));
fsdataset.finalizeBlock(b);
assertEquals(blockIdToLen(i), fsdataset.getLength(b));
}
return bytesAdded;
}
int addSomeBlocks(SimulatedFSDataset fsdataset ) throws IOException {
return addSomeBlocks(fsdataset, 1);
}
@Test
public void testFSDatasetFactory() {
final Configuration conf = new Configuration();
FsDatasetSpi.Factory<?> f = FsDatasetSpi.Factory.getFactory(conf);
assertEquals(FsDatasetFactory.class, f.getClass());
assertFalse(f.isSimulated());
SimulatedFSDataset.setFactory(conf);
FsDatasetSpi.Factory<?> s = FsDatasetSpi.Factory.getFactory(conf);
assertEquals(SimulatedFSDataset.Factory.class, s.getClass());
assertTrue(s.isSimulated());
}
@Test
public void testGetMetaData() throws IOException {
final SimulatedFSDataset fsdataset = getSimulatedFSDataset();
ExtendedBlock b = new ExtendedBlock(bpid, 1, 5, 0);
try {
assertTrue(fsdataset.getMetaDataInputStream(b) == null);
assertTrue("Expected an IO exception", false);
} catch (IOException e) {
// ok - as expected
}
addSomeBlocks(fsdataset); // Only need to add one but ....
b = new ExtendedBlock(bpid, 1, 0, 0);
InputStream metaInput = fsdataset.getMetaDataInputStream(b);
DataInputStream metaDataInput = new DataInputStream(metaInput);
short version = metaDataInput.readShort();
assertEquals(BlockMetadataHeader.VERSION, version);
DataChecksum checksum = DataChecksum.newDataChecksum(metaDataInput);
assertEquals(DataChecksum.Type.NULL, checksum.getChecksumType());
assertEquals(0, checksum.getChecksumSize());
}
@Test
public void testStorageUsage() throws IOException {
final SimulatedFSDataset fsdataset = getSimulatedFSDataset();
assertEquals(fsdataset.getDfsUsed(), 0);
assertEquals(fsdataset.getRemaining(), fsdataset.getCapacity());
int bytesAdded = addSomeBlocks(fsdataset);
assertEquals(bytesAdded, fsdataset.getDfsUsed());
assertEquals(fsdataset.getCapacity()-bytesAdded, fsdataset.getRemaining());
}
void checkBlockDataAndSize(SimulatedFSDataset fsdataset, ExtendedBlock b,
long expectedLen) throws IOException {
InputStream input = fsdataset.getBlockInputStream(b);
long lengthRead = 0;
int data;
while ((data = input.read()) != -1) {
assertEquals(SimulatedFSDataset.DEFAULT_DATABYTE, data);
lengthRead++;
}
assertEquals(expectedLen, lengthRead);
}
@Test
public void testWriteRead() throws IOException {
final SimulatedFSDataset fsdataset = getSimulatedFSDataset();
addSomeBlocks(fsdataset);
for (int i=1; i <= NUMBLOCKS; ++i) {
ExtendedBlock b = new ExtendedBlock(bpid, i, 0, 0);
assertTrue(fsdataset.isValidBlock(b));
assertEquals(blockIdToLen(i), fsdataset.getLength(b));
checkBlockDataAndSize(fsdataset, b, blockIdToLen(i));
}
}
@Test
public void testGetBlockReport() throws IOException {
SimulatedFSDataset fsdataset = getSimulatedFSDataset();
BlockListAsLongs blockReport = fsdataset.getBlockReport(bpid);
assertEquals(0, blockReport.getNumberOfBlocks());
addSomeBlocks(fsdataset);
blockReport = fsdataset.getBlockReport(bpid);
assertEquals(NUMBLOCKS, blockReport.getNumberOfBlocks());
for (Block b: blockReport) {
assertNotNull(b);
assertEquals(blockIdToLen(b.getBlockId()), b.getNumBytes());
}
}
@Test
public void testInjectionEmpty() throws IOException {
SimulatedFSDataset fsdataset = getSimulatedFSDataset();
BlockListAsLongs blockReport = fsdataset.getBlockReport(bpid);
assertEquals(0, blockReport.getNumberOfBlocks());
int bytesAdded = addSomeBlocks(fsdataset);
blockReport = fsdataset.getBlockReport(bpid);
assertEquals(NUMBLOCKS, blockReport.getNumberOfBlocks());
for (Block b: blockReport) {
assertNotNull(b);
assertEquals(blockIdToLen(b.getBlockId()), b.getNumBytes());
}
// Inject blocks into an empty fsdataset
// - injecting the blocks we got above.
SimulatedFSDataset sfsdataset = getSimulatedFSDataset();
sfsdataset.injectBlocks(bpid, blockReport);
blockReport = sfsdataset.getBlockReport(bpid);
assertEquals(NUMBLOCKS, blockReport.getNumberOfBlocks());
for (Block b: blockReport) {
assertNotNull(b);
assertEquals(blockIdToLen(b.getBlockId()), b.getNumBytes());
assertEquals(blockIdToLen(b.getBlockId()), sfsdataset
.getLength(new ExtendedBlock(bpid, b)));
}
assertEquals(bytesAdded, sfsdataset.getDfsUsed());
assertEquals(sfsdataset.getCapacity()-bytesAdded, sfsdataset.getRemaining());
}
@Test
public void testInjectionNonEmpty() throws IOException {
SimulatedFSDataset fsdataset = getSimulatedFSDataset();
BlockListAsLongs blockReport = fsdataset.getBlockReport(bpid);
assertEquals(0, blockReport.getNumberOfBlocks());
int bytesAdded = addSomeBlocks(fsdataset);
blockReport = fsdataset.getBlockReport(bpid);
assertEquals(NUMBLOCKS, blockReport.getNumberOfBlocks());
for (Block b: blockReport) {
assertNotNull(b);
assertEquals(blockIdToLen(b.getBlockId()), b.getNumBytes());
}
fsdataset = null;
// Inject blocks into an non-empty fsdataset
// - injecting the blocks we got above.
SimulatedFSDataset sfsdataset = getSimulatedFSDataset();
// Add come blocks whose block ids do not conflict with
// the ones we are going to inject.
bytesAdded += addSomeBlocks(sfsdataset, NUMBLOCKS+1);
sfsdataset.getBlockReport(bpid);
assertEquals(NUMBLOCKS, blockReport.getNumberOfBlocks());
sfsdataset.getBlockReport(bpid);
assertEquals(NUMBLOCKS, blockReport.getNumberOfBlocks());
sfsdataset.injectBlocks(bpid, blockReport);
blockReport = sfsdataset.getBlockReport(bpid);
assertEquals(NUMBLOCKS*2, blockReport.getNumberOfBlocks());
for (Block b: blockReport) {
assertNotNull(b);
assertEquals(blockIdToLen(b.getBlockId()), b.getNumBytes());
assertEquals(blockIdToLen(b.getBlockId()), sfsdataset
.getLength(new ExtendedBlock(bpid, b)));
}
assertEquals(bytesAdded, sfsdataset.getDfsUsed());
assertEquals(sfsdataset.getCapacity()-bytesAdded, sfsdataset.getRemaining());
// Now test that the dataset cannot be created if it does not have sufficient cap
conf.setLong(SimulatedFSDataset.CONFIG_PROPERTY_CAPACITY, 10);
try {
sfsdataset = getSimulatedFSDataset();
sfsdataset.addBlockPool(bpid, conf);
sfsdataset.injectBlocks(bpid, blockReport);
assertTrue("Expected an IO exception", false);
} catch (IOException e) {
// ok - as expected
}
}
public void checkInvalidBlock(ExtendedBlock b) {
final SimulatedFSDataset fsdataset = getSimulatedFSDataset();
assertFalse(fsdataset.isValidBlock(b));
try {
fsdataset.getLength(b);
assertTrue("Expected an IO exception", false);
} catch (IOException e) {
// ok - as expected
}
try {
fsdataset.getBlockInputStream(b);
assertTrue("Expected an IO exception", false);
} catch (IOException e) {
// ok - as expected
}
try {
fsdataset.finalizeBlock(b);
assertTrue("Expected an IO exception", false);
} catch (IOException e) {
// ok - as expected
}
}
@Test
public void testInValidBlocks() throws IOException {
final SimulatedFSDataset fsdataset = getSimulatedFSDataset();
ExtendedBlock b = new ExtendedBlock(bpid, 1, 5, 0);
checkInvalidBlock(b);
// Now check invlaid after adding some blocks
addSomeBlocks(fsdataset);
b = new ExtendedBlock(bpid, NUMBLOCKS + 99, 5, 0);
checkInvalidBlock(b);
}
@Test
public void testInvalidate() throws IOException {
final SimulatedFSDataset fsdataset = getSimulatedFSDataset();
int bytesAdded = addSomeBlocks(fsdataset);
Block[] deleteBlocks = new Block[2];
deleteBlocks[0] = new Block(1, 0, 0);
deleteBlocks[1] = new Block(2, 0, 0);
fsdataset.invalidate(bpid, deleteBlocks);
checkInvalidBlock(new ExtendedBlock(bpid, deleteBlocks[0]));
checkInvalidBlock(new ExtendedBlock(bpid, deleteBlocks[1]));
long sizeDeleted = blockIdToLen(1) + blockIdToLen(2);
assertEquals(bytesAdded-sizeDeleted, fsdataset.getDfsUsed());
assertEquals(fsdataset.getCapacity()-bytesAdded+sizeDeleted, fsdataset.getRemaining());
// Now make sure the rest of the blocks are valid
for (int i=3; i <= NUMBLOCKS; ++i) {
Block b = new Block(i, 0, 0);
assertTrue(fsdataset.isValidBlock(new ExtendedBlock(bpid, b)));
}
}
private SimulatedFSDataset getSimulatedFSDataset() {
SimulatedFSDataset fsdataset = new SimulatedFSDataset(null, conf);
fsdataset.addBlockPool(bpid, conf);
return fsdataset;
}
}
|
|
package com.smcpartners.shape.frameworks.data.dao.shape.impl;
import com.diffplug.common.base.Errors;
import com.smcpartners.shape.frameworks.data.dao.shape.OrganizationMeasureDAO;
import com.smcpartners.shape.frameworks.data.entitymodel.shape.MeasureEntity;
import com.smcpartners.shape.frameworks.data.entitymodel.shape.OrganizationEntity;
import com.smcpartners.shape.frameworks.data.entitymodel.shape.OrganizationMeasureEntity;
import com.smcpartners.shape.frameworks.data.entitymodel.shape.UserEntity;
import com.smcpartners.shape.frameworks.data.exceptions.DataAccessException;
import com.smcpartners.shape.frameworks.producers.annotations.ShapeDatabase;
import com.smcpartners.shape.shared.dto.shape.OrganizationMeasureDTO;
import javax.ejb.Stateless;
import javax.inject.Inject;
import javax.persistence.EntityManager;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
/**
* Responsible:</br>
* 1. Implements OrganizationMaeasureDAO</br>
* <p>
* Created by johndestefano on 10/29/15.
* </p>
* <p>
* Changes:</br>
* 1. </br>
* </p>
*/
@Stateless
public class OrganizationMeasureDAOImpl extends AbstractCrudDAO<OrganizationMeasureDTO, OrganizationMeasureEntity, Integer> implements OrganizationMeasureDAO {
/**
* Constructor
*/
@Inject
public OrganizationMeasureDAOImpl(@ShapeDatabase EntityManager em) {
this.em = em;
}
@Override
public List<OrganizationMeasureDTO> findAllOrganizationMeasure() throws DataAccessException {
try {
List<OrganizationMeasureEntity> omLst = em.createNamedQuery("OrganizationMeasure.findAll").getResultList();
List<OrganizationMeasureDTO> retLst = new ArrayList<>();
if (omLst != null && omLst.size() > 0) {
omLst.forEach(Errors.rethrow().wrap(om -> {
OrganizationMeasureDTO dto = this.mapEntityToDTO(om);
retLst.add(dto);
}));
}
return retLst;
} catch (Exception e) {
log.logp(Level.SEVERE, this.getClass().getName(), "findAllOrganizationMeasure", e.getMessage(), e);
throw new DataAccessException(e);
}
}
@Override
public List<OrganizationMeasureDTO> findAllOrganizationMeasureByOrgId(int orgId) throws DataAccessException {
try {
OrganizationEntity oe = em.find(OrganizationEntity.class, orgId);
List<OrganizationMeasureEntity> omLst = em.createNamedQuery("OrganizationMeasure.findAllByOrgId")
.setParameter("org", oe)
.getResultList();
List<OrganizationMeasureDTO> retLst = new ArrayList<>();
if (omLst != null && omLst.size() > 0) {
omLst.forEach(Errors.rethrow().wrap(om -> {
OrganizationMeasureDTO dto = this.mapEntityToDTO(om);
retLst.add(dto);
}));
}
return retLst;
} catch (Exception e) {
log.logp(Level.SEVERE, this.getClass().getName(), "findAllOrganizationMeasureByOrgId", e.getMessage(), e);
throw new DataAccessException(e);
}
}
@Override
public List<OrganizationMeasureDTO> findOrgMeasureByMeasureIdAndYear(int measureId, int year) throws DataAccessException {
try {
MeasureEntity me = em.find(MeasureEntity.class, measureId);
List<OrganizationMeasureEntity> omLst = em.createNamedQuery("OrganizationMeasure.findByMeasAndYear")
.setParameter("meas", me)
.setParameter("year", year)
.getResultList();
List<OrganizationMeasureDTO> retLst = new ArrayList<>();
if (omLst != null && omLst.size() > 0) {
omLst.forEach(Errors.rethrow().wrap(om -> {
OrganizationMeasureDTO dto = this.mapEntityToDTO(om);
retLst.add(dto);
}));
}
return retLst;
} catch (Exception e) {
log.logp(Level.SEVERE, this.getClass().getName(), "findOrgMeasureByOrgIdAndMeasureId", e.getMessage(), e);
throw new DataAccessException(e);
}
}
@Override
public List<OrganizationMeasureDTO> findOrgMeasureByMeasureIdAndYearAndOrg(int measureId, int year, int orgId) throws DataAccessException {
try {
MeasureEntity me = em.find(MeasureEntity.class, measureId);
OrganizationEntity org = em.find(OrganizationEntity.class, orgId);
List<OrganizationMeasureEntity> omLst = em.createNamedQuery("OrganizationMeasure.findByMeasYearOrg")
.setParameter("meas", me)
.setParameter("year", year)
.setParameter("org", org)
.getResultList();
List<OrganizationMeasureDTO> retLst = new ArrayList<>();
if (omLst != null && omLst.size() > 0) {
omLst.forEach(Errors.rethrow().wrap(om -> {
OrganizationMeasureDTO dto = this.mapEntityToDTO(om);
retLst.add(dto);
}));
}
return retLst;
} catch (Exception e) {
log.logp(Level.SEVERE, this.getClass().getName(), "findOrgMeasureByMeasureIdAndYearAndOrg", e.getMessage(), e);
throw new DataAccessException(e);
}
}
@Override
protected OrganizationMeasureEntity mapDtoToEntity(OrganizationMeasureEntity et, OrganizationMeasureDTO dto) {
et.setAge1844Num(dto.getAge1844Num());
et.setAge1844Den(dto.getAge1844Den());
et.setAge4564Num(dto.getAge4564Num());
et.setAge4564Den(dto.getAge4564Den());
et.setAgeOver65Num(dto.getAgeOver65Num());
et.setAgeOver65Den(dto.getAgeOver65Den());
et.setDenominatorValue(dto.getDenominatorValue());
et.setEthnicityHispanicLatinoNum(dto.getEthnicityHispanicLatinoNum());
et.setEthnicityHispanicLatinoDen(dto.getEthnicityHispanicLatinoDen());
et.setEthnicityNotHispanicLatinoNum(dto.getEthnicityNotHispanicLatinoNum());
et.setEthnicityNotHispanicLatinoDen(dto.getEthnicityNotHispanicLatinoDen());
et.setGenderFemaleNum(dto.getGenderFemaleNum());
et.setGenderFemaleDen(dto.getGenderFemaleDen());
et.setGenderMaleNum(dto.getGenderMaleNum());
et.setGenderMaleDen(dto.getGenderMaleDen());
et.setId(dto.getId());
et.setNumeratorValue(dto.getNumeratorValue());
et.setRaceAfricanAmericanNum(dto.getRaceAfricanAmericanNum());
et.setRaceAfricanAmericanDen(dto.getRaceAfricanAmericanDen());
et.setRaceAmericanIndianNum(dto.getRaceAmericanIndianNum());
et.setRaceAmericanIndianDen(dto.getRaceAmericanIndianDen());
et.setRaceAsianNum(dto.getRaceAsianNum());
et.setRaceAsianDen(dto.getRaceAsianDen());
et.setRaceNativeHawaiianNum(dto.getRaceNativeHawaiianNum());
et.setRaceNativeHawaiianDen(dto.getRaceNativeHawaiianDen());
et.setRaceOtherNum(dto.getRaceOtherNum());
et.setRaceOtherDen(dto.getRaceOtherDen());
et.setRaceWhiteNum(dto.getRaceWhiteNum());
et.setRaceWhiteDen(dto.getRaceWhiteDen());
et.setReportPeriodYear(dto.getReportPeriodYear());
et.setRpDate(dto.getRpDate());
// Organization
OrganizationEntity org = em.find(OrganizationEntity.class, dto.getOrganizationId());
et.setOrganizationByOrganizationId(org);
// Measure
MeasureEntity measure = em.find(MeasureEntity.class, dto.getMeasureId());
et.setMeasureByMeasureId(measure);
// User
UserEntity user = em.find(UserEntity.class, dto.getUserId());
et.setUserByUserId(user);
// Return
return et;
}
@Override
protected Class<OrganizationMeasureEntity> getGenericEntityClass() throws Exception {
return OrganizationMeasureEntity.class;
}
@Override
protected OrganizationMeasureDTO mapEntityToDTO(OrganizationMeasureEntity e) throws Exception {
OrganizationMeasureDTO d = new OrganizationMeasureDTO();
d.setAge1844Num(e.getAge1844Num());
d.setAge1844Den(e.getAge1844Den());
d.setAge4564Num(e.getAge4564Num());
d.setAge4564Den(e.getAge4564Den());
d.setAgeOver65Num(e.getAgeOver65Num());
d.setAgeOver65Den(e.getAgeOver65Den());
d.setDenominatorValue(e.getDenominatorValue());
d.setEthnicityHispanicLatinoNum(e.getEthnicityHispanicLatinoNum());
d.setEthnicityHispanicLatinoDen(e.getEthnicityHispanicLatinoDen());
d.setEthnicityNotHispanicLatinoNum(e.getEthnicityNotHispanicLatinoNum());
d.setEthnicityNotHispanicLatinoDen(e.getEthnicityNotHispanicLatinoDen());
d.setGenderFemaleNum(e.getGenderFemaleNum());
d.setGenderFemaleDen(e.getGenderFemaleDen());
d.setGenderMaleNum(e.getGenderMaleNum());
d.setGenderMaleDen(e.getGenderMaleDen());
d.setId(e.getId());
d.setNumeratorValue(e.getNumeratorValue());
d.setRaceAfricanAmericanNum(e.getRaceAfricanAmericanNum());
d.setRaceAfricanAmericanDen(e.getRaceAfricanAmericanDen());
d.setRaceAmericanIndianNum(e.getRaceAmericanIndianNum());
d.setRaceAmericanIndianDen(e.getRaceAmericanIndianDen());
d.setRaceAsianNum(e.getRaceAsianNum());
d.setRaceAsianDen(e.getRaceAsianDen());
d.setRaceNativeHawaiianNum(e.getRaceNativeHawaiianNum());
d.setRaceNativeHawaiianDen(e.getRaceNativeHawaiianDen());
d.setRaceOtherNum(e.getRaceOtherNum());
d.setRaceOtherDen(e.getRaceOtherDen());
d.setRaceWhiteNum(e.getRaceWhiteNum());
d.setRaceWhiteDen(e.getRaceWhiteDen());
d.setId(e.getId());
d.setMeasureId(e.getMeasureByMeasureId().getId());
d.setOrganizationId(e.getOrganizationByOrganizationId().getId());
d.setUserId(e.getUserByUserId().getId());
d.setReportPeriodYear(e.getReportPeriodYear());
d.setRpDate(e.getRpDate());
return d;
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.auditmanager.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* The metadata that's associated with the delegation.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/auditmanager-2017-07-25/DelegationMetadata" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DelegationMetadata implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The unique identifier for the delegation.
* </p>
*/
private String id;
/**
* <p>
* The name of the associated assessment.
* </p>
*/
private String assessmentName;
/**
* <p>
* The unique identifier for the assessment.
* </p>
*/
private String assessmentId;
/**
* <p>
* The current status of the delegation.
* </p>
*/
private String status;
/**
* <p>
* The Amazon Resource Name (ARN) of the IAM role.
* </p>
*/
private String roleArn;
/**
* <p>
* Specifies when the delegation was created.
* </p>
*/
private java.util.Date creationTime;
/**
* <p>
* Specifies the name of the control set that was delegated for review.
* </p>
*/
private String controlSetName;
/**
* <p>
* The unique identifier for the delegation.
* </p>
*
* @param id
* The unique identifier for the delegation.
*/
public void setId(String id) {
this.id = id;
}
/**
* <p>
* The unique identifier for the delegation.
* </p>
*
* @return The unique identifier for the delegation.
*/
public String getId() {
return this.id;
}
/**
* <p>
* The unique identifier for the delegation.
* </p>
*
* @param id
* The unique identifier for the delegation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DelegationMetadata withId(String id) {
setId(id);
return this;
}
/**
* <p>
* The name of the associated assessment.
* </p>
*
* @param assessmentName
* The name of the associated assessment.
*/
public void setAssessmentName(String assessmentName) {
this.assessmentName = assessmentName;
}
/**
* <p>
* The name of the associated assessment.
* </p>
*
* @return The name of the associated assessment.
*/
public String getAssessmentName() {
return this.assessmentName;
}
/**
* <p>
* The name of the associated assessment.
* </p>
*
* @param assessmentName
* The name of the associated assessment.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DelegationMetadata withAssessmentName(String assessmentName) {
setAssessmentName(assessmentName);
return this;
}
/**
* <p>
* The unique identifier for the assessment.
* </p>
*
* @param assessmentId
* The unique identifier for the assessment.
*/
public void setAssessmentId(String assessmentId) {
this.assessmentId = assessmentId;
}
/**
* <p>
* The unique identifier for the assessment.
* </p>
*
* @return The unique identifier for the assessment.
*/
public String getAssessmentId() {
return this.assessmentId;
}
/**
* <p>
* The unique identifier for the assessment.
* </p>
*
* @param assessmentId
* The unique identifier for the assessment.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DelegationMetadata withAssessmentId(String assessmentId) {
setAssessmentId(assessmentId);
return this;
}
/**
* <p>
* The current status of the delegation.
* </p>
*
* @param status
* The current status of the delegation.
* @see DelegationStatus
*/
public void setStatus(String status) {
this.status = status;
}
/**
* <p>
* The current status of the delegation.
* </p>
*
* @return The current status of the delegation.
* @see DelegationStatus
*/
public String getStatus() {
return this.status;
}
/**
* <p>
* The current status of the delegation.
* </p>
*
* @param status
* The current status of the delegation.
* @return Returns a reference to this object so that method calls can be chained together.
* @see DelegationStatus
*/
public DelegationMetadata withStatus(String status) {
setStatus(status);
return this;
}
/**
* <p>
* The current status of the delegation.
* </p>
*
* @param status
* The current status of the delegation.
* @return Returns a reference to this object so that method calls can be chained together.
* @see DelegationStatus
*/
public DelegationMetadata withStatus(DelegationStatus status) {
this.status = status.toString();
return this;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the IAM role.
* </p>
*
* @param roleArn
* The Amazon Resource Name (ARN) of the IAM role.
*/
public void setRoleArn(String roleArn) {
this.roleArn = roleArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the IAM role.
* </p>
*
* @return The Amazon Resource Name (ARN) of the IAM role.
*/
public String getRoleArn() {
return this.roleArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the IAM role.
* </p>
*
* @param roleArn
* The Amazon Resource Name (ARN) of the IAM role.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DelegationMetadata withRoleArn(String roleArn) {
setRoleArn(roleArn);
return this;
}
/**
* <p>
* Specifies when the delegation was created.
* </p>
*
* @param creationTime
* Specifies when the delegation was created.
*/
public void setCreationTime(java.util.Date creationTime) {
this.creationTime = creationTime;
}
/**
* <p>
* Specifies when the delegation was created.
* </p>
*
* @return Specifies when the delegation was created.
*/
public java.util.Date getCreationTime() {
return this.creationTime;
}
/**
* <p>
* Specifies when the delegation was created.
* </p>
*
* @param creationTime
* Specifies when the delegation was created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DelegationMetadata withCreationTime(java.util.Date creationTime) {
setCreationTime(creationTime);
return this;
}
/**
* <p>
* Specifies the name of the control set that was delegated for review.
* </p>
*
* @param controlSetName
* Specifies the name of the control set that was delegated for review.
*/
public void setControlSetName(String controlSetName) {
this.controlSetName = controlSetName;
}
/**
* <p>
* Specifies the name of the control set that was delegated for review.
* </p>
*
* @return Specifies the name of the control set that was delegated for review.
*/
public String getControlSetName() {
return this.controlSetName;
}
/**
* <p>
* Specifies the name of the control set that was delegated for review.
* </p>
*
* @param controlSetName
* Specifies the name of the control set that was delegated for review.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DelegationMetadata withControlSetName(String controlSetName) {
setControlSetName(controlSetName);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getId() != null)
sb.append("Id: ").append(getId()).append(",");
if (getAssessmentName() != null)
sb.append("AssessmentName: ").append(getAssessmentName()).append(",");
if (getAssessmentId() != null)
sb.append("AssessmentId: ").append(getAssessmentId()).append(",");
if (getStatus() != null)
sb.append("Status: ").append(getStatus()).append(",");
if (getRoleArn() != null)
sb.append("RoleArn: ").append(getRoleArn()).append(",");
if (getCreationTime() != null)
sb.append("CreationTime: ").append(getCreationTime()).append(",");
if (getControlSetName() != null)
sb.append("ControlSetName: ").append(getControlSetName());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DelegationMetadata == false)
return false;
DelegationMetadata other = (DelegationMetadata) obj;
if (other.getId() == null ^ this.getId() == null)
return false;
if (other.getId() != null && other.getId().equals(this.getId()) == false)
return false;
if (other.getAssessmentName() == null ^ this.getAssessmentName() == null)
return false;
if (other.getAssessmentName() != null && other.getAssessmentName().equals(this.getAssessmentName()) == false)
return false;
if (other.getAssessmentId() == null ^ this.getAssessmentId() == null)
return false;
if (other.getAssessmentId() != null && other.getAssessmentId().equals(this.getAssessmentId()) == false)
return false;
if (other.getStatus() == null ^ this.getStatus() == null)
return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false)
return false;
if (other.getRoleArn() == null ^ this.getRoleArn() == null)
return false;
if (other.getRoleArn() != null && other.getRoleArn().equals(this.getRoleArn()) == false)
return false;
if (other.getCreationTime() == null ^ this.getCreationTime() == null)
return false;
if (other.getCreationTime() != null && other.getCreationTime().equals(this.getCreationTime()) == false)
return false;
if (other.getControlSetName() == null ^ this.getControlSetName() == null)
return false;
if (other.getControlSetName() != null && other.getControlSetName().equals(this.getControlSetName()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode());
hashCode = prime * hashCode + ((getAssessmentName() == null) ? 0 : getAssessmentName().hashCode());
hashCode = prime * hashCode + ((getAssessmentId() == null) ? 0 : getAssessmentId().hashCode());
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
hashCode = prime * hashCode + ((getRoleArn() == null) ? 0 : getRoleArn().hashCode());
hashCode = prime * hashCode + ((getCreationTime() == null) ? 0 : getCreationTime().hashCode());
hashCode = prime * hashCode + ((getControlSetName() == null) ? 0 : getControlSetName().hashCode());
return hashCode;
}
@Override
public DelegationMetadata clone() {
try {
return (DelegationMetadata) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.auditmanager.model.transform.DelegationMetadataMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import javax.cache.Cache;
import javax.cache.configuration.Factory;
import javax.cache.integration.CacheWriterException;
import javax.cache.processor.MutableEntry;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.CacheAtomicityMode;
import org.apache.ignite.cache.CacheEntryProcessor;
import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.cache.CachePeekMode;
import org.apache.ignite.cache.CacheWriteSynchronizationMode;
import org.apache.ignite.cache.store.CacheStore;
import org.apache.ignite.cache.store.CacheStoreAdapter;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.DataStorageConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.configuration.NearCacheConfiguration;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.lang.IgniteBiInClosure;
import org.apache.ignite.resources.IgniteInstanceResource;
import org.apache.ignite.testframework.MvccFeatureChecker;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.junit.Before;
import org.junit.Test;
import static org.apache.ignite.IgniteSystemProperties.IGNITE_SKIP_CONFIGURATION_CONSISTENCY_CHECK;
/**
* Tests for cache client with and without store.
*/
public class CacheClientStoreSelfTest extends GridCommonAbstractTest {
/** */
private static final String CACHE_NAME = "test-cache";
/** */
private volatile boolean nearEnabled;
/** */
private volatile Factory<CacheStore> factory;
/** */
private volatile CacheMode cacheMode;
/** */
private static volatile boolean loadedFromClient;
/** */
@Before
public void beforeCacheClientStoreSelfTest() {
MvccFeatureChecker.skipIfNotSupported(MvccFeatureChecker.Feature.CACHE_STORE);
}
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
boolean client = igniteInstanceName != null && igniteInstanceName.startsWith("client");
cfg.setClientMode(client);
if (client)
cfg.setDataStorageConfiguration(new DataStorageConfiguration());
CacheConfiguration cc = new CacheConfiguration(DEFAULT_CACHE_NAME);
cc.setName(CACHE_NAME);
cc.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL);
cc.setCacheMode(cacheMode);
cc.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC);
cc.setBackups(1);
cc.setCacheStoreFactory(factory);
if (factory instanceof Factory3)
cc.setReadThrough(true);
if (client && nearEnabled)
cc.setNearConfiguration(new NearCacheConfiguration());
cfg.setCacheConfiguration(cc);
return cfg;
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
stopAllGrids();
loadedFromClient = false;
}
/**
* @throws Exception If failed.
*/
@Test
public void testCorrectStore() throws Exception {
nearEnabled = false;
cacheMode = CacheMode.PARTITIONED;
factory = new Factory1();
startGrids(2);
Ignite ignite = startGrid("client-1");
IgniteCache<Object, Object> cache = ignite.cache(CACHE_NAME);
cache.get(0);
cache.getAll(F.asSet(0, 1));
cache.getAndPut(0, 0);
cache.getAndPutIfAbsent(0, 0);
cache.getAndRemove(0);
cache.getAndReplace(0, 0);
cache.put(0, 0);
cache.putAll(F.asMap(0, 0, 1, 1));
cache.putIfAbsent(0, 0);
cache.remove(0);
cache.remove(0, 0);
cache.removeAll(F.asSet(0, 1));
cache.removeAll();
cache.invoke(0, new EP());
cache.invokeAll(F.asSet(0, 1), new EP());
}
/**
* @throws Exception If failed.
*/
@Test
public void testInvalidStore() throws Exception {
nearEnabled = false;
cacheMode = CacheMode.PARTITIONED;
factory = new Factory1();
startGrids(2);
factory = new Factory2();
startGrid("client-1");
}
/**
* @throws Exception If failed.
*/
@Test
public void testDisabledConsistencyCheck() throws Exception {
nearEnabled = false;
cacheMode = CacheMode.PARTITIONED;
factory = new Factory1();
startGrids(2);
factory = new Factory2();
System.setProperty(IGNITE_SKIP_CONFIGURATION_CONSISTENCY_CHECK, "true");
startGrid("client-1");
factory = new Factory1();
System.clearProperty(IGNITE_SKIP_CONFIGURATION_CONSISTENCY_CHECK);
startGrid("client-2");
}
/**
* @throws Exception If failed.
*/
@Test
public void testNoStoreNearDisabled() throws Exception {
nearEnabled = false;
cacheMode = CacheMode.PARTITIONED;
factory = new Factory1();
startGrids(2);
doTestNoStore();
}
/**
* @throws Exception If failed.
*/
@Test
public void testNoStoreNearEnabled() throws Exception {
nearEnabled = true;
cacheMode = CacheMode.PARTITIONED;
factory = new Factory1();
startGrids(2);
doTestNoStore();
}
/**
* @throws Exception If failed.
*/
private void doTestNoStore() throws Exception {
factory = null;
Ignite ignite = startGrid("client-1");
IgniteCache<Object, Object> cache = ignite.cache(CACHE_NAME);
cache.get(0);
cache.getAll(F.asSet(0, 1));
cache.getAndPut(0, 0);
cache.getAndPutIfAbsent(0, 0);
cache.getAndRemove(0);
cache.getAndReplace(0, 0);
cache.put(0, 0);
cache.putAll(F.asMap(0, 0, 1, 1));
cache.putIfAbsent(0, 0);
cache.remove(0);
cache.remove(0, 0);
cache.removeAll(F.asSet(0, 1));
cache.removeAll();
cache.invoke(0, new EP());
cache.invokeAll(F.asSet(0, 1), new EP());
}
/**
* Load cache created on client as LOCAL and see if it only loaded on client
*
* @throws Exception If failed.
*/
@Test
public void testLocalLoadClient() throws Exception {
cacheMode = CacheMode.LOCAL;
factory = new Factory3();
startGrids(2);
Ignite client = startGrid("client-1");
IgniteCache<Object, Object> cache = client.cache(CACHE_NAME);
cache.loadCache(null);
assertEquals(10, cache.localSize(CachePeekMode.ALL));
assertEquals(0, grid(0).cache(CACHE_NAME).localSize(CachePeekMode.ALL));
assertEquals(0, grid(1).cache(CACHE_NAME).localSize(CachePeekMode.ALL));
assert loadedFromClient;
}
/**
* Load cache from server that created on client as LOCAL and see if it only loaded on server
*
* @throws Exception If failed.
*/
@Test
public void testLocalLoadServer() throws Exception {
cacheMode = CacheMode.LOCAL;
factory = new Factory3();
startGrids(2);
Ignite client = startGrid("client-1");
IgniteCache cache = grid(0).cache(CACHE_NAME);
cache.loadCache(null);
assertEquals(10, cache.localSize(CachePeekMode.ALL));
assertEquals(0, grid(1).cache(CACHE_NAME).localSize(CachePeekMode.ALL));
assertEquals(0, client.cache(CACHE_NAME).localSize(CachePeekMode.ALL));
assert !loadedFromClient : "Loaded data from client!";
}
/**
* Load cache created on client as REPLICATED and see if it only loaded on servers
*/
@Test
public void testReplicatedLoadFromClient() throws Exception {
cacheMode = CacheMode.REPLICATED;
factory = new Factory3();
startGrids(2);
Ignite client = startGrid("client-1");
IgniteCache cache = client.cache(CACHE_NAME);
cache.loadCache(null);
assertEquals(0, cache.localSize(CachePeekMode.ALL));
assertEquals(10, grid(0).cache(CACHE_NAME).localSize(CachePeekMode.ALL));
assertEquals(10, grid(1).cache(CACHE_NAME).localSize(CachePeekMode.ALL));
assert !loadedFromClient : "Loaded data from client!";
}
/**
* Load cache created on client as REPLICATED and see if it only loaded on servers
*/
@Test
public void testPartitionedLoadFromClient() throws Exception {
cacheMode = CacheMode.PARTITIONED;
factory = new Factory3();
startGrids(2);
Ignite client = startGrid("client-1");
IgniteCache cache = client.cache(CACHE_NAME);
cache.loadCache(null);
assertEquals(0, cache.localSize(CachePeekMode.ALL));
assertEquals(10, grid(0).cache(CACHE_NAME).localSize(CachePeekMode.ALL));
assertEquals(10, grid(1).cache(CACHE_NAME).localSize(CachePeekMode.ALL));
assert !loadedFromClient : "Loaded data from client!";
}
/**
*/
private static class Factory1 implements Factory<CacheStore> {
/** {@inheritDoc} */
@Override public CacheStore create() {
return null;
}
}
/**
*/
private static class Factory2 implements Factory<CacheStore> {
/** {@inheritDoc} */
@Override public CacheStore create() {
return null;
}
}
/**
*/
private static class Factory3 implements Factory<CacheStore> {
/** {@inheritDoc} */
@Override public CacheStore create() {
return new TestStore();
}
}
/**
*/
private static class EP implements CacheEntryProcessor {
/** {@inheritDoc} */
@Override public Object process(MutableEntry entry, Object... arguments) {
return null;
}
}
/**
* Test store that loads 10 item
*/
public static class TestStore extends CacheStoreAdapter<Object, Object> {
/** */
@IgniteInstanceResource
private Ignite ignite;
/** {@inheritDoc} */
@Override public Integer load(Object key) {
return null;
}
/** {@inheritDoc} */
@Override public void write(Cache.Entry<?, ?> entry) {
// No-op.
}
/** {@inheritDoc} */
@Override public void delete(Object key) throws CacheWriterException {
// No-op.
}
/** {@inheritDoc} */
@Override public void loadCache(IgniteBiInClosure<Object, Object> clo, Object... args) {
if (ignite.cluster().localNode().isClient())
loadedFromClient = true;
for (int i = 0; i < 10; i++)
clo.apply(i, i);
}
}
}
|
|
/**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/providers/tags/sakai-10.6/imsent/src/java/org/sakaiproject/component/imsent/user/IMSEntUserDirectoryProvider.java $
* $Id: IMSEntUserDirectoryProvider.java 105079 2012-02-24 23:08:11Z ottenhoff@longsight.com $
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.component.imsent.user;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.db.api.SqlReader;
import org.sakaiproject.db.api.SqlService;
import org.sakaiproject.user.api.UserDirectoryProvider;
import org.sakaiproject.user.api.UserEdit;
/**
* <p>
* IMSEntUserDirectoryProvider is a sample UserDirectoryProvider.
* </p>
*/
public class IMSEntUserDirectoryProvider implements UserDirectoryProvider
{
/** Our log (commons). */
private static Log M_log = LogFactory.getLog(IMSEntUserDirectoryProvider.class);
/**********************************************************************************************************************************************************************************************************************************************************
* Dependencies and their setter methods
*********************************************************************************************************************************************************************************************************************************************************/
/** Dependency: SqlService */
protected SqlService m_sqlService = null;
/**
* Dependency: SqlService.
*
* @param service
* The SqlService.
*/
public void setSqlService(SqlService service)
{
M_log.info("Setting Sql Service");
m_sqlService = service;
}
/** Configuration: to run the ddl on init or not. */
// TODO: Set back to false
protected boolean m_autoDdl = true;
/**
* Configuration: to run the ddl on init or not.
*
* @param value
* the auto ddl value.
*/
public void setAutoDdl(String value)
{
m_autoDdl = new Boolean(value).booleanValue();
}
/**********************************************************************************************************************************************************************************************************************************************************
* Init and Destroy
*********************************************************************************************************************************************************************************************************************************************************/
/**
* Final initialization, once all dependencies are set.
*/
public void init()
{
try
{
M_log.info("init()");
}
catch (Exception t)
{
M_log.info(this + ".init() - failed attempting to log " + t);
M_log.warn(".init(): " + t);
}
try
{
// if we are auto-creating our schema, check and create
if (m_autoDdl && m_sqlService != null)
{
m_sqlService.ddl(this.getClass().getClassLoader(), "imsent_provider");
M_log.info("Back from autoddl");
}
// Check to see if we are ready to run...
if (!isReady())
{
M_log.warn(".init(): Not properly initialized.");
}
}
catch (Exception t)
{
M_log.warn(".init(): ", t);
m_isReady = false;
}
// Check to see if we are ready to run...
if (!isReady())
{
M_log.warn(".init(): Not properly initialized.");
}
} // init
/**
* Returns to uninitialized state. You can use this method to release resources thet your Service allocated when Turbine shuts down.
*/
public void destroy()
{
M_log.info("destroy()");
} // destroy
/**
* Determine if we are in a ready-to-go-state
*/
private boolean m_isReady = true;
private boolean m_firstCheck = true;
private boolean isReady()
{
// Only check things once
if (!m_firstCheck) return m_isReady;
m_firstCheck = false;
boolean retval = true;
if (m_sqlService == null)
{
M_log.warn("sqlService injection failed");
retval = false;
}
// Check all other injections here
// Return the value and set
m_isReady = retval;
return retval;
}
/**********************************************************************************************************************************************************************************************************************************************************
* UserDirectoryProvider implementation
*********************************************************************************************************************************************************************************************************************************************************/
public class SakaiIMSUser
{
// From User
public String eMail = null;
public String displayName = null;
public String sortName = null;
public String firstName = null;
public String lastName = null;
// From Resource
// public ResourceProperties getProperties;
public String id = null;
// For use locally
public String password = null;
// For debugging
public String toString()
{
String rv = "SakaiIMSUser Email=" + eMail + " DisplayName=" + displayName + " SortName=" + sortName + " FirstName="
+ firstName + " LastName=" + lastName + " Id=" + id + " Password=" + password;
return rv;
}
}
public SakaiIMSUser retrieveUser(final String userId, boolean isEmail)
{
String statement;
if (userId == null) return null;
if (isEmail)
{
// 1 2 3 4 5 6 7
statement = "select USERID,FN,SORT,PASSWORD,FAMILY,GIVEN,EMAIL from IMSENT_PERSON where EMAIL = ?";
}
else
{
statement = "select USERID,FN,SORT,PASSWORD,FAMILY,GIVEN,EMAIL from IMSENT_PERSON where USERID = ?";
}
Object fields[] = new Object[1];
fields[0] = userId;
M_log.info("SQL:" + statement);
List rv = m_sqlService.dbRead(statement, fields, new SqlReader()
{
public Object readSqlResultRecord(ResultSet result)
{
try
{
SakaiIMSUser rv = new SakaiIMSUser();
rv.id = result.getString(1);
rv.displayName = result.getString(2);
rv.sortName = result.getString(3);
if (rv.sortName == null) rv.sortName = rv.displayName;
rv.password = result.getString(4);
rv.lastName = result.getString(5);
rv.firstName = result.getString(6);
rv.eMail = result.getString(7);
M_log.info("Inside reader " + rv);
return rv;
}
catch (SQLException e)
{
M_log.warn(this + ".authenticateUser: " + userId + " : " + e);
return null;
}
}
});
if ((rv != null) && (rv.size() > 0))
{
M_log.info("Returning ");
M_log.info(" " + (SakaiIMSUser) rv.get(0));
return (SakaiIMSUser) rv.get(0);
}
return null;
}
/**
* Construct.
*/
public IMSEntUserDirectoryProvider()
{
} // SampleUserDirectoryProvider
/**
* Copy the information from our internal structure into the Sakai User structure.
*
* @param edit
* @param imsUser
*/
private void copyInfo(UserEdit edit, SakaiIMSUser imsUser)
{
edit.setId(imsUser.id);
edit.setFirstName(imsUser.firstName);
edit.setLastName(imsUser.lastName);
edit.setEmail(imsUser.eMail);
edit.setPassword(imsUser.password);
// Sakai currently creates sortname from first and last name
edit.setType("imsent");
}
/**
* Access a user object. Update the object with the information found.
*
* @param edit
* The user object (id is set) to fill in.
* @return true if the user object was found and information updated, false if not.
*/
public boolean getUser(UserEdit edit)
{
if (!isReady()) return false;
if (edit == null) return false;
String userId = edit.getEid();
M_log.info("getUser(" + userId + ")");
SakaiIMSUser rv = retrieveUser(userId, false);
if (rv == null) return false;
copyInfo(edit, rv);
return true;
} // getUser
/**
* Access a collection of UserEdit objects; if the user is found, update the information, otherwise remove the UserEdit object from the collection.
*
* @param users
* The UserEdit objects (with id set) to fill in or remove.
*/
public void getUsers(Collection users)
{
for (Iterator i = users.iterator(); i.hasNext();)
{
UserEdit user = (UserEdit) i.next();
if (!getUser(user))
{
i.remove();
}
}
}
/**
* Find a user object who has this email address. Update the object with the information found.
*
* @param email
* The email address string.
* @return true if the user object was found and information updated, false if not.
*/
public boolean findUserByEmail(UserEdit edit, String email)
{
if (!isReady()) return false;
if ((edit == null) || (email == null)) return false;
M_log.info("findUserByEmail(" + email + ")");
SakaiIMSUser rv = retrieveUser(email, true);
if (rv == null) return false;
copyInfo(edit, rv);
return true;
} // findUserByEmail
/**
* Authenticate a user / password. If the user edit exists it may be modified, and will be stored if...
*
* @param id
* The user id.
* @param edit
* The UserEdit matching the id to be authenticated (and updated) if we have one.
* @param password
* The password.
* @return true if authenticated, false if not.
*/
public boolean authenticateUser(final String userId, UserEdit edit, String password)
{
if (!isReady()) return false;
if ((userId == null) || (password == null)) return false;
M_log.info("authenticateUser(" + userId + ")");
SakaiIMSUser rv = retrieveUser(userId, false);
if (rv == null) return false;
return (password.compareTo(rv.password) == 0);
} // authenticateUser
/**
* {@inheritDoc}
*/
public boolean authenticateWithProviderFirst(String id)
{
return false;
}
} // SampleUserDirectoryProvider
|
|
package com.jcwhatever.nucleus.providers.mysql.datanode;
import com.jcwhatever.nucleus.providers.mysql.datanode.SqlDataNode.SqlNodeValue;
import com.jcwhatever.nucleus.providers.sql.ISqlQueryResult;
import com.jcwhatever.nucleus.providers.sql.ISqlTable;
import com.jcwhatever.nucleus.providers.sql.ISqlTableDefinition;
import com.jcwhatever.nucleus.providers.sql.ISqlTableDefinition.ISqlTableColumn;
import com.jcwhatever.nucleus.providers.sql.datanode.ISqlDataNode;
import com.jcwhatever.nucleus.providers.sql.datanode.ISqlDataNodeBuilder;
import com.jcwhatever.nucleus.utils.PreCon;
import org.bukkit.plugin.Plugin;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.Nullable;
/**
* Implementation of {@link ISqlDataNodeBuilder}.
*/
public class SqlDataNodeBuilder implements ISqlDataNodeBuilder {
private final Plugin _plugin;
private final Map<String, SqlNodeValue> _valueMap;
private final Setter _setter = new Setter();
private final EmptySetter _emptySetter = new EmptySetter();
private ISqlTable _table;
private ISqlQueryResult _result;
private Object _pKeyValue;
/**
* Constructor.
*
* @param plugin The resulting data nodes owning plugin.
*/
public SqlDataNodeBuilder(Plugin plugin) {
PreCon.notNull(plugin);
_plugin = plugin;
_valueMap = new HashMap<>(25);
}
@Override
public Setter fromSource(
ISqlTable table, ISqlQueryResult result, Object pKeyValue) {
PreCon.notNull(table);
PreCon.notNull(result);
PreCon.notNull(pKeyValue);
setSource(table, result, pKeyValue);
return _setter;
}
@Override
public EmptySetter withoutSource(ISqlTable table, Object pKeyValue) {
PreCon.notNull(table);
PreCon.notNull(pKeyValue);
setSource(table, null, pKeyValue);
return _emptySetter;
}
private void setSource(
ISqlTable table, @Nullable ISqlQueryResult result, Object pKeyValue) {
_table = table;
_result = result;
_pKeyValue = pKeyValue;
}
private void setAll() throws SQLException {
String[] columnNames;
if (_result == null) {
ISqlTableDefinition definition = _table.getDefinition();
columnNames = definition.getColumnNames();
}
else {
columnNames = _result.getColumns();
}
for (String columnName : columnNames) {
set(columnName, columnName);
}
}
private void set(String columnName, String nodeName) throws SQLException {
PreCon.notNullOrEmpty(columnName);
PreCon.notNull(nodeName);
Object value = null;
if (_result != null) {
value = _result.getObject(columnName);
}
else {
ISqlTableColumn column = _table.getDefinition().getColumn(columnName);
if (column == null)
throw new IllegalArgumentException("Column " + columnName + " not found.");
if (column.hasDefaultValue()) {
value = column.getDefaultValue();
}
}
SqlNodeValue nodeValue = new SqlNodeValue(
nodeName, columnName, _pKeyValue, _table, value);
_valueMap.put(nodeName, nodeValue);
}
private void set(int columnIndex, String nodeName) throws SQLException {
PreCon.notNull(nodeName);
assert _result != null;
Object value = _result.getObject(columnIndex);
String columnName = _result.getColumns()[columnIndex];
SqlNodeValue nodeValue = new SqlNodeValue(
nodeName, columnName, _pKeyValue, _table, value);
_valueMap.put(nodeName, nodeValue);
}
private class Setter implements ISqlDataNodeBuilderSetter {
@Override
public Setter fromSource(
ISqlTable table, ISqlQueryResult result, Object pKeyValue) {
return SqlDataNodeBuilder.this.fromSource(table, result, pKeyValue);
}
@Override
public EmptySetter withoutSource(ISqlTable table, Object pKeyValue) {
return SqlDataNodeBuilder.this.withoutSource(table, pKeyValue);
}
@Override
public Setter setAll() throws SQLException {
SqlDataNodeBuilder.this.setAll();
return this;
}
@Override
public Setter set(String columnName) throws SQLException {
SqlDataNodeBuilder.this.set(columnName, columnName);
return this;
}
@Override
public Setter set(String columnName, String nodeName) throws SQLException {
SqlDataNodeBuilder.this.set(columnName, nodeName);
return this;
}
@Override
public Setter set(int columnIndex, String nodeName) throws SQLException {
SqlDataNodeBuilder.this.set(columnIndex, nodeName);
return this;
}
@Override
public SqlDataNode build() {
return new SqlDataNode(_plugin, _valueMap);
}
}
private class EmptySetter implements ISqlDataNodeBuilderEmptySetter {
@Override
public EmptySetter setAll() {
try {
SqlDataNodeBuilder.this.setAll();
} catch (SQLException e) {
e.printStackTrace();
}
return this;
}
@Override
public EmptySetter set(String columnName) {
return set(columnName, columnName);
}
@Override
public EmptySetter set(String columnName, String nodeName) {
try {
SqlDataNodeBuilder.this.set(columnName, nodeName);
} catch (SQLException e) {
e.printStackTrace();
}
return this;
}
@Override
public EmptySetter set(String columnName, Object value) {
return set(columnName, columnName, value);
}
@Override
public EmptySetter set(String columnName, String nodeName, @Nullable Object value) {
PreCon.notNull(columnName);
PreCon.notNull(nodeName);
SqlNodeValue nodeValue = new SqlNodeValue(
nodeName, columnName, _pKeyValue, _table, value);
_valueMap.put(nodeName, nodeValue);
return this;
}
@Override
public ISqlDataNode build() {
return new SqlDataNode(_plugin, _valueMap);
}
@Override
public Setter fromSource(ISqlTable table, ISqlQueryResult result, Object pKeyValue) {
return SqlDataNodeBuilder.this.fromSource(table, result, pKeyValue);
}
@Override
public EmptySetter withoutSource(ISqlTable table, Object pKeyValue) {
return SqlDataNodeBuilder.this.withoutSource(table, pKeyValue);
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.tomcat.model;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyAttribute;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
import javax.xml.namespace.QName;
import org.apache.catalina.Executor;
import org.apache.catalina.LifecycleListener;
import org.apache.catalina.Service;
import org.apache.catalina.connector.Connector;
import org.apache.geronimo.crypto.EncryptionManager;
import org.apache.geronimo.tomcat.TomcatServerGBean;
import org.apache.tomcat.util.IntrospectionUtils;
import org.apache.xbean.recipe.ObjectRecipe;
import org.apache.xbean.recipe.Option;
/**
* <p>Java class for ConnectorType complex type.
* <p/>
* <p>The following schema fragment specifies the expected content contained within this class.
* <p/>
* <pre>
* <complexType name="ConnectorType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="Listener" type="{}ListenerType" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* <attribute name="className" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="allowTrace" type="{http://www.w3.org/2001/XMLSchema}boolean" />
* <attribute name="emptySessionPath" type="{http://www.w3.org/2001/XMLSchema}boolean" />
* <attribute name="enableLookups" type="{http://www.w3.org/2001/XMLSchema}boolean" />
* <attribute name="maxPostSize" type="{http://www.w3.org/2001/XMLSchema}int" />
* <attribute name="maxSavePostSize" type="{http://www.w3.org/2001/XMLSchema}int" />
* <attribute name="port" type="{http://www.w3.org/2001/XMLSchema}int" />
* <attribute name="protocol" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="protocolHandlerClassName" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="proxyName" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="proxyPort" type="{http://www.w3.org/2001/XMLSchema}int" />
* <attribute name="redirectPort" type="{http://www.w3.org/2001/XMLSchema}int" />
* <attribute name="scheme" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="secure" type="{http://www.w3.org/2001/XMLSchema}boolean" />
* <attribute name="encoding" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="useBodyEncodingForURI" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="xpoweredBy" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="useIPVHosts" type="{http://www.w3.org/2001/XMLSchema}boolean" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ConnectorType", propOrder = {
"listener"
})
public class ConnectorType {
@XmlElement(name = "Listener")
protected List<ListenerType> listener;
@XmlAttribute
protected String className = Connector.class.getName();
@XmlAttribute
protected Boolean allowTrace;
@XmlAttribute
protected Boolean emptySessionPath;
@XmlAttribute
protected Boolean enableLookups;
@XmlAttribute
protected Integer maxParameterCount;
@XmlAttribute
protected Integer maxPostSize;
@XmlAttribute
protected Integer maxSavePostSize;
@XmlAttribute
protected Integer port;
@XmlAttribute
protected String protocol;
@XmlAttribute
protected String protocolHandlerClassName;
@XmlAttribute
protected String proxyName;
@XmlAttribute
protected Integer proxyPort;
@XmlAttribute
protected Integer redirectPort;
@XmlAttribute
protected String scheme;
@XmlAttribute
protected Boolean secure;
@XmlAttribute
protected String encoding;
@XmlAttribute
protected String useBodyEncodingForURI;
@XmlAttribute
protected String xpoweredBy;
@XmlAttribute
protected Boolean useIPVHosts;
@XmlAnyAttribute
private Map<QName, String> otherAttributes = new HashMap<QName, String>();
private static final String EMPTY_SESSION_PATH = "emptySessionPath";
private static final String ENABLE_LOOKUPS = "enableLookups";
private static final String MAX_PARAMETER_COUNT = "maxParameterCount";
private static final String MAX_POST_SIZE = "maxPostSize";
private static final String MAX_SAVE_POST_SIZE = "maxSavePostSize";
private static final String PORT = "port";
private static final String PROTOCOL = "protocol";
private static final String PROTOCOL_HANDLER_CLASS_NAME = "protocolHandlerClassName";
private static final String PROXY_NAME = "proxyName";
private static final String PROXY_PORT = "proxyPort";
private static final String REDIRECT_PORT = "redirectPort";
private static final String SCHEME = "scheme";
private static final String SECURE = "secure";
private static final String ENCODING = "encoding";
private static final String USE_BODY_ENCODING_FOR_URI = "useBodyEncodingForURI";
private static final String X_POWERED_BY = "xPoweredBy";
private static final String USE_IPVHOSTS = "useIPVHosts";
private static final String ALLOW_TRACE = "allowTrace";
/**
* Gets the value of the listener property.
* <p/>
* <p/>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the listener property.
* <p/>
* <p/>
* For example, to add a new item, do as follows:
* <pre>
* getListener().add(newItem);
* </pre>
* <p/>
* <p/>
* <p/>
* Objects of the following type(s) are allowed in the list
* {@link ListenerType }
*/
public List<ListenerType> getListener() {
if (listener == null) {
listener = new ArrayList<ListenerType>();
}
return this.listener;
}
/**
* Gets the value of the className property.
*
* @return possible object is
* {@link String } z
*/
public String getClassName() {
return className;
}
/**
* Sets the value of the className property.
*
* @param value allowed object is
* {@link String }
*/
public void setClassName(String value) {
this.className = value;
}
/**
* Gets the value of the allowTrace property.
*
* @return possible object is
* {@link Boolean }
*/
public Boolean isAllowTrace() {
return allowTrace;
}
/**
* Sets the value of the allowTrace property.
*
* @param value allowed object is
* {@link Boolean }
*/
public void setAllowTrace(Boolean value) {
this.allowTrace = value;
}
/**
* Gets the value of the emptySessionPath property.
*
* @return possible object is
* {@link Boolean }
*/
public Boolean isEmptySessionPath() {
return emptySessionPath;
}
/**
* Sets the value of the emptySessionPath property.
*
* @param value allowed object is
* {@link Boolean }
*/
public void setEmptySessionPath(Boolean value) {
this.emptySessionPath = value;
}
/**
* Gets the value of the enableLookups property.
*
* @return possible object is
* {@link Boolean }
*/
public Boolean isEnableLookups() {
return enableLookups;
}
/**
* Sets the value of the enableLookups property.
*
* @param value allowed object is
* {@link Boolean }
*/
public void setEnableLookups(Boolean value) {
this.enableLookups = value;
}
/**
* Gets the value of the maxParameterCount property.
*
* @return possible object is
* {@link Integer }
*/
public Integer getMaxParameterCount() {
return maxParameterCount;
}
/**
* Sets the value of the maxParameterCount property.
*
* @param value allowed object is
* {@link Integer }
*/
public void setMaxParameterCount(Integer value) {
this.maxParameterCount = value;
}
/**
* Gets the value of the maxPostSize property.
*
* @return possible object is
* {@link Integer }
*/
public Integer getMaxPostSize() {
return maxPostSize;
}
/**
* Sets the value of the maxPostSize property.
*
* @param value allowed object is
* {@link Integer }
*/
public void setMaxPostSize(Integer value) {
this.maxPostSize = value;
}
/**
* Gets the value of the maxSavePostSize property.
*
* @return possible object is
* {@link Integer }
*/
public Integer getMaxSavePostSize() {
return maxSavePostSize;
}
/**
* Sets the value of the maxSavePostSize property.
*
* @param value allowed object is
* {@link Integer }
*/
public void setMaxSavePostSize(Integer value) {
this.maxSavePostSize = value;
}
/**
* Gets the value of the port property.
*
* @return possible object is
* {@link Integer }
*/
public Integer getPort() {
return port;
}
/**
* Sets the value of the port property.
*
* @param value allowed object is
* {@link Integer }
*/
public void setPort(Integer value) {
this.port = value;
}
/**
* Gets the value of the protocol property.
*
* @return possible object is
* {@link String }
*/
public String getProtocol() {
return protocol;
}
/**
* Sets the value of the protocol property.
*
* @param value allowed object is
* {@link String }
*/
public void setProtocol(String value) {
this.protocol = value;
}
/**
* Gets the value of the protocolHandlerClassName property.
*
* @return possible object is
* {@link String }
*/
public String getProtocolHandlerClassName() {
return protocolHandlerClassName;
}
/**
* Sets the value of the protocolHandlerClassName property.
*
* @param value allowed object is
* {@link String }
*/
public void setProtocolHandlerClassName(String value) {
this.protocolHandlerClassName = value;
}
/**
* Gets the value of the proxyName property.
*
* @return possible object is
* {@link String }
*/
public String getProxyName() {
return proxyName;
}
/**
* Sets the value of the proxyName property.
*
* @param value allowed object is
* {@link String }
*/
public void setProxyName(String value) {
this.proxyName = value;
}
/**
* Gets the value of the proxyPort property.
*
* @return possible object is
* {@link Integer }
*/
public Integer getProxyPort() {
return proxyPort;
}
/**
* Sets the value of the proxyPort property.
*
* @param value allowed object is
* {@link Integer }
*/
public void setProxyPort(Integer value) {
this.proxyPort = value;
}
/**
* Gets the value of the redirectPort property.
*
* @return possible object is
* {@link Integer }
*/
public Integer getRedirectPort() {
return redirectPort;
}
/**
* Sets the value of the redirectPort property.
*
* @param value allowed object is
* {@link Integer }
*/
public void setRedirectPort(Integer value) {
this.redirectPort = value;
}
/**
* Gets the value of the scheme property.
*
* @return possible object is
* {@link String }
*/
public String getScheme() {
return scheme;
}
/**
* Sets the value of the scheme property.
*
* @param value allowed object is
* {@link String }
*/
public void setScheme(String value) {
this.scheme = value;
}
/**
* Gets the value of the secure property.
*
* @return possible object is
* {@link Boolean }
*/
public Boolean isSecure() {
return secure;
}
/**
* Sets the value of the secure property.
*
* @param value allowed object is
* {@link Boolean }
*/
public void setSecure(Boolean value) {
this.secure = value;
}
/**
* Gets the value of the encoding property.
*
* @return possible object is
* {@link String }
*/
public String getEncoding() {
return encoding;
}
/**
* Sets the value of the encoding property.
*
* @param value allowed object is
* {@link String }
*/
public void setEncoding(String value) {
this.encoding = value;
}
/**
* Gets the value of the useBodyEncodingForURI property.
*
* @return possible object is
* {@link String }
*/
public String getUseBodyEncodingForURI() {
return useBodyEncodingForURI;
}
/**
* Sets the value of the useBodyEncodingForURI property.
*
* @param value allowed object is
* {@link String }
*/
public void setUseBodyEncodingForURI(String value) {
this.useBodyEncodingForURI = value;
}
/**
* Gets the value of the xpoweredBy property.
*
* @return possible object is
* {@link String }
*/
public String getXpoweredBy() {
return xpoweredBy;
}
/**
* Sets the value of the xpoweredBy property.
*
* @param value allowed object is
* {@link String }
*/
public void setXpoweredBy(String value) {
this.xpoweredBy = value;
}
/**
* Gets the value of the useIPVHosts property.
*
* @return possible object is
* {@link Boolean }
*/
public Boolean isUseIPVHosts() {
return useIPVHosts;
}
/**
* Sets the value of the useIPVHosts property.
*
* @param value allowed object is
* {@link Boolean }
*/
public void setUseIPVHosts(Boolean value) {
this.useIPVHosts = value;
}
/**
* Gets a map that contains attributes that aren't bound to any typed property on this class.
* <p/>
* <p/>
* the map is keyed by the name of the attribute and
* the value is the string value of the attribute.
* <p/>
* the map returned by this method is live, and you can add new attribute
* by updating the map directly. Because of this design, there's no setter.
*
* @return always non-null
*/
public Map<QName, String> getOtherAttributes() {
return otherAttributes;
}
public Connector getConnector(ClassLoader cl, Service service) throws Exception {
Map<String, Object> properties = new HashMap<String, Object>();
if (isAllowTrace() != null) {
properties.put(ALLOW_TRACE, isAllowTrace());
}
if (isEmptySessionPath() != null) {
properties.put(EMPTY_SESSION_PATH, isEmptySessionPath());
}
if (isEnableLookups() != null) {
properties.put(ENABLE_LOOKUPS, isEnableLookups());
}
if (getMaxParameterCount() != null) {
properties.put(MAX_PARAMETER_COUNT, getMaxParameterCount());
}
if (getMaxPostSize() != null) {
properties.put(MAX_POST_SIZE, getMaxPostSize());
}
if (getMaxSavePostSize() != null) {
properties.put(MAX_SAVE_POST_SIZE, getMaxSavePostSize());
}
if (getPort() != null) {
properties.put(PORT, getPort());
}
if (getProtocol() != null) {
properties.put(PROTOCOL, getProtocol());
}
if (getProtocolHandlerClassName() != null) {
properties.put(PROTOCOL_HANDLER_CLASS_NAME, getProtocolHandlerClassName());
}
if (getProxyName() != null) {
properties.put(PROXY_NAME, getProxyName());
}
if (getProxyPort() != null) {
properties.put(PROXY_PORT, getProxyPort());
}
if (getRedirectPort() != null) {
properties.put(REDIRECT_PORT, getRedirectPort());
}
if (getScheme() != null) {
properties.put(SCHEME, getScheme());
}
if (isSecure() != null) {
properties.put(SECURE, isSecure());
}
if (getEncoding() != null) {
properties.put(ENCODING, getEncoding());
}
if (getUseBodyEncodingForURI() != null) {
properties.put(USE_BODY_ENCODING_FOR_URI, getUseBodyEncodingForURI());
}
if (getXpoweredBy() != null) {
properties.put(X_POWERED_BY, getXpoweredBy());
}
if (isUseIPVHosts() != null) {
properties.put(USE_IPVHOSTS, isUseIPVHosts());
}
ObjectRecipe recipe = new ObjectRecipe(className, properties);
recipe.allow(Option.IGNORE_MISSING_PROPERTIES);
recipe.setConstructorArgTypes(new Class[] { String.class });
recipe.setConstructorArgNames(new String[] { "protocol" });
Connector connector = (Connector) recipe.create(cl);
boolean executorSupported = !connector.getProtocolHandlerClassName().equals("org.apache.jk.server.JkCoyoteHandler");
for (Map.Entry<QName, String> entry : otherAttributes.entrySet()) {
String name = entry.getKey().getLocalPart();
String value = entry.getValue();
if (executorSupported && "executor".equals(name)) {
Executor executor = service.getExecutor(entry.getValue());
if (executor == null) {
throw new IllegalArgumentException("No executor found in service with name: " + value);
}
IntrospectionUtils.callMethod1(connector.getProtocolHandler(),
"setExecutor",
executor,
java.util.concurrent.Executor.class.getName(),
cl);
} else if ("name".equals(name)) {
//name attribute is held by Geronimo to identify the connector, it is not required by Tomcat
TomcatServerGBean.ConnectorName.put(connector, value);
} else {
if ("keystorePass".equals(name)) {
value = (String) EncryptionManager.decrypt(value);
}
connector.setProperty(name, value);
}
}
for (ListenerType listenerType : getListener()) {
LifecycleListener listener = listenerType.getLifecycleListener(cl);
connector.addLifecycleListener(listener);
TomcatServerGBean.LifecycleListeners.add(listener);
}
return connector;
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.sql.analyzer.Type;
import com.facebook.presto.sql.planner.plan.AggregationNode;
import com.facebook.presto.sql.planner.plan.ExchangeNode;
import com.facebook.presto.sql.planner.plan.FilterNode;
import com.facebook.presto.sql.planner.plan.JoinNode;
import com.facebook.presto.sql.planner.plan.LimitNode;
import com.facebook.presto.sql.planner.plan.OutputNode;
import com.facebook.presto.sql.planner.plan.PlanFragmentId;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.facebook.presto.sql.planner.plan.PlanVisitor;
import com.facebook.presto.sql.planner.plan.ProjectNode;
import com.facebook.presto.sql.planner.plan.SemiJoinNode;
import com.facebook.presto.sql.planner.plan.SinkNode;
import com.facebook.presto.sql.planner.plan.SortNode;
import com.facebook.presto.sql.planner.plan.TableScanNode;
import com.facebook.presto.sql.planner.plan.TableWriterNode;
import com.facebook.presto.sql.planner.plan.TopNNode;
import com.facebook.presto.sql.planner.plan.UnionNode;
import com.facebook.presto.sql.planner.plan.WindowNode;
import com.facebook.presto.sql.tree.ComparisonExpression;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.FunctionCall;
import com.facebook.presto.sql.tree.QualifiedNameReference;
import com.facebook.presto.util.GraphvizPrinter;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.lang.String.format;
public class PlanPrinter
{
private final StringBuilder output = new StringBuilder();
private PlanPrinter(PlanNode plan, Map<Symbol, Type> types, Optional<Map<PlanFragmentId, PlanFragment>> fragmentsById)
{
checkNotNull(plan, "plan is null");
checkNotNull(types, "types is null");
checkNotNull(fragmentsById, "fragmentsById is null");
Visitor visitor = new Visitor(types, fragmentsById);
plan.accept(visitor, 0);
}
@Override
public String toString()
{
return output.toString();
}
public static String textLogicalPlan(PlanNode plan, Map<Symbol, Type> types)
{
return new PlanPrinter(plan, types, Optional.<Map<PlanFragmentId, PlanFragment>>absent()).toString();
}
public static String textDistributedPlan(SubPlan plan)
{
List<PlanFragment> fragments = plan.getAllFragments();
Map<PlanFragmentId, PlanFragment> fragmentsById = Maps.uniqueIndex(fragments, PlanFragment.idGetter());
PlanFragment fragment = plan.getFragment();
return new PlanPrinter(fragment.getRoot(), fragment.getSymbols(), Optional.of(fragmentsById)).toString();
}
public static String graphvizLogicalPlan(PlanNode plan, Map<Symbol, Type> types)
{
PlanFragment fragment = new PlanFragment(new PlanFragmentId("graphviz_plan"), plan.getId(), types, plan);
return GraphvizPrinter.printLogical(ImmutableList.of(fragment));
}
public static String graphvizDistributedPlan(SubPlan plan)
{
return GraphvizPrinter.printDistributed(plan);
}
private void print(int indent, String format, Object... args)
{
String value;
if (args.length == 0) {
value = format;
}
else {
value = format(format, args);
}
output.append(Strings.repeat(" ", indent)).append(value).append('\n');
}
private class Visitor
extends PlanVisitor<Integer, Void>
{
private final Map<Symbol, Type> types;
private final Optional<Map<PlanFragmentId, PlanFragment>> fragmentsById;
public Visitor(Map<Symbol, Type> types, Optional<Map<PlanFragmentId, PlanFragment>> fragmentsById)
{
this.types = types;
this.fragmentsById = fragmentsById;
}
@Override
public Void visitJoin(JoinNode node, Integer indent)
{
List<Expression> joinExpressions = new ArrayList<>();
for (JoinNode.EquiJoinClause clause : node.getCriteria()) {
joinExpressions.add(new ComparisonExpression(ComparisonExpression.Type.EQUAL,
new QualifiedNameReference(clause.getLeft().toQualifiedName()),
new QualifiedNameReference(clause.getRight().toQualifiedName())));
}
print(indent, "- %s[%s] => [%s]", node.getType().getJoinLabel(), Joiner.on(" AND ").join(joinExpressions), formatOutputs(node.getOutputSymbols()));
node.getLeft().accept(this, indent + 1);
node.getRight().accept(this, indent + 1);
return null;
}
@Override
public Void visitSemiJoin(SemiJoinNode node, Integer indent)
{
print(indent, "- SemiJoin[%s = %s] => [%s]", node.getSourceJoinSymbol(), node.getFilteringSourceJoinSymbol(), formatOutputs(node.getOutputSymbols()));
node.getSource().accept(this, indent + 1);
node.getFilteringSource().accept(this, indent + 1);
return null;
}
@Override
public Void visitLimit(LimitNode node, Integer indent)
{
print(indent, "- Limit[%s] => [%s]", node.getCount(), formatOutputs(node.getOutputSymbols()));
return processChildren(node, indent + 1);
}
@Override
public Void visitAggregation(AggregationNode node, Integer indent)
{
String type = "";
if (node.getStep() != AggregationNode.Step.SINGLE) {
type = format("(%s)", node.getStep().toString());
}
String key = "";
if (!node.getGroupBy().isEmpty()) {
key = node.getGroupBy().toString();
}
print(indent, "- Aggregate%s%s => [%s]", type, key, formatOutputs(node.getOutputSymbols()));
for (Map.Entry<Symbol, FunctionCall> entry : node.getAggregations().entrySet()) {
print(indent + 2, "%s := %s", entry.getKey(), entry.getValue());
}
return processChildren(node, indent + 1);
}
@Override
public Void visitWindow(final WindowNode node, Integer indent)
{
List<String> partitionBy = Lists.transform(node.getPartitionBy(), Functions.toStringFunction());
List<String> orderBy = Lists.transform(node.getOrderBy(), new Function<Symbol, String>()
{
@Override
public String apply(Symbol input)
{
return input + " " + node.getOrderings().get(input);
}
});
List<String> args = new ArrayList<>();
if (!partitionBy.isEmpty()) {
args.add(format("partition by (%s)", Joiner.on(", ").join(partitionBy)));
}
if (!orderBy.isEmpty()) {
args.add(format("order by (%s)", Joiner.on(", ").join(orderBy)));
}
print(indent, "- Window[%s] => [%s]", Joiner.on(", ").join(args), formatOutputs(node.getOutputSymbols()));
for (Map.Entry<Symbol, FunctionCall> entry : node.getWindowFunctions().entrySet()) {
print(indent + 2, "%s := %s(%s)", entry.getKey(), entry.getValue().getName(), Joiner.on(", ").join(entry.getValue().getArguments()));
}
return processChildren(node, indent + 1);
}
@Override
public Void visitTableScan(TableScanNode node, Integer indent)
{
print(indent, "- TableScan[%s, partition predicate=%s, upstream predicate=%s] => [%s]", node.getTable(), node.getPartitionPredicate(), node.getUpstreamPredicateHint(), formatOutputs(node.getOutputSymbols()));
for (Map.Entry<Symbol, ColumnHandle> entry : node.getAssignments().entrySet()) {
print(indent + 2, "%s := %s", entry.getKey(), entry.getValue());
}
return null;
}
@Override
public Void visitFilter(FilterNode node, Integer indent)
{
print(indent, "- Filter[%s] => [%s]", node.getPredicate(), formatOutputs(node.getOutputSymbols()));
return processChildren(node, indent + 1);
}
@Override
public Void visitProject(ProjectNode node, Integer indent)
{
print(indent, "- Project => [%s]", formatOutputs(node.getOutputSymbols()));
for (Map.Entry<Symbol, Expression> entry : node.getOutputMap().entrySet()) {
if (entry.getValue() instanceof QualifiedNameReference && ((QualifiedNameReference) entry.getValue()).getName().equals(entry.getKey().toQualifiedName())) {
// skip identity assignments
continue;
}
print(indent + 2, "%s := %s", entry.getKey(), entry.getValue());
}
return processChildren(node, indent + 1);
}
@Override
public Void visitOutput(OutputNode node, Integer indent)
{
print(indent, "- Output[%s]", Joiner.on(", ").join(node.getColumnNames()));
for (int i = 0; i < node.getColumnNames().size(); i++) {
String name = node.getColumnNames().get(i);
Symbol symbol = node.getOutputSymbols().get(i);
if (!name.equals(symbol.toString())) {
print(indent + 2, "%s := %s", name, symbol);
}
}
return processChildren(node, indent + 1);
}
@Override
public Void visitTopN(final TopNNode node, Integer indent)
{
Iterable<String> keys = Iterables.transform(node.getOrderBy(), new Function<Symbol, String>()
{
@Override
public String apply(Symbol input)
{
return input + " " + node.getOrderings().get(input);
}
});
print(indent, "- TopN[%s by (%s)] => [%s]", node.getCount(), Joiner.on(", ").join(keys), formatOutputs(node.getOutputSymbols()));
return processChildren(node, indent + 1);
}
@Override
public Void visitSort(final SortNode node, Integer indent)
{
Iterable<String> keys = Iterables.transform(node.getOrderBy(), new Function<Symbol, String>()
{
@Override
public String apply(Symbol input)
{
return input + " " + node.getOrderings().get(input);
}
});
print(indent, "- Sort[%s] => [%s]", Joiner.on(", ").join(keys), formatOutputs(node.getOutputSymbols()));
return processChildren(node, indent + 1);
}
@Override
public Void visitTableWriter(TableWriterNode node, Integer indent)
{
print(indent, "- TableWrite[%s] => [%s]", node.getTable(), formatOutputs(node.getOutputSymbols()));
for (Map.Entry<Symbol, ColumnHandle> entry : node.getColumns().entrySet()) {
print(indent + 2, "%s := %s", entry.getValue(), entry.getKey());
}
return processChildren(node, indent + 1);
}
@Override
public Void visitExchange(ExchangeNode node, Integer indent)
{
print(indent, "- Exchange[%s] => [%s]", node.getSourceFragmentIds(), formatOutputs(node.getOutputSymbols()));
return processExchange(node, indent + 1);
}
@Override
public Void visitSink(SinkNode node, Integer indent)
{
print(indent, "- Sink[%s] => [%s]", node.getId(), formatOutputs(node.getOutputSymbols()));
return processChildren(node, indent + 1);
}
@Override
public Void visitUnion(UnionNode node, Integer indent)
{
print(indent, "- Union => [%s]", formatOutputs(node.getOutputSymbols()));
return processChildren(node, indent + 1);
}
@Override
protected Void visitPlan(PlanNode node, Integer context)
{
throw new UnsupportedOperationException("not yet implemented");
}
private Void processChildren(PlanNode node, int indent)
{
for (PlanNode child : node.getSources()) {
child.accept(this, indent);
}
return null;
}
private Void processExchange(ExchangeNode node, int indent)
{
for (PlanFragmentId planFragmentId : node.getSourceFragmentIds()) {
PlanFragment target = fragmentsById.get().get(planFragmentId);
target.getRoot().accept(this, indent);
}
return null;
}
private String formatOutputs(List<Symbol> symbols)
{
return Joiner.on(", ").join(Iterables.transform(symbols, new Function<Symbol, String>()
{
@Override
public String apply(Symbol input)
{
return input + ":" + types.get(input);
}
}));
}
}
}
|
|
package com.eas.designer.explorer;
import com.eas.designer.application.indexer.IndexerQuery;
import com.eas.designer.explorer.project.ui.PlatypusProjectNodesList;
import java.awt.Dialog;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.beans.PropertyVetoException;
import java.util.Set;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import org.openide.DialogDescriptor;
import org.openide.DialogDisplayer;
import org.openide.ErrorManager;
import org.openide.awt.Mnemonics;
import org.openide.explorer.ExplorerManager;
import org.openide.explorer.view.BeanTreeView;
import org.openide.filesystems.FileObject;
import org.openide.filesystems.FileUtil;
import org.openide.loaders.DataFilter;
import org.openide.loaders.DataFolder;
import org.openide.loaders.DataObject;
import org.openide.nodes.FilterNode;
import org.openide.nodes.Node;
import org.openide.util.HelpCtx;
import org.openide.util.NbBundle;
import org.openide.util.Utilities;
/**
* A file chooser allowing to choose a file or folder from the netbeans
* filesystem. Can be used as a standalone panel, or as a dialog.
*
* @author Tomas Pavek
*/
public class FileChooser extends JPanel implements ExplorerManager.Provider {
public static final Filter SELECT_FOLDERS_FILTER = new FolderFilter();
private boolean applicationElementMode;
private DataFolder rootFolder;
private ExplorerManager explorerManager;
private String selectedAppElementName;
private FileObject selectedFile;
private FileObject selectedFolder;
private boolean confirmed;
private BeanTreeView treeView;
private JButton newButton;
private JButton okButton;
private JButton cancelButton;
private JTextField fileNameTextField;
private Filter selectFilter;
public static final String PROP_SELECTED_FILE = "selectedFile"; // NOI18N
public interface Filter {
boolean accept(FileObject file);
}
public static class FolderFilter implements Filter {
@Override
public boolean accept(FileObject file) {
return file.isFolder();
}
}
/**
* Creates a new FileChooser. Can be used directly as a panel, or getDialog
* can be called to get it wrapped in a Dialog.
*
* @param fileInProject a source file from project sources (determines the
* project's classpath)
* @param aDisplayFilter a filter for files to be displayed
* @param anSelectFilter a filter for files to be selected
* @param aChoosingFolder if true, the chooser only allows to select a
* folder, and only source classpath is shown (i.e. not JARs on execution
* CP)
* @param okCancelButtons defines whether the controls buttons should be
* shown (typically true if using as a dialog and false if using as a panel)
*/
public FileChooser(FileObject aRootFile, final Filter aDisplayFilter, Filter anSelectFilter, boolean okCancelButtons) {
super();
selectFilter = anSelectFilter;
rootFolder = DataFolder.findFolder(aRootFile);
explorerManager = new ExplorerManager();
explorerManager.setRootContext(new FilterNode(rootFolder.getNodeDelegate(), rootFolder.createNodeChildren(new DataFilter() {
@Override
public boolean acceptDataObject(DataObject obj) {
return aDisplayFilter.accept(obj.getPrimaryFile());
}
})));
init(okCancelButtons);
}
/**
* Creates a new FileChooser for application elements. Can be used directly
* as a panel, or getDialog can be called to get it wrapped in a Dialog.
*
* @param fileInProject a source file from project sources (determines the
* project's classpath)
* @param anSelectFilter a filter for files to be selected
* @param aChoosingFolder if true, the chooser only allows to select a
* folder, and only source classpath is shown (i.e. not JARs on execution
* CP)
* @param okCancelButtons defines whether the controls buttons should be
* shown (typically true if using as a dialog and false if using as a panel)
*/
public FileChooser(FileObject aRootFile, Filter anSelectFilter, boolean okCancelButtons) {
super();
applicationElementMode = true;
selectFilter = anSelectFilter;
rootFolder = DataFolder.findFolder(aRootFile);
explorerManager = new ExplorerManager();
explorerManager.setRootContext(new FilterNode(rootFolder.getNodeDelegate(), rootFolder.createNodeChildren(PlatypusProjectNodesList.APPLICATION_TYPES_FILTER)));
init(okCancelButtons);
}
private boolean isChooseFolders() {
return selectFilter == SELECT_FOLDERS_FILTER;
}
private void init(boolean okCancelButtons) {
Listener listener = new Listener();
try {
explorerManager.setSelectedNodes(new Node[]{rootFolder.getNodeDelegate()});
} catch (PropertyVetoException ex) {
ErrorManager.getDefault().notify(ErrorManager.INFORMATIONAL, ex);
}
explorerManager.addPropertyChangeListener(listener);
if (isChooseFolders()) { // add a button allowing to create a new folder
newButton = new JButton();
Mnemonics.setLocalizedText(newButton, NbBundle.getMessage(FileChooser.class, "CTL_CreateNewButton")); // NOI18N
newButton.addActionListener(listener);
newButton.setEnabled(false);
newButton.setToolTipText(NbBundle.getMessage(FileChooser.class, "CTL_CreateNewButtonHint")); // NOI18N
}
if (okCancelButtons) {
okButton = new JButton(NbBundle.getMessage(FileChooser.class, "CTL_OKButton")); // NOI18N
okButton.addActionListener(listener);
okButton.setEnabled(false);
cancelButton = new JButton(NbBundle.getMessage(FileChooser.class, "CTL_CancelButton")); // NOI18N
}
treeView = new BeanTreeView();
treeView.setPopupAllowed(false);
treeView.setDefaultActionAllowed(false);
treeView.setBorder((Border) UIManager.get("Nb.ScrollPane.border")); // NOI18N
treeView.getAccessibleContext().setAccessibleName(NbBundle.getMessage(FileChooser.class, "ACSN_FileSelectorTreeView")); // NOI18N
treeView.getAccessibleContext().setAccessibleDescription(NbBundle.getMessage(FileChooser.class, "ACSD_FileSelectorTreeView")); // NOI18N
this.getAccessibleContext().setAccessibleDescription(NbBundle.getMessage(FileChooser.class, "ACSD_FileSelectorPanel")); // NOI18N
// label and text field with mnemonic
JLabel label = new JLabel();
Mnemonics.setLocalizedText(label, NbBundle.getMessage(FileChooser.class,
isChooseFolders() ? "LBL_FolderName" : applicationElementMode ? "LBL_AppName" : "LBL_FileName")); // NOI18N
fileNameTextField = new JTextField();
fileNameTextField.setEditable(false);
label.setLabelFor(fileNameTextField);
GroupLayout layout = new GroupLayout(this);
setLayout(layout);
layout.setAutoCreateGaps(true);
GroupLayout.SequentialGroup sq = layout.createSequentialGroup().addComponent(label).addComponent(fileNameTextField);
if (!okCancelButtons && newButton != null) // add newButton next to the text field
{
sq.addComponent(newButton);
}
layout.setHorizontalGroup(layout.createParallelGroup().addComponent(treeView, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE).addGroup(sq));
GroupLayout.ParallelGroup pq = layout.createParallelGroup(GroupLayout.Alignment.BASELINE).addComponent(label).addComponent(fileNameTextField);
if (!okCancelButtons && newButton != null) // add newButton next to the text field
{
pq.addComponent(newButton);
}
layout.setVerticalGroup(layout.createSequentialGroup().addComponent(treeView, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE).addGroup(pq));
}
/**
* Creates a modal dialog containing the file chooser with given title. Use
* ActionListener to be informed about pressing OK button. Otherwise call
* isConfirmed which returns true if OK button was pressed.
*
* @param title the title of the dialog
* @param listener ActionListener attached to the OK button (if not null)
*/
public Dialog getDialog(String title, ActionListener listener) {
if (okButton == null) {
throw new IllegalStateException("Can't create dialog for a chooser without OK and Cancel buttons."); // NOI18N
}
((GroupLayout) getLayout()).setAutoCreateContainerGaps(true);
DialogDescriptor dd = new DialogDescriptor(
this, title, true,
newButton != null
? new JButton[]{newButton, okButton, cancelButton}
: new JButton[]{okButton, cancelButton},
okButton,
DialogDescriptor.DEFAULT_ALIGN, HelpCtx.DEFAULT_HELP,
null);
dd.setClosingOptions(new JButton[]{okButton, cancelButton});
if (listener != null) {
okButton.addActionListener(listener);
}
return DialogDisplayer.getDefault().createDialog(dd);
}
@Override
public void addNotify() {
confirmed = false;
super.addNotify();
treeView.requestFocusInWindow();
}
/**
* Returns if the user selected some file and confirmed by OK button.
*
* @return true if OK button has been pressed by the user since last call of
* getDialog
*/
public boolean isConfirmed() {
return confirmed;
}
/**
* Returns the selected application element name.
*
* @return application element name
*/
public String getSelectedAppElementName() {
return selectedAppElementName;
}
/**
* Returns the file selected by the user (or set via setSelectedFile
* method).
*
* @return the FileObject selected in the chooser
*/
public FileObject getSelectedFile() {
return selectedFile;
}
/**
* Sets the selected file in the chooser. The tree view is expanded as
* needed and the corresponding node selected.
*
* @param file the FileObject to be selected in the chooser
*/
public void setSelectedFile(FileObject file) {
if (file != null) {
selectFileNode(file);
}
selectedFile = file;
}
private class Listener implements PropertyChangeListener, ActionListener, DocumentListener {
// called when Create New or OK button pressed
@Override
public void actionPerformed(ActionEvent e) {
if (e.getSource() == okButton) {
confirmed = true;
} else if (e.getSource() == newButton) {
if (selectedFolder == null || selectedFile != null) {
return;
}
String fileName = fileNameTextField.getText();
try { // create a new dir
selectedFile = selectedFolder.createFolder(fileName);
selectFileNode(selectedFile);
} catch (Exception ex) { // report failure (name should be OK from checkFileName)
ErrorManager.getDefault().notify(ex);
}
if (isChooseFolders() && selectedFile != null) {
firePropertyChange(PROP_SELECTED_FILE, null, selectedFile);
}
} else if (e.getSource() == fileNameTextField) { // enter pressed in the text field
if (selectedFile == null) { // nothing set from checkNameField
String fileName = fileNameTextField.getText();
if (fileName.startsWith("/")) // NOI18N
{
fileName = fileName.substring(1);
}
}
if (selectedFile != null) {
Node[] nodes = explorerManager.getSelectedNodes();
if (nodes.length != 1 || fileFromNode(nodes[0]) != selectedFile) {
selectFileNode(selectedFile);
treeView.requestFocus();
} else if (okButton != null) {
okButton.doClick();
return;
}
if (okButton != null) {
okButton.setEnabled(selectedFile != null && selectFilter.accept(selectedFile));
}
if (newButton != null) {
newButton.setEnabled(false);
}
}
}
}
// called from ExplorerManager when node selection changes
@Override
public void propertyChange(PropertyChangeEvent ev) {
if (ev.getPropertyName().equals(ExplorerManager.PROP_SELECTED_NODES)) {
Node[] nodes = explorerManager.getSelectedNodes();
FileObject oldSelected = selectedFile;
selectedFile = null;
selectedFolder = null;
if (nodes.length == 1) {
FileObject fo = fileFromNode(nodes[0]);
if (fo != null) {
if (applicationElementMode && !fo.isFolder()) {
selectedAppElementName = IndexerQuery.file2AppElementId(fo);
fileNameTextField.setText(selectedAppElementName != null ? selectedAppElementName : ""); // NOI18N
} else if (!applicationElementMode && !fo.isFolder()) {
fileNameTextField.setText(fo.getNameExt());
} else {
fileNameTextField.setText(""); // NOI18N
selectedAppElementName = null;
}
selectedFile = fo;
selectedFolder = fo.getParent();
}
}
if (okButton != null) {
okButton.setEnabled(selectedFile != null && (!selectedFile.isFolder() || isChooseFolders()) && selectFilter.accept(selectedFile));
}
if (newButton != null) {
newButton.setEnabled(false);
}
firePropertyChange(PROP_SELECTED_FILE, oldSelected, selectedFile);
}
}
// called when a the user types in the text field (DocumentListener)
@Override
public void changedUpdate(DocumentEvent e) {
}
// called when a the user types in the text field (DocumentListener)
@Override
public void insertUpdate(DocumentEvent e) {
checkNameField();
}
// called when a the user types in the text field (DocumentListener)
@Override
public void removeUpdate(DocumentEvent e) {
checkNameField();
}
}
private void checkNameField() {
if (selectedFolder != null) {
selectedFile = null;
String fileName = fileNameTextField.getText();
Node[] nodes = explorerManager.getSelectedNodes();
if (nodes.length == 1) {
FileObject fo = fileFromNode(nodes[0]);
if (fo != null) {
if (!fo.isFolder()) {
fo = fo.getParent();
}
selectedFile = fo.getFileObject(fileName);
selectedFolder = fo;
}
}
if (okButton != null) {
okButton.setEnabled(selectedFile != null && (!selectedFile.isFolder() || isChooseFolders()));
}
if (newButton != null) {
newButton.setEnabled(selectedFile == null && isChooseFolders()
&& Utilities.isJavaIdentifier(fileName));
}
}
}
/**
* Implementation of ExplorerManager.Provider. Needed for the tree view to
* work.
*/
@Override
public ExplorerManager getExplorerManager() {
return explorerManager;
}
public static FileChooser createInstance(FileObject aRoot, FileObject aSelectedFile, Filter aDisplayFilter, Filter aSelectFilter) {
FileChooser chooser = new FileChooser(aRoot, aDisplayFilter, aSelectFilter, true);
try {
chooser.setSelectedFile(aSelectedFile);
} catch (IllegalArgumentException iaex) {
ErrorManager.getDefault().notify(ErrorManager.INFORMATIONAL, iaex);
}
return chooser;
}
public static FileChooser createInstance(FileObject aRoot, FileObject aSelectedFile, final Set<String> allowedMimeTypes) {
FileChooser chooser = new FileChooser(aRoot, getMimeTypeFilter(allowedMimeTypes), true);
try {
chooser.setSelectedFile(aSelectedFile);
} catch (IllegalArgumentException iaex) {
ErrorManager.getDefault().notify(ErrorManager.INFORMATIONAL, iaex);
}
return chooser;
}
private static Filter getMimeTypeFilter(final Set<String> allowedMimeTypes) {
return new Filter() {
@Override
public boolean accept(FileObject fo) {
return allowedMimeTypes == null || allowedMimeTypes.isEmpty() || allowedMimeTypes.contains(fo.getMIMEType()) || fo.isFolder();
}
};
}
public static FileObject selectFile(FileObject aRoot, FileObject aSelectedFile, Set<String> aDisplayMimeTypes, Set<String> aSelectMimeTypes) {
FileChooser chooser = createInstance(aRoot, aSelectedFile, getMimeTypeFilter(aDisplayMimeTypes), getMimeTypeFilter(aSelectMimeTypes));
chooser.getDialog(chooser.getTitle(), null)
.setVisible(true);
if (chooser.isConfirmed()) {
return chooser.getSelectedFile();
} else {
return aSelectedFile;
}
}
public static FileObject selectAppElement(FileObject aRoot, FileObject aSelectedFile, Set<String> allowedMimeTypes) {
FileChooser chooser = createInstance(aRoot, aSelectedFile, allowedMimeTypes);
chooser.getDialog(chooser.getTitle(), null)
.setVisible(true);
if (chooser.isConfirmed()) {
return chooser.getSelectedFile();
} else {
return null;
}
}
private String getTitle() {
return NbBundle.getMessage(FileChooser.class,
applicationElementMode
? "CTL_OpenAppDialogName" //NOI18N
: isChooseFolders()
? "CTL_OpenFolderDialogName" : "CTL_OpenFileDialogName"); //NOI18N
}
private static FileObject fileFromNode(Node n) {
DataObject dobj = n.getLookup().lookup(DataObject.class);
return dobj != null ? dobj.getPrimaryFile() : null;
}
private void selectFileNode(FileObject fo) {
selectNode(explorerManager.getRootContext(), fo);
}
private void selectNode(Node parent, FileObject fo) {
for (Node n : parent.getChildren().getNodes(true)) {
FileObject nodeFO = fileFromNode(n);
if (nodeFO == fo) {
try {
explorerManager.setSelectedNodes(new Node[]{n});
} catch (PropertyVetoException ex) { // should not happen
ErrorManager.getDefault().notify(ErrorManager.INFORMATIONAL, ex);
}
break;
} else if (FileUtil.isParentOf(nodeFO, fo)) {
selectNode(n, fo);
break;
}
}
}
private static class FilteredNode extends FilterNode {
FilteredNode(Node original, String displayName, Filter filter) {
super(original, new FilteredChildren(original, filter));
if (displayName != null) {
disableDelegation(DELEGATE_GET_DISPLAY_NAME | DELEGATE_SET_DISPLAY_NAME);
setDisplayName(displayName);
}
}
}
/**
* A mutualy recursive children that ensure propagation of the filter to
* deeper levels of hiearachy. That is, it creates FilteredNodes filtered by
* the same filter.
*/
public static class FilteredChildren extends FilterNode.Children {
private Filter filter;
public FilteredChildren(Node original, Filter filter) {
super(original);
this.filter = filter;
}
@Override
protected Node copyNode(Node node) {
return filter != null ? new FilteredNode(node, null, filter)
: super.copyNode(node);
}
@Override
protected Node[] createNodes(Node key) {
if (filter != null) {
FileObject fo = fileFromNode(key);
if (fo == null || !filter.accept(fo)) {
return new Node[0];
}
}
return super.createNodes(key);
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.protocol.http.proxy;
import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Map;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.jmeter.config.Arguments;
import org.apache.jmeter.protocol.http.config.MultipartUrlConfig;
import org.apache.jmeter.protocol.http.control.gui.HttpTestSampleGui;
import org.apache.jmeter.protocol.http.sampler.HTTPSamplerBase;
import org.apache.jmeter.protocol.http.sampler.HTTPSamplerFactory;
import org.apache.jmeter.protocol.http.sampler.PostWriter;
import org.apache.jmeter.protocol.http.util.ConversionUtils;
import org.apache.jmeter.protocol.http.util.HTTPConstants;
import org.apache.jmeter.protocol.http.util.HTTPFileArg;
import org.apache.jmeter.testelement.TestElement;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
/**
* Default implementation that handles classical HTTP textual + Multipart requests
*/
public class DefaultSamplerCreator extends AbstractSamplerCreator {
private static final Logger log = LoggerFactory.getLogger(DefaultSamplerCreator.class);
/*
* Must be the same order than in org.apache.jmeter.protocol.http.proxy.gui.ProxyControlGui class in createHTTPSamplerPanel method
*/
private static final int SAMPLER_NAME_NAMING_MODE_PREFIX = 0; // $NON-NLS-1$
private static final int SAMPLER_NAME_NAMING_MODE_COMPLETE = 1; // $NON-NLS-1$
/**
*
*/
public DefaultSamplerCreator() {
}
/**
* @see org.apache.jmeter.protocol.http.proxy.SamplerCreator#getManagedContentTypes()
*/
@Override
public String[] getManagedContentTypes() {
return new String[0];
}
/**
*
* @see org.apache.jmeter.protocol.http.proxy.SamplerCreator#createSampler(org.apache.jmeter.protocol.http.proxy.HttpRequestHdr, java.util.Map, java.util.Map)
*/
@Override
public HTTPSamplerBase createSampler(HttpRequestHdr request,
Map<String, String> pageEncodings, Map<String, String> formEncodings) {
// Instantiate the sampler
HTTPSamplerBase sampler = HTTPSamplerFactory.newInstance(request.getHttpSamplerName());
sampler.setProperty(TestElement.GUI_CLASS, HttpTestSampleGui.class.getName());
// Defaults
sampler.setFollowRedirects(false);
sampler.setUseKeepAlive(true);
if (log.isDebugEnabled()) {
log.debug("getSampler: sampler path = " + sampler.getPath());
}
return sampler;
}
/**
* @see org.apache.jmeter.protocol.http.proxy.SamplerCreator#populateSampler(org.apache.jmeter.protocol.http.sampler.HTTPSamplerBase, org.apache.jmeter.protocol.http.proxy.HttpRequestHdr, java.util.Map, java.util.Map)
*/
@Override
public final void populateSampler(HTTPSamplerBase sampler,
HttpRequestHdr request, Map<String, String> pageEncodings,
Map<String, String> formEncodings) throws Exception{
computeFromHeader(sampler, request, pageEncodings, formEncodings);
computeFromPostBody(sampler, request);
if (log.isDebugEnabled()) {
log.debug("sampler path = " + sampler.getPath());
}
Arguments arguments = sampler.getArguments();
if(arguments.getArgumentCount() == 1 && arguments.getArgument(0).getName().length()==0) {
sampler.setPostBodyRaw(true);
}
}
/**
* Compute sampler informations from Request Header
* @param sampler {@link HTTPSamplerBase}
* @param request {@link HttpRequestHdr}
* @param pageEncodings Map of page encodings
* @param formEncodings Map of form encodings
* @throws Exception when something fails
*/
protected void computeFromHeader(HTTPSamplerBase sampler,
HttpRequestHdr request, Map<String, String> pageEncodings,
Map<String, String> formEncodings) throws Exception {
computeDomain(sampler, request);
computeMethod(sampler, request);
computePort(sampler, request);
computeProtocol(sampler, request);
computeContentEncoding(sampler, request,
pageEncodings, formEncodings);
computePath(sampler, request);
computeSamplerName(sampler, request);
}
/**
* Compute sampler informations from Request Header
* @param sampler {@link HTTPSamplerBase}
* @param request {@link HttpRequestHdr}
* @throws Exception when something fails
*/
protected void computeFromPostBody(HTTPSamplerBase sampler,
HttpRequestHdr request) throws Exception {
// If it was a HTTP GET request, then all parameters in the URL
// has been handled by the sampler.setPath above, so we just need
// to do parse the rest of the request if it is not a GET request
if((!HTTPConstants.CONNECT.equals(request.getMethod())) && (!HTTPConstants.GET.equals(request.getMethod()))) {
// Check if it was a multipart http post request
final String contentType = request.getContentType();
MultipartUrlConfig urlConfig = request.getMultipartConfig(contentType);
String contentEncoding = sampler.getContentEncoding();
// Get the post data using the content encoding of the request
String postData = null;
if (log.isDebugEnabled()) {
if(!StringUtils.isEmpty(contentEncoding)) {
log.debug("Using encoding " + contentEncoding + " for request body");
}
else {
log.debug("No encoding found, using JRE default encoding for request body");
}
}
if (!StringUtils.isEmpty(contentEncoding)) {
postData = new String(request.getRawPostData(), contentEncoding);
} else {
// Use default encoding
postData = new String(request.getRawPostData(), PostWriter.ENCODING);
}
if (urlConfig != null) {
urlConfig.parseArguments(postData);
// Tell the sampler to do a multipart post
sampler.setDoMultipartPost(true);
// Remove the header for content-type and content-length, since
// those values will most likely be incorrect when the sampler
// performs the multipart request, because the boundary string
// will change
request.getHeaderManager().removeHeaderNamed(HttpRequestHdr.CONTENT_TYPE);
request.getHeaderManager().removeHeaderNamed(HttpRequestHdr.CONTENT_LENGTH);
// Set the form data
sampler.setArguments(urlConfig.getArguments());
// Set the file uploads
sampler.setHTTPFiles(urlConfig.getHTTPFileArgs().asArray());
sampler.setDoBrowserCompatibleMultipart(true); // we are parsing browser input here
// used when postData is pure xml (eg. an xml-rpc call) or for PUT
} else if (postData.trim().startsWith("<?")
|| HTTPConstants.PUT.equals(sampler.getMethod())
|| isPotentialXml(postData)) {
sampler.addNonEncodedArgument("", postData, "");
} else if (contentType == null ||
(contentType.startsWith(HTTPConstants.APPLICATION_X_WWW_FORM_URLENCODED) &&
!isBinaryContent(contentType))) {
// It is the most common post request, with parameter name and values
// We also assume this if no content type is present, to be most backwards compatible,
// but maybe we should only parse arguments if the content type is as expected
sampler.parseArguments(postData.trim(), contentEncoding); //standard name=value postData
} else if (postData.length() > 0) {
if (isBinaryContent(contentType)) {
try {
File tempDir = new File(getBinaryDirectory());
File out = File.createTempFile(request.getMethod(), getBinaryFileSuffix(), tempDir);
FileUtils.writeByteArrayToFile(out,request.getRawPostData());
HTTPFileArg [] files = {new HTTPFileArg(out.getPath(),"",contentType)};
sampler.setHTTPFiles(files);
} catch (IOException e) {
log.warn("Could not create binary file: "+e);
}
} else {
// Just put the whole postbody as the value of a parameter
sampler.addNonEncodedArgument("", postData, ""); //used when postData is pure xml (ex. an xml-rpc call)
}
}
}
}
/**
* Tries parsing to see if content is xml
* @param postData String
* @return boolean
*/
private static boolean isPotentialXml(String postData) {
try {
SAXParserFactory spf = SAXParserFactory.newInstance();
SAXParser saxParser = spf.newSAXParser();
XMLReader xmlReader = saxParser.getXMLReader();
ErrorDetectionHandler detectionHandler =
new ErrorDetectionHandler();
xmlReader.setContentHandler(detectionHandler);
xmlReader.setErrorHandler(detectionHandler);
xmlReader.parse(new InputSource(new StringReader(postData)));
return !detectionHandler.isErrorDetected();
} catch (ParserConfigurationException | SAXException | IOException e) {
return false;
}
}
private static final class ErrorDetectionHandler extends DefaultHandler {
private boolean errorDetected = false;
public ErrorDetectionHandler() {
super();
}
/* (non-Javadoc)
* @see org.xml.sax.helpers.DefaultHandler#error(org.xml.sax.SAXParseException)
*/
@Override
public void error(SAXParseException e) throws SAXException {
this.errorDetected = true;
}
/* (non-Javadoc)
* @see org.xml.sax.helpers.DefaultHandler#fatalError(org.xml.sax.SAXParseException)
*/
@Override
public void fatalError(SAXParseException e) throws SAXException {
this.errorDetected = true;
}
/**
* @return the errorDetected
*/
public boolean isErrorDetected() {
return errorDetected;
}
}
/**
* Compute sampler name
* @param sampler {@link HTTPSamplerBase}
* @param request {@link HttpRequestHdr}
*/
protected void computeSamplerName(HTTPSamplerBase sampler,
HttpRequestHdr request) {
String prefix = request.getPrefix();
int httpSampleNameMode = request.getHttpSampleNameMode();
if (!HTTPConstants.CONNECT.equals(request.getMethod()) && isNumberRequests()) {
if(!StringUtils.isEmpty(prefix)) {
if (httpSampleNameMode==SAMPLER_NAME_NAMING_MODE_PREFIX) {
sampler.setName(prefix + incrementRequestNumberAndGet() + " " + sampler.getPath());
} else if (httpSampleNameMode==SAMPLER_NAME_NAMING_MODE_COMPLETE) {
sampler.setName(incrementRequestNumberAndGet() + " " + prefix);
} else {
log.debug("Sampler name naming mode not recognized");
}
} else {
sampler.setName(incrementRequestNumberAndGet() + " " + sampler.getPath());
}
} else {
if(!StringUtils.isEmpty(prefix)) {
if (httpSampleNameMode==SAMPLER_NAME_NAMING_MODE_PREFIX) {
sampler.setName(prefix+sampler.getPath());
} else if (httpSampleNameMode==SAMPLER_NAME_NAMING_MODE_COMPLETE) {
sampler.setName(prefix);
} else {
log.debug("Sampler name naming mode not recognized");
}
} else {
sampler.setName(sampler.getPath());
}
}
}
/**
* Set path on sampler
* @param sampler {@link HTTPSamplerBase}
* @param request {@link HttpRequestHdr}
*/
protected void computePath(HTTPSamplerBase sampler, HttpRequestHdr request) {
if(sampler.getContentEncoding() != null) {
sampler.setPath(request.getPath(), sampler.getContentEncoding());
}
else {
// Although the spec says UTF-8 should be used for encoding URL parameters,
// most browser use ISO-8859-1 for default if encoding is not known.
// We use null for contentEncoding, then the url parameters will be added
// with the value in the URL, and the "encode?" flag set to false
sampler.setPath(request.getPath(), null);
}
if (log.isDebugEnabled()) {
log.debug("Proxy: setting path: " + sampler.getPath());
}
}
/**
* Compute content encoding
* @param sampler {@link HTTPSamplerBase}
* @param request {@link HttpRequestHdr}
* @param pageEncodings Map of page encodings
* @param formEncodings Map of form encodings
* @throws MalformedURLException when no {@link URL} could be built from
* <code>sampler</code> and <code>request</code>
*/
protected void computeContentEncoding(HTTPSamplerBase sampler,
HttpRequestHdr request, Map<String, String> pageEncodings,
Map<String, String> formEncodings) throws MalformedURLException {
URL pageUrl = null;
if(sampler.isProtocolDefaultPort()) {
pageUrl = new URL(sampler.getProtocol(), sampler.getDomain(), request.getPath());
}
else {
pageUrl = new URL(sampler.getProtocol(), sampler.getDomain(),
sampler.getPort(), request.getPath());
}
String urlWithoutQuery = request.getUrlWithoutQuery(pageUrl);
String contentEncoding = computeContentEncoding(request, pageEncodings,
formEncodings, urlWithoutQuery);
// Set the content encoding
if(!StringUtils.isEmpty(contentEncoding)) {
sampler.setContentEncoding(contentEncoding);
}
}
/**
* Computes content encoding from request and if not found uses pageEncoding
* and formEncoding to see if URL was previously computed with a content type
* @param request {@link HttpRequestHdr}
* @param pageEncodings Map of page encodings
* @param formEncodings Map of form encodings
* @param urlWithoutQuery the request URL without the query parameters
* @return String content encoding
*/
protected String computeContentEncoding(HttpRequestHdr request,
Map<String, String> pageEncodings,
Map<String, String> formEncodings, String urlWithoutQuery) {
// Check if the request itself tells us what the encoding is
String contentEncoding = null;
String requestContentEncoding = ConversionUtils.getEncodingFromContentType(
request.getContentType());
if(requestContentEncoding != null) {
contentEncoding = requestContentEncoding;
}
else {
// Check if we know the encoding of the page
if (pageEncodings != null) {
synchronized (pageEncodings) {
contentEncoding = pageEncodings.get(urlWithoutQuery);
}
}
// Check if we know the encoding of the form
if (formEncodings != null) {
synchronized (formEncodings) {
String formEncoding = formEncodings.get(urlWithoutQuery);
// Form encoding has priority over page encoding
if (formEncoding != null) {
contentEncoding = formEncoding;
}
}
}
}
return contentEncoding;
}
/**
* Set protocol on sampler
* @param sampler {@link HTTPSamplerBase}
* @param request {@link HttpRequestHdr}
*/
protected void computeProtocol(HTTPSamplerBase sampler,
HttpRequestHdr request) {
sampler.setProtocol(request.getProtocol(sampler));
}
/**
* Set Port on sampler
* @param sampler {@link HTTPSamplerBase}
* @param request {@link HttpRequestHdr}
*/
protected void computePort(HTTPSamplerBase sampler, HttpRequestHdr request) {
sampler.setPort(request.serverPort());
if (log.isDebugEnabled()) {
log.debug("Proxy: setting port: " + sampler.getPort());
}
}
/**
* Set method on sampler
* @param sampler {@link HTTPSamplerBase}
* @param request {@link HttpRequestHdr}
*/
protected void computeMethod(HTTPSamplerBase sampler, HttpRequestHdr request) {
sampler.setMethod(request.getMethod());
log.debug("Proxy: setting method: " + sampler.getMethod());
}
/**
* Set domain on sampler
* @param sampler {@link HTTPSamplerBase}
* @param request {@link HttpRequestHdr}
*/
protected void computeDomain(HTTPSamplerBase sampler, HttpRequestHdr request) {
sampler.setDomain(request.serverName());
if (log.isDebugEnabled()) {
log.debug("Proxy: setting server: " + sampler.getDomain());
}
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive;
import com.facebook.presto.spi.ColumnMetadata;
import com.facebook.presto.spi.ConnectorColumnHandle;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.TypeManager;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Function;
import com.google.common.base.Objects;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Table;
import java.util.Map;
import static com.facebook.presto.hive.util.Types.checkType;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
public class HiveColumnHandle
implements ConnectorColumnHandle
{
public static final String SAMPLE_WEIGHT_COLUMN_NAME = "__presto__sample_weight__";
private final String clientId;
private final String name;
private final int ordinalPosition;
private final HiveType hiveType;
private final String typeName;
private final int hiveColumnIndex;
private final boolean partitionKey;
@JsonCreator
public HiveColumnHandle(
@JsonProperty("clientId") String clientId,
@JsonProperty("name") String name,
@JsonProperty("ordinalPosition") int ordinalPosition,
@JsonProperty("hiveType") HiveType hiveType,
@JsonProperty("typeName") String typeName,
@JsonProperty("hiveColumnIndex") int hiveColumnIndex,
@JsonProperty("partitionKey") boolean partitionKey)
{
this.clientId = checkNotNull(clientId, "clientId is null");
this.name = checkNotNull(name, "name is null");
checkArgument(ordinalPosition >= 0, "ordinalPosition is negative");
this.ordinalPosition = ordinalPosition;
checkArgument(hiveColumnIndex >= 0 || partitionKey, "hiveColumnIndex is negative");
this.hiveColumnIndex = hiveColumnIndex;
this.hiveType = checkNotNull(hiveType, "hiveType is null");
this.typeName = checkNotNull(typeName, "type is null");
this.partitionKey = partitionKey;
}
@JsonProperty
public String getClientId()
{
return clientId;
}
@JsonProperty
public String getName()
{
return name;
}
@JsonProperty
public int getOrdinalPosition()
{
return ordinalPosition;
}
@JsonProperty
public HiveType getHiveType()
{
return hiveType;
}
@JsonProperty
public int getHiveColumnIndex()
{
return hiveColumnIndex;
}
@JsonProperty
public boolean isPartitionKey()
{
return partitionKey;
}
public ColumnMetadata getColumnMetadata(TypeManager typeManager)
{
return new ColumnMetadata(name, typeManager.getType(typeName), ordinalPosition, partitionKey);
}
@JsonProperty
public String getTypeName()
{
return typeName;
}
@Override
public int hashCode()
{
return Objects.hashCode(clientId, name, hiveColumnIndex, hiveType, partitionKey);
}
@Override
public boolean equals(Object obj)
{
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
HiveColumnHandle other = (HiveColumnHandle) obj;
return Objects.equal(this.clientId, other.clientId) &&
Objects.equal(this.name, other.name) &&
Objects.equal(this.hiveColumnIndex, other.hiveColumnIndex) &&
Objects.equal(this.hiveType, other.hiveType) &&
Objects.equal(this.partitionKey, other.partitionKey);
}
@Override
public String toString()
{
return Objects.toStringHelper(this)
.add("clientId", clientId)
.add("name", name)
.add("ordinalPosition", ordinalPosition)
.add("hiveType", hiveType)
.add("hiveColumnIndex", hiveColumnIndex)
.add("partitionKey", partitionKey)
.toString();
}
public static Function<ConnectorColumnHandle, HiveColumnHandle> hiveColumnHandle()
{
return new Function<ConnectorColumnHandle, HiveColumnHandle>()
{
@Override
public HiveColumnHandle apply(ConnectorColumnHandle columnHandle)
{
return checkType(columnHandle, HiveColumnHandle.class, "columnHandle");
}
};
}
public static Function<HiveColumnHandle, Integer> hiveColumnIndexGetter()
{
return new Function<HiveColumnHandle, Integer>()
{
@Override
public Integer apply(HiveColumnHandle input)
{
return input.getHiveColumnIndex();
}
};
}
public static Function<HiveColumnHandle, ColumnMetadata> columnMetadataGetter(Table table, final TypeManager typeManager)
{
ImmutableMap.Builder<String, String> builder = ImmutableMap.builder();
for (FieldSchema field : Iterables.concat(table.getSd().getCols(), table.getPartitionKeys())) {
if (field.getComment() != null) {
builder.put(field.getName(), field.getComment());
}
}
final Map<String, String> columnComment = builder.build();
return new Function<HiveColumnHandle, ColumnMetadata>()
{
@Override
public ColumnMetadata apply(HiveColumnHandle input)
{
return new ColumnMetadata(
input.getName(),
typeManager.getType(input.getTypeName()),
input.getOrdinalPosition(),
input.isPartitionKey(),
columnComment.get(input.getName()),
false);
}
};
}
public static Function<HiveColumnHandle, Type> nativeTypeGetter(final TypeManager typeManager)
{
return new Function<HiveColumnHandle, Type>()
{
@Override
public Type apply(HiveColumnHandle input)
{
return typeManager.getType(input.getTypeName());
}
};
}
public static Predicate<HiveColumnHandle> isPartitionKeyPredicate()
{
return new Predicate<HiveColumnHandle>()
{
@Override
public boolean apply(HiveColumnHandle input)
{
return input.isPartitionKey();
}
};
}
}
|
|
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.gemstone.gemfire.internal.cache.wan.serial;
import java.util.HashMap;
import java.util.Map;
import com.gemstone.gemfire.internal.cache.wan.WANTestBase;
import com.gemstone.gemfire.internal.cache.wan.WANTestBase.MyGatewayEventFilter;
import dunit.AsyncInvocation;
public class SerialWANStatsDUnitTest extends WANTestBase {
private static final long serialVersionUID = 1L;
public SerialWANStatsDUnitTest(String name) {
super(name);
}
public void setUp() throws Exception {
super.setUp();
}
public void testReplicatedSerialPropagation() throws Exception {
Integer lnPort = (Integer)vm0.invoke(WANTestBase.class,
"createFirstLocatorWithDSId", new Object[] { 1 });
Integer nyPort = (Integer)vm1.invoke(WANTestBase.class,
"createFirstRemoteLocator", new Object[] { 2, lnPort });
vm2.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm5.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm6.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm4.invoke(WANTestBase.class, "createSender", new Object[] { "ln", 2,
false, 100, 10, false, false, null, true });
vm5.invoke(WANTestBase.class, "createSender", new Object[] { "ln", 2,
false, 100, 10, false, false, null, true });
vm2.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR", null, isOffHeap() });
vm4.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
vm4.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR", "ln", isOffHeap() });
vm5.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR", "ln", isOffHeap() });
vm6.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR", "ln", isOffHeap() });
vm7.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR", "ln", isOffHeap() });
vm5.invoke(WANTestBase.class, "doPuts", new Object[] { testName + "_RR",
1000 });
vm2.invoke(WANTestBase.class, "validateRegionSize", new Object[] {
testName + "_RR", 1000 });
pause(2000);
vm2.invoke(WANTestBase.class, "checkGatewayReceiverStats", new Object[] {100, 1000, 1000 });
vm4.invoke(WANTestBase.class, "checkQueueStats", new Object[] {"ln",
0, 1000, 1000, 1000});
vm4.invoke(WANTestBase.class, "checkBatchStats", new Object[] {"ln",
100});
vm5.invoke(WANTestBase.class, "checkQueueStats", new Object[] {"ln",
0, 1000, 0, 0});
vm5.invoke(WANTestBase.class, "checkBatchStats", new Object[] {"ln",
0});
}
public void testWANStatsTwoWanSites() throws Exception {
Integer lnPort = createFirstLocatorWithDSId(1);
Integer nyPort = (Integer)vm0.invoke(WANTestBase.class,
"createFirstRemoteLocator", new Object[] { 2, lnPort });
Integer tkPort = (Integer)vm1.invoke(WANTestBase.class,
"createFirstRemoteLocator", new Object[] { 3, lnPort });
vm2.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
vm3.invoke(WANTestBase.class, "createReceiver", new Object[] { tkPort });
vm4.invoke(WANTestBase.class, "createCache", new Object[] {lnPort });
vm5.invoke(WANTestBase.class, "createCache", new Object[] {lnPort });
vm6.invoke(WANTestBase.class, "createCache", new Object[] {lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] {lnPort });
vm4.invoke(WANTestBase.class, "createSender", new Object[] { "lnSerial1",
2, false, 100, 10, false, false, null, true });
vm5.invoke(WANTestBase.class, "createSender", new Object[] { "lnSerial1",
2, false, 100, 10, false, false, null, true });
vm4.invoke(WANTestBase.class, "createSender", new Object[] { "lnSerial2",
3, false, 100, 10, false, false, null, true });
vm5.invoke(WANTestBase.class, "createSender", new Object[] { "lnSerial2",
3, false, 100, 10, false, false, null, true });
vm2.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR", null, isOffHeap() });
vm3.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR", null, isOffHeap() });
vm4.invoke(WANTestBase.class, "startSender", new Object[] { "lnSerial1" });
vm5.invoke(WANTestBase.class, "startSender", new Object[] { "lnSerial1" });
vm4.invoke(WANTestBase.class, "startSender", new Object[] { "lnSerial2" });
vm5.invoke(WANTestBase.class, "startSender", new Object[] { "lnSerial2" });
vm4.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR", "lnSerial1,lnSerial2", isOffHeap() });
vm5.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR", "lnSerial1,lnSerial2", isOffHeap() });
vm6.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR", "lnSerial1,lnSerial2", isOffHeap() });
vm7.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR", "lnSerial1,lnSerial2", isOffHeap() });
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { testName + "_RR",
1000 });
vm2.invoke(WANTestBase.class, "validateRegionSize", new Object[] {
testName + "_RR", 1000 });
vm3.invoke(WANTestBase.class, "validateRegionSize", new Object[] {
testName + "_RR", 1000 });
pause(2000);
vm2.invoke(WANTestBase.class, "checkGatewayReceiverStats", new Object[] {100, 1000, 1000 });
vm3.invoke(WANTestBase.class, "checkGatewayReceiverStats", new Object[] {100, 1000, 1000 });
vm4.invoke(WANTestBase.class, "checkQueueStats", new Object[] {"lnSerial1",
0, 1000, 1000, 1000});
vm4.invoke(WANTestBase.class, "checkBatchStats", new Object[] {"lnSerial1",
100});
vm4.invoke(WANTestBase.class, "checkQueueStats", new Object[] {"lnSerial2",
0, 1000, 1000, 1000});
vm4.invoke(WANTestBase.class, "checkBatchStats", new Object[] {"lnSerial2",
100});
vm5.invoke(WANTestBase.class, "checkQueueStats", new Object[] {"lnSerial1",
0, 1000, 0, 0});
vm5.invoke(WANTestBase.class, "checkBatchStats", new Object[] {"lnSerial1",
0});
vm5.invoke(WANTestBase.class, "checkQueueStats", new Object[] {"lnSerial2",
0, 1000, 0, 0});
vm5.invoke(WANTestBase.class, "checkBatchStats", new Object[] {"lnSerial2",
0});
}
public void testReplicatedSerialPropagationHA() throws Exception {
Integer lnPort = (Integer)vm0.invoke(WANTestBase.class,
"createFirstLocatorWithDSId", new Object[] { 1 });
Integer nyPort = (Integer)vm1.invoke(WANTestBase.class,
"createFirstRemoteLocator", new Object[] { 2, lnPort });
vm2.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
vm4.invoke(WANTestBase.class, "createCache", new Object[] {lnPort });
vm5.invoke(WANTestBase.class, "createCache", new Object[] {lnPort });
vm6.invoke(WANTestBase.class, "createCache", new Object[] {lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] {lnPort });
vm4.invoke(WANTestBase.class, "createSender", new Object[] { "ln", 2,
false, 100, 10, false, false, null, true });
vm5.invoke(WANTestBase.class, "createSender", new Object[] { "ln", 2,
false, 100, 10, false, false, null, true });
vm2.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR", null, isOffHeap() });
vm4.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
vm4.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR", "ln", isOffHeap() });
vm5.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR", "ln", isOffHeap() });
vm6.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR", "ln", isOffHeap() });
vm7.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR", "ln", isOffHeap() });
AsyncInvocation inv1 = vm5.invokeAsync(WANTestBase.class, "doPuts",
new Object[] { testName + "_RR", 10000 });
pause(2000);
AsyncInvocation inv2 = vm4.invokeAsync(WANTestBase.class, "killSender", new Object[] { "ln" });
Boolean isKilled = Boolean.FALSE;
try {
isKilled = (Boolean)inv2.getResult();
}
catch (Throwable e) {
fail("Unexpected exception while killing a sender");
}
AsyncInvocation inv3 = null;
if(!isKilled){
inv3 = vm5.invokeAsync(WANTestBase.class, "killSender", new Object[] { "ln" });
inv3.join();
}
inv1.join();
inv2.join();
vm2.invoke(WANTestBase.class, "validateRegionSize", new Object[] {
testName + "_RR", 10000 });
vm2.invoke(WANTestBase.class, "checkGatewayReceiverStatsHA", new Object[] {1000, 10000, 10000 });
vm5.invoke(WANTestBase.class, "checkStats_Failover", new Object[] {"ln", 10000});
}
public void testReplicatedSerialPropagationUNPorcessedEvents() throws Exception {
Integer lnPort = (Integer)vm0.invoke(WANTestBase.class,
"createFirstLocatorWithDSId", new Object[] { 1 });
Integer nyPort = (Integer)vm1.invoke(WANTestBase.class,
"createFirstRemoteLocator", new Object[] { 2, lnPort });
//these are part of remote site
vm2.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
vm3.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
//these are part of local site
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm5.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm6.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
//senders are created on local site
vm4.invoke(WANTestBase.class, "createSender", new Object[] { "ln", 2,
false, 100, 20, false, false, null, true });
vm5.invoke(WANTestBase.class, "createSender", new Object[] { "ln", 2,
false, 100, 20, false, false, null, true });
//create one RR (RR_1) on remote site
vm2.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR_1", null, isOffHeap() });
vm3.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR_1", null, isOffHeap() });
//create another RR (RR_2) on remote site
vm2.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR_2", null, isOffHeap() });
vm3.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR_2", null, isOffHeap() });
//start the senders on local site
vm4.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
//create one RR (RR_1) on local site
vm4.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR_1", "ln", isOffHeap() });
vm5.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR_1", "ln", isOffHeap() });
vm6.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR_1", "ln", isOffHeap() });
vm7.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR_1", "ln", isOffHeap() });
//create another RR (RR_2) on local site
vm4.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR_2", "ln", isOffHeap() });
vm5.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR_2", "ln", isOffHeap() });
vm6.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR_2", "ln", isOffHeap() });
vm7.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR_2", "ln", isOffHeap() });
//start puts in RR_1 in another thread
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { testName + "_RR_1", 1000 });
//do puts in RR_2 in main thread
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { testName + "_RR_2", 500 });
//sleep for some time to let all the events propagate to remote site
Thread.sleep(20);
//vm4.invoke(WANTestBase.class, "verifyQueueSize", new Object[] { "ln", 0 });
vm2.invoke(WANTestBase.class, "validateRegionSize", new Object[] {
testName + "_RR_1", 1000 });
vm3.invoke(WANTestBase.class, "validateRegionSize", new Object[] {
testName + "_RR_2", 500 });
pause(2000);
vm4.invoke(WANTestBase.class, "checkQueueStats", new Object[] {"ln",
0, 1500, 1500, 1500});
vm4.invoke(WANTestBase.class, "checkBatchStats", new Object[] {"ln",
75});
vm4.invoke(WANTestBase.class, "checkUnProcessedStats", new Object[] {"ln", 0});
vm5.invoke(WANTestBase.class, "checkQueueStats", new Object[] {"ln",
0, 1500, 0, 0});
vm5.invoke(WANTestBase.class, "checkBatchStats", new Object[] {"ln",
0});
vm5.invoke(WANTestBase.class, "checkUnProcessedStats", new Object[] {"ln", 1500});
}
/**
* 1 region and sender configured on local site and 1 region and a
* receiver configured on remote site. Puts to the local region are in progress.
* Remote region is destroyed in the middle.
*
* @throws Exception
*/
public void testReplicatedSerialPropagationWithRemoteRegionDestroy() throws Exception {
Integer lnPort = (Integer)vm0.invoke(WANTestBase.class,
"createFirstLocatorWithDSId", new Object[] { 1 });
Integer nyPort = (Integer)vm1.invoke(WANTestBase.class,
"createFirstRemoteLocator", new Object[] { 2, lnPort });
//these are part of remote site
vm2.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
//these are part of local site
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm5.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm6.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
//senders are created on local site
vm4.invoke(WANTestBase.class, "createSender", new Object[] { "ln", 2,
false, 100, 100, false, false, null, true });
vm5.invoke(WANTestBase.class, "createSender", new Object[] { "ln", 2,
false, 100, 100, false, false, null, true });
//create one RR (RR_1) on remote site
vm2.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR_1", null, isOffHeap() });
//start the senders on local site
vm4.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
//create one RR (RR_1) on local site
vm4.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR_1", "ln", isOffHeap() });
vm5.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR_1", "ln", isOffHeap() });
vm6.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR_1", "ln", isOffHeap() });
vm7.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
testName + "_RR_1", "ln", isOffHeap() });
//start puts in RR_1 in another thread
AsyncInvocation inv1 = vm4.invokeAsync(WANTestBase.class, "doPuts", new Object[] { testName + "_RR_1", 10000 });
//destroy RR_1 in remote site
pause(3000);
vm2.invoke(WANTestBase.class, "destroyRegion", new Object[] { testName + "_RR_1"});
try {
inv1.join();
} catch (InterruptedException e) {
e.printStackTrace();
fail();
}
//verify that all is well in local site. All the events should be present in local region
vm4.invoke(WANTestBase.class, "validateRegionSize", new Object[] {
testName + "_RR_1", 10000 });
//assuming some events might have been dispatched before the remote region was destroyed,
//sender's region queue will have events less than 1000 but the queue will not be empty.
//NOTE: this much verification might be sufficient in DUnit. Hydra will take care of
//more in depth validations.
vm4.invoke(WANTestBase.class, "verifyRegionQueueNotEmpty", new Object[] {"ln" });
vm4.invoke(WANTestBase.class, "checkBatchStats", new Object[] {"ln", true, true});
vm5.invoke(WANTestBase.class, "checkUnProcessedStats", new Object[] {"ln", 10000});
vm2.invoke(WANTestBase.class, "checkExcepitonStats", new Object[] {1});
}
public void testSerialPropogationWithFilter() throws Exception {
Integer lnPort = (Integer)vm0.invoke(WANTestBase.class, "createFirstLocatorWithDSId",
new Object[] {1});
Integer nyPort = (Integer)vm1.invoke(WANTestBase.class,
"createFirstRemoteLocator", new Object[] {2,lnPort });
vm2.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
vm3.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
vm4.invoke(WANTestBase.class, "createCache", new Object[] {lnPort });
vm5.invoke(WANTestBase.class, "createCache", new Object[] {lnPort });
vm6.invoke(WANTestBase.class, "createCache", new Object[] {lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] {lnPort });
vm4.invoke(WANTestBase.class, "createSender", new Object[] { "ln", 2,
false, 100, 10, false, false,
new MyGatewayEventFilter(), true });
vm5.invoke(WANTestBase.class, "createSender", new Object[] { "ln", 2,
false, 100, 10, false, false,
new MyGatewayEventFilter(), true });
vm4.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
testName, "ln", 1, 100, isOffHeap() });
vm5.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
testName, "ln", 1, 100, isOffHeap() });
vm6.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
testName, "ln", 1, 100, isOffHeap() });
vm7.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
testName, "ln", 1, 100, isOffHeap() });
vm4.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
vm2.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
testName, null, 1, 100, isOffHeap() });
vm3.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
testName, null, 1, 100, isOffHeap() });
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { testName, 1000 });
vm2.invoke(WANTestBase.class, "validateRegionSize", new Object[] {
testName, 800 });
pause(2000);
vm4.invoke(WANTestBase.class, "checkQueueStats", new Object[] {"ln",
0, 1000, 900, 800});
vm4.invoke(WANTestBase.class, "checkEventFilteredStats", new Object[] {"ln",
200});
vm4.invoke(WANTestBase.class, "checkBatchStats", new Object[] {"ln",
80});
vm4.invoke(WANTestBase.class, "checkUnProcessedStats", new Object[] {"ln", 0});
vm5.invoke(WANTestBase.class, "checkQueueStats", new Object[] {"ln",
0, 1000, 0, 0});
vm5.invoke(WANTestBase.class, "checkBatchStats", new Object[] {"ln",
0});
vm5.invoke(WANTestBase.class, "checkUnProcessedStats", new Object[] {"ln",900});
}
public void testSerialPropagationConflation() throws Exception {
Integer lnPort = (Integer)vm0.invoke(WANTestBase.class,
"createFirstLocatorWithDSId", new Object[] { 1 });
Integer nyPort = (Integer)vm1.invoke(WANTestBase.class,
"createFirstRemoteLocator", new Object[] { 2, lnPort });
vm2.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
vm3.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
vm4.invoke(WANTestBase.class, "createCache", new Object[] {lnPort });
vm5.invoke(WANTestBase.class, "createCache", new Object[] {lnPort });
vm6.invoke(WANTestBase.class, "createCache", new Object[] {lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] {lnPort });
vm4.invoke(WANTestBase.class, "createSender", new Object[] { "ln", 2,
false, 100, 10, true, false, null, true });
vm4.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
testName, "ln", 0, 100, isOffHeap() });
vm5.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
testName, "ln", 0, 100, isOffHeap() });
vm6.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
testName, "ln", 0, 100, isOffHeap() });
vm7.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
testName, "ln", 0, 100, isOffHeap() });
vm4.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
vm4.invoke(WANTestBase.class, "pauseSenderAndWaitForDispatcherToPause", new Object[] { "ln" });
vm2.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
testName, null,1, 100, isOffHeap() });
vm3.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
testName, null,1, 100, isOffHeap() });
final Map keyValues = new HashMap();
final Map updateKeyValues = new HashMap();
for(int i=0; i< 1000; i++) {
keyValues.put(i, i);
}
vm4.invoke(WANTestBase.class, "putGivenKeyValue", new Object[] { testName, keyValues });
pause(5000);
vm4.invoke(WANTestBase.class, "checkQueueSize", new Object[] { "ln", keyValues.size() });
for(int i=0;i<500;i++) {
updateKeyValues.put(i, i+"_updated");
}
vm4.invoke(WANTestBase.class, "putGivenKeyValue", new Object[] { testName, updateKeyValues });
pause(5000);
vm4.invoke(WANTestBase.class, "checkQueueSize", new Object[] { "ln", keyValues.size() + updateKeyValues.size() });
vm2.invoke(WANTestBase.class, "validateRegionSize", new Object[] {
testName, 0 });
vm4.invoke(WANTestBase.class, "putGivenKeyValue", new Object[] { testName, updateKeyValues });
pause(5000);
vm4.invoke(WANTestBase.class, "checkQueueSize", new Object[] { "ln", keyValues.size() + updateKeyValues.size() });
vm4.invoke(WANTestBase.class, "resumeSender", new Object[] { "ln" });
keyValues.putAll(updateKeyValues);
vm2.invoke(WANTestBase.class, "validateRegionSize", new Object[] {
testName, keyValues.size() });
vm3.invoke(WANTestBase.class, "validateRegionSize", new Object[] {
testName, keyValues.size() });
vm2.invoke(WANTestBase.class, "validateRegionContents", new Object[] {
testName, keyValues });
vm3.invoke(WANTestBase.class, "validateRegionContents", new Object[] {
testName, keyValues });
pause(2000);
vm4.invoke(WANTestBase.class, "checkQueueStats", new Object[] {"ln",
0, 2000, 2000, 1500});
vm4.invoke(WANTestBase.class, "checkConflatedStats", new Object[] {"ln",
500});
}
}
|
|
package org.apereo.cas.ticket.registry.support;
import com.google.common.base.Throwables;
import org.apereo.cas.config.CasCoreAuthenticationHandlersConfiguration;
import org.apereo.cas.config.CasCoreAuthenticationMetadataConfiguration;
import org.apereo.cas.config.CasCoreAuthenticationPolicyConfiguration;
import org.apereo.cas.config.CasCoreAuthenticationPrincipalConfiguration;
import org.apereo.cas.config.CasCoreAuthenticationServiceSelectionStrategyConfiguration;
import org.apereo.cas.config.CasCoreConfiguration;
import org.apereo.cas.config.CasCoreHttpConfiguration;
import org.apereo.cas.config.CasCoreServicesConfiguration;
import org.apereo.cas.config.CasCoreTicketCatalogConfiguration;
import org.apereo.cas.config.CasCoreTicketsConfiguration;
import org.apereo.cas.config.CasPersonDirectoryConfiguration;
import org.apereo.cas.config.JpaTicketRegistryConfiguration;
import org.apereo.cas.config.JpaTicketRegistryTicketCatalogConfiguration;
import org.apereo.cas.config.support.EnvironmentConversionServiceInitializer;
import org.apereo.cas.configuration.model.support.jpa.ticketregistry.JpaTicketRegistryProperties;
import org.apereo.cas.configuration.support.Beans;
import org.apereo.cas.logout.config.CasCoreLogoutConfiguration;
import org.apereo.cas.util.SchedulingUtils;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.cloud.autoconfigure.RefreshAutoConfiguration;
import org.springframework.context.ApplicationContext;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.orm.jpa.SharedEntityManagerCreator;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.PostConstruct;
import javax.persistence.EntityManagerFactory;
import javax.sql.DataSource;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.junit.Assert.*;
/**
* Unit test for {@link JpaLockingStrategy}.
*
* @author Marvin S. Addison
* @since 3.0.0
*/
@RunWith(SpringRunner.class)
@SpringBootTest(classes = {
JpaLockingStrategyTests.JpaTestConfiguration.class,
RefreshAutoConfiguration.class,
CasCoreTicketsConfiguration.class,
CasCoreLogoutConfiguration.class,
CasCoreHttpConfiguration.class,
CasCoreServicesConfiguration.class,
CasCoreConfiguration.class,
CasCoreAuthenticationServiceSelectionStrategyConfiguration.class,
CasCoreAuthenticationPrincipalConfiguration.class,
CasCoreAuthenticationMetadataConfiguration.class,
CasCoreAuthenticationHandlersConfiguration.class,
CasCoreAuthenticationPolicyConfiguration.class,
CasCoreTicketCatalogConfiguration.class,
JpaTicketRegistryTicketCatalogConfiguration.class,
CasPersonDirectoryConfiguration.class,
JpaTicketRegistryConfiguration.class})
@ContextConfiguration(initializers = EnvironmentConversionServiceInitializer.class)
public class JpaLockingStrategyTests {
/**
* Number of clients contending for lock in concurrent test.
*/
private static final int CONCURRENT_SIZE = 13;
private static final Logger LOGGER = LoggerFactory.getLogger(JpaLockingStrategyTests.class);
@Autowired
@Qualifier("ticketTransactionManager")
private PlatformTransactionManager txManager;
@Autowired
@Qualifier("ticketEntityManagerFactory")
private EntityManagerFactory factory;
@Autowired
@Qualifier("dataSourceTicket")
private DataSource dataSource;
@TestConfiguration
public static class JpaTestConfiguration {
@Autowired
protected ApplicationContext applicationContext;
@PostConstruct
public void init() {
SchedulingUtils.prepScheduledAnnotationBeanPostProcessor(applicationContext);
}
}
/**
* Test basic acquire/release semantics.
*
* @throws Exception On errors.
*/
@Test
public void verifyAcquireAndRelease() throws Exception {
final String appId = "basic";
final String uniqueId = appId + "-1";
final LockingStrategy lock = newLockTxProxy(appId, uniqueId, JpaTicketRegistryProperties.DEFAULT_LOCK_TIMEOUT);
try {
assertTrue(lock.acquire());
assertEquals(uniqueId, getOwner(appId));
lock.release();
assertNull(getOwner(appId));
} catch (final Exception e) {
LOGGER.debug("testAcquireAndRelease produced an error", e);
fail("testAcquireAndRelease failed");
}
}
/**
* Test lock expiration.
*
* @throws Exception On errors.
*/
@Test
public void verifyLockExpiration() throws Exception {
final String appId = "expquick";
final String uniqueId = appId + "-1";
final LockingStrategy lock = newLockTxProxy(appId, uniqueId, "1");
try {
assertTrue(lock.acquire());
assertEquals(uniqueId, getOwner(appId));
assertFalse(lock.acquire());
Thread.sleep(1500);
assertTrue(lock.acquire());
assertEquals(uniqueId, getOwner(appId));
lock.release();
assertNull(getOwner(appId));
} catch (final Exception e) {
LOGGER.debug("testLockExpiration produced an error", e);
fail("testLockExpiration failed");
}
}
/**
* Verify non-reentrant behavior.
*/
@Test
public void verifyNonReentrantBehavior() {
final String appId = "reentrant";
final String uniqueId = appId + "-1";
final LockingStrategy lock = newLockTxProxy(appId, uniqueId, JpaTicketRegistryProperties.DEFAULT_LOCK_TIMEOUT);
try {
assertTrue(lock.acquire());
assertEquals(uniqueId, getOwner(appId));
assertFalse(lock.acquire());
lock.release();
assertNull(getOwner(appId));
} catch (final Exception e) {
LOGGER.debug("testNonReentrantBehavior produced an error", e);
fail("testNonReentrantBehavior failed.");
}
}
/**
* Test concurrent acquire/release semantics.
*/
@Test
public void verifyConcurrentAcquireAndRelease() throws Exception {
final ExecutorService executor = Executors.newFixedThreadPool(CONCURRENT_SIZE);
try {
testConcurrency(executor, Arrays.asList(getConcurrentLocks("concurrent-new")));
} catch (final Exception e) {
LOGGER.debug("testConcurrentAcquireAndRelease produced an error", e);
fail("testConcurrentAcquireAndRelease failed.");
} finally {
executor.shutdownNow();
}
}
/**
* Test concurrent acquire/release semantics for existing lock.
*/
@Test
public void verifyConcurrentAcquireAndReleaseOnExistingLock() throws Exception {
final LockingStrategy[] locks = getConcurrentLocks("concurrent-exists");
locks[0].acquire();
locks[0].release();
final ExecutorService executor = Executors.newFixedThreadPool(CONCURRENT_SIZE);
try {
testConcurrency(executor, Arrays.asList(locks));
} catch (final Exception e) {
LOGGER.debug("testConcurrentAcquireAndReleaseOnExistingLock produced an error", e);
fail("testConcurrentAcquireAndReleaseOnExistingLock failed.");
} finally {
executor.shutdownNow();
}
}
private LockingStrategy[] getConcurrentLocks(final String appId) {
final LockingStrategy[] locks = new LockingStrategy[CONCURRENT_SIZE];
IntStream.rangeClosed(1, locks.length)
.forEach(i -> locks[i - 1] = newLockTxProxy(appId, appId + '-' + i, JpaTicketRegistryProperties.DEFAULT_LOCK_TIMEOUT));
return locks;
}
private LockingStrategy newLockTxProxy(final String appId, final String uniqueId, final String ttl) {
final JpaLockingStrategy lock = new JpaLockingStrategy(appId, uniqueId, Beans.newDuration(ttl).getSeconds());
lock.entityManager = SharedEntityManagerCreator.createSharedEntityManager(factory);
return (LockingStrategy) Proxy.newProxyInstance(
JpaLockingStrategy.class.getClassLoader(),
new Class[]{LockingStrategy.class},
new TransactionalLockInvocationHandler(lock, this.txManager));
}
private String getOwner(final String appId) {
final JdbcTemplate simpleJdbcTemplate = new JdbcTemplate(dataSource);
final List<Map<String, Object>> results = simpleJdbcTemplate.queryForList(
"SELECT unique_id FROM locks WHERE application_id=?", appId);
if (results.isEmpty()) {
return null;
}
return (String) results.get(0).get("unique_id");
}
private static void testConcurrency(final ExecutorService executor,
final Collection<LockingStrategy> locks) throws Exception {
final List<Locker> lockers = new ArrayList<>(locks.size());
lockers.addAll(locks.stream().map(Locker::new).collect(Collectors.toList()));
final long lockCount = executor.invokeAll(lockers).stream().filter(result -> {
try {
return result.get();
} catch (final InterruptedException | ExecutionException e) {
throw Throwables.propagate(e);
}
}).count();
assertTrue("Lock count should be <= 1 but was " + lockCount, lockCount <= 1);
final List<Releaser> releasers = new ArrayList<>(locks.size());
releasers.addAll(locks.stream().map(Releaser::new).collect(Collectors.toList()));
final long releaseCount = executor.invokeAll(lockers).stream().filter(result -> {
try {
return result.get();
} catch (final InterruptedException | ExecutionException e) {
throw Throwables.propagate(e);
}
}).count();
assertTrue("Release count should be <= 1 but was " + releaseCount, releaseCount <= 1);
}
private static class TransactionalLockInvocationHandler implements InvocationHandler {
private static final Logger LOGGER = LoggerFactory.getLogger(TransactionalLockInvocationHandler.class);
private final JpaLockingStrategy jpaLock;
private final PlatformTransactionManager txManager;
TransactionalLockInvocationHandler(final JpaLockingStrategy lock,
final PlatformTransactionManager txManager) {
jpaLock = lock;
this.txManager = txManager;
}
public JpaLockingStrategy getLock() {
return this.jpaLock;
}
@Override
public Object invoke(final Object proxy, final Method method, final Object[] args) throws Throwable {
return new TransactionTemplate(txManager).execute(status -> {
try {
final Object result = method.invoke(jpaLock, args);
jpaLock.entityManager.flush();
LOGGER.debug("Performed [{}] on [{}]", method.getName(), jpaLock);
return result;
// Force result of transaction to database
} catch (final Exception e) {
throw new RuntimeException("Transactional method invocation failed.", e);
}
});
}
}
private static class Locker implements Callable<Boolean> {
private static final Logger LOGGER = LoggerFactory.getLogger(Locker.class);
private final LockingStrategy lock;
Locker(final LockingStrategy l) {
lock = l;
}
@Override
public Boolean call() throws Exception {
try {
return lock.acquire();
} catch (final Exception e) {
LOGGER.debug("[{}] failed to acquire lock", lock, e);
return false;
}
}
}
private static class Releaser implements Callable<Boolean> {
private static final Logger LOGGER = LoggerFactory.getLogger(Releaser.class);
private final LockingStrategy lock;
Releaser(final LockingStrategy l) {
lock = l;
}
@Override
public Boolean call() throws Exception {
try {
lock.release();
return true;
} catch (final Exception e) {
LOGGER.debug("[{}] failed to release lock", lock, e);
return false;
}
}
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.terms;
import org.apache.lucene.search.IndexSearcher;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
import org.elasticsearch.search.aggregations.bucket.BucketUtils;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import java.io.IOException;
import java.util.List;
import java.util.Map;
public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSource, TermsAggregatorFactory> {
private final Terms.Order order;
private final IncludeExclude includeExclude;
private final String executionHint;
private final SubAggCollectionMode collectMode;
private final TermsAggregator.BucketCountThresholds bucketCountThresholds;
private boolean showTermDocCountError;
public TermsAggregatorFactory(String name, Type type, ValuesSourceConfig<ValuesSource> config, Terms.Order order,
IncludeExclude includeExclude, String executionHint, SubAggCollectionMode collectMode,
TermsAggregator.BucketCountThresholds bucketCountThresholds, boolean showTermDocCountError, AggregationContext context,
AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
super(name, type, config, context, parent, subFactoriesBuilder, metaData);
this.order = order;
this.includeExclude = includeExclude;
this.executionHint = executionHint;
this.collectMode = collectMode;
this.bucketCountThresholds = bucketCountThresholds;
this.showTermDocCountError = showTermDocCountError;
}
@Override
protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
throws IOException {
final InternalAggregation aggregation = new UnmappedTerms(name, order, config.format(), bucketCountThresholds.getRequiredSize(),
bucketCountThresholds.getShardSize(), bucketCountThresholds.getMinDocCount(), pipelineAggregators, metaData);
return new NonCollectingAggregator(name, context, parent, factories, pipelineAggregators, metaData) {
{
// even in the case of an unmapped aggregator, validate the
// order
InternalOrder.validate(order, this);
}
@Override
public InternalAggregation buildEmptyAggregation() {
return aggregation;
}
};
}
@Override
protected Aggregator doCreateInternal(ValuesSource valuesSource, Aggregator parent, boolean collectsFromSingleBucket,
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
if (collectsFromSingleBucket == false) {
return asMultiBucketAggregator(this, context, parent);
}
BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(this.bucketCountThresholds);
if (!(order == InternalOrder.TERM_ASC || order == InternalOrder.TERM_DESC)
&& bucketCountThresholds.getShardSize() == TermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) {
// The user has not made a shardSize selection. Use default
// heuristic to avoid any wrong-ranking caused by distributed
// counting
bucketCountThresholds.setShardSize(BucketUtils.suggestShardSideQueueSize(bucketCountThresholds.getRequiredSize(),
context.searchContext().numberOfShards()));
}
bucketCountThresholds.ensureValidity();
if (valuesSource instanceof ValuesSource.Bytes) {
ExecutionMode execution = null;
if (executionHint != null) {
execution = ExecutionMode.fromString(executionHint, context.searchContext().parseFieldMatcher());
}
// In some cases, using ordinals is just not supported: override it
if (!(valuesSource instanceof ValuesSource.Bytes.WithOrdinals)) {
execution = ExecutionMode.MAP;
}
final long maxOrd;
final double ratio;
if (execution == null || execution.needsGlobalOrdinals()) {
ValuesSource.Bytes.WithOrdinals valueSourceWithOrdinals = (ValuesSource.Bytes.WithOrdinals) valuesSource;
IndexSearcher indexSearcher = context.searchContext().searcher();
maxOrd = valueSourceWithOrdinals.globalMaxOrd(indexSearcher);
ratio = maxOrd / ((double) indexSearcher.getIndexReader().numDocs());
} else {
maxOrd = -1;
ratio = -1;
}
// Let's try to use a good default
if (execution == null) {
// if there is a parent bucket aggregator the number of
// instances of this aggregator is going
// to be unbounded and most instances may only aggregate few
// documents, so use hashed based
// global ordinals to keep the bucket ords dense.
if (Aggregator.descendsFromBucketAggregator(parent)) {
execution = ExecutionMode.GLOBAL_ORDINALS_HASH;
} else {
if (factories == AggregatorFactories.EMPTY) {
if (ratio <= 0.5 && maxOrd <= 2048) {
// 0.5: At least we need reduce the number of global
// ordinals look-ups by half
// 2048: GLOBAL_ORDINALS_LOW_CARDINALITY has
// additional memory usage, which directly linked to
// maxOrd, so we need to limit.
execution = ExecutionMode.GLOBAL_ORDINALS_LOW_CARDINALITY;
} else {
execution = ExecutionMode.GLOBAL_ORDINALS;
}
} else {
execution = ExecutionMode.GLOBAL_ORDINALS;
}
}
}
SubAggCollectionMode cm = collectMode;
if (cm == null) {
cm = SubAggCollectionMode.DEPTH_FIRST;
if (factories != AggregatorFactories.EMPTY) {
cm = subAggCollectionMode(bucketCountThresholds.getShardSize(), maxOrd);
}
}
DocValueFormat format = config.format();
if ((includeExclude != null) && (includeExclude.isRegexBased()) && format != DocValueFormat.RAW) {
throw new AggregationExecutionException("Aggregation [" + name + "] cannot support regular expression style include/exclude "
+ "settings as they can only be applied to string fields. Use an array of values for include/exclude clauses");
}
return execution.create(name, factories, valuesSource, order, format, bucketCountThresholds, includeExclude, context, parent,
cm, showTermDocCountError, pipelineAggregators, metaData);
}
if ((includeExclude != null) && (includeExclude.isRegexBased())) {
throw new AggregationExecutionException("Aggregation [" + name + "] cannot support regular expression style include/exclude "
+ "settings as they can only be applied to string fields. Use an array of numeric values for include/exclude clauses used to filter numeric fields");
}
if (valuesSource instanceof ValuesSource.Numeric) {
IncludeExclude.LongFilter longFilter = null;
SubAggCollectionMode cm = collectMode;
if (cm == null) {
if (factories != AggregatorFactories.EMPTY) {
cm = subAggCollectionMode(bucketCountThresholds.getShardSize(), -1);
} else {
cm = SubAggCollectionMode.DEPTH_FIRST;
}
}
if (((ValuesSource.Numeric) valuesSource).isFloatingPoint()) {
if (includeExclude != null) {
longFilter = includeExclude.convertToDoubleFilter();
}
return new DoubleTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(), order,
bucketCountThresholds, context, parent, cm, showTermDocCountError, longFilter,
pipelineAggregators, metaData);
}
if (includeExclude != null) {
longFilter = includeExclude.convertToLongFilter(config.format());
}
return new LongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(), order,
bucketCountThresholds, context, parent, cm, showTermDocCountError, longFilter, pipelineAggregators,
metaData);
}
throw new AggregationExecutionException("terms aggregation cannot be applied to field [" + config.fieldContext().field()
+ "]. It can only be applied to numeric or string fields.");
}
// return the SubAggCollectionMode that this aggregation should use based on the expected size
// and the cardinality of the field
static SubAggCollectionMode subAggCollectionMode(int expectedSize, long maxOrd) {
if (expectedSize == Integer.MAX_VALUE) {
// return all buckets
return SubAggCollectionMode.DEPTH_FIRST;
}
if (maxOrd == -1 || maxOrd > expectedSize) {
// use breadth_first if the cardinality is bigger than the expected size or unknown (-1)
return SubAggCollectionMode.BREADTH_FIRST;
}
return SubAggCollectionMode.DEPTH_FIRST;
}
public enum ExecutionMode {
MAP(new ParseField("map")) {
@Override
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, Terms.Order order,
DocValueFormat format, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
throws IOException {
final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(format);
return new StringTermsAggregator(name, factories, valuesSource, order, format, bucketCountThresholds, filter,
aggregationContext, parent, subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData);
}
@Override
boolean needsGlobalOrdinals() {
return false;
}
},
GLOBAL_ORDINALS(new ParseField("global_ordinals")) {
@Override
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, Terms.Order order,
DocValueFormat format, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
throws IOException {
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format);
return new GlobalOrdinalsStringTermsAggregator(name, factories, (ValuesSource.Bytes.WithOrdinals) valuesSource, order,
format, bucketCountThresholds, filter, aggregationContext, parent, subAggCollectMode, showTermDocCountError,
pipelineAggregators, metaData);
}
@Override
boolean needsGlobalOrdinals() {
return true;
}
},
GLOBAL_ORDINALS_HASH(new ParseField("global_ordinals_hash")) {
@Override
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, Terms.Order order,
DocValueFormat format, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
throws IOException {
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format);
return new GlobalOrdinalsStringTermsAggregator.WithHash(name, factories, (ValuesSource.Bytes.WithOrdinals) valuesSource,
order, format, bucketCountThresholds, filter, aggregationContext, parent, subAggCollectMode, showTermDocCountError,
pipelineAggregators, metaData);
}
@Override
boolean needsGlobalOrdinals() {
return true;
}
},
GLOBAL_ORDINALS_LOW_CARDINALITY(new ParseField("global_ordinals_low_cardinality")) {
@Override
Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, Terms.Order order,
DocValueFormat format, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
throws IOException {
if (includeExclude != null || factories.countAggregators() > 0
// we need the FieldData impl to be able to extract the
// segment to global ord mapping
|| valuesSource.getClass() != ValuesSource.Bytes.FieldData.class) {
return GLOBAL_ORDINALS.create(name, factories, valuesSource, order, format, bucketCountThresholds, includeExclude,
aggregationContext, parent, subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData);
}
return new GlobalOrdinalsStringTermsAggregator.LowCardinality(name, factories,
(ValuesSource.Bytes.WithOrdinals) valuesSource, order, format, bucketCountThresholds, aggregationContext, parent,
subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData);
}
@Override
boolean needsGlobalOrdinals() {
return true;
}
};
public static ExecutionMode fromString(String value, ParseFieldMatcher parseFieldMatcher) {
for (ExecutionMode mode : values()) {
if (parseFieldMatcher.match(value, mode.parseField)) {
return mode;
}
}
throw new IllegalArgumentException("Unknown `execution_hint`: [" + value + "], expected any of " + values());
}
private final ParseField parseField;
ExecutionMode(ParseField parseField) {
this.parseField = parseField;
}
abstract Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, Terms.Order order,
DocValueFormat format, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
throws IOException;
abstract boolean needsGlobalOrdinals();
@Override
public String toString() {
return parseField.getPreferredName();
}
}
}
|
|
/*******************************************************************************
* Copyright 2017 Observational Health Data Sciences and Informatics
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.ohdsi.jCdmBuilder.etls.hcup;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.ohdsi.databases.RichConnection;
import org.ohdsi.jCdmBuilder.DbSettings;
import org.ohdsi.jCdmBuilder.EtlReport;
import org.ohdsi.jCdmBuilder.cdm.CdmV5NullableChecker;
import org.ohdsi.jCdmBuilder.utilities.CodeToDomainConceptMap;
import org.ohdsi.jCdmBuilder.utilities.CodeToDomainConceptMap.CodeDomainData;
import org.ohdsi.jCdmBuilder.utilities.CodeToDomainConceptMap.TargetConcept;
import org.ohdsi.jCdmBuilder.utilities.QCSampleConstructor;
import org.ohdsi.utilities.StringUtilities;
import org.ohdsi.utilities.collections.OneToManyList;
import org.ohdsi.utilities.files.ReadCSVFileWithHeader;
import org.ohdsi.utilities.files.Row;
/**
* Performs the ETL to CDM v5 for the HCUP Inpatient Sample. Assumes the vocabulary is already loaded in the target schema. In the HCUP IS all data used in the
* ETL comes from the CORE table. This table contains one row per hospital visit. Since there is no way to link patients across visits, each visit is assigned a
* unique personId.
*
* @author MSCHUEMI
*
*/
public class HCUPETLToV5 {
private static final double QC_SAMPLE_PROBABILITY = 0.000001;
public static int BATCH_SIZE = 10000;
public static String[] diagnoseFields = new String[] { "DX1", "DX2", "DX3", "DX4", "DX5", "DX6", "DX7", "DX8", "DX9", "DX10",
"DX11", "DX12", "DX13", "DX14", "DX15", "DX16", "DX17", "DX18", "DX19", "DX20", "DX21", "DX22", "DX23", "DX24", "DX25", "ECODE1", "ECODE2" };
public static String[] procedureFields = new String[] { "PR1", "PR2", "PR3", "PR4", "PR5", "PR6", "PR7", "PR8", "PR9", "PR10",
"PR11", "PR12", "PR13", "PR14", "PR15" };
public static String[] procedureDayFields = new String[] { "PRDAY1", "PRDAY2", "PRDAY3", "PRDAY4", "PRDAY5", "PRDAY6", "PRDAY7",
"PRDAY8", "PRDAY9", "PRDAY10", "PRDAY11", "PRDAY12", "PRDAY13", "PRDAY14", "PRDAY15" };
public static int[] diagnoseFieldConceptIds = new int[] { 38000184, 38000185, 38000186, 38000187, 38000188, 38000189, 38000190,
38000191, 38000192, 38000193, 38000194, 38000195, 38000196, 38000197, 38000198, 38000198, 38000198, 38000198, 38000198, 38000198, 38000198,
38000198, 38000198, 38000198, 38000198, 38000184, 38000185 };
public static int[] procedureFieldConceptIds = new int[] { 38000251, 38000252, 38000253, 38000254, 38000255, 38000256, 38000257,
38000258, 38000259, 38000260, 38000261, 38000262, 38000263, 38000264, 38000265 };
private RichConnection sourceConnection;
private RichConnection targetConnection;
private QCSampleConstructor qcSampleConstructor;
private EtlReport etlReport;
private CdmV5NullableChecker cdmv5NullableChecker = new CdmV5NullableChecker();
private OneToManyList<String, Row> tableToRows;
private long personId;
private long observationPeriodId;
private Integer locationId;
private long drugExposureId;
private long conditionOccurrenceId;
private long visitOccurrenceId;
private long procedureOccurrenceId;
private long deviceExposureId;
private long measurementId;
private long observationId;
private long visitStartDate;
private long visitEndDate;
private Map<String, Integer> stateCountyToLocationId;
private Set<Integer> careSiteIds;
private Map<String, String> codeToCounty;
private CodeToDomainConceptMap icd9ToConcept;
private CodeToDomainConceptMap icd9ToValueConcept;
private CodeToDomainConceptMap icd9ProcToConcept;
private CodeToDomainConceptMap drgYearToConcept;
public void process(String folder, DbSettings sourceDbSettings, DbSettings targetDbSettings, int maxPersons, int versionId) {
loadMappings(targetDbSettings);
sourceConnection = new RichConnection(sourceDbSettings.server, sourceDbSettings.domain, sourceDbSettings.user, sourceDbSettings.password,
sourceDbSettings.dbType);
sourceConnection.setContext(this.getClass());
sourceConnection.use(sourceDbSettings.database);
targetConnection = new RichConnection(targetDbSettings.server, targetDbSettings.domain, targetDbSettings.user, targetDbSettings.password,
targetDbSettings.dbType);
targetConnection.setContext(this.getClass());
targetConnection.use(targetDbSettings.database);
truncateTables(targetConnection);
targetConnection.execute("TRUNCATE TABLE _version");
String date = new SimpleDateFormat("yyyy-MM-dd").format(new Date());
targetConnection.execute("INSERT INTO _version (version_id, version_date) VALUES (" + versionId + ", '" + date + "')");
qcSampleConstructor = new QCSampleConstructor(folder + "/sample", QC_SAMPLE_PROBABILITY);
etlReport = new EtlReport(folder);
tableToRows = new OneToManyList<String, Row>();
stateCountyToLocationId = new HashMap<String, Integer>();
careSiteIds = new HashSet<Integer>();
personId = 0;
drugExposureId = 0;
conditionOccurrenceId = 0;
visitOccurrenceId = 0;
procedureOccurrenceId = 0;
deviceExposureId = 0;
observationPeriodId = 0;
measurementId = 0;
observationId = 0;
StringUtilities.outputWithTime("Populating CDM_Source table");
populateCdmSourceTable();
StringUtilities.outputWithTime("Processing persons");
for (Row row : sourceConnection.query("SELECT * FROM core ORDER BY [key]")) {
processPerson(row);
if (personId == maxPersons) {
System.out.println("Reached limit of " + maxPersons + " persons, terminating");
break;
}
if (personId % BATCH_SIZE == 0) {
insertBatch();
System.out.println("Processed " + personId + " persons");
}
}
insertBatch();
System.out.println("Processed " + personId + " persons");
qcSampleConstructor.addCdmData(targetConnection, targetDbSettings.database);
String etlReportName = etlReport.generateETLReport(icd9ToConcept, icd9ProcToConcept, drgYearToConcept);
System.out.println("An ETL report was generated and written to :" + etlReportName);
if (etlReport.getTotalProblemCount() > 0) {
String etlProblemListname = etlReport.generateProblemReport();
System.out.println("An ETL problem list was generated and written to :" + etlProblemListname);
}
StringUtilities.outputWithTime("Finished ETL");
}
private void populateCdmSourceTable() {
SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd");
targetConnection.executeResource("PopulateCdmSourceTable.sql", "@today", df.format(new Date()));
}
private void truncateTables(RichConnection targetConnection) {
StringUtilities.outputWithTime("Truncating tables");
String[] tables = new String[] { "attribute_definition", "care_site", "cdm_source", "cohort", "cohort_attribute", "cohort_definition", "condition_era",
"condition_occurrence", "death", "cost", "device_exposure", "dose_era", "drug_era", "drug_exposure", "fact_relationship", "location",
"measurement", "note", "observation", "observation_period", "payer_plan_period", "person", "procedure_occurrence", "provider", "specimen",
"visit_occurrence" };
for (String table : tables)
targetConnection.execute("TRUNCATE TABLE " + table);
}
private void processPerson(Row row) {
if (!row.get("KEY_NIS").equals("")) {
// New data format: transform names to old format:
row.set("KEY", row.get("KEY_NIS"));
row.set("HOSPID", row.get("HOSP_NIS"));
}
etlReport.registerIncomingData("core", row);
personId++;
visitOccurrenceId++;
observationPeriodId++;
visitStartDate = computeVisitStartDate(row.get("YEAR"), row.get("AMONTH"), row.get("AWEEKEND"), row.get("KEY"));
visitEndDate = computeVisitEndDate(visitStartDate, row.get("LOS"));
qcSampleConstructor.registerPersonData("core", row, row.getLong("KEY"));
if (addToPerson(row)) {
List<Row> stemTable = createStemTable(row);
addToLocation(row);
addToCareSite(row);
addToVisitOccurrence(row);
addToObservationPeriod(row);
addToConditionOccurrence(stemTable);
addToDeath(row);
addToDrugExposure(stemTable);
addToDeviceExposure(stemTable);
addToProcedureOccurrence(stemTable);
addToMeasurement(stemTable);
addToObservation(stemTable);
addToObservation(row);
}
}
private List<Row> createStemTable(Row row) {
List<Row> stemTable = new ArrayList<Row>();
for (int i = 0; i < diagnoseFields.length; i++)
if (row.get(diagnoseFields[i]).trim().length() != 0) {
CodeDomainData data = icd9ToConcept.getCodeData(row.get(diagnoseFields[i]).trim());
for (TargetConcept targetConcept : data.targetConcepts) {
if (targetConcept.conceptId == 4014295 && row.getInt("AGE") < 12) { // 4014295 = Single live birth
etlReport.reportProblem("Condition_occurrence", "Person < 12 years old with live birth. Removing condition_occurrence", row.get("KEY"));
continue;
}
if (targetConcept.conceptId == 4014295 && row.get("FEMALE").equals("0")) { // 4014295 = Single live birth
etlReport.reportProblem("Condition_occurrence", "Male with live birth. Removing condition_occurrence", row.get("KEY"));
continue;
}
Row stemRow = new Row();
stemRow.add("person_id", personId);
stemRow.add("source_value", row.get(diagnoseFields[i]));
stemRow.add("source_concept_id", data.sourceConceptId);
stemRow.add("concept_id", targetConcept.conceptId);
stemRow.add("domain_id", targetConcept.domainId);
stemRow.add("type_concept_id", diagnoseFieldConceptIds[i]);
stemRow.add("start_date", StringUtilities.daysToDatabaseDateString(visitStartDate));
stemRow.add("visit_occurrence_id", visitOccurrenceId);
stemRow.add("index", i);
stemTable.add(stemRow);
}
}
for (int i = 0; i < procedureFields.length; i++)
if (row.get(procedureFields[i]).trim().length() != 0) {
int day = row.getInt(procedureDayFields[i]);
if (day < 0)
day = 0;
if (day > visitEndDate - visitStartDate) {
etlReport.reportProblem("Procedure", "Procedure date beyond length of stay, removing procedure", row.get("KEY"));
continue;
}
CodeDomainData data = icd9ProcToConcept.getCodeData(row.get(procedureFields[i]).trim());
for (TargetConcept targetConcept : data.targetConcepts) {
Row stemRow = new Row();
stemRow.add("person_id", personId);
stemRow.add("source_value", row.get(procedureFields[i]));
stemRow.add("source_concept_id", data.sourceConceptId);
stemRow.add("concept_id", targetConcept.conceptId);
stemRow.add("domain_id", targetConcept.domainId);
stemRow.add("type_concept_id", procedureFieldConceptIds[i]);
stemRow.add("start_date", StringUtilities.daysToDatabaseDateString(visitStartDate + day));
stemRow.add("visit_occurrence_id", visitOccurrenceId);
stemRow.add("index", i);
stemTable.add(stemRow);
}
}
String drgYear = row.get("DRG").trim() + "_" + row.get("YEAR");
CodeDomainData data = drgYearToConcept.getCodeData(drgYear);
for (TargetConcept targetConcept : data.targetConcepts) {
Row stemRow = new Row();
stemRow.add("person_id", personId);
stemRow.add("source_value", drgYear);
stemRow.add("source_concept_id", data.sourceConceptId);
stemRow.add("concept_id", targetConcept.conceptId);
stemRow.add("domain_id", targetConcept.domainId);
stemRow.add("type_concept_id", 38000276); // Problem list from EHR
stemRow.add("start_date", StringUtilities.daysToDatabaseDateString(visitStartDate));
stemRow.add("visit_occurrence_id", visitOccurrenceId);
stemRow.add("index", 0);
stemTable.add(stemRow);
}
return stemTable;
}
private void addToLocation(Row row) {
if (row.get("HOSPST").equals("") && row.get("HOSPSTCO").equals("")) {
locationId = null;
return;
}
String stateCounty = row.get("HOSPST") + "\t" + (row.get("HOSPSTCO").equals("-9999") ? "" : row.get("HOSPSTCO"));
locationId = stateCountyToLocationId.get(stateCounty);
if (locationId == null) {
locationId = stateCountyToLocationId.size() + 1;
stateCountyToLocationId.put(stateCounty, locationId);
Row location = new Row();
location.add("location_id", locationId);
location.add("state", row.get("HOSPST"));
String county = codeToCounty.get(row.get("HOSPSTCO"));
if (county == null)
county = "";
if (county.length() > 20)
county = county.substring(0, 20); // County field in CDM limited to 20 chars
location.add("county", county);
location.add("location_source_value", row.get("HOSPSTCO"));
tableToRows.put("location", location);
}
}
private void addToCareSite(Row row) {
if (careSiteIds.add(row.getInt("HOSPID"))) {
Row careSite = new Row();
careSite.add("care_site_id", row.get("HOSPID"));
careSite.add("care_site_source_value", row.get("HOSPID"));
if (locationId == null)
careSite.add("location_id", "");
else
careSite.add("location_id", locationId);
careSite.add("place_of_service_concept_id", 9201); // Inpatient visit
tableToRows.put("care_site", careSite);
}
}
private void addToVisitOccurrence(Row row) {
Row visitOccurrence = new Row();
visitOccurrence.add("person_id", personId);
visitOccurrence.add("visit_occurrence_id", visitOccurrenceId);
visitOccurrence.add("visit_start_date", StringUtilities.daysToDatabaseDateString(visitStartDate));
visitOccurrence.add("visit_end_date", StringUtilities.daysToDatabaseDateString(visitEndDate));
visitOccurrence.add("care_site_id", row.get("HOSPID"));
visitOccurrence.add("visit_concept_id", 9201); // Inpatient visit
visitOccurrence.add("visit_type_concept_id", 44818517); // Visit derived from encounter on claim
tableToRows.put("visit_occurrence", visitOccurrence);
}
private void addToObservationPeriod(Row row) {
Row observationPeriod = new Row();
observationPeriod.add("observation_period_id", observationPeriodId);
observationPeriod.add("person_id", personId);
observationPeriod.add("observation_period_start_date", StringUtilities.daysToDatabaseDateString(visitStartDate));
observationPeriod.add("observation_period_end_date", StringUtilities.daysToDatabaseDateString(visitEndDate));
observationPeriod.add("period_type_concept_id", 44814724); // Period covering healthcare encounters
tableToRows.put("observation_period", observationPeriod);
}
private boolean addToPerson(Row row) {
if (row.getInt("AGE") < 0) { // No age specified. Cannot create person, since birth year is required field
etlReport.reportProblem("Person", "No age specified so cannot create row", row.get("KEY"));
return false;
}
Row person = new Row();
person.add("person_id", personId);
person.add("person_source_value", row.get("KEY"));
person.add("gender_source_value", row.get("FEMALE"));
person.add("gender_concept_id", row.get("FEMALE").equals("1") ? "8532" : row.get("FEMALE").equals("0") ? "8507" : "0");
if (row.getInt("AGE") > 0) {
int yearOfBirth = Integer.parseInt(StringUtilities.daysToCalendarYear(visitStartDate)) - row.getInt("AGE");
person.add("year_of_birth", yearOfBirth);
person.add("month_of_birth", "");
person.add("day_of_birth", "");
} else if (row.get("AGEDAY").equals("") && row.get("AGE_NEONATE").equals("1")) {
int yearOfBirth = Integer.parseInt(StringUtilities.daysToCalendarYear(visitStartDate));
person.add("year_of_birth", yearOfBirth);
person.add("month_of_birth", "");
person.add("day_of_birth", "");
} else if ((row.get("AGEDAY").equals("") && !row.get("AGE_NEONATE").equals("1")) || row.getInt("AGEDAY") < 0) {
long dateOfBirth = visitStartDate - 180;
person.add("year_of_birth", StringUtilities.daysToCalendarYear(dateOfBirth));
person.add("month_of_birth", "");
person.add("day_of_birth", "");
} else if (row.getInt("AGEDAY") >= 0) {
long dateOfBirth = visitStartDate - row.getInt("AGEDAY");
person.add("year_of_birth", StringUtilities.daysToCalendarYear(dateOfBirth));
person.add("month_of_birth", StringUtilities.daysToCalendarMonth(dateOfBirth));
person.add("day_of_birth", StringUtilities.daysToCalendarDayOfMonth(dateOfBirth));
} else {
person.add("year_of_birth", "");
person.add("month_of_birth", "");
person.add("day_of_birth", "");
}
person.add("race_source_value", row.get("RACE"));
if (row.get("RACE").equals("1")) // White
person.add("race_concept_id", "8527");
else if (row.get("RACE").equals("2")) // Black
person.add("race_concept_id", "8516");
else if (row.get("RACE").equals("4")) // Pacific islander
person.add("race_concept_id", "8557");
else if (row.get("RACE").equals("5")) // Native American
person.add("race_concept_id", "8657");
else if (row.get("RACE").equals("3")) // Hispanic, should be coded as 'other'
person.add("race_concept_id", "8522");
else if (row.get("RACE").equals("6")) // Other
person.add("race_concept_id", "0");
else
person.add("race_concept_id", "0");
if (row.get("RACE").equals("3")) {// Hispanic
person.add("ethnicity_source_value", "3");
person.add("ethnicity_concept_id", "38003563");
} else {
person.add("ethnicity_source_value", "");
person.add("ethnicity_concept_id", "0");
}
tableToRows.put("person", person);
return true;
}
private void addToConditionOccurrence(List<Row> stemTable) {
for (Row stemRow : stemTable) {
if (stemRow.get("domain_id").equals("Condition")) {
Row conditionOccurrence = new Row();
conditionOccurrence.add("person_id", stemRow.get("person_id"));
conditionOccurrence.add("condition_occurrence_id", ++conditionOccurrenceId);
conditionOccurrence.add("condition_source_value", stemRow.get("source_value"));
conditionOccurrence.add("condition_source_concept_id", stemRow.get("source_concept_id"));
conditionOccurrence.add("condition_concept_id", stemRow.get("concept_id"));
conditionOccurrence.add("condition_type_concept_id", stemRow.get("type_concept_id"));
conditionOccurrence.add("condition_start_date", stemRow.get("start_date"));
conditionOccurrence.add("visit_occurrence_id", stemRow.get("visit_occurrence_id"));
tableToRows.put("condition_occurrence", conditionOccurrence);
}
}
}
private void addToDeviceExposure(List<Row> stemTable) {
for (Row stemRow : stemTable) {
if (stemRow.get("domain_id").equals("Device")) {
Row deviceExposure = new Row();
deviceExposure.add("person_id", stemRow.get("person_id"));
deviceExposure.add("device_exposure_id", ++deviceExposureId);
deviceExposure.add("device_source_value", stemRow.get("source_value"));
deviceExposure.add("device_source_concept_id", stemRow.get("source_concept_id"));
deviceExposure.add("device_concept_id", stemRow.get("concept_id"));
deviceExposure.add("device_type_concept_id", stemRow.get("type_concept_id"));
deviceExposure.add("device_exposure_start_date", stemRow.get("start_date"));
deviceExposure.add("visit_occurrence_id", stemRow.get("visit_occurrence_id"));
tableToRows.put("device_exposure", deviceExposure);
}
}
}
private void addToDeath(Row row) {
if (row.get("DIED").equals("1")) {
Row death = new Row();
death.add("person_id", personId);
death.add("death_date", StringUtilities.daysToDatabaseDateString(visitEndDate));
death.add("death_type_concept_id", 38003566); // EHR record patient status "Deceased"
tableToRows.put("death", death);
}
}
private void addToDrugExposure(List<Row> stemTable) {
for (Row stemRow : stemTable) {
if (stemRow.get("domain_id").equals("Drug")) {
Row drugExposure = new Row();
drugExposure.add("person_id", stemRow.get("person_id"));
drugExposure.add("device_exposure_id", ++drugExposureId);
drugExposure.add("drug_source_value", stemRow.get("source_value"));
drugExposure.add("drug_source_concept_id", stemRow.get("source_concept_id"));
drugExposure.add("drug_concept_id", stemRow.get("concept_id"));
drugExposure.add("drug_type_concept_id", stemRow.get("type_concept_id"));
drugExposure.add("drug_exposure_start_date", stemRow.get("start_date"));
drugExposure.add("visit_occurrence_id", stemRow.get("visit_occurrence_id"));
tableToRows.put("drug_exposure", drugExposure);
}
}
}
private void addToProcedureOccurrence(List<Row> stemTable) {
for (Row stemRow : stemTable) {
if (stemRow.get("domain_id").equals("Procedure")) {
Row procedureOccurrence = new Row();
procedureOccurrence.add("person_id", stemRow.get("person_id"));
procedureOccurrence.add("procedure_occurrence_id", ++procedureOccurrenceId);
procedureOccurrence.add("procedure_source_value", stemRow.get("source_value"));
procedureOccurrence.add("procedure_source_concept_id", stemRow.get("source_concept_id"));
procedureOccurrence.add("procedure_concept_id", stemRow.get("concept_id"));
procedureOccurrence.add("procedure_type_concept_id", stemRow.get("type_concept_id"));
procedureOccurrence.add("procedure_date", stemRow.get("start_date"));
procedureOccurrence.add("visit_occurrence_id", stemRow.get("visit_occurrence_id"));
tableToRows.put("procedure_occurrence", procedureOccurrence);
}
}
}
private void addToMeasurement(List<Row> stemTable) {
for (Row stemRow : stemTable) {
if (stemRow.get("domain_id").equals("Measurement")) {
Row measurement = new Row();
measurement.add("person_id", stemRow.get("person_id"));
measurement.add("measurement_id", ++measurementId);
measurement.add("measurement_source_value", stemRow.get("source_value"));
measurement.add("measurement_source_concept_id", stemRow.get("source_concept_id"));
measurement.add("measurement_concept_id", stemRow.get("concept_id"));
measurement.add("measurement_type_concept_id", stemRow.get("type_concept_id"));
measurement.add("measurement_date", stemRow.get("start_date"));
measurement.add("visit_occurrence_id", stemRow.get("visit_occurrence_id"));
CodeDomainData codeData = icd9ToValueConcept.getCodeData(stemRow.get("source_value"));
if (codeData.targetConcepts.get(0).conceptId == 0) {
measurement.add("value_as_concept_id", 4181412); // 'Present'
} else {
measurement.add("value_as_concept_id", codeData.targetConcepts.get(0).conceptId);
}
tableToRows.put("measurement", measurement);
}
}
}
private void addToObservation(List<Row> stemTable) {
for (Row stemRow : stemTable) {
if (stemRow.get("domain_id").equals("Observation")) {
Row observation = new Row();
observation.add("person_id", stemRow.get("person_id"));
observation.add("observation_id", ++observationId);
observation.add("value_as_number", "");
observation.add("observation_source_value", stemRow.get("source_value"));
observation.add("observation_source_concept_id", stemRow.get("source_concept_id"));
observation.add("observation_concept_id", stemRow.get("concept_id"));
observation.add("observation_type_concept_id", stemRow.get("type_concept_id"));
observation.add("observation_date", stemRow.get("start_date"));
CodeDomainData codeData = icd9ToValueConcept.getCodeData(stemRow.get("source_value"));
if (codeData.targetConcepts.get(0).conceptId == 0) {
observation.add("value_as_concept_id", 45877994); // 'Yes'
} else {
observation.add("value_as_concept_id", codeData.targetConcepts.get(0).conceptId);
}
observation.add("visit_occurrence_id", stemRow.get("visit_occurrence_id"));
tableToRows.put("observation", observation);
}
}
}
private void addToObservation(Row row) {
Row observation = new Row();
observation.add("person_id", personId);
observation.add("observation_id", ++observationId);
observation.add("value_as_number", row.get("DISCWT"));
observation.add("observation_source_value", "DISCWT");
observation.add("value_as_concept_id", "");
observation.add("observation_source_concept_id", "");
observation.add("observation_concept_id", "0");
observation.add("observation_type_concept_id", "900000003");
observation.add("observation_date", StringUtilities.daysToDatabaseDateString(visitStartDate));
observation.add("visit_occurrence_id", visitOccurrenceId);
tableToRows.put("observation", observation);
}
private long computeVisitStartDate(String year, String amonth, String aweekend, String key) {
if (Integer.parseInt(amonth) < 1)
amonth = Integer.toString(Math.abs(hash(key) % 12) + 1);
boolean isWeekend = aweekend.equals("1");
Calendar calendar = Calendar.getInstance();
calendar.set(Integer.parseInt(year), Integer.parseInt(amonth) - 1, 1);
while (isWeekend(calendar) != isWeekend)
calendar.add(Calendar.DATE, 1);
long time = calendar.getTimeInMillis();
time += calendar.getTimeZone().getOffset(time);
// Millenium is added because for negative numbers, integer division truncates upwards! (-8/10 = 0). This is reversed in the daysToDatabaseDateString
// function
return (((StringUtilities.MILLENIUM + time) / StringUtilities.DAY) - (1000 * 365));
}
private long computeVisitEndDate(long visitStartDate, String los) {
int lengthOfStay = Integer.parseInt(los);
if (lengthOfStay < 0)
lengthOfStay = 0;
return visitStartDate + lengthOfStay;
}
private int hash(String string) {
int hashCode = string.hashCode();
hashCode ^= (hashCode >>> 20) ^ (hashCode >>> 12);
return hashCode ^ (hashCode >>> 7) ^ (hashCode >>> 4);
}
private boolean isWeekend(Calendar calendar) {
int dayOfWeek = calendar.get(Calendar.DAY_OF_WEEK);
return (dayOfWeek == Calendar.SATURDAY || dayOfWeek == Calendar.SUNDAY);
}
private void insertBatch() {
removeRowsWithNonNullableNulls();
etlReport.registerOutgoingData(tableToRows);
for (String table : tableToRows.keySet())
targetConnection.insertIntoTable(tableToRows.get(table).iterator(), table, false, true);
tableToRows.clear();
}
private void loadMappings(DbSettings dbSettings) {
StringUtilities.outputWithTime("Loading mappings from server");
RichConnection connection = new RichConnection(dbSettings.server, dbSettings.domain, dbSettings.user, dbSettings.password, dbSettings.dbType);
connection.setContext(this.getClass());
connection.use(dbSettings.database);
System.out.println("- Loading ICD-9 to concept_id mapping");
icd9ToConcept = new CodeToDomainConceptMap("ICD-9 to concept_id mapping", "Condition");
for (Row row : connection.queryResource("icd9ToConditionProcMeasObsDevice.sql")) {
row.upperCaseFieldNames();
icd9ToConcept.add(row.get("SOURCE_CODE"), row.get("SOURCE_NAME"), row.getInt("SOURCE_CONCEPT_ID"), row.getInt("TARGET_CONCEPT_ID"),
row.get("TARGET_CODE"), row.get("TARGET_NAME"), row.get("DOMAIN_ID"));
}
System.out.println("- Loading ICD-9 to observation value concept_id mapping");
icd9ToValueConcept = new CodeToDomainConceptMap("ICD-9 to value concept_id mapping", "Observation");
for (Row row : connection.queryResource("icd9ToObservationValue.sql")) {
row.upperCaseFieldNames();
icd9ToValueConcept.add(row.get("SOURCE_CODE"), row.get("SOURCE_NAME"), row.getInt("SOURCE_CONCEPT_ID"), row.getInt("TARGET_CONCEPT_ID"),
row.get("TARGET_CODE"), row.get("TARGET_NAME"), row.get("DOMAIN_ID"));
}
System.out.println("- Loading ICD-9 Procedure to concept_id mapping");
icd9ProcToConcept = new CodeToDomainConceptMap("ICD-9 Procedure to concept_id mapping", "Procedure");
for (Row row : connection.queryResource("icd9ProcToProcMeasObsDrugCondition.sql")) {
row.upperCaseFieldNames();
icd9ProcToConcept.add(row.get("SOURCE_CODE"), row.get("SOURCE_NAME"), row.getInt("SOURCE_CONCEPT_ID"), row.getInt("TARGET_CONCEPT_ID"),
row.get("TARGET_CODE"), row.get("TARGET_NAME"), row.get("DOMAIN_ID"));
}
System.out.println("- Loading DRG to concept_id mapping");
drgYearToConcept = new CodeToDomainConceptMap("DRG to concept_id mapping", "Observation");
// Need to create drg_year combinations for every year for easy retrieval later on:
String oldSourceCode = "";
int year = 1999;
for (Row row : connection.queryResource("drgToConcept.sql")) {
row.upperCaseFieldNames();
if (!row.get("SOURCE_CODE").equals(oldSourceCode)) {
oldSourceCode = row.get("SOURCE_CODE");
year = 1999;
}
for (; year <= row.getInt("SOURCE_END_YEAR"); year++)
drgYearToConcept.add(row.get("SOURCE_CODE") + "_" + year, row.get("SOURCE_NAME"), row.getInt("SOURCE_CONCEPT_ID"),
row.getInt("TARGET_CONCEPT_ID"), row.get("TARGET_CODE"), row.get("TARGET_NAME"), row.get("TARGET_DOMAIN"));
}
System.out.println("- Loading county code to name mapping");
codeToCounty = new HashMap<String, String>();
for (Row row : new ReadCSVFileWithHeader(this.getClass().getResourceAsStream("national_county.txt")))
codeToCounty.put(row.get("State ANSI") + row.get("County ANSI"), row.get("County Name"));
StringUtilities.outputWithTime("Finished loading mappings");
}
private void removeRowsWithNonNullableNulls() {
for (String table : tableToRows.keySet()) {
Iterator<Row> iterator = tableToRows.get(table).iterator();
while (iterator.hasNext()) {
Row row = iterator.next();
String nonAllowedNullField = cdmv5NullableChecker.findNonAllowedNull(table, row);
if (nonAllowedNullField != null) {
if (row.getFieldNames().contains("person_id"))
etlReport.reportProblem(table, "Column " + nonAllowedNullField + " is null, could not create row", row.get("person_id"));
else
etlReport.reportProblem(table, "Column " + nonAllowedNullField + " is null, could not create row", "");
iterator.remove();
}
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache;
import static org.junit.Assert.*;
import org.apache.geode.InternalGemFireException;
import org.apache.geode.cache.AttributesFactory;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.CacheException;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.cache.CacheLoader;
import org.apache.geode.cache.CacheLoaderException;
import org.apache.geode.cache.EntryNotFoundException;
import org.apache.geode.cache.LoaderHelper;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.Scope;
import org.apache.geode.distributed.internal.ClusterDistributionManager;
import org.apache.geode.distributed.internal.ClusterDistributionManagerDUnitTest;
import org.apache.geode.distributed.internal.InternalDistributedSystem;
import org.apache.geode.internal.Assert;
import org.apache.geode.test.dunit.Host;
import org.apache.geode.test.dunit.LogWriterUtils;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.dunit.internal.JUnit4DistributedTestCase;
/**
* This is the abstract superclass of tests that validate the functionality of GemFire's distributed
* caches. It provides a number of convenient helper classes.
*/
public abstract class DistributedCacheTestCase extends JUnit4DistributedTestCase {
/** The current cache in this VM */
protected static Cache cache = null;
@Override
public final void postSetUp() throws Exception {
setUpDistributedCacheTestCase(true);
}
/**
* Creates the {@link Cache} and root region in each remote VM and, if createLocalCache, in this
* VM.
*/
private final void setUpDistributedCacheTestCase(boolean createLocalCache) throws Exception {
if (createLocalCache) {
try {
remoteCreateCache();
assertTrue(cache != null);
} catch (Exception ex) {
String s = "While creating cache in this VM";
throw new InternalGemFireException(s, ex);
}
} else {
this.getSystem(); // make sure we have a connected DistributedSystem
}
for (int h = 0; h < Host.getHostCount(); h++) {
Host host = Host.getHost(h);
for (int v = 0; v < host.getVMCount(); v++) {
VM vm = host.getVM(v);
vm.invoke(() -> this.remoteCreateCache());
}
}
}
/**
* Creates the root region in a remote VM
*/
private static void remoteCreateCache() throws Exception {
Assert.assertTrue(cache == null, "cache should be null");
DistributedCacheTestCase x = new DistributedCacheTestCase() {};
cache = CacheFactory.create(x.getSystem());
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_NO_ACK);
cache.createRegion("root", factory.create());
}
/**
* Closes the cache in this VM and each remote VM
*/
@Override
public final void preTearDown() throws Exception {
StringBuffer problems = new StringBuffer();
if (cache != null) {
try {
if (remoteCloseCache()) {
problems.append("An exception occurred trying to close the cache.");
}
assertTrue(cache == null);
} catch (Exception ex) {
String s = "While closing the cache in this VM";
throw new InternalGemFireException(s, ex);
}
}
for (int h = 0; h < Host.getHostCount(); h++) {
Host host = Host.getHost(h);
for (int v = 0; v < host.getVMCount(); v++) {
VM vm = host.getVM(v);
boolean exceptionInThreads = vm.invoke(() -> this.remoteCloseCache());
if (exceptionInThreads) {
String s = "An exception occurred in GemFire system";
problems.append(s);
}
}
}
assertEquals("Problems while tearing down", "", problems.toString().trim());
}
/**
* Closes the Cache for the current VM. Returns whether or not an exception occurred in the
* distribution manager to which this VM is attached. Note that the exception flag is cleared by
* this method.
*
* @see ClusterDistributionManager#exceptionInThreads()
*/
private static boolean remoteCloseCache() throws CacheException {
Assert.assertTrue(cache != null, "No cache on this VM?");
Assert.assertTrue(!cache.isClosed(), "Who closed my cache?");
InternalDistributedSystem system = (InternalDistributedSystem) cache.getDistributedSystem();
ClusterDistributionManager dm = (ClusterDistributionManager) system.getDistributionManager();
boolean exceptionInThreads = dm.exceptionInThreads();
dm.clearExceptionInThreads();
cache.close();
cache = null;
return exceptionInThreads;
}
/**
* Returns the root region of the cache. We assume that the {@link Cache} and the root region have
* already been created.
*/
protected static Region getRootRegion() throws CacheException {
if (cache == null) {
String s = "Cache not created yet!";
throw new IllegalStateException(s);
}
return cache.getRegion("root");
}
/**
* Return the distribution manager associate with the cache
*
* @since GemFire 2.1
*/
protected static ClusterDistributionManager getDistributionManager() {
if (cache == null) {
String s = "Cache not created yet!";
throw new IllegalStateException(s);
}
InternalDistributedSystem system =
(InternalDistributedSystem) ((GemFireCacheImpl) cache).getDistributedSystem();
return (ClusterDistributionManager) system.getDistributionManager();
}
/**
* Creates a new sub-Region of the root Region in a remote VM with default scope, SCOPE_LOCAL.
*
* @param name The name of the newly-created sub-Region. It is recommended that the name of the
* Region be the {@link #getUniqueName()} of the test.
*/
protected static void remoteCreateRegion(String name) throws CacheException {
remoteCreateRegion(name, Scope.LOCAL);
}
/**
* Creates a new sub-Region of the root Region in a remote VM.
*
* @param name The name of the newly-created sub-Region. It is recommended that the name of the
* Region be the {@link #getUniqueName()} of the test.
* @param scope create the region attributes with this scope
*/
protected static void remoteCreateRegion(String name, Scope scope) throws CacheException {
Region root = getRootRegion();
AttributesFactory factory = new AttributesFactory();
factory.setScope(scope);
Region newRegion = root.createSubregion(name, factory.create());
LogWriterUtils.getLogWriter().info("Created Region '" + newRegion.getFullPath() + "'");
}
/**
* Defines an entry in the Region with the given name and scope.
*
* @param regionName The name of a region that is a sub-region of the root region, or a global
* name
* @param entryName Must be {@link java.io.Serializable}
*/
protected static void remoteDefineEntry(String regionName, String entryName, Scope scope)
throws CacheException {
remoteDefineEntry(regionName, entryName, scope, true);
}
/**
* Defines an entry in the Region with the given name and scope. In 3.0 this method create a
* subregion named <code>entryName</code> (with the appropriate attributes) that contains an entry
* named <code>entryName</code>.
*
* @param regionName The name of a region that is a sub-region of the root region, or a global
* name
* @param entryName Must be {@link java.io.Serializable}
* @param doNetSearch Will the distributed region perform a netSearch when looking for objects? If
* <code>false</code> a {@link CacheException} will be thrown if an entry in the region is
* asked for and it not there.
*/
protected static void remoteDefineEntry(String regionName, String entryName, Scope scope,
boolean doNetSearch) throws CacheException {
Region root = getRootRegion();
Region region = root.getSubregion(regionName);
AttributesFactory factory = new AttributesFactory();
factory.setScope(scope);
if (!doNetSearch) {
factory.setCacheLoader(new CacheLoader() {
public Object load(LoaderHelper helper) throws CacheLoaderException {
String s = "Should not be loading \"" + helper.getKey() + "\" in \""
+ helper.getRegion().getFullPath() + "\"";
throw new CacheLoaderException(s);
}
public void close() {}
});
}
Region sub = region.createSubregion(entryName, factory.create());
sub.create(entryName, null);
LogWriterUtils.getLogWriter()
.info("Defined Entry named '" + entryName + "' in region '" + sub.getFullPath() + "'");
}
/**
* Puts (or creates) a value in a subregion of <code>region</code> named <code>entryName</code>.
*/
protected static void remotePut(String regionName, String entryName, Object value, Scope scope)
throws CacheException {
Region root = getRootRegion();
Region region = root.getSubregion(regionName);
Region sub = region.getSubregion(entryName);
if (sub == null) {
AttributesFactory factory = new AttributesFactory();
factory.setScope(scope);
sub = region.createSubregion(entryName, factory.create());
}
sub.put(entryName, value);
LogWriterUtils.getLogWriter().info("Put value " + value + " in entry " + entryName
+ " in region '" + region.getFullPath() + "'");
}
/**
* Does a put with the given value, defining a DISTRIBUTED_NO_ACK entry in the Region with the
* given name.
*
* @param regionName The name of a region that is a sub-region of the root region, or a global
* name
* @param entryName Must be {@link java.io.Serializable}
*/
protected static void remotePutDistributed(String regionName, String entryName, Object value)
throws CacheException {
remotePut(regionName, entryName, value, Scope.DISTRIBUTED_NO_ACK);
}
/**
* Replaces the value of an entry in a region in a remote VM
*
* @param regionName The name of a region that is a sub-region of the root region
* @param entryName Must be {@link java.io.Serializable}
* @param value The value used to replace
*/
protected static void remoteReplace(String regionName, String entryName, Object value)
throws CacheException {
Region root = getRootRegion();
Region region = root.getSubregion(regionName);
Region sub = region.getSubregion(entryName);
if (sub == null) {
String s = "Entry \"" + entryName + "\" does not exist";
throw new EntryNotFoundException(s);
}
sub.put(entryName, value);
LogWriterUtils.getLogWriter().info("Replaced value " + value + "in entry " + entryName
+ " in region '" + region.getFullPath() + "'");
}
/**
* Invalidates the value of an entry in a region in a remote VM
*
* @param regionName The name of a region that is a sub-region of the root region
* @param entryName Must be {@link java.io.Serializable}
*/
protected static void remoteInvalidate(String regionName, String entryName)
throws CacheException {
Region root = getRootRegion();
Region region = root.getSubregion(regionName);
Region sub = region.getSubregion(entryName);
if (sub == null) {
String s = "Entry \"" + entryName + "\" does not exist";
throw new EntryNotFoundException(s);
}
sub.invalidate(entryName);
}
/**
* Destroys the value of an entry in a region in a remote VM
*
* @param regionName The name of a region that is a sub-region of the root region
* @param entryName Must be {@link java.io.Serializable}
*/
protected static void remoteDestroy(String regionName, String entryName) throws CacheException {
Region root = getRootRegion();
Region region = root.getSubregion(regionName);
Region sub = region.getSubregion(entryName);
if (sub == null) {
String s = "Entry \"" + entryName + "\" does not exist";
throw new EntryNotFoundException(s);
}
assertNotNull(sub.getEntry(entryName));
sub.destroy(entryName);
assertNull(sub.getEntry(entryName));
}
/**
* Asserts that the value of an entry in a region is what we expect it to be.
*
* @param regionName The name of a region that is a sub-region of the root region
* @param entryName Must be {@link java.io.Serializable}
*/
protected static void remoteAssertEntryValue(String regionName, String entryName, Object expected)
throws CacheException {
Region root = getRootRegion();
Region region = root.getSubregion(regionName);
Region sub = region.getSubregion(entryName);
if (sub == null) {
String s = "Entry \"" + entryName + "\" does not exist";
throw new EntryNotFoundException(s);
}
assertEquals(expected, sub.get(entryName));
}
/**
* Assumes there is only one host, and invokes the given method in every VM that host knows about.
*/
public void forEachVMInvoke(String methodName, Object[] args) {
forEachVMInvoke(getClass(), methodName, args);
}
/**
* Assumes there is only one host, and invokes the given method in every VM that host knows about.
*/
public void forEachVMInvoke(Class<?> targetClass, String methodName, Object[] args) {
Host host = Host.getHost(0);
int vmCount = host.getVMCount();
for (int i = 0; i < vmCount; i++) {
LogWriterUtils.getLogWriter().info("Invoking " + methodName + "on VM#" + i);
host.getVM(i).invoke(targetClass, methodName, args);
}
}
}
|
|
/*
* Copyright 2014-2018 Groupon, Inc
* Copyright 2014-2018 The Billing Project, LLC
*
* The Billing Project licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.catalog;
import java.io.ByteArrayInputStream;
import java.math.BigDecimal;
import javax.xml.bind.JAXBException;
import org.joda.time.DateTime;
import org.killbill.billing.ErrorCode;
import org.killbill.billing.callcontext.InternalTenantContext;
import org.killbill.billing.catalog.api.BillingActionPolicy;
import org.killbill.billing.catalog.api.BillingAlignment;
import org.killbill.billing.catalog.api.BillingMode;
import org.killbill.billing.catalog.api.CatalogApiException;
import org.killbill.billing.catalog.api.Currency;
import org.killbill.billing.catalog.api.InternationalPrice;
import org.killbill.billing.catalog.api.PhaseType;
import org.killbill.billing.catalog.api.Plan;
import org.killbill.billing.catalog.api.PlanAlignmentChange;
import org.killbill.billing.catalog.api.PlanAlignmentCreate;
import org.killbill.billing.catalog.api.PriceListSet;
import org.killbill.billing.catalog.api.Product;
import org.killbill.billing.catalog.api.ProductCategory;
import org.killbill.billing.catalog.api.SimplePlanDescriptor;
import org.killbill.billing.catalog.api.TimeUnit;
import org.killbill.billing.catalog.rules.DefaultCaseBillingAlignment;
import org.killbill.billing.catalog.rules.DefaultCaseCancelPolicy;
import org.killbill.billing.catalog.rules.DefaultCaseChangePlanAlignment;
import org.killbill.billing.catalog.rules.DefaultCaseChangePlanPolicy;
import org.killbill.billing.catalog.rules.DefaultCaseCreateAlignment;
import org.killbill.billing.catalog.rules.DefaultCasePriceList;
import org.killbill.billing.catalog.rules.DefaultPlanRules;
import org.killbill.xmlloader.ValidationException;
import org.killbill.xmlloader.XMLLoader;
import org.killbill.xmlloader.XMLWriter;
import com.google.common.collect.ImmutableList;
public class CatalogUpdater {
public static String DEFAULT_CATALOG_NAME = "DEFAULT";
private final DefaultMutableStaticCatalog catalog;
public CatalogUpdater(final StandaloneCatalog standaloneCatalog) {
this.catalog = new DefaultMutableStaticCatalog(standaloneCatalog);
this.catalog.setRecurringBillingMode(BillingMode.IN_ADVANCE);
}
public CatalogUpdater(final DateTime effectiveDate, final Currency... currencies) {
final DefaultPriceList defaultPriceList = new DefaultPriceList().setName(PriceListSet.DEFAULT_PRICELIST_NAME);
final StandaloneCatalog tmp = new StandaloneCatalog()
.setCatalogName(DEFAULT_CATALOG_NAME)
.setEffectiveDate(effectiveDate.toDate())
.setRecurringBillingMode(BillingMode.IN_ADVANCE)
.setProducts(ImmutableList.<Product>of())
.setPlans(ImmutableList.<Plan>of())
.setPriceLists(new DefaultPriceListSet(defaultPriceList, new DefaultPriceList[0]))
.setPlanRules(getSaneDefaultPlanRules(defaultPriceList));
if (currencies != null && currencies.length > 0) {
tmp.setSupportedCurrencies(currencies);
} else {
tmp.setSupportedCurrencies(new Currency[0]);
}
tmp.initialize(tmp);
this.catalog = new DefaultMutableStaticCatalog(tmp);
}
public StandaloneCatalog getCatalog() {
return catalog;
}
public String getCatalogXML(final InternalTenantContext internalTenantContext) throws CatalogApiException {
try {
final String newCatalog = XMLWriter.writeXML(catalog, StandaloneCatalog.class);
// Verify we can deserialize this catalog prior we commit to disk
XMLLoader.getObjectFromStream(new ByteArrayInputStream(newCatalog.getBytes()), StandaloneCatalog.class);
return newCatalog;
} catch (ValidationException e) {
throw new CatalogApiException(e, ErrorCode.CAT_INVALID_FOR_TENANT, internalTenantContext.getTenantRecordId());
} catch (JAXBException e) {
throw new CatalogApiException(e, ErrorCode.CAT_INVALID_FOR_TENANT, internalTenantContext.getTenantRecordId());
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public void addSimplePlanDescriptor(final SimplePlanDescriptor desc) throws CatalogApiException {
// We need at least a planId
if (desc == null || desc.getPlanId() == null) {
throw new CatalogApiException(ErrorCode.CAT_INVALID_SIMPLE_PLAN_DESCRIPTOR, desc);
}
DefaultPlan plan = (DefaultPlan) getExistingPlan(desc.getPlanId());
if (plan == null && desc.getProductName() == null) {
throw new CatalogApiException(ErrorCode.CAT_INVALID_SIMPLE_PLAN_DESCRIPTOR, desc);
}
validateNewPlanDescriptor(desc);
DefaultProduct product = plan != null ? (DefaultProduct) plan.getProduct() : (DefaultProduct) getExistingProduct(desc.getProductName());
if (product == null) {
product = new DefaultProduct();
product.setName(desc.getProductName());
product.setCatagory(desc.getProductCategory());
product.initialize(catalog);
catalog.addProduct(product);
}
if (plan == null) {
plan = new DefaultPlan();
plan.setName(desc.getPlanId());
plan.setPriceListName(PriceListSet.DEFAULT_PRICELIST_NAME);
plan.setProduct(product);
plan.setRecurringBillingMode(catalog.getRecurringBillingMode());
if (desc.getTrialLength() > 0 && desc.getTrialTimeUnit() != TimeUnit.UNLIMITED) {
final DefaultPlanPhase trialPhase = new DefaultPlanPhase();
trialPhase.setPhaseType(PhaseType.TRIAL);
trialPhase.setDuration(new DefaultDuration().setUnit(desc.getTrialTimeUnit()).setNumber(desc.getTrialLength()));
trialPhase.setFixed(new DefaultFixed().setFixedPrice(new DefaultInternationalPrice().setPrices(new DefaultPrice[]{new DefaultPrice().setCurrency(desc.getCurrency()).setValue(BigDecimal.ZERO)})));
plan.setInitialPhases(new DefaultPlanPhase[]{trialPhase});
}
plan.initialize(catalog);
catalog.addPlan(plan);
} else {
validateExistingPlan(plan, desc);
}
//
// At this point we have an old or newly created **simple** Plan and we need to either create the recurring section or add a new currency.
//
if (!isCurrencySupported(desc.getCurrency())) {
catalog.addCurrency(desc.getCurrency());
// Reset the fixed price to null so the isZero() logic goes through new currencies and set the zero price for all
if (plan.getInitialPhases().length == 1) {
((DefaultInternationalPrice) plan.getInitialPhases()[0].getFixed().getPrice()).setPrices(null);
}
}
DefaultPlanPhase evergreenPhase = plan.getFinalPhase();
if (evergreenPhase == null) {
evergreenPhase = new DefaultPlanPhase();
evergreenPhase.setPhaseType(PhaseType.EVERGREEN);
evergreenPhase.setDuration(new DefaultDuration()
.setUnit(TimeUnit.UNLIMITED));
plan.setFinalPhase(evergreenPhase);
}
DefaultRecurring recurring = (DefaultRecurring) evergreenPhase.getRecurring();
if (recurring == null) {
recurring = new DefaultRecurring();
recurring.setBillingPeriod(desc.getBillingPeriod());
recurring.setRecurringPrice(new DefaultInternationalPrice().setPrices(new DefaultPrice[0]));
evergreenPhase.setRecurring(recurring);
}
if (!isPriceForCurrencyExists(recurring.getRecurringPrice(), desc.getCurrency())) {
catalog.addRecurringPriceToPlan(recurring.getRecurringPrice(), new DefaultPrice().setCurrency(desc.getCurrency()).setValue(desc.getAmount()));
}
if (desc.getProductCategory() == ProductCategory.ADD_ON) {
for (final String bp : desc.getAvailableBaseProducts()) {
final Product targetBasePlan = getExistingProduct(bp);
boolean found = false;
for (Product cur : targetBasePlan.getAvailable()) {
if (cur.getName().equals(product.getName())) {
found = true;
break;
}
}
if (!found) {
catalog.addProductAvailableAO(getExistingProduct(bp), product);
}
}
}
// Reinit catalog
catalog.initialize(catalog);
}
private boolean isPriceForCurrencyExists(final InternationalPrice price, final Currency currency) {
if (price.getPrices().length == 0) {
return false;
}
try {
price.getPrice(currency);
} catch (CatalogApiException ignore) {
return false;
}
return true;
}
private void validateExistingPlan(final DefaultPlan plan, final SimplePlanDescriptor desc) throws CatalogApiException {
boolean failedValidation = false;
//
// TRIAL VALIDATION
//
// We only support adding new Plan with NO TRIAL or $0 TRIAL. Existing Plan not matching such criteria are incompatible
if (plan.getInitialPhases().length > 1 ||
(plan.getInitialPhases().length == 1 &&
(plan.getInitialPhases()[0].getPhaseType() != PhaseType.TRIAL || !plan.getInitialPhases()[0].getFixed().getPrice().isZero()))) {
failedValidation = true;
} else if (desc.getTrialLength() != null && desc.getTrialTimeUnit() != null) { // If desc includes trial info we verify this is valid
final boolean isDescConfiguredWithTrial = desc.getTrialLength() > 0 && desc.getTrialTimeUnit() != TimeUnit.UNLIMITED;
final boolean isPlanConfiguredWithTrial = plan.getInitialPhases().length == 1;
// Current plan has trial and desc does not or reverse
if ((isDescConfiguredWithTrial && !isPlanConfiguredWithTrial) ||
(!isDescConfiguredWithTrial && isPlanConfiguredWithTrial)) {
failedValidation = true;
// Both have trials , check they match
} else if (isDescConfiguredWithTrial && isPlanConfiguredWithTrial) {
if (plan.getInitialPhases()[0].getDuration().getUnit() != desc.getTrialTimeUnit() ||
plan.getInitialPhases()[0].getDuration().getNumber() != desc.getTrialLength()) {
failedValidation = true;
}
}
}
//
// RECURRING VALIDATION
//
if (!failedValidation) {
// Desc only supports EVERGREEN Phase
if (plan.getFinalPhase().getPhaseType() != PhaseType.EVERGREEN) {
failedValidation = true;
} else {
// Should be same recurring BillingPeriod
if (desc.getBillingPeriod() != null && plan.getFinalPhase().getRecurring().getBillingPeriod() != desc.getBillingPeriod()) {
failedValidation = true;
} else if (desc.getCurrency() != null && desc.getAmount() != null) {
try {
final BigDecimal currentAmount = plan.getFinalPhase().getRecurring().getRecurringPrice().getPrice(desc.getCurrency());
if (currentAmount.compareTo(desc.getAmount()) != 0) {
failedValidation = true;
}
} catch (CatalogApiException ignoreIfCurrencyIsCurrentlyUndefined) {
}
}
}
}
if (failedValidation) {
throw new CatalogApiException(ErrorCode.CAT_FAILED_SIMPLE_PLAN_VALIDATION, plan.toString(), desc.toString());
}
}
private boolean isCurrencySupported(final Currency targetCurrency) {
if (catalog.getSupportedCurrencies() != null) {
for (final Currency input : catalog.getSupportedCurrencies()) {
if (input.equals(targetCurrency)) {
return true;
}
}
}
return false;
}
private void validateNewPlanDescriptor(final SimplePlanDescriptor desc) throws CatalogApiException {
final boolean invalidPlan = desc.getPlanId() == null && (desc.getProductCategory() == null || desc.getBillingPeriod() == null);
final boolean invalidPrice = (desc.getAmount() == null || desc.getAmount().compareTo(BigDecimal.ZERO) < 0) ||
desc.getCurrency() == null;
if (invalidPlan || invalidPrice) {
throw new CatalogApiException(ErrorCode.CAT_INVALID_SIMPLE_PLAN_DESCRIPTOR, desc);
}
if (desc.getProductCategory() == ProductCategory.ADD_ON) {
if (desc.getAvailableBaseProducts() == null || desc.getAvailableBaseProducts().isEmpty()) {
throw new CatalogApiException(ErrorCode.CAT_INVALID_SIMPLE_PLAN_DESCRIPTOR, desc);
}
for (final String cur : desc.getAvailableBaseProducts()) {
if (getExistingProduct(cur) == null) {
throw new CatalogApiException(ErrorCode.CAT_INVALID_SIMPLE_PLAN_DESCRIPTOR, desc);
}
}
}
}
private Product getExistingProduct(final String productName) {
try {
return catalog.findProduct(productName);
} catch (final CatalogApiException e) {
return null;
}
}
private Plan getExistingPlan(final String planName) {
try {
return catalog.findPlan(planName);
} catch (CatalogApiException e) {
return null;
}
}
private DefaultPlanRules getSaneDefaultPlanRules(final DefaultPriceList defaultPriceList) {
final DefaultCaseChangePlanPolicy[] changePolicy = new DefaultCaseChangePlanPolicy[1];
changePolicy[0] = new DefaultCaseChangePlanPolicy();
changePolicy[0].setPolicy(BillingActionPolicy.IMMEDIATE);
final DefaultCaseChangePlanAlignment[] changeAlignment = new DefaultCaseChangePlanAlignment[1];
changeAlignment[0] = new DefaultCaseChangePlanAlignment();
changeAlignment[0].setAlignment(PlanAlignmentChange.START_OF_BUNDLE);
final DefaultCaseCancelPolicy[] cancelPolicy = new DefaultCaseCancelPolicy[1];
cancelPolicy[0] = new DefaultCaseCancelPolicy();
cancelPolicy[0].setPolicy(BillingActionPolicy.IMMEDIATE);
final DefaultCaseCreateAlignment[] createAlignment = new DefaultCaseCreateAlignment[1];
createAlignment[0] = new DefaultCaseCreateAlignment();
createAlignment[0].setAlignment(PlanAlignmentCreate.START_OF_BUNDLE);
final DefaultCaseBillingAlignment[] billingAlignmentCase = new DefaultCaseBillingAlignment[1];
billingAlignmentCase[0] = new DefaultCaseBillingAlignment();
billingAlignmentCase[0].setAlignment(BillingAlignment.ACCOUNT);
final DefaultCasePriceList[] priceList = new DefaultCasePriceList[1];
priceList[0] = new DefaultCasePriceList();
priceList[0].setToPriceList(defaultPriceList);
return new DefaultPlanRules()
.setChangeCase(changePolicy)
.setChangeAlignmentCase(changeAlignment)
.setCancelCase(cancelPolicy)
.setCreateAlignmentCase(createAlignment)
.setBillingAlignmentCase(billingAlignmentCase)
.setPriceListCase(priceList);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.test.integration.functions.binary.matrix_full_other;
import java.util.HashMap;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.apache.sysml.api.DMLScript;
import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM;
import org.apache.sysml.lops.LopProperties.ExecType;
import org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex;
import org.apache.sysml.test.integration.AutomatedTestBase;
import org.apache.sysml.test.integration.TestConfiguration;
import org.apache.sysml.test.utils.TestUtils;
/**
* The main purpose of this test is to verify various input combinations for
* matrix-matrix logical operations that internally translate to binary operations.
*
*/
public class FullLogicalMatrixTest extends AutomatedTestBase
{
private final static String TEST_NAME1 = "LogicalMatrixTest";
private final static String TEST_DIR = "functions/binary/matrix_full_other/";
private final static String TEST_CLASS_DIR = TEST_DIR + FullLogicalMatrixTest.class.getSimpleName() + "/";
private final static double eps = 1e-10;
private final static int rows1 = 1383;
private final static int cols1 = 1432;
private final static double sparsity1 = 0.7;
private final static double sparsity2 = 0.01;
public enum Type{
GREATER,
LESS,
EQUALS,
NOT_EQUALS,
GREATER_EQUALS,
LESS_EQUALS,
}
@Override
public void setUp()
{
addTestConfiguration( TEST_NAME1, new TestConfiguration(TEST_CLASS_DIR, TEST_NAME1, new String[] { "C" }) );
TestUtils.clearAssertionInformation();
if (TEST_CACHE_ENABLED) {
setOutAndExpectedDeletionDisabled(true);
}
}
@BeforeClass
public static void init()
{
TestUtils.clearDirectory(TEST_DATA_DIR + TEST_CLASS_DIR);
}
@AfterClass
public static void cleanUp()
{
if (TEST_CACHE_ENABLED) {
TestUtils.clearDirectory(TEST_DATA_DIR + TEST_CLASS_DIR);
}
}
@Test
public void testLogicalGreaterDenseDenseCP()
{
runLogicalTest(Type.GREATER, false, false, ExecType.CP);
}
@Test
public void testLogicalGreaterDenseSparseCP()
{
runLogicalTest(Type.GREATER, false, true, ExecType.CP);
}
@Test
public void testLogicalGreaterSparseDenseCP()
{
runLogicalTest(Type.GREATER, true, false, ExecType.CP);
}
@Test
public void testLogicalGreaterSparseSparseCP()
{
runLogicalTest(Type.GREATER, true, true, ExecType.CP);
}
@Test
public void testLogicalGreaterEqualsDenseDenseCP()
{
runLogicalTest(Type.GREATER_EQUALS, false, false, ExecType.CP);
}
@Test
public void testLogicalGreaterEqualsDenseSparseCP()
{
runLogicalTest(Type.GREATER_EQUALS, false, true, ExecType.CP);
}
@Test
public void testLogicalGreaterEqualsSparseDenseCP()
{
runLogicalTest(Type.GREATER_EQUALS, true, false, ExecType.CP);
}
@Test
public void testLogicalGreaterEqualsSparseSparseCP()
{
runLogicalTest(Type.GREATER_EQUALS, true, true, ExecType.CP);
}
@Test
public void testLogicalEqualsDenseDenseCP()
{
runLogicalTest(Type.EQUALS, false, false, ExecType.CP);
}
@Test
public void testLogicalEqualsDenseSparseCP()
{
runLogicalTest(Type.EQUALS, false, true, ExecType.CP);
}
@Test
public void testLogicalEqualsSparseDenseCP()
{
runLogicalTest(Type.EQUALS, true, false, ExecType.CP);
}
@Test
public void testLogicalEqualsSparseSparseCP()
{
runLogicalTest(Type.EQUALS, true, true, ExecType.CP);
}
@Test
public void testLogicalNotEqualsDenseDenseCP()
{
runLogicalTest(Type.NOT_EQUALS, false, false, ExecType.CP);
}
@Test
public void testLogicalNotEqualsDenseSparseCP()
{
runLogicalTest(Type.NOT_EQUALS, false, true, ExecType.CP);
}
@Test
public void testLogicalNotEqualsSparseDenseCP()
{
runLogicalTest(Type.NOT_EQUALS, true, false, ExecType.CP);
}
@Test
public void testLogicalNotEqualsSparseSparseCP()
{
runLogicalTest(Type.NOT_EQUALS, true, true, ExecType.CP);
}
@Test
public void testLogicalLessDenseDenseCP()
{
runLogicalTest(Type.LESS, false, false, ExecType.CP);
}
@Test
public void testLogicalLessDenseSparseCP()
{
runLogicalTest(Type.LESS, false, true, ExecType.CP);
}
@Test
public void testLogicalLessSparseDenseCP()
{
runLogicalTest(Type.LESS, true, false, ExecType.CP);
}
@Test
public void testLogicalLessSparseSparseCP()
{
runLogicalTest(Type.LESS, true, true, ExecType.CP);
}
@Test
public void testLogicalLessEqualsDenseDenseCP()
{
runLogicalTest(Type.LESS_EQUALS, false, false, ExecType.CP);
}
@Test
public void testLogicalLessEqualsDenseSparseCP()
{
runLogicalTest(Type.LESS_EQUALS, false, true, ExecType.CP);
}
@Test
public void testLogicalLessEqualsSparseDenseCP()
{
runLogicalTest(Type.LESS_EQUALS, true, false, ExecType.CP);
}
@Test
public void testLogicalLessEqualsSparseSparseCP()
{
runLogicalTest(Type.LESS_EQUALS, true, true, ExecType.CP);
}
// ------------------------
@Test
public void testLogicalGreaterDenseDenseSP()
{
runLogicalTest(Type.GREATER, false, false, ExecType.SPARK);
}
@Test
public void testLogicalGreaterDenseSparseSP()
{
runLogicalTest(Type.GREATER, false, true, ExecType.SPARK);
}
@Test
public void testLogicalGreaterSparseDenseSP()
{
runLogicalTest(Type.GREATER, true, false, ExecType.SPARK);
}
@Test
public void testLogicalGreaterSparseSparseSP()
{
runLogicalTest(Type.GREATER, true, true, ExecType.SPARK);
}
@Test
public void testLogicalGreaterEqualsDenseDenseSP()
{
runLogicalTest(Type.GREATER_EQUALS, false, false, ExecType.SPARK);
}
@Test
public void testLogicalGreaterEqualsDenseSparseSP()
{
runLogicalTest(Type.GREATER_EQUALS, false, true, ExecType.SPARK);
}
@Test
public void testLogicalGreaterEqualsSparseDenseSP()
{
runLogicalTest(Type.GREATER_EQUALS, true, false, ExecType.SPARK);
}
@Test
public void testLogicalGreaterEqualsSparseSparseSP()
{
runLogicalTest(Type.GREATER_EQUALS, true, true, ExecType.SPARK);
}
@Test
public void testLogicalEqualsDenseDenseSP()
{
runLogicalTest(Type.EQUALS, false, false, ExecType.SPARK);
}
@Test
public void testLogicalEqualsDenseSparseSP()
{
runLogicalTest(Type.EQUALS, false, true, ExecType.SPARK);
}
@Test
public void testLogicalEqualsSparseDenseSP()
{
runLogicalTest(Type.EQUALS, true, false, ExecType.SPARK);
}
@Test
public void testLogicalEqualsSparseSparseSP()
{
runLogicalTest(Type.EQUALS, true, true, ExecType.SPARK);
}
@Test
public void testLogicalNotEqualsDenseDenseSP()
{
runLogicalTest(Type.NOT_EQUALS, false, false, ExecType.SPARK);
}
@Test
public void testLogicalNotEqualsDenseSparseSP()
{
runLogicalTest(Type.NOT_EQUALS, false, true, ExecType.SPARK);
}
@Test
public void testLogicalNotEqualsSparseDenseSP()
{
runLogicalTest(Type.NOT_EQUALS, true, false, ExecType.SPARK);
}
@Test
public void testLogicalNotEqualsSparseSparseSP()
{
runLogicalTest(Type.NOT_EQUALS, true, true, ExecType.SPARK);
}
@Test
public void testLogicalLessDenseDenseSP()
{
runLogicalTest(Type.LESS, false, false, ExecType.SPARK);
}
@Test
public void testLogicalLessDenseSparseSP()
{
runLogicalTest(Type.LESS, false, true, ExecType.SPARK);
}
@Test
public void testLogicalLessSparseDenseSP()
{
runLogicalTest(Type.LESS, true, false, ExecType.SPARK);
}
@Test
public void testLogicalLessSparseSparseSP()
{
runLogicalTest(Type.LESS, true, true, ExecType.SPARK);
}
@Test
public void testLogicalLessEqualsDenseDenseSP()
{
runLogicalTest(Type.LESS_EQUALS, false, false, ExecType.SPARK);
}
@Test
public void testLogicalLessEqualsDenseSparseSP()
{
runLogicalTest(Type.LESS_EQUALS, false, true, ExecType.SPARK);
}
@Test
public void testLogicalLessEqualsSparseDenseSP()
{
runLogicalTest(Type.LESS_EQUALS, true, false, ExecType.SPARK);
}
@Test
public void testLogicalLessEqualsSparseSparseSP()
{
runLogicalTest(Type.LESS_EQUALS, true, true, ExecType.SPARK);
}
// ----------------------
@Test
public void testLogicalGreaterDenseDenseMR()
{
runLogicalTest(Type.GREATER, false, false, ExecType.MR);
}
@Test
public void testLogicalGreaterDenseSparseMR()
{
runLogicalTest(Type.GREATER, false, true, ExecType.MR);
}
@Test
public void testLogicalGreaterSparseDenseMR()
{
runLogicalTest(Type.GREATER, true, false, ExecType.MR);
}
@Test
public void testLogicalGreaterSparseSparseMR()
{
runLogicalTest(Type.GREATER, true, true, ExecType.MR);
}
@Test
public void testLogicalGreaterEqualsDenseDenseMR()
{
runLogicalTest(Type.GREATER_EQUALS, false, false, ExecType.MR);
}
@Test
public void testLogicalGreaterEqualsDenseSparseMR()
{
runLogicalTest(Type.GREATER_EQUALS, false, true, ExecType.MR);
}
@Test
public void testLogicalGreaterEqualsSparseDenseMR()
{
runLogicalTest(Type.GREATER_EQUALS, true, false, ExecType.MR);
}
@Test
public void testLogicalGreaterEqualsSparseSparseMR()
{
runLogicalTest(Type.GREATER_EQUALS, true, true, ExecType.MR);
}
@Test
public void testLogicalEqualsDenseDenseMR()
{
runLogicalTest(Type.EQUALS, false, false, ExecType.MR);
}
@Test
public void testLogicalEqualsDenseSparseMR()
{
runLogicalTest(Type.EQUALS, false, true, ExecType.MR);
}
@Test
public void testLogicalEqualsSparseDenseMR()
{
runLogicalTest(Type.EQUALS, true, false, ExecType.MR);
}
@Test
public void testLogicalEqualsSparseSparseMR()
{
runLogicalTest(Type.EQUALS, true, true, ExecType.MR);
}
@Test
public void testLogicalNotEqualsDenseDenseMR()
{
runLogicalTest(Type.NOT_EQUALS, false, false, ExecType.MR);
}
@Test
public void testLogicalNotEqualsDenseSparseMR()
{
runLogicalTest(Type.NOT_EQUALS, false, true, ExecType.MR);
}
@Test
public void testLogicalNotEqualsSparseDenseMR()
{
runLogicalTest(Type.NOT_EQUALS, true, false, ExecType.MR);
}
@Test
public void testLogicalNotEqualsSparseSparseMR()
{
runLogicalTest(Type.NOT_EQUALS, true, true, ExecType.MR);
}
@Test
public void testLogicalLessDenseDenseMR()
{
runLogicalTest(Type.LESS, false, false, ExecType.MR);
}
@Test
public void testLogicalLessDenseSparseMR()
{
runLogicalTest(Type.LESS, false, true, ExecType.MR);
}
@Test
public void testLogicalLessSparseDenseMR()
{
runLogicalTest(Type.LESS, true, false, ExecType.MR);
}
@Test
public void testLogicalLessSparseSparseMR()
{
runLogicalTest(Type.LESS, true, true, ExecType.MR);
}
@Test
public void testLogicalLessEqualsDenseDenseMR()
{
runLogicalTest(Type.LESS_EQUALS, false, false, ExecType.MR);
}
@Test
public void testLogicalLessEqualsDenseSparseMR()
{
runLogicalTest(Type.LESS_EQUALS, false, true, ExecType.MR);
}
@Test
public void testLogicalLessEqualsSparseDenseMR()
{
runLogicalTest(Type.LESS_EQUALS, true, false, ExecType.MR);
}
@Test
public void testLogicalLessEqualsSparseSparseMR()
{
runLogicalTest(Type.LESS_EQUALS, true, true, ExecType.MR);
}
private void runLogicalTest( Type type, boolean sp1, boolean sp2, ExecType et )
{
String TEST_NAME = TEST_NAME1;
int rows = rows1;
int cols = cols1;
RUNTIME_PLATFORM platformOld = rtplatform;
switch( et ){
case MR: rtplatform = RUNTIME_PLATFORM.HADOOP; break;
case SPARK: rtplatform = RUNTIME_PLATFORM.SPARK; break;
default: rtplatform = RUNTIME_PLATFORM.HYBRID; break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if( rtplatform == RUNTIME_PLATFORM.SPARK )
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
double sparsityLeft = sp1 ? sparsity2 : sparsity1;
double sparsityRight = sp2 ? sparsity2 : sparsity1;
String TEST_CACHE_DIR = "";
if (TEST_CACHE_ENABLED) {
TEST_CACHE_DIR = type.ordinal() + "_" + rows + "_" + cols + "_" + sparsityLeft + "_" + sparsityRight + "/";
}
try
{
TestConfiguration config = getTestConfiguration(TEST_NAME);
loadTestConfiguration(config, TEST_CACHE_DIR);
/* This is for running the junit test the new way, i.e., construct the arguments directly */
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME + ".dml";
programArgs = new String[]{"-args", input("A"), input("B"),
Integer.toString(type.ordinal()), output("C") };
fullRScriptName = HOME + TEST_NAME + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + type.ordinal() + " " + expectedDir();
//generate actual dataset
double[][] A = getRandomMatrix(rows, cols, -10, 10, sparsityLeft, 7);
writeInputMatrixWithMTD("A", A, true);
double[][] B = getRandomMatrix(rows, cols, -15, 15, sparsityRight, 3);
writeInputMatrixWithMTD("B", B, true);
//run tests
runTest(true, false, null, -1);
runRScript(true);
//compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("C");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("C");
TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R");
}
finally
{
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
}
|
|
/**
* SIX OVAL - https://nakamura5akihito.github.io/
* Copyright (C) 2010 Akihito Nakamura
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.opensec.six.oval.model.windows;
import io.opensec.six.oval.model.ComponentType;
import io.opensec.six.oval.model.Family;
import io.opensec.six.oval.model.sc.EntityItemBoolType;
import io.opensec.six.oval.model.sc.EntityItemIntType;
import io.opensec.six.oval.model.sc.EntityItemStringType;
import io.opensec.six.oval.model.sc.ItemType;
import io.opensec.six.oval.model.sc.StatusEnumeration;
/**
* This item stores information about Windows services that are present on the system.
*
* @author Akihito Nakamura, AIST
* @see <a href="http://oval.mitre.org/language/">OVAL Language</a>
*/
public class ServiceItem
extends ItemType
{
//{0..1}
private EntityItemStringType service_name;
private EntityItemStringType display_name;
private EntityItemStringType description;
private EntityItemServiceTypeType service_type;
private EntityItemServiceStartTypeType start_type;
private EntityItemServiceCurrentStateType current_state;
private EntityItemServiceControlsAcceptedType controls_accepted;
private EntityItemStringType start_name;
private EntityItemStringType path;
private EntityItemIntType pid;
private EntityItemBoolType service_flag;
private EntityItemStringType dependencies;
/**
* Constructor.
*/
public ServiceItem()
{
this( 0 );
}
public ServiceItem(
final int id
)
{
this( id, null );
}
public ServiceItem(
final int id,
final StatusEnumeration status
)
{
super( id, status );
_oval_family = Family.WINDOWS;
_oval_component = ComponentType.SERVICE;
}
/**
*/
public void setServiceName(
final EntityItemStringType service_name
)
{
this.service_name = service_name;
}
public EntityItemStringType getServiceName()
{
return service_name;
}
/**
*/
public void setDisplayName(
final EntityItemStringType display_name
)
{
this.display_name = display_name;
}
public EntityItemStringType getDisplayName()
{
return display_name;
}
/**
*/
public void setDescription(
final EntityItemStringType description
)
{
this.description = description;
}
public EntityItemStringType getDescription()
{
return description;
}
/**
*/
public void setServiceType(
final EntityItemServiceTypeType service_type
)
{
this.service_type = service_type;
}
public EntityItemServiceTypeType getServiceType()
{
return service_type;
}
/**
*/
public void setStartType(
final EntityItemServiceStartTypeType start_type
)
{
this.start_type = start_type;
}
public EntityItemServiceStartTypeType getStartType()
{
return start_type;
}
/**
*/
public void setCurrentState(
final EntityItemServiceCurrentStateType current_state
)
{
this.current_state = current_state;
}
public EntityItemServiceCurrentStateType getCurrentState()
{
return current_state;
}
/**
*/
public void setControlsAccepted(
final EntityItemServiceControlsAcceptedType controls_accepted
)
{
this.controls_accepted = controls_accepted;
}
public EntityItemServiceControlsAcceptedType getControlsAccepted()
{
return controls_accepted;
}
/**
*/
public void setStartName(
final EntityItemStringType start_name
)
{
this.start_name = start_name;
}
public EntityItemStringType getStartName()
{
return start_name;
}
/**
*/
public void setPath(
final EntityItemStringType path
)
{
this.path = path;
}
public EntityItemStringType getPath()
{
return path;
}
/**
*/
public void setPid(
final EntityItemIntType pid
)
{
this.pid = pid;
}
public EntityItemIntType getPid()
{
return pid;
}
/**
*/
public void setServiceFlag(
final EntityItemBoolType service_flag
)
{
this.service_flag = service_flag;
}
public EntityItemBoolType getServiceFlag()
{
return service_flag;
}
/**
*/
public void setDependencies(
final EntityItemStringType dependencies
)
{
this.dependencies = dependencies;
}
public EntityItemStringType getDependencies()
{
return dependencies;
}
//**************************************************************
// java.lang.Object
//**************************************************************
@Override
public String toString()
{
return "service_item[" + super.toString()
+ ", service_name=" + getServiceName()
+ ", display_name=" + getDisplayName()
+ ", description=" + getDescription()
+ ", service_type=" + getServiceType()
+ ", start_type=" + getStartType()
+ ", current_state=" + getCurrentState()
+ ", controls_accepted=" + getControlsAccepted()
+ ", start_name=" + getStartName()
+ ", path=" + getPath()
+ ", pid=" + getPid()
+ ", service_flag=" + getServiceFlag()
+ ", dependencies=" + getDependencies()
+ "]";
}
}
//
|
|
/*
* Copyright 2011, 2012 Thomas Amsler
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thomasamsler.android.flashcards.pager;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import org.thomasamsler.android.flashcards.ActionBusListener;
import org.thomasamsler.android.flashcards.AppConstants;
import org.thomasamsler.android.flashcards.MainApplication;
import org.thomasamsler.android.flashcards.R;
import org.thomasamsler.android.flashcards.activity.MainActivity;
import org.thomasamsler.android.flashcards.db.DataSource;
import org.thomasamsler.android.flashcards.dialog.HelpDialog;
import org.thomasamsler.android.flashcards.fragment.CardFragment;
import org.thomasamsler.android.flashcards.model.Card;
import org.thomasamsler.android.flashcards.model.CardSet;
import android.content.Context;
import android.os.Bundle;
import android.os.Handler;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentStatePagerAdapter;
import android.support.v4.view.ViewPager;
import android.support.v4.view.ViewPager.OnPageChangeListener;
import android.util.SparseArray;
import android.view.ViewGroup;
import android.widget.Toast;
public class CardsPager extends Fragment implements ActionBusListener, AppConstants {
private static final Integer NEG_ONE = Integer.valueOf(-1);
private ViewPager mViewPager;
private MyFragmentPagerAdapter mMyFragmentPagerAdapter;
private Random mRandom;
private List<Card> mCards;
private List<Integer> mRandomCardPositionList;
private List<Integer> mAvailableCardPositionList;
private int mNumberOfCards;
private int mHelpContext;
private int mFontSize;
private CardSet mCardSet;
private DataSource mDataSource;
private MainActivity mActivity;
private Context mApplicationContext;
private MainApplication mMainApplication;
private Handler mHandler;
public static CardsPager newInstance(CardSet cardSet, int fontSize) {
CardsPager cardsPager = new CardsPager();
cardsPager.init(cardSet, fontSize);
return cardsPager;
}
public void init(CardSet cardSet, int fontSize) {
mCardSet = cardSet;
mFontSize = fontSize;
}
public void clear() {
if(null != mViewPager) {
mViewPager.removeAllViews();
}
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mActivity = (MainActivity) getActivity();
mApplicationContext = getActivity().getApplicationContext();
mMainApplication = (MainApplication) getActivity().getApplication();
mDataSource = ((MainActivity) getActivity()).getDataSource();
mHelpContext = HELP_CONTEXT_VIEW_CARD;
mMainApplication.registerAction(this, ACTION_EDIT_CARD, ACTION_ZOOM_IN_CARD, ACTION_ZOOM_OUT_CARD, ACTION_SHOW_CARD_INFO, ACTION_DELETE_CARD, ACTION_UPDATE_CARD);
mRandom = new Random();
try {
mCards = mDataSource.getCards(mCardSet.getId());
}
catch (Exception e) {
mMainApplication.doAction(ACTION_SHOW_CARD_SETS, Boolean.TRUE);
}
if (0 == mCards.size()) {
Toast.makeText(mApplicationContext, R.string.view_cards_emtpy_set_message, Toast.LENGTH_SHORT).show();
}
mNumberOfCards = mCards.size();
mRandomCardPositionList = new ArrayList<Integer>();
mAvailableCardPositionList = new ArrayList<Integer>();
// Initialize arrays
for (int i = 0; i < mNumberOfCards; i++) {
mRandomCardPositionList.add(NEG_ONE);
mAvailableCardPositionList.add(Integer.valueOf(i));
}
mViewPager = (ViewPager) mActivity.findViewById(R.id.viewpager);
mMyFragmentPagerAdapter = new MyFragmentPagerAdapter(mActivity.getSupportFragmentManager());
mHandler = new Handler();
mHandler.post(new Runnable() {
public void run() {
mViewPager.setAdapter(mMyFragmentPagerAdapter);
}
});
/*
* Use page change listener to magnify and reduce the word's font size
*/
mViewPager.setOnPageChangeListener(new OnPageChangeListener() {
public void onPageSelected(int currentIndex) {
CardFragment cardFragment = mMyFragmentPagerAdapter.getFragment(currentIndex);
if (null != cardFragment) {
cardFragment.setFontSize(mFontSize);
}
}
public void onPageScrolled(int arg0, float arg1, int arg2) {
// Nothing to do here
}
public void onPageScrollStateChanged(int state) {
// Nothing to do here
}
});
}
protected void showHelp() {
HelpDialog helpDialog = new HelpDialog(mActivity);
switch (mHelpContext) {
case HELP_CONTEXT_DEFAULT:
helpDialog.setHelp(mActivity.getResources().getString(R.string.help_content_default));
break;
case HELP_CONTEXT_VIEW_CARD:
helpDialog.setHelp(mActivity.getResources().getString(R.string.help_content_view_card));
break;
default:
helpDialog.setHelp(mActivity.getResources().getString(R.string.help_content_default));
}
helpDialog.show();
}
private void updateCard(Card updatedCard) {
/*
* First, we update the in memory list of cards
*/
Card card = mCards.get(mRandomCardPositionList.get(updatedCard.getRandomCardIndex()));
card.setQuestion(updatedCard.getQuestion());
card.setAnswer(updatedCard.getAnswer());
mDataSource.updateCard(card);
}
private void editCard() {
int currentIndex = mViewPager.getCurrentItem();
CardFragment cardFragment = mMyFragmentPagerAdapter.getFragment(currentIndex);
if (null != cardFragment) {
cardFragment.onEdit();
}
}
/*
* Called from action bar
*/
private void zoom(int action) {
int currentIndex = mViewPager.getCurrentItem();
CardFragment cardFragment = mMyFragmentPagerAdapter.getFragment(currentIndex);
if (null == cardFragment) {
return;
}
if (ACTION_ZOOM_IN_CARD == action) {
mFontSize += FONT_SIZE_ZOOM_CHANGE;
}
else if (ACTION_ZOOM_OUT_CARD == action) {
mFontSize -= FONT_SIZE_ZOOM_CHANGE;
}
cardFragment.setFontSize(mFontSize);
}
private void showCardInformation() {
String message = String.format(mActivity.getResources().getString(R.string.card_information), mCardSet.getTitle());
Toast.makeText(mApplicationContext, message, Toast.LENGTH_SHORT).show();
}
/*
* Called From action bar
*/
public void deleteCard() {
// Get the current card index
int currentIndex = mViewPager.getCurrentItem();
// Reduce the card counter by one
mNumberOfCards -= 1;
Card card = null;
if (mRandomCardPositionList.size() > 0) {
// Mark card as deleted. The saveCards(...) method ignores null or
// empty string cards
card = mCards.set(mRandomCardPositionList.get(currentIndex), null);
// Delete card
mDataSource.deleteCard(card);
// Remove the deleted card position
mRandomCardPositionList.remove(currentIndex);
}
/*
* Determine all remaining random card positions
*/
int randomNum;
if (mAvailableCardPositionList.size() > 0) {
for (int i = 0; i < mRandomCardPositionList.size(); i++) {
if (NEG_ONE.compareTo(mRandomCardPositionList.get(i)) == 0 && mAvailableCardPositionList.size() > 0) {
randomNum = mRandom.nextInt(mAvailableCardPositionList.size());
mRandomCardPositionList.set(i, mAvailableCardPositionList.remove(randomNum));
}
}
}
mMyFragmentPagerAdapter.notifyDataSetChanged();
// When we delete the last card in a card set, we return to the list
if (mRandomCardPositionList.size() == 0) {
String message = String.format(mActivity.getResources().getString(R.string.delete_last_card_message), mCardSet.getTitle());
Toast.makeText(mApplicationContext, message, Toast.LENGTH_SHORT).show();
mMainApplication.doAction(ACTION_SHOW_CARD_SETS, Boolean.TRUE);
}
else {
Toast.makeText(mApplicationContext, R.string.delete_card, Toast.LENGTH_SHORT).show();
}
if (null != card) {
// Notify CardSet that we have just deleted a card
mMainApplication.doAction(ACTION_DELETE_CARD_UPDATE_CARD_SET, Long.valueOf(card.getCardSetId()));
}
}
/*
* Classes
*/
private class MyFragmentPagerAdapter extends FragmentStatePagerAdapter {
private SparseArray<WeakReference<CardFragment>> mPageReferenceMap = new SparseArray<WeakReference<CardFragment>>();
public MyFragmentPagerAdapter(FragmentManager fm) {
super(fm);
}
@Override
public Fragment getItem(int index) {
return getFragment(index);
}
@Override
public int getCount() {
return mNumberOfCards;
}
public Object instantiateItem(ViewGroup container, int position) {
int randomNum;
if (mRandomCardPositionList.get(position).compareTo(NEG_ONE) == 0) {
randomNum = mRandom.nextInt(mAvailableCardPositionList.size());
mRandomCardPositionList.set(position, mAvailableCardPositionList.remove(randomNum));
}
CardFragment cardFragment = CardFragment.newInstance(mCards.get(mRandomCardPositionList.get(position)), position, mNumberOfCards, mFontSize);
mPageReferenceMap.put(Integer.valueOf(position), new WeakReference<CardFragment>(cardFragment));
return super.instantiateItem(container, position);
}
@Override
public void destroyItem(ViewGroup container, int position, Object object) {
super.destroyItem(container, position, object);
mPageReferenceMap.remove(Integer.valueOf(position));
}
/*
* Overriding this method in conjunction with calling
* notifyDataSetChanged removes a page from the pager.
*/
@Override
public int getItemPosition(Object object) {
return POSITION_NONE;
}
public CardFragment getFragment(int key) {
WeakReference<CardFragment> weakReference = mPageReferenceMap.get(key);
if (null != weakReference) {
return (CardFragment) weakReference.get();
}
else {
return null;
}
}
}
public void doAction(Integer action, Object data) {
switch (action) {
case ACTION_EDIT_CARD:
editCard();
break;
case ACTION_ZOOM_IN_CARD:
zoom(ACTION_ZOOM_IN_CARD);
break;
case ACTION_ZOOM_OUT_CARD:
zoom(ACTION_ZOOM_OUT_CARD);
break;
case ACTION_SHOW_CARD_INFO:
showCardInformation();
break;
case ACTION_DELETE_CARD:
deleteCard();
break;
case ACTION_UPDATE_CARD:
updateCard((Card) data);
break;
}
}
}
|
|
package edu.wpi.first.wpilibj.templates;
//<editor-fold defaultstate="collapsed" desc="Imports">
import edu.wpi.first.wpilibj.SimpleRobot;
import edu.wpi.first.wpilibj.Victor;
import edu.wpi.first.wpilibj.Jaguar;
import edu.wpi.first.wpilibj.RobotDrive;
import edu.wpi.first.wpilibj.Compressor;
import edu.wpi.first.wpilibj.Solenoid;
import edu.wpi.first.wpilibj.Joystick;
import edu.wpi.first.wpilibj.DigitalInput;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.camera.AxisCamera;
import edu.wpi.first.wpilibj.AnalogChannel;
import edu.wpi.first.wpilibj.Relay;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Initializations">
public class RobotTemplate extends SimpleRobot {
int BckRghtWhl = 9;
int BckLftWhl = 2;
int FrntRghtWhl = 1;
int FrntLftWhl = 10;
RobotDrive driveRobot = new RobotDrive (
FrntLftWhl,
BckLftWhl,
FrntRghtWhl,
BckRghtWhl);
int Sol = 1;
Solenoid FrisbeeLauncher = new Solenoid(Sol);
int Sol2 = 8;
Solenoid FrisbeeLifter = new Solenoid(Sol2);
int Sol3 = 3;
Solenoid VerticalMovement = new Solenoid(Sol3);
int Sol4 = 4;
Solenoid VerticalDown = new Solenoid(Sol4);
int PressureSensor = 1;
int RelayComp = 4;
Compressor Compressor = new Compressor(PressureSensor, RelayComp);
int Switch = 2;
DigitalInput FrisbeeSwitch = new DigitalInput(Switch);
int Switch2 = 5;
DigitalInput LoaderSwitch = new DigitalInput(Switch2);
int ConveyorMotorLow = 3;
Victor Conveyor1 = new Victor(ConveyorMotorLow);
int Angle = 7;
Victor AngleMotor = new Victor(Angle);
int Shoot1 = 4;
Jaguar Shooter1 = new Jaguar(Shoot1);
int Shoot2 = 5;
Jaguar Shooter2 = new Jaguar(Shoot2);
int JStick = 1;
int JStick2 = 2;
int JStick3 = 3;
Joystick Joystick1 = new Joystick(JStick);
Joystick Joystick2 = new Joystick(JStick2);
Joystick Joystick3 = new Joystick(JStick3);
Timer timer = new Timer();
double Time = 0;
double TotalTime = 1;
double Presses = 0;
double Speed = 0;
int Spot = 7;
AnalogChannel SPOTY = new AnalogChannel(Spot);
AxisCamera camera;
int Switch3 = 9;
DigitalInput LimitSwitch = new DigitalInput(Switch3);
int LEDlIgHtShue = 8;
Relay WeLookSoCool = new Relay(LEDlIgHtShue);
int LEye = 6;
Relay LeftEye = new Relay(LEye);
int REye = 2;
Relay RightEye = new Relay(REye);
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Autonomous">
public void autonomous() {
Compressor.start();
VerticalMovement.set(true);
VerticalDown.set(false);
Timer.delay(2);
Shooter1.set(-1);
Shooter2.set(-1);
Timer.delay(3);
FrisbeeLauncher.set(true);
Timer.delay(1);
FrisbeeLauncher.set(false);
Timer.delay(1.5);
FrisbeeLauncher.set(true);
Timer.delay(1);
FrisbeeLauncher.set(false);
Timer.delay(1.5);
FrisbeeLauncher.set(true);
Timer.delay(1);
FrisbeeLauncher.set(false);
Timer.delay(1);
Shooter1.set(0);
Shooter2.set(0);
Timer.delay(.5);
VerticalMovement.set(false);
VerticalDown.set(true);
Compressor.stop();
}
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Teleoperated">
public void operatorControl() {
//WeLookSoCool.set(Relay.Value.kOn);
boolean LED = true;
boolean Up = false;
boolean Down = false;
boolean Righty = true;
boolean Lefty = true;
LeftEye.set(Relay.Value.kOn);
RightEye.set(Relay.Value.kOn);
String Message = "YOU GUYS GOT THIS WRECK EVERYONE AROUND YOU";
int Angle = 0;
while (isOperatorControl() && isEnabled()) {
Compressor.start();
if(Joystick2.getRawButton(1) == true) {
if(LED == true) {
WeLookSoCool.set(Relay.Value.kOff);
LED = false;
} else {
WeLookSoCool.set(Relay.Value.kOn);
LED = true;
}
}
if(Joystick1.getRawButton(5) == true) {
if(Righty == true) {
RightEye.set(Relay.Value.kOff);
Righty = false;
} else {
RightEye.set(Relay.Value.kOn);
Righty = true;
}
}
if(Joystick1.getRawButton(4) == true) {
if(Lefty == true) {
LeftEye.set(Relay.Value.kOff);
Lefty = false;
} else {
LeftEye.set(Relay.Value.kOn);
Lefty = true;
}
}
System.out.println(Righty);
System.out.println(Lefty);
driveRobot.tankDrive(Joystick1.getY() * -1.0, Joystick2.getY() * -1.0);
double potato = SPOTY.getVoltage();
//System.out.println("Spot:" + potato);
//Hang
if(Joystick3.getRawButton(7) == true) {
if((potato < 4.83) == true) {
AngleMotor.set(1);
Message = "Angle is changing.";
}
else if((potato > 4.85) == true) {
AngleMotor.set(-1);
Message = "Angle is changing.";
}
else {
AngleMotor.set(0);
Angle = 0;
Message = "Can't go any lower";
}
}
//15 degree
if(Joystick3.getRawButton(8) == true) {
if((potato < 5.62) == true) {
AngleMotor.set(1);
Message = "Angle is changing";
}
else if((potato > 5.64) == true) {
AngleMotor.set(-1);
Message = "Angle is changing";
}
else {
AngleMotor.set(0);
Message = "Angle is not changing";
Angle = 15;
}
}
//19 degree
if(Joystick3.getRawButton(9) == true) {
if((potato < 5.97) == true) {
AngleMotor.set(1);
Message = "Angle is changing";
}
else if((potato > 5.99) == true) {
AngleMotor.set(-1);
Message = "Angle is changing";
}
else {
AngleMotor.set(0);
Message = "Angle is not changing";
Angle = 19;
}
}
//21 degree
if(Joystick3.getRawButton(10) == true) {
if((potato < 6.04) == true) {
AngleMotor.set(1);
Message = "Angle is changing.";
}
else if((potato > 6.06) == true) {
AngleMotor.set(-1);
Message = "Angle is changing.";
}
else {
AngleMotor.set(0);
Message = "Angle is not changing.";
Angle = 21;
}
}
//53 degree
if(Joystick3.getRawButton(11) == true) {
if((potato < 7.66) == true) {
AngleMotor.set(1);
Message = "Angle is changing.";
}
else if((potato > 7.68) == true) {
AngleMotor.set(-1);
Message = "Angle is changing.";
}
else {
AngleMotor.set(0);
Angle = 53;
Message = "Angle is not changing.";
}
}
//Angle Movement Controls
if(Joystick2.getRawButton(3) == true){
if((potato < 7.68) == true) {
AngleMotor.set(1.0);
Message = "Increasing Angle";
}
else {
AngleMotor.set(0);
Message = "Can't go higher";
}
}
if(Joystick2.getRawButton(2) == true) {
if(potato > 4.83) {
AngleMotor.set(-1);
Message = "Decreasing Angle";
} else {
AngleMotor.set(0.0);
Message = "Can't go any lower";
}
}
if(Joystick2.getRawButton(3) == false && Joystick2.getRawButton(2) == false && Joystick3.getRawButton(10) == false && Joystick3.getRawButton(8) == false && Joystick3.getRawButton(7) == false && Joystick3.getRawButton(11) == false && Joystick3.getRawButton(9) == false){
AngleMotor.set(0);
}
//Infinite Shooter
while(Joystick3.getRawButton(1) == true){
Shooter1.set(-1);
Shooter2.set(-1);
Timer.delay(1.7);
FrisbeeLauncher.set(true);
Timer.delay(1);
FrisbeeLauncher.set(false);
Timer.delay(1.5);
FrisbeeLauncher.set(true);
Timer.delay(1);
FrisbeeLauncher.set(false);
Timer.delay(1);
Shooter1.set(0);
Shooter2.set(0);
}
/*
* AngleLift = Joystick3.getY();
* if (AngleLift >= .1) {
* AngleLift = 1.0;
* }
* if (AngleLift <= -.1) {
* AngleLift = -1.0;
* }
* if (AngleLift < .1 && AngleLift > -.1) {
* AngleLift = 0;
* }
* AngleMotor.set(AngleLift);
*/
//Motor Speed Mechanism
if(Joystick3.getRawButton(3) == true) {
Shooter1.set(-1.0);
Shooter2.set(-1.0);
}
if(Joystick3.getRawButton(4) == true) {
Shooter1.set(0.0);
Shooter2.set(0.0);
}
if(Joystick3.getRawButton(6) == true) {
Shooter1.set(1.0);
Shooter2.set(1.0);
}
if(Joystick3.getRawButton(5) == true){
Shooter1.set(-0.5);
Shooter2.set(-0.5);
}
//Fliper Fingers
if(Joystick2.getRawButton(10) == true) {
FrisbeeLifter.set(false);
}
if(Joystick2.getRawButton(11) == true) {
FrisbeeLifter.set(true);
}
//Alternative Shooter
/*if(Joystick3.getRawButton(7) == true){
Shooter1.set(-1);
Shooter2.set(-1);
Timer.delay(1.7);
FrisbeeLauncher.set(true);
Timer.delay(1);
FrisbeeLauncher.set(false);
Shooter1.set(0);
Shooter2.set(0);
}
*/
//Conveyor
if(Joystick2.getRawButton(8) == true) {
Conveyor1.set(1.0);
}
if(Joystick2.getRawButton(9) == true) {
Conveyor1.set(-1.0);
}
if(Joystick2.getRawButton(8) == false && Joystick2.getRawButton(9) == false) {
Conveyor1.set(0.0);
}
//Shooter Vertical Movement
if(Joystick1.getRawButton(3) == true) {
VerticalMovement.set(true);
if(Up == true) {
VerticalMovement.set(false);
} else {
VerticalMovement.set(true);
}
}
if(Joystick1.getRawButton(2) == true) {
VerticalMovement.set(false);
if(Down == true) {
VerticalDown.set(false);
} else {
VerticalDown.set(true);
}
}
//Launcher
if(Joystick3.getRawButton(2) == true){
FrisbeeLauncher.set(true);
} else {
FrisbeeLauncher.set(false);
}
//One Button Shooter Maximum
/*
* if(Joystick2.getRawButton(11) == true) {
* Shooter1.set(-1);
* Shooter2.set(-1);
* Timer.delay(2.5);
* FrisbeeLifter.set(true);
* Timer.delay(1);
* FrisbeeLauncher.set(true);
* Timer.delay(1);
* FrisbeeLauncher.set(false);
* Timer.delay(1);
* FrisbeeLifter.set(false);
* Shooter1.set(0);
* Shooter2.set(0);
* }
*
* //One Button Shooter Medium
* if(Joystick2.getRawButton(10) == true) {
* Shooter1.set(-1);
* Shooter2.set(-1);
* Timer.delay(1);
* FrisbeeLifter.set(true);
* Timer.delay(1);
* FrisbeeLauncher.set(true);
* Timer.delay(1);
* FrisbeeLauncher.set(false);
* Timer.delay(1);
* FrisbeeLifter.set(false);
* Shooter1.set(0);
* Shooter2.set(0);
* }
*
* //One Button Shooter Low
* if(Joystick2.getRawButton(8) == true) {
* Shooter1.set(-1);
* Shooter2.set(-1);
* Timer.delay(1);
* FrisbeeLifter.set(true);
* Timer.delay(1);
* FrisbeeLauncher.set(true);
* Timer.delay(1);
* FrisbeeLauncher.set(false);
* Timer.delay(1);
* FrisbeeLifter.set(false);
* Shooter1.set(0);
* Shooter2.set(0);
* }
*/
SmartDashboard.putBoolean("Are LEDs on?", LED);
SmartDashboard.putNumber("Angle: ", Angle);
SmartDashboard.putString("Messages:", Message);
}
Compressor.stop();
}
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Test">
public void test() {
}
}
//</editor-fold>
|
|
package uk.ac.york.minesweeper;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.RenderingHints;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import javax.swing.JComponent;
import javax.swing.SwingUtilities;
/**
* A component which can display a minefield graphically and handle various events
*/
public class MinefieldPanel extends JComponent
{
private static final long serialVersionUID = 1L;
/** Size of all the tiles */
private static final int TILE_SIZE = 32;
/** Width of the bevel */
private static final int BEVEL_WIDTH = 2;
/** Font vertical offset (from top to BASELINE) */
private static final int FONT_VOFFSET = 24;
/** The font to draw numbers with */
private static final Font FONT = new Font(Font.MONOSPACED, Font.BOLD, 24);
/** Default background colour */
private static final Color COLOUR_BACKGROUND = new Color(0xC0, 0xC0, 0xC0);
/** Light grey for bevels */
private static final Color COLOUR_LIGHT = new Color(0xE0, 0xE0, 0xE0);
/** Dark grey for bevels */
private static final Color COLOUR_DARK = new Color(0x80, 0x80, 0x80);
/** Colour of question marks */
private static final Color COLOUR_QUESTION = Color.WHITE;
/** The colours of the numbers (0 is unused) */
private static final Color[] COLOUR_NUMBERS = new Color[]
{
null, // 0 = Unused
new Color(0x00, 0x00, 0xFF), // 1 = Blue
new Color(0x00, 0x7F, 0x00), // 2 = Green
new Color(0xFF, 0x00, 0x00), // 3 = Red
new Color(0x2F, 0x2F, 0x9F), // 4 = Dark Blue
new Color(0x7F, 0x00, 0x00), // 5 = Maroon
new Color(0x9F, 0x9F, 0x2F), // 6 = Turquoise
new Color(0x00, 0x00, 0x00), // 7 = Black
new Color(0x7F, 0x7F, 0x7F), // 8 = Grey
};
/** Current minefield */
private Minefield minefield;
/** Currently selected tile (null most of the time) */
private Point selectedTile;
/** List of state change listeners */
private ArrayList<MinefieldStateChangeListener> listeners = new ArrayList<MinefieldStateChangeListener>();
/**
* Initializes a new MinefieldPanel with the given Minefield
*
* There must always be a minefield to display (you cannot pass null)
*
* @param minefield minefield to display
*/
public MinefieldPanel(Minefield minefield)
{
this.addMouseListener(new MouseEventListener());
this.setBackground(COLOUR_BACKGROUND);
this.setOpaque(true);
this.setFont(FONT);
this.setMinefield(minefield);
}
/**
* Adds a listener to which received game state change events
*
* @param listener listener to add
*/
public void addStateChangeListener(MinefieldStateChangeListener listener)
{
if (!listeners.contains(listener))
listeners.add(listener);
}
/**
* Removes a listener which received game state change events
*
* @param listener listener to remove
*/
public void removeStateChangeListener(MinefieldStateChangeListener listener)
{
listeners.remove(listener);
}
/**
* Fires the state changed event
*/
private void fireStateChangeEvent()
{
MinefieldStateChangeEvent event = new MinefieldStateChangeEvent(this);
for (MinefieldStateChangeListener listener : listeners)
listener.stateChanged(event);
}
/**
* Gets the current minefield
*
* @return current minefield
*/
public Minefield getMinefield()
{
return minefield;
}
/**
* Sets a new minefield for the component
*
* @param newMinefield the new minefield
*/
public void setMinefield(Minefield newMinefield)
{
if (newMinefield == null)
throw new IllegalArgumentException("newMinefield cannot be null");
this.minefield = newMinefield;
// Reset selected tile
this.selectedTile = null;
// Update all visuals
this.setSize(getPreferredSize());
this.repaint();
// Fire event
this.fireStateChangeEvent();
}
/**
* Draws a character on a tile
*
* @param g graphics object
* @param x x position of top-left of tile
* @param y y position of top-left of tile
* @param c character to draw
*/
private static void drawCharacter(Graphics g, int x, int y, char c)
{
// Get coordinates to draw at
int drawX = x + (TILE_SIZE - g.getFontMetrics().charWidth(c)) / 2;
int drawY = y + FONT_VOFFSET;
// Draw the character
g.drawChars(new char[] { c }, 0, 1, drawX, drawY);
}
/**
* Draws an image at the given tile location
*
* @param g graphics object
* @param x x position of top-left of tile
* @param y y position of top-left of tile
* @param img image to draw
*/
private static void drawImage(Graphics g, int tileX, int tileY, BufferedImage img)
{
int xOff = tileX + (TILE_SIZE - img.getWidth()) / 2;
int yOff = tileY + (TILE_SIZE - img.getHeight()) / 2;
g.drawImage(img, xOff, yOff, null);
}
@Override
public void paintComponent(Graphics gOld)
{
Graphics2D g = (Graphics2D) gOld;
// Get selected tile position
int selectedX = (selectedTile == null ? -1 : selectedTile.x);
int selectedY = (selectedTile == null ? -1 : selectedTile.y);
// Make the numbers look a little nicer
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
// Draw background
if (isOpaque())
{
g.setColor(getBackground());
g.fillRect(0, 0, getWidth(), getHeight());
}
// Draw all the tiles
for (int x = 0; x < minefield.getWidth(); x++)
{
for (int y = 0; y < minefield.getHeight(); y++)
{
int graphicsX1 = x * TILE_SIZE;
int graphicsY1 = y * TILE_SIZE;
// Draw standard background
g.setColor(COLOUR_DARK);
g.drawLine(graphicsX1, graphicsY1, graphicsX1 + TILE_SIZE, graphicsY1);
g.drawLine(graphicsX1, graphicsY1, graphicsX1, graphicsY1 + TILE_SIZE);
// Covered or uncovered?
if (minefield.getTileState(x, y) == TileState.UNCOVERED)
{
// Draw the correct symbol
int tileValue = minefield.getTileValue(x, y);
if (tileValue < 0)
{
drawImage(g, graphicsX1, graphicsY1, Images.MINE);
}
else if (tileValue > 0)
{
g.setColor(COLOUR_NUMBERS[tileValue]);
drawCharacter(g, graphicsX1, graphicsY1, (char) ('0' + tileValue));
}
}
else
{
// Only draw the bevel background if this is NOT the selected tile
if (x != selectedX || y != selectedY)
{
int bevelX2 = graphicsX1 + TILE_SIZE - BEVEL_WIDTH;
int bevelY2 = graphicsY1 + TILE_SIZE - BEVEL_WIDTH;
g.setColor(COLOUR_LIGHT);
g.fillRect(graphicsX1, graphicsY1, TILE_SIZE, BEVEL_WIDTH);
g.fillRect(graphicsX1, graphicsY1, BEVEL_WIDTH, TILE_SIZE);
g.setColor(COLOUR_DARK);
g.fillRect(graphicsX1, bevelY2, TILE_SIZE, BEVEL_WIDTH);
g.fillRect(bevelX2, graphicsY1, BEVEL_WIDTH, TILE_SIZE);
}
// Draw flag or question mark if needed
if (minefield.getTileState(x, y) == TileState.FLAGGED)
{
drawImage(g, graphicsX1, graphicsY1, Images.FLAG);
}
else if (minefield.getTileState(x, y) == TileState.QUESTION)
{
g.setColor(COLOUR_QUESTION);
drawCharacter(g, graphicsX1, graphicsY1, '?');
}
}
}
}
}
@Override
public Dimension getPreferredSize()
{
return new Dimension(TILE_SIZE * minefield.getWidth(),
TILE_SIZE * minefield.getHeight());
}
@Override
public Dimension getMaximumSize()
{
return getPreferredSize();
}
@Override
public Dimension getMinimumSize()
{
return getPreferredSize();
}
/**
* Handles all mouse events within the game area
*/
private class MouseEventListener extends MouseAdapter
{
/**
* Calculates the selected tile from a mouse event
*/
private Point getTileFromEvent(MouseEvent e)
{
return new Point(e.getX() / TILE_SIZE, e.getY() / TILE_SIZE);
}
@Override
public void mouseExited(MouseEvent e)
{
// Clear selected tile
if (selectedTile != null)
{
selectedTile = null;
repaint();
}
}
@Override
public void mousePressed(MouseEvent e)
{
// Ignore if finished
if (minefield.isFinished())
return;
// Get tile position
Point tile = getTileFromEvent(e);
// Right or left click?
if (SwingUtilities.isLeftMouseButton(e))
{
// Do not select tiles with flags on
if (minefield.getTileState(tile.x, tile.y) == TileState.FLAGGED)
return;
// Set new selected tile
selectedTile = tile;
}
else if (SwingUtilities.isRightMouseButton(e))
{
TileState newState;
// Change flagged state
switch(minefield.getTileState(tile.x, tile.y))
{
case COVERED: newState = TileState.FLAGGED; break;
case FLAGGED: newState = TileState.QUESTION; break;
default: newState = TileState.COVERED; break;
case UNCOVERED: newState = TileState.UNCOVERED; break;
}
minefield.setTileState(tile.x, tile.y, newState);
}
repaint();
}
@Override
public void mouseReleased(MouseEvent e)
{
// Ignore if finished
if (minefield.isFinished())
return;
// Ensure there was a tile selected
if (selectedTile != null)
{
// Ensure the tile was the same as the one clicked on
if (selectedTile.equals(getTileFromEvent(e)))
{
// Either chord or uncover depending on the number of clicks
GameState state = minefield.getGameState();
if (e.getClickCount() == 2)
minefield.chord(selectedTile.x, selectedTile.y);
else if (e.getClickCount() == 1)
minefield.uncover(selectedTile.x, selectedTile.y);
// Fire state changed event if needed
if (minefield.getGameState() != state)
fireStateChangeEvent();
}
// Clear selected tile
selectedTile = null;
repaint();
}
}
}
}
|
|
package com.jivesoftware.os.amza.ui.region;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.jivesoftware.os.amza.api.partition.PartitionName;
import com.jivesoftware.os.amza.api.partition.VersionedPartitionName;
import com.jivesoftware.os.amza.api.ring.RingMemberAndHost;
import com.jivesoftware.os.amza.service.AmzaRingStoreReader;
import com.jivesoftware.os.amza.service.replication.AmzaAquariumProvider;
import com.jivesoftware.os.amza.service.ring.AmzaRingReader;
import com.jivesoftware.os.amza.service.ring.RingTopology;
import com.jivesoftware.os.amza.ui.region.AquariumPluginRegion.AquariumPluginRegionInput;
import com.jivesoftware.os.amza.ui.soy.SoyRenderer;
import com.jivesoftware.os.aquarium.AquariumStats;
import com.jivesoftware.os.aquarium.Liveliness;
import com.jivesoftware.os.aquarium.Member;
import com.jivesoftware.os.aquarium.State;
import com.jivesoftware.os.aquarium.Waterline;
import com.jivesoftware.os.lab.guts.LABSparseCircularMetricBuffer;
import com.jivesoftware.os.mlogger.core.MetricLogger;
import com.jivesoftware.os.mlogger.core.MetricLoggerFactory;
import java.awt.Color;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.LongAdder;
/**
*
*/
// soy.page.aquariumPluginRegion
public class AquariumPluginRegion implements PageRegion<AquariumPluginRegionInput> {
private static final MetricLogger LOG = MetricLoggerFactory.getLogger();
private final NumberFormat numberFormat = NumberFormat.getInstance();
private final String template;
private final SoyRenderer renderer;
private final AmzaRingStoreReader ringReader;
private final AmzaAquariumProvider aquariumProvider;
private final Liveliness liveliness;
private final AquariumStats aquariumStats;
public final EnumMap<State, LABSparseCircularMetricBuffer> currentStateMetricBuffer = new EnumMap<>(State.class);
public final EnumMap<State, LABSparseCircularMetricBuffer> desiredStateMetricBuffer = new EnumMap<>(State.class);
public AquariumPluginRegion(String template,
SoyRenderer renderer,
AmzaRingStoreReader ringReader,
AmzaAquariumProvider aquariumProvider,
Liveliness liveliness,
AquariumStats aquariumStats) {
this.template = template;
this.renderer = renderer;
this.ringReader = ringReader;
this.aquariumProvider = aquariumProvider;
this.liveliness = liveliness;
this.aquariumStats = aquariumStats;
for (State state : State.values()) {
currentStateMetricBuffer.put(state, new LABSparseCircularMetricBuffer(120, 0, 1_000));
desiredStateMetricBuffer.put(state, new LABSparseCircularMetricBuffer(120, 0, 1_000));
}
Executors.newSingleThreadScheduledExecutor().scheduleAtFixedRate(() -> {
try {
refresh();
} catch (Exception x) {
LOG.warn("Refresh aquarium stats failed", x);
}
}, 0, 500, TimeUnit.MILLISECONDS);
}
public void refresh() {
long timestamp = System.currentTimeMillis();
for (Entry<State, LongAdder> e : aquariumStats.currentState.entrySet()) {
currentStateMetricBuffer.get(e.getKey()).set(timestamp, e.getValue());
}
for (Entry<State, LongAdder> e : aquariumStats.desiredState.entrySet()) {
desiredStateMetricBuffer.get(e.getKey()).set(timestamp, e.getValue());
}
}
public static class AquariumPluginRegionInput {
final String ringName;
final String partitionName;
final String hexPartitionVersion;
public AquariumPluginRegionInput(String ringName, String partitionName, String hexPartitionVersion) {
this.ringName = ringName;
this.partitionName = partitionName;
this.hexPartitionVersion = hexPartitionVersion;
}
}
@Override
public String render(AquariumPluginRegionInput input) {
Map<String, Object> data = Maps.newHashMap();
try {
data.put("ringName", input.ringName);
data.put("partitionName", input.partitionName);
data.put("partitionVersion", input.hexPartitionVersion);
List<Map<String, String>> currentStats = Lists.newArrayList();
currentStats.add(ImmutableMap.of("name", "getMyCurrentWaterline", "value", numberFormat.format(aquariumStats.getMyCurrentWaterline.longValue())));
currentStats.add(ImmutableMap.of("name", "getOthersCurrentWaterline", "value", numberFormat.format(aquariumStats.getOthersCurrentWaterline
.longValue())));
currentStats
.add(ImmutableMap.of("name", "acknowledgeCurrentOther", "value", numberFormat.format(aquariumStats.acknowledgeCurrentOther.longValue())));
List<Map<String, String>> desiredStats = Lists.newArrayList();
desiredStats.add(ImmutableMap.of("name", "getMyDesiredWaterline", "value", numberFormat.format(aquariumStats.getMyDesiredWaterline.longValue())));
desiredStats.add(ImmutableMap.of("name", "getOthersDesiredWaterline", "value", numberFormat.format(aquariumStats.getOthersDesiredWaterline
.longValue())));
desiredStats
.add(ImmutableMap.of("name", "acknowledgeDesiredOther", "value", numberFormat.format(aquariumStats.acknowledgeDesiredOther.longValue())));
Map<String, Object> stats = Maps.newHashMap();
stats.put("tapTheGlass", numberFormat.format(aquariumStats.tapTheGlass.longValue()));
stats.put("tapTheGlassNotified", numberFormat.format(aquariumStats.tapTheGlassNotified.longValue()));
stats.put("captureEndState", numberFormat.format(aquariumStats.captureEndState.longValue()));
stats.put("suggestState", numberFormat.format(aquariumStats.suggestState.longValue()));
stats.put("getLivelyEndState", numberFormat.format(aquariumStats.getLivelyEndState.longValue()));
stats.put("getStateForMember", numberFormat.format(aquariumStats.getStateForMember.longValue()));
stats.put("isLivelyStateForMember", numberFormat.format(aquariumStats.isLivelyStateForMember.longValue()));
stats.put("isLivelyEndStateForMember", numberFormat.format(aquariumStats.isLivelyEndStateForMember.longValue()));
stats.put("feedTheFish", numberFormat.format(aquariumStats.feedTheFish.longValue()));
stats.put("acknowledgeOther", numberFormat.format(aquariumStats.acknowledgeOther.longValue()));
stats.put("awaitOnline", numberFormat.format(aquariumStats.awaitOnline.longValue()));
stats.put("awaitTimedOut", numberFormat.format(aquariumStats.awaitTimedOut.longValue()));
stats.put("current", currentStats);
stats.put("desired", desiredStats);
data.put("stats", stats);
List<Map<String, Object>> wavformGroups = Lists.newArrayList();
State[] aquariumStates = State.values();
String[] names = new String[aquariumStates.length * 2];
LABSparseCircularMetricBuffer[] waves = new LABSparseCircularMetricBuffer[aquariumStates.length * 2];
boolean[] fills = new boolean[aquariumStates.length * 2];
for (int i = 0; i < aquariumStates.length; i++) {
int j = i * 2;
fills[j] = false;
fills[j + 1] = false;
names[j] = "current-" + aquariumStates[i];
names[j + 1] = "desired-" + aquariumStates[i];
waves[j] = currentStateMetricBuffer.get(aquariumStates[i]);
waves[j + 1] = desiredStateMetricBuffer.get(aquariumStates[i]);
}
wavformGroups.addAll(wavformGroup("states", null, "aquarium-states", stateColors, names, waves, fills));
data.put("wavestats", wavformGroups);
long now = System.currentTimeMillis();
List<Map<String, Object>> live = new ArrayList<>();
RingTopology ring = ringReader.getRing(AmzaRingReader.SYSTEM_RING, -1);
for (RingMemberAndHost entry : ring.entries) {
long aliveUntilTimestamp = liveliness.aliveUntilTimestamp(entry.ringMember.asAquariumMember());
live.add(ImmutableMap.of(
"member", entry.ringMember.getMember(),
"host", entry.ringHost.toCanonicalString(),
"liveliness", (aliveUntilTimestamp > now) ? "alive for " + String.valueOf(aliveUntilTimestamp - now) : "dead for " + String.valueOf(
aliveUntilTimestamp - now)
));
}
data.put("liveliness", live);
byte[] ringNameBytes = input.ringName.getBytes();
byte[] partitionNameBytes = input.partitionName.getBytes();
PartitionName partitionName = (ringNameBytes.length > 0 && partitionNameBytes.length > 0)
? new PartitionName(false, ringNameBytes, partitionNameBytes) : null;
long partitionVersion = Long.parseLong(input.hexPartitionVersion, 16);
VersionedPartitionName versionedPartitionName = partitionName != null ? new VersionedPartitionName(partitionName, partitionVersion) : null;
if (versionedPartitionName != null) {
List<Map<String, Object>> states = new ArrayList<>();
aquariumProvider.tx(versionedPartitionName, (readCurrent, readDesired, writeCurrent, writeDesired) -> {
for (RingMemberAndHost entry : ring.entries) {
Member asMember = entry.ringMember.asAquariumMember();
Map<String, Object> state = new HashMap<>();
state.put("partitionName", input.partitionName);
state.put("ringName", input.ringName);
state.put("partitionVersion", input.hexPartitionVersion);
if (readCurrent != null) {
Waterline current = readCurrent.get(asMember);
if (current != null) {
state.put("current", asMap(liveliness, current));
}
}
if (readDesired != null) {
Waterline desired = readDesired.get(asMember);
if (desired != null) {
state.put("desired", asMap(liveliness, desired));
}
}
states.add(state);
}
return true;
});
data.put("aquarium", states);
}
} catch (Exception e) {
LOG.error("Unable to retrieve data", e);
}
return renderer.render(template, data);
}
private Color[] stateColors = new Color[]{
new Color(0, 0, 128),
Color.blue,
new Color(128, 0, 0),
Color.red,
new Color(128, 100, 0),
Color.orange,
new Color(0, 128, 0),
Color.green,
new Color(0, 64, 64), // teal
new Color(0, 128, 128), // teal
new Color(128, 0, 128),
Color.magenta,
Color.darkGray,
Color.gray
};
private static Map<String, Object> asMap(Liveliness liveliness, Waterline waterline) throws Exception {
State state = waterline.getState();
Map<String, Object> map = new HashMap<>();
map.put("state", state == null ? "null" : state.name());
map.put("member", new String(waterline.getMember().getMember()));
map.put("timestamp", String.valueOf(waterline.getTimestamp()));
map.put("version", String.valueOf(waterline.getVersion()));
map.put("alive", String.valueOf(liveliness.isAlive(waterline.getMember())));
map.put("quorum", waterline.isAtQuorum());
return map;
}
private List<Map<String, Object>> wavformGroup(String group, String filter, String title, Color[] colors, String[] waveName,
LABSparseCircularMetricBuffer[] waveforms,
boolean[] fill) {
if (filter != null && filter.length() > 0 && !title.contains(filter)) {
return Collections.emptyList();
}
String total = "";
List<String> ls = new ArrayList<>();
int s = 1;
for (double m : waveforms[0].metric()) {
ls.add("\"" + s + "\"");
s++;
}
List<Map<String, Object>> ws = new ArrayList<>();
for (int i = 0; i < waveName.length; i++) {
List<String> values = Lists.newArrayList();
double[] metric = waveforms[i].metric();
boolean nonZero = false;
for (double m : metric) {
if (m < 0.0d || m > 0.0d) {
nonZero = true;
}
values.add("\"" + String.valueOf(m) + "\"");
}
if (!nonZero) {
continue;
}
ws.add(waveform(waveName[i], new Color[]{colors[i]}, 1f, values, fill[i], false));
if (i > 0) {
total += ", ";
}
Color c = colors[i];
int r = c.getRed();
int g = c.getGreen();
int b = c.getBlue();
String colorDiv = "<div style=\"display:inline-block; width:10px; height:10px; background:rgb(" + r + "," + g + "," + b + ");\"></div>";
total += colorDiv + waveName[i] + "=" + numberFormat.format(waveforms[i].total());
}
List<Map<String, Object>> listOfwaveformGroups = Lists.newArrayList();
List<Map<String, Object>> ows = new ArrayList<>();
List<String> ols = new ArrayList<>();
List<String> ovalues = Lists.newArrayList();
Color[] ocolors = new Color[waveforms.length];
for (int i = 0; i < waveforms.length; i++) {
ovalues.add("\"" + String.valueOf(waveforms[i].total()) + "\"");
ols.add("\"" + waveName[i] + "\"");
ocolors[i] = colors[i];
}
ows.add(waveform(title + "-overview", ocolors, 1f, ovalues, true, false));
Map<String, Object> overViewMap = new HashMap<>();
overViewMap.put("group", group);
overViewMap.put("filter", title);
overViewMap.put("title", title + "-overview");
overViewMap.put("total", "");
overViewMap.put("height", String.valueOf(150));
overViewMap.put("width", String.valueOf(ls.size() * 10));
overViewMap.put("id", title + "-overview");
overViewMap.put("graphType", "bar");
overViewMap.put("waveform", ImmutableMap.of("labels", ols, "datasets", ows));
listOfwaveformGroups.add(overViewMap);
Map<String, Object> map = new HashMap<>();
map.put("group", group);
map.put("filter", title);
map.put("title", title);
map.put("total", total);
if (filter != null && filter.length() > 0) {
map.put("height", String.valueOf(800));
} else {
map.put("height", String.valueOf(300));
}
map.put("width", String.valueOf(ls.size() * 10));
map.put("id", title);
map.put("graphType", "line");
map.put("waveform", ImmutableMap.of("labels", ls, "datasets", ws));
listOfwaveformGroups.add(map);
return listOfwaveformGroups;
}
public Map<String, Object> waveform(String label, Color[] color, float alpha, List<String> values, boolean fill, boolean stepped) {
Map<String, Object> waveform = new HashMap<>();
waveform.put("label", "\"" + label + "\"");
Object c = "\"rgba(" + color[0].getRed() + "," + color[0].getGreen() + "," + color[0].getBlue() + "," + String.valueOf(alpha) + ")\"";
if (color.length > 1) {
List<String> colorStrings = Lists.newArrayList();
for (int i = 0; i < color.length; i++) {
colorStrings.add("\"rgba(" + color[i].getRed() + "," + color[i].getGreen() + "," + color[i].getBlue() + "," + String.valueOf(alpha) + ")\"");
}
c = colorStrings;
}
waveform.put("fill", fill);
waveform.put("steppedLine", stepped);
waveform.put("lineTension", "0.1");
waveform.put("backgroundColor", c);
waveform.put("borderColor", c);
waveform.put("borderCapStyle", "'butt'");
waveform.put("borderDash", "[]");
waveform.put("borderDashOffset", 0.0);
waveform.put("borderJoinStyle", "'miter'");
waveform.put("pointBorderColor", c);
waveform.put("pointBackgroundColor", "\"#fff\"");
waveform.put("pointBorderWidth", 1);
waveform.put("pointHoverRadius", 5);
waveform.put("pointHoverBackgroundColor", c);
waveform.put("pointHoverBorderColor", c);
waveform.put("pointHoverBorderWidth", 2);
waveform.put("pointRadius", 1);
waveform.put("pointHitRadius", 10);
waveform.put("spanGaps", false);
waveform.put("data", values);
return waveform;
}
@Override
public String getTitle() {
return "Aquarium";
}
}
|
|
package org.apache.lucene.codecs.lucene40;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.TermVectorsWriter;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.StringHelper;
import static org.apache.lucene.codecs.lucene40.Lucene40TermVectorsReader.*;
/**
* Writer for 4.0 term vectors format for testing
* @deprecated for test purposes only
*/
@Deprecated
final class Lucene40TermVectorsWriter extends TermVectorsWriter {
private final Directory directory;
private final String segment;
private IndexOutput tvx = null, tvd = null, tvf = null;
/** Sole constructor. */
public Lucene40TermVectorsWriter(Directory directory, String segment, IOContext context) throws IOException {
this.directory = directory;
this.segment = segment;
boolean success = false;
try {
// Open files for TermVector storage
tvx = directory.createOutput(IndexFileNames.segmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_INDEX_EXTENSION), context);
CodecUtil.writeHeader(tvx, CODEC_NAME_INDEX, VERSION_CURRENT);
tvd = directory.createOutput(IndexFileNames.segmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_DOCUMENTS_EXTENSION), context);
CodecUtil.writeHeader(tvd, CODEC_NAME_DOCS, VERSION_CURRENT);
tvf = directory.createOutput(IndexFileNames.segmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_FIELDS_EXTENSION), context);
CodecUtil.writeHeader(tvf, CODEC_NAME_FIELDS, VERSION_CURRENT);
assert HEADER_LENGTH_INDEX == tvx.getFilePointer();
assert HEADER_LENGTH_DOCS == tvd.getFilePointer();
assert HEADER_LENGTH_FIELDS == tvf.getFilePointer();
success = true;
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(this);
}
}
}
@Override
public void startDocument(int numVectorFields) throws IOException {
lastFieldName = null;
this.numVectorFields = numVectorFields;
tvx.writeLong(tvd.getFilePointer());
tvx.writeLong(tvf.getFilePointer());
tvd.writeVInt(numVectorFields);
fieldCount = 0;
fps = ArrayUtil.grow(fps, numVectorFields);
}
private long fps[] = new long[10]; // pointers to the tvf before writing each field
private int fieldCount = 0; // number of fields we have written so far for this document
private int numVectorFields = 0; // total number of fields we will write for this document
private String lastFieldName;
@Override
public void startField(FieldInfo info, int numTerms, boolean positions, boolean offsets, boolean payloads) throws IOException {
assert lastFieldName == null || info.name.compareTo(lastFieldName) > 0: "fieldName=" + info.name + " lastFieldName=" + lastFieldName;
lastFieldName = info.name;
this.positions = positions;
this.offsets = offsets;
this.payloads = payloads;
lastTerm.clear();
lastPayloadLength = -1; // force first payload to write its length
fps[fieldCount++] = tvf.getFilePointer();
tvd.writeVInt(info.number);
tvf.writeVInt(numTerms);
byte bits = 0x0;
if (positions)
bits |= Lucene40TermVectorsReader.STORE_POSITIONS_WITH_TERMVECTOR;
if (offsets)
bits |= Lucene40TermVectorsReader.STORE_OFFSET_WITH_TERMVECTOR;
if (payloads)
bits |= Lucene40TermVectorsReader.STORE_PAYLOAD_WITH_TERMVECTOR;
tvf.writeByte(bits);
}
@Override
public void finishDocument() throws IOException {
assert fieldCount == numVectorFields;
for (int i = 1; i < fieldCount; i++) {
tvd.writeVLong(fps[i] - fps[i-1]);
}
}
private final BytesRefBuilder lastTerm = new BytesRefBuilder();
// NOTE: we override addProx, so we don't need to buffer when indexing.
// we also don't buffer during bulk merges.
private int offsetStartBuffer[] = new int[10];
private int offsetEndBuffer[] = new int[10];
private BytesRefBuilder payloadData = new BytesRefBuilder();
private int bufferedIndex = 0;
private int bufferedFreq = 0;
private boolean positions = false;
private boolean offsets = false;
private boolean payloads = false;
@Override
public void startTerm(BytesRef term, int freq) throws IOException {
final int prefix = StringHelper.bytesDifference(lastTerm.get(), term);
final int suffix = term.length - prefix;
tvf.writeVInt(prefix);
tvf.writeVInt(suffix);
tvf.writeBytes(term.bytes, term.offset + prefix, suffix);
tvf.writeVInt(freq);
lastTerm.copyBytes(term);
lastPosition = lastOffset = 0;
if (offsets && positions) {
// we might need to buffer if its a non-bulk merge
offsetStartBuffer = ArrayUtil.grow(offsetStartBuffer, freq);
offsetEndBuffer = ArrayUtil.grow(offsetEndBuffer, freq);
}
bufferedIndex = 0;
bufferedFreq = freq;
payloadData.clear();
}
int lastPosition = 0;
int lastOffset = 0;
int lastPayloadLength = -1; // force first payload to write its length
BytesRefBuilder scratch = new BytesRefBuilder(); // used only by this optimized flush below
@Override
public void addProx(int numProx, DataInput positions, DataInput offsets) throws IOException {
if (payloads) {
// TODO, maybe overkill and just call super.addProx() in this case?
// we do avoid buffering the offsets in RAM though.
for (int i = 0; i < numProx; i++) {
int code = positions.readVInt();
if ((code & 1) == 1) {
int length = positions.readVInt();
scratch.grow(length);
scratch.setLength(length);
positions.readBytes(scratch.bytes(), 0, scratch.length());
writePosition(code >>> 1, scratch.get());
} else {
writePosition(code >>> 1, null);
}
}
tvf.writeBytes(payloadData.bytes(), 0, payloadData.length());
} else if (positions != null) {
// pure positions, no payloads
for (int i = 0; i < numProx; i++) {
tvf.writeVInt(positions.readVInt() >>> 1);
}
}
if (offsets != null) {
for (int i = 0; i < numProx; i++) {
tvf.writeVInt(offsets.readVInt());
tvf.writeVInt(offsets.readVInt());
}
}
}
@Override
public void addPosition(int position, int startOffset, int endOffset, BytesRef payload) throws IOException {
if (positions && (offsets || payloads)) {
// write position delta
writePosition(position - lastPosition, payload);
lastPosition = position;
// buffer offsets
if (offsets) {
offsetStartBuffer[bufferedIndex] = startOffset;
offsetEndBuffer[bufferedIndex] = endOffset;
}
bufferedIndex++;
} else if (positions) {
// write position delta
writePosition(position - lastPosition, payload);
lastPosition = position;
} else if (offsets) {
// write offset deltas
tvf.writeVInt(startOffset - lastOffset);
tvf.writeVInt(endOffset - startOffset);
lastOffset = endOffset;
}
}
@Override
public void finishTerm() throws IOException {
if (bufferedIndex > 0) {
// dump buffer
assert positions && (offsets || payloads);
assert bufferedIndex == bufferedFreq;
if (payloads) {
tvf.writeBytes(payloadData.bytes(), 0, payloadData.length());
}
if (offsets) {
for (int i = 0; i < bufferedIndex; i++) {
tvf.writeVInt(offsetStartBuffer[i] - lastOffset);
tvf.writeVInt(offsetEndBuffer[i] - offsetStartBuffer[i]);
lastOffset = offsetEndBuffer[i];
}
}
}
}
private void writePosition(int delta, BytesRef payload) throws IOException {
if (payloads) {
int payloadLength = payload == null ? 0 : payload.length;
if (payloadLength != lastPayloadLength) {
lastPayloadLength = payloadLength;
tvf.writeVInt((delta<<1)|1);
tvf.writeVInt(payloadLength);
} else {
tvf.writeVInt(delta << 1);
}
if (payloadLength > 0) {
if (payloadLength + payloadData.length() < 0) {
// we overflowed the payload buffer, just throw UOE
// having > Integer.MAX_VALUE bytes of payload for a single term in a single doc is nuts.
throw new UnsupportedOperationException("A term cannot have more than Integer.MAX_VALUE bytes of payload data in a single document");
}
payloadData.append(payload);
}
} else {
tvf.writeVInt(delta);
}
}
@Override
public void finish(FieldInfos fis, int numDocs) {
long indexFP = tvx.getFilePointer();
if (HEADER_LENGTH_INDEX+((long) numDocs)*16 != indexFP)
// This is most likely a bug in Sun JRE 1.6.0_04/_05;
// we detect that the bug has struck, here, and
// throw an exception to prevent the corruption from
// entering the index. See LUCENE-1282 for
// details.
throw new RuntimeException("tvx size mismatch: mergedDocs is " + numDocs + " but tvx size is " + indexFP + " (wrote numDocs=" + ((indexFP - HEADER_LENGTH_INDEX)/16.0) + " file=" + tvx.toString() + "; now aborting this merge to prevent index corruption");
}
/** Close all streams. */
@Override
public void close() throws IOException {
// make an effort to close all streams we can but remember and re-throw
// the first exception encountered in this process
IOUtils.close(tvx, tvd, tvf);
tvx = tvd = tvf = null;
}
}
|
|
package org.gestern.gringotts.data;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import org.bukkit.World;
import org.bukkit.block.Block;
import org.bukkit.block.Sign;
import org.gestern.gringotts.*;
import org.gestern.gringotts.accountholder.AccountHolder;
import java.sql.*;
import java.util.LinkedList;
import java.util.List;
import java.util.logging.Logger;
import static org.gestern.gringotts.Configuration.CONF;
/**
* The Data Access Object provides access to the datastore.
* This implementation uses the Apache Derby embedded DB.
*
* @author jast
*
*/
public class DerbyDAO implements DAO {
/** Singleton DAO instance. */
private static DerbyDAO dao;
private final Logger log = Gringotts.G.getLogger();
private final Driver driver;
private Connection connection;
private PreparedStatement
storeAccountChest, destroyAccountChest,
storeAccount, getAccount, getChests,
getChestsForAccount, getCents, storeCents;
private static final String dbName = "GringottsDB";
/** Full connection string for database, without connect options. */
private final String dbString;
private DerbyDAO() {
String dbPath = Gringotts.G.getDataFolder().getAbsolutePath();
dbString = "jdbc:derby:" + dbPath+"/"+dbName;
String connectString = dbString + ";create=true";
try {
driver = DriverManager.getDriver(connectString);
connection = driver.connect(connectString, null);
checkConnection();
setupDB(connection);
prepareStatements();
log.fine("DAO setup successfully.");
} catch (SQLException e) {
throw new GringottsStorageException("Failed to initialize database connection.", e);
}
}
/**
* Configure DB for use with gringotts, if it isn't already.
* @param connection Connection to the db
* @throws SQLException
*/
private void setupDB(Connection connection) throws SQLException {
// create tables only if they don't already exist. use metadata to determine this.
DatabaseMetaData dbmd = connection.getMetaData();
ResultSet rs1 = dbmd.getTables(null, null, "ACCOUNT", null);
if(!rs1.next()) {
String createAccount =
"create table account (" +
"id INTEGER NOT NULL GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1), " +
"type varchar(64), owner varchar(64), cents int not null, " +
"primary key (id), constraint unique_type_owner unique(type, owner))";
int updated = connection.createStatement().executeUpdate(createAccount);
if (updated > 0)
log.info("created table ACCOUNT");
}
ResultSet rs2 = dbmd.getTables(null, null, "ACCOUNTCHEST", null);
if(!rs2.next()) {
String createAccountChest =
"create table accountchest (" +
"id INTEGER NOT NULL GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1)," +
"world varchar(64), x integer, y integer, z integer, account integer not null, " +
"primary key(id), constraint unique_location unique(world,x,y,z), constraint fk_account foreign key(account) references account(id))";
int updated = connection.createStatement().executeUpdate(createAccountChest);
if (updated > 0)
log.info("created table ACCOUNTCHEST");
}
}
/**
* Prepare sql statements for use in DAO.
*
* @throws SQLException
*/
private void prepareStatements() throws SQLException {
storeAccountChest = connection.prepareStatement(
"insert into accountchest (world,x,y,z,account) values (?, ?, ?, ?, (select id from account where owner=? and type=?))");
destroyAccountChest = connection.prepareStatement(
"delete from accountchest where world = ? and x = ? and y = ? and z = ?");
storeAccount = connection.prepareStatement(
"insert into account (type, owner, cents) values (?,?,?)");
getAccount = connection.prepareStatement(
"select * from account where owner = ? and type = ?");
getChests = connection.prepareStatement(
"SELECT ac.world, ac.x, ac.y, ac.z, a.type, a.owner " +
"FROM accountchest ac JOIN account a ON ac.account = a.id ");
getChestsForAccount = connection.prepareStatement(
"SELECT ac.world, ac.x, ac.y, ac.z " +
"FROM accountchest ac JOIN account a ON ac.account = a.id " +
"WHERE a.owner = ? and a.type = ?");
getCents = connection.prepareStatement(
"SELECT cents FROM account WHERE owner = ? and type = ?");
storeCents = connection.prepareStatement(
"UPDATE account SET cents = ? WHERE owner = ? and type = ?");
}
private void checkConnection() throws SQLException {
if (connection == null || connection.isClosed()) {
connection = driver.connect(dbString, null);
prepareStatements();
log.warning("Database connection lost. Reinitialized DB.");
}
}
/* (non-Javadoc)
* @see org.gestern.gringotts.data.DAO#storeAccountChest(org.gestern.gringotts.AccountChest)
*/
@Override
public boolean storeAccountChest(AccountChest chest) {
GringottsAccount account = chest.getAccount();
Location loc = chest.sign.getLocation();
log.info("storing account chest: " + chest + " for account: " + account);
try {
checkConnection();
storeAccountChest.setString(1, loc.getWorld().getName());
storeAccountChest.setInt(2, loc.getBlockX());
storeAccountChest.setInt(3, loc.getBlockY());
storeAccountChest.setInt(4, loc.getBlockZ());
storeAccountChest.setString(5, account.owner.getId());
storeAccountChest.setString(6, account.owner.getType());
int updated = storeAccountChest.executeUpdate();
return updated > 0;
} catch (SQLException e) {
// unique constraint failed: chest already exists
if (e.getErrorCode() == 23505) {
log.warning("Unable to store account chest: " + e.getMessage());
return false;
}
// other more serious error probably
throw new GringottsStorageException("Failed to store account chest: " + chest, e);
}
}
/* (non-Javadoc)
* @see org.gestern.gringotts.data.DAO#destroyAccountChest(org.gestern.gringotts.AccountChest)
*/
@Override
public boolean destroyAccountChest(AccountChest chest) {
Location loc = chest.sign.getLocation();
try {
checkConnection();
return deleteAccountChest(loc.getWorld().getName(), loc.getBlockX(), loc.getBlockY(), loc.getBlockZ());
} catch (SQLException e) {
throw new GringottsStorageException("Failed to delete account chest: " + chest, e);
}
}
@SuppressWarnings("SuspiciousNameCombination")
private boolean deleteAccountChest(String world, int x, int y, int z) throws SQLException {
destroyAccountChest.setString(1, world);
destroyAccountChest.setInt(2, x);
destroyAccountChest.setInt(3, y);
destroyAccountChest.setInt(4, z);
int updated = destroyAccountChest.executeUpdate();
return updated > 0;
}
/* (non-Javadoc)
* @see org.gestern.gringotts.data.DAO#storeAccount(org.gestern.gringotts.GringottsAccount)
*/
@Override
public boolean storeAccount(GringottsAccount account) {
AccountHolder owner = account.owner;
// don't store/overwrite if it's already there
if (hasAccount(owner))
return false;
try {
checkConnection();
storeAccount.setString(1, owner.getType());
storeAccount.setString(2, owner.getId());
// TODO this is business logic and should probably be outside of the DAO implementation.
// also find a more elegant way of handling different account types
double value = 0;
String type = account.owner.getType();
switch (type) {
case "player":
value = CONF.startBalancePlayer;
break;
case "faction":
value = CONF.startBalanceFaction;
break;
case "town":
value = CONF.startBalanceTown;
break;
case "nation":
value = CONF.startBalanceNation;
break;
}
storeAccount.setLong(3, CONF.currency.centValue(value));
int updated = storeAccount.executeUpdate();
return updated > 0;
} catch (SQLException e) {
throw new GringottsStorageException("Failed to store account: " + account, e);
}
}
/* (non-Javadoc)
* @see org.gestern.gringotts.data.DAO#getAccount(org.gestern.gringotts.accountholder.AccountHolder)
*/
@Override
public boolean hasAccount(AccountHolder accountHolder) {
ResultSet result = null;
try {
checkConnection();
getAccount.setString(1, accountHolder.getId());
getAccount.setString(2, accountHolder.getType());
result = getAccount.executeQuery();
return result.next();
} catch (SQLException e) {
throw new GringottsStorageException("Failed to get account for owner: " + accountHolder, e);
} finally {
try { if (result!=null) result.close(); } catch (SQLException ignored) {}
}
}
/**
* Migration method: Get all account raw data.
* @return list of {{DerbyAccount}} describing the accounts
*/
public List<DerbyAccount> getAccountsRaw() {
List<DerbyAccount> accounts = new LinkedList<>();
ResultSet result = null;
try {
checkConnection();
result = connection.createStatement().executeQuery("select * from account");
while (result.next()) {
int id = result.getInt("id");
String type = result.getString("type");
String owner = result.getString("owner");
long cents = result.getLong("cents");
accounts.add(new DerbyAccount(id, type, owner, cents));
}
} catch (SQLException e) {
throw new GringottsStorageException("Failed to get set of accounts", e);
} finally {
try { if (result!=null) result.close(); } catch (SQLException ignored) {}
}
return accounts;
}
/**
* Migration method: Get all accountchest raw data.
* @return ...
*/
public List<DerbyAccountChest> getChestsRaw() {
List<DerbyAccountChest> chests = new LinkedList<>();
ResultSet result = null;
try {
checkConnection();
result = connection.createStatement().executeQuery("select * from accountchest");
while (result.next()) {
int id = result.getInt("id");
String world = result.getString("world");
int x = result.getInt("x");
int y = result.getInt("y");
int z = result.getInt("z");
int account = result.getInt("account");
chests.add(new DerbyAccountChest(id,world,x,y,z,account));
}
} catch (SQLException e) {
throw new GringottsStorageException("Failed to get set of accounts", e);
} finally {
try { if (result!=null) result.close(); } catch (SQLException ignored) {}
}
return chests;
}
/* (non-Javadoc)
* @see org.gestern.gringotts.data.DAO#getChests()
*/
@Override
public List<AccountChest> getChests() {
List<AccountChest> chests = new LinkedList<>();
ResultSet result = null;
try {
checkConnection();
result = getChests.executeQuery();
while (result.next()) {
String worldName = result.getString("world");
int x = result.getInt("x");
int y = result.getInt("y");
int z = result.getInt("z");
String type = result.getString("type");
String ownerId = result.getString("owner");
World world = Bukkit.getWorld(worldName);
Location loc = new Location(world, x, y, z);
if (world == null) {
Gringotts.G.getLogger().warning("Vault " + type + ":" + ownerId + " located on a non-existent world. Skipping.");
continue;
}
Block signBlock = loc.getBlock();
if (Util.isSignBlock(signBlock)) {
AccountHolder owner = Gringotts.G.accountHolderFactory.get(type, ownerId);
if (owner == null) {
// FIXME this logic really doesn't belong in DAO, I think?
log.info("AccountHolder "+type+":"+ownerId+" is not valid. Deleting associated account chest at " + signBlock.getLocation());
deleteAccountChest(signBlock.getWorld().getName(), signBlock.getX(), signBlock.getY(), signBlock.getZ());
} else {
GringottsAccount ownerAccount = new GringottsAccount(owner);
Sign sign = (Sign) signBlock.getState();
chests.add(new AccountChest(sign, ownerAccount));
}
} else {
// remove accountchest from storage if it is not a valid chest
deleteAccountChest(signBlock.getWorld().getName(), signBlock.getX(), signBlock.getY(), signBlock.getZ());
}
}
} catch (SQLException e) {
throw new GringottsStorageException("Failed to get list of all chests", e);
} finally {
try { if (result!=null) result.close(); } catch (SQLException ignored) {}
}
return chests;
}
/* (non-Javadoc)
* @see org.gestern.gringotts.data.DAO#getChests(org.gestern.gringotts.GringottsAccount)
*/
@Override
public List<AccountChest> getChests(GringottsAccount account) {
AccountHolder owner = account.owner;
List<AccountChest> chests = new LinkedList<>();
ResultSet result = null;
try {
checkConnection();
getChestsForAccount.setString(1, owner.getId());
getChestsForAccount.setString(2, owner.getType());
result = getChestsForAccount.executeQuery();
while (result.next()) {
String worldName = result.getString("world");
int x = result.getInt("x");
int y = result.getInt("y");
int z = result.getInt("z");
World world = Bukkit.getWorld(worldName);
Location loc = new Location(world, x, y, z);
if (world == null) {
deleteAccountChest(worldName, x, y, x); // FIXME: Isn't actually removing the non-existent vaults..
Gringotts.G.getLogger().severe("Vault of " + account.owner.getName() + " located on a non-existent world. Deleting Vault on world " + worldName);
continue;
}
Block signBlock = loc.getBlock();
if (Util.isSignBlock(signBlock)) {
Sign sign = (Sign) loc.getBlock().getState();
chests.add(new AccountChest(sign, account));
} else {
// remove accountchest from storage if it is not a valid chest
deleteAccountChest(signBlock.getWorld().toString(), signBlock.getX(), signBlock.getY(), signBlock.getZ());
}
}
} catch (SQLException e) {
throw new GringottsStorageException("Failed to get list of all chests", e);
} finally {
try { if (result!=null) result.close(); } catch (SQLException ignored) {}
}
return chests;
}
/* (non-Javadoc)
* @see org.gestern.gringotts.data.DAO#storeCents(org.gestern.gringotts.GringottsAccount, long)
*/
@Override
public boolean storeCents(GringottsAccount account, long amount) {
try {
checkConnection();
storeCents.setLong(1, amount);
storeCents.setString(2, account.owner.getId());
storeCents.setString(3, account.owner.getType());
int updated = storeCents.executeUpdate();
return updated > 0;
} catch (SQLException e) {
throw new GringottsStorageException("Failed to get cents for account: " + account, e);
}
}
/* (non-Javadoc)
* @see org.gestern.gringotts.data.DAO#getCents(org.gestern.gringotts.GringottsAccount)
*/
@Override
public long getCents(GringottsAccount account) {
ResultSet result = null;
try {
checkConnection();
getCents.setString(1, account.owner.getId());
getCents.setString(2, account.owner.getType());
result = getCents.executeQuery();
if (result.next()) {
return result.getLong("cents");
}
return 0;
} catch (SQLException e) {
throw new GringottsStorageException("Failed to get stored cents for account: " + account, e);
} finally {
try { if (result!=null) result.close(); } catch (SQLException ignored) {}
}
}
/**
* Get a DAO instance.
* @return the DAO instance
*/
public static DerbyDAO getDao() {
if (dao != null) return dao;
// load derby embedded driver
String driver = "org.apache.derby.jdbc.EmbeddedDriver";
try {
Class.forName(driver);
} catch (ClassNotFoundException ignored) {
return null;
}
dao = new DerbyDAO();
return dao;
}
/* (non-Javadoc)
* @see org.gestern.gringotts.data.DAO#shutdown()
*/
@Override
public void shutdown() {
try {
log.info("shutting down database connection");
// disconnect from derby completely
String disconnectString = "jdbc:derby:;shutdown=true";
DriverManager.getConnection(disconnectString);
} catch (SQLException e) {
// yes, derby actually throws an exception as a shutdown message ...
log.info("Derby shutdown: " + e.getSQLState() + ": " + e.getMessage());
System.gc();
}
}
/* (non-Javadoc)
* @see org.gestern.gringotts.data.DAO#finalize()
*/
@Override
public void finalize() throws Throwable {
super.finalize();
shutdown();
}
/* (non-Javadoc)
* @see org.gestern.gringotts.data.DAO#deleteAccount(org.gestern.gringotts.GringottsAccount)
*/
@Override
public void deleteAccount(GringottsAccount acc) {
// TODO implement this, mayhaps?
throw new RuntimeException("delete account not yet implemented");
}
/**
* Utility class to support migration of Derby database.
*/
public static class DerbyAccount {
public final int id;
public final String type;
public final String owner;
public final long cents;
public DerbyAccount(int id, String type, String owner, long cents) {
this.id = id;
this.type = type;
this.owner = owner;
this.cents = cents;
}
}
public static class DerbyAccountChest {
public final int id;
public final String world;
public final int x, y, z;
public final int account;
public DerbyAccountChest(int id, String world, int x, int y, int z, int account) {
this.id = id;
this.world = world;
this.x = x;
this.y = y;
this.z = z;
this.account = account;
}
}
}
|
|
package AST;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.io.File;
import java.util.*;
import beaver.*;
import java.util.ArrayList;
import java.util.zip.*;
import java.io.*;
import java.util.Stack;
import java.util.regex.Pattern;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.Transformer;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.w3c.dom.Element;
import org.w3c.dom.Document;
import java.util.HashMap;
import java.util.Map.Entry;
import javax.xml.transform.TransformerException;
import javax.xml.parsers.ParserConfigurationException;
import java.util.Collection;
/**
* @ast node
* @declaredat Granule.ast:24
*/
public class CheckFitnessExpr extends Access implements Cloneable {
/**
* @apilvl low-level
*/
public void flushCache() {
super.flushCache();
}
/**
* @apilvl internal
*/
public void flushCollectionCache() {
super.flushCollectionCache();
}
/**
* @apilvl internal
*/
@SuppressWarnings({"unchecked", "cast"})
public CheckFitnessExpr clone() throws CloneNotSupportedException {
CheckFitnessExpr node = (CheckFitnessExpr)super.clone();
node.in$Circle(false);
node.is$Final(false);
return node;
}
/**
* @apilvl internal
*/
@SuppressWarnings({"unchecked", "cast"})
public CheckFitnessExpr copy() {
try {
CheckFitnessExpr node = (CheckFitnessExpr)clone();
if(children != null) node.children = (ASTNode[])children.clone();
return node;
} catch (CloneNotSupportedException e) {
}
System.err.println("Error: Could not clone node of type " + getClass().getName() + "!");
return null;
}
/**
* @apilvl low-level
*/
@SuppressWarnings({"unchecked", "cast"})
public CheckFitnessExpr fullCopy() {
CheckFitnessExpr res = (CheckFitnessExpr)copy();
for(int i = 0; i < getNumChildNoTransform(); i++) {
ASTNode node = getChildNoTransform(i);
if(node != null) node = node.fullCopy();
res.setChild(node, i);
}
return res;
}
/**
* @ast method
* @declaredat Granule.ast:1
*/
public CheckFitnessExpr() {
super();
setChild(new List(), 0);
}
/**
* @ast method
* @declaredat Granule.ast:8
*/
public CheckFitnessExpr(List<Expr> p0) {
setChild(p0, 0);
}
/**
* @apilvl low-level
* @ast method
* @declaredat Granule.ast:14
*/
protected int numChildren() {
return 1;
}
/**
* @apilvl internal
* @ast method
* @declaredat Granule.ast:20
*/
public boolean mayHaveRewrite() {
return false;
}
/**
* Setter for ArgList
* @apilvl high-level
* @ast method
* @declaredat Granule.ast:5
*/
public void setArgList(List<Expr> list) {
setChild(list, 0);
}
/**
* @return number of children in ArgList
* @apilvl high-level
* @ast method
* @declaredat Granule.ast:12
*/
public int getNumArg() {
return getArgList().getNumChild();
}
/**
* Getter for child in list ArgList
* @apilvl high-level
* @ast method
* @declaredat Granule.ast:19
*/
@SuppressWarnings({"unchecked", "cast"})
public Expr getArg(int i) {
return (Expr)getArgList().getChild(i);
}
/**
* Add element to list ArgList
* @apilvl high-level
* @ast method
* @declaredat Granule.ast:27
*/
public void addArg(Expr node) {
List<Expr> list = (parent == null || state == null) ? getArgListNoTransform() : getArgList();
list.addChild(node);
}
/**
* @apilvl low-level
* @ast method
* @declaredat Granule.ast:34
*/
public void addArgNoTransform(Expr node) {
List<Expr> list = getArgListNoTransform();
list.addChild(node);
}
/**
* Setter for child in list ArgList
* @apilvl high-level
* @ast method
* @declaredat Granule.ast:42
*/
public void setArg(Expr node, int i) {
List<Expr> list = getArgList();
list.setChild(node, i);
}
/**
* Getter for Arg list.
* @apilvl high-level
* @ast method
* @declaredat Granule.ast:50
*/
public List<Expr> getArgs() {
return getArgList();
}
/**
* @apilvl low-level
* @ast method
* @declaredat Granule.ast:56
*/
public List<Expr> getArgsNoTransform() {
return getArgListNoTransform();
}
/**
* Getter for list ArgList
* @apilvl high-level
* @ast method
* @declaredat Granule.ast:63
*/
@SuppressWarnings({"unchecked", "cast"})
public List<Expr> getArgList() {
List<Expr> list = (List<Expr>)getChild(0);
list.getNumChild();
return list;
}
/**
* @apilvl low-level
* @ast method
* @declaredat Granule.ast:72
*/
@SuppressWarnings({"unchecked", "cast"})
public List<Expr> getArgListNoTransform() {
return (List<Expr>)getChildNoTransform(0);
}
/**
* @apilvl internal
*/
public ASTNode rewriteTo() {
return super.rewriteTo();
}
}
|
|
package com.matchandtrade.rest.v1.validator;
import com.matchandtrade.persistence.entity.ArticleEntity;
import com.matchandtrade.persistence.entity.MembershipEntity;
import com.matchandtrade.persistence.entity.UserEntity;
import com.matchandtrade.rest.RestException;
import com.matchandtrade.rest.service.ArticleService;
import com.matchandtrade.rest.service.MembershipService;
import com.matchandtrade.rest.service.UserService;
import com.matchandtrade.rest.v1.json.ArticleJson;
import com.matchandtrade.rest.v1.transformer.ArticleTransformer;
import com.matchandtrade.test.StringRandom;
import com.matchandtrade.test.helper.SearchHelper;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.springframework.http.HttpStatus;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class ArticleValidatorUT {
@Mock
private ArticleService articleService;
private ArticleTransformer articleTransformer = new ArticleTransformer();
private ArticleEntity existingArticleOwnedByDifferentUser;
private ArticleEntity existingListedArticle;
private UserEntity existingUser;
private ArticleJson givenExistingArticle;
private ArticleValidator fixture;
@Mock
private MembershipService mockMembershipService;
@Mock
private UserService mockUserService;
@Before
public void before() {
fixture = new ArticleValidator();
existingUser = new UserEntity();
existingUser.setUserId(1);
givenExistingArticle = new ArticleJson();
givenExistingArticle.setArticleId(20);
givenExistingArticle.setName("existing-article-name");
ArticleEntity existingArticle = articleTransformer.transform(givenExistingArticle);
existingListedArticle = new ArticleEntity();
existingListedArticle.setArticleId(21);
existingArticleOwnedByDifferentUser = new ArticleEntity();
existingArticleOwnedByDifferentUser.setArticleId(22);
when(mockUserService.findByUserId(existingUser.getUserId())).thenReturn(existingUser);
fixture.userService = mockUserService;
when(articleService.findByArticleId(givenExistingArticle.getArticleId())).thenReturn(existingArticle);
when(articleService.findByUserIdAndArticleId(existingUser.getUserId(), existingArticle.getArticleId())).thenReturn(existingArticle);
when(articleService.findByUserIdAndArticleId(existingUser.getUserId(), existingListedArticle.getArticleId())).thenReturn(existingListedArticle);
fixture.articleService = articleService;
MembershipEntity existingListedMembership = new MembershipEntity();
existingListedMembership.setMembershipId(30);
existingListedMembership.getArticles().add(existingListedArticle);
when(mockMembershipService.findByArticleIdId(existingListedArticle.getArticleId(), 1, 10))
.thenReturn(SearchHelper.buildSearchResult(existingListedMembership));
when(mockMembershipService.findByArticleIdId(givenExistingArticle.getArticleId(), 1, 10))
.thenReturn(SearchHelper.buildEmptySearchResult());
fixture.membershipService = mockMembershipService;
}
@Test(expected = RestException.class)
public void validateDelete_When_ArticleIsListed_Then_Forbidden() {
try {
fixture.validateDelete(existingUser.getUserId(), existingListedArticle.getArticleId());
} catch (RestException e) {
assertEquals(HttpStatus.FORBIDDEN, e.getHttpStatus());
assertEquals("Article.articleId: 21 is listed on Membership.membershipId: [30]", e.getDescription());
throw e;
}
}
@Test(expected = RestException.class)
public void validateDelete_When_UserDoesNotOwnArticle_Then_Forbidden() {
try {
fixture.validateDelete(existingUser.getUserId(), existingArticleOwnedByDifferentUser.getArticleId());
} catch (RestException e) {
assertEquals(HttpStatus.BAD_REQUEST, e.getHttpStatus());
assertEquals("User.userId: 1 does not own Article.articleId: 22", e.getDescription());
throw e;
}
}
@Test
public void validateDelete_When_ArticleIsNotListed_Then_Succeeds() {
fixture.validateDelete(existingUser.getUserId(), givenExistingArticle.getArticleId());
}
@Test
public void validateGet_When_ArticleExist_Then_Succeeds() {
fixture.validateGet(givenExistingArticle.getArticleId());
}
@Test(expected = RestException.class)
public void validateGet_When_ArticleDoesNotExist_Then_NotFound() {
try {
fixture.validateGet(0);
} catch (RestException e) {
assertEquals(HttpStatus.NOT_FOUND, e.getHttpStatus());
throw e;
}
}
@Test
public void validatePost_When_NameLengthIs150_Then_Succeeds() {
givenExistingArticle.setName(StringRandom.sequentialNumericString(150));
fixture.validatePost(existingUser.getUserId(), givenExistingArticle);
}
@Test
public void validatePost_When_NameLengthIs3_Then_Succeeds() {
givenExistingArticle.setName(StringRandom.sequentialNumericString(3));
fixture.validatePost(existingUser.getUserId(), givenExistingArticle);
}
@Test(expected = RestException.class)
public void validatePost_When_NameIsNull_Then_BadRequest() {
givenExistingArticle.setName(null);
try {
fixture.validatePost(existingUser.getUserId(), givenExistingArticle);
} catch (RestException e) {
assertEquals(HttpStatus.BAD_REQUEST, e.getHttpStatus());
throw (e);
}
}
@Test(expected = RestException.class)
public void validatePost_When_NameLengthIs2_Then_BadRequest() {
givenExistingArticle.setName(StringRandom.sequentialNumericString(2));
try {
fixture.validatePost(existingUser.getUserId(), givenExistingArticle);
} catch (RestException e) {
assertEquals(HttpStatus.BAD_REQUEST, e.getHttpStatus());
throw (e);
}
}
@Test(expected = RestException.class)
public void validatePost_When_NameLengthIs151_Then_BadRequest() {
givenExistingArticle.setName(StringRandom.sequentialNumericString(151));
try {
fixture.validatePost(existingUser.getUserId(), givenExistingArticle);
} catch (RestException e) {
assertEquals(HttpStatus.BAD_REQUEST, e.getHttpStatus());
throw (e);
}
}
@Test
public void validatePost_When_DescriptionLengthIs2000_Then_Succeeds() {
givenExistingArticle.setDescription(StringRandom.sequentialNumericString(2000));
fixture.validatePost(existingUser.getUserId(), givenExistingArticle);
}
@Test
public void validatePost_When_DescriptionIsNull_Then_Succeeds() {
fixture.validatePost(existingUser.getUserId(), givenExistingArticle);
}
@Test(expected = RestException.class)
public void validatePost_When_DescriptionLengthIs2001_Then_BadRequest() {
givenExistingArticle.setDescription(StringRandom.sequentialNumericString(2001));
try {
fixture.validatePost(existingUser.getUserId(), givenExistingArticle);
} catch (RestException e) {
assertEquals(HttpStatus.BAD_REQUEST, e.getHttpStatus());
throw e;
}
}
@Test(expected = RestException.class)
public void validatePost_When_UserDoesNotExist_Then_BadRequest() {
try {
fixture.validatePost(0, givenExistingArticle);
} catch (RestException e) {
assertEquals(HttpStatus.BAD_REQUEST, e.getHttpStatus());
throw e;
}
}
@Test
public void validatePut_When_ArticleExists_Then_Succeeds() {
fixture.validatePut(existingUser.getUserId(), givenExistingArticle);
}
@Test(expected = RestException.class)
public void validatePut_When_ArticleDoesNotExists_Then_BadRequest() {
ArticleJson articleOwnedByDifferentUser = articleTransformer.transform(existingArticleOwnedByDifferentUser);
try {
fixture.validatePut(existingUser.getUserId(), articleOwnedByDifferentUser);
} catch (RestException e) {
assertEquals(HttpStatus.BAD_REQUEST, e.getHttpStatus());
throw e;
}
}
@Test(expected = RestException.class)
public void validatePut_When_UserDoesNotExist_Then_BadRequest() {
try {
fixture.validatePut(0, givenExistingArticle);
} catch (RestException e) {
assertEquals(HttpStatus.BAD_REQUEST, e.getHttpStatus());
throw e;
}
}
}
|
|
/*
* Copyright (c) 2008-2009 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.remote;
import java.util.Map;
import java.util.NoSuchElementException;
import org.neo4j.graphdb.Direction;
import org.neo4j.graphdb.NotInTransactionException;
import org.neo4j.graphdb.RelationshipType;
import org.neo4j.graphdb.ReturnableEvaluator;
import org.neo4j.graphdb.StopEvaluator;
import org.neo4j.graphdb.TraversalPosition;
import org.neo4j.graphdb.Traverser.Order;
final class RemoteGraphDbEngine
{
private final ThreadLocal<RemoteTransaction> current = new ThreadLocal<RemoteTransaction>();
private final RemoteConnection connection;
private final ConfigurationFactory factory;
private final Map<String, RelationshipType> typesCache = null;
private final LocalTraversalService traversal = new LocalTraversalService();
RemoteGraphDbEngine( RemoteConnection connection, ConfigurationModule module )
{
this.connection = connection;
this.factory = new ConfigurationFactory( module, connection
.configure( Configuration.of( module ) ) );
}
RemoteTransaction beginTx()
{
RemoteTransaction transaction = current.get();
if ( transaction != null )
{
transaction = factory.createNestedTransaction( transaction );
}
else
{
transaction = new RemoteTransaction( this, connection
.beginTransaction() );
}
current.set( transaction );
return transaction;
}
void endTx( RemoteTransaction previous )
{
current.set( previous );
}
RemoteTransaction current()
{
return current( false );
}
RemoteTransaction current( boolean read_only )
{
RemoteTransaction transaction = current.get();
if ( transaction == null )
{
throw new NotInTransactionException();
}
return transaction;
}
void shutdown()
{
connection.close();
}
private <T> T receive( RemoteResponse<T> response )
{
return response.value();
}
void commit( int txId )
{
connection.commit( txId );
}
void rollback( int txId )
{
connection.rollback( txId );
}
RelationshipType type( String name )
{
RelationshipType type = null;
if ( typesCache != null )
{
type = typesCache.get( name );
}
if ( type == null )
{
type = new RelType( name );
if ( typesCache != null )
{
typesCache.put( name, type );
}
}
return type;
}
Iterable<String> getRelationshipTypes( final int txId )
{
return new BatchIterable<String>()
{
@Override
IterableSpecification<String> init()
{
return receive( connection.getRelationshipTypes( txId ) );
}
@Override
IterableSpecification<String> more( int requestToken )
{
return receive( connection.getMoreRelationshipTypes( txId,
requestToken ) );
}
@Override
void done( int requestToken )
{
receive( connection.closeRelationshipTypeIterator( txId,
requestToken ) );
}
};
}
Iterable<NodeSpecification> getAllNodes( final int txId )
{
return new BatchIterable<NodeSpecification>()
{
@Override
IterableSpecification<NodeSpecification> init()
{
return receive( connection.getAllNodes( txId ) );
}
@Override
IterableSpecification<NodeSpecification> more( int requestToken )
{
return receive( connection.getMoreNodes( txId, requestToken ) );
}
@Override
void done( int requestToken )
{
receive( connection.closeNodeIterator( txId, requestToken ) );
}
};
}
// --- RemoteTransaction interface ---
long createNode( int txId )
{
return receive( connection.createNode( txId ) ).id;
}
long getReferenceNode( int txId )
{
return receive( connection.getReferenceNode( txId ) ).id;
}
boolean hasNodeWithId( int txId, long nodeId )
{
return receive( connection.hasNodeWithId( txId, nodeId ) );
}
void deleteNode( int txId, long nodeId )
{
receive( connection.deleteNode( txId, nodeId ) );
}
RelationshipSpecification createRelationship( int txId, String typeName,
long startId, long endId )
{
return receive( connection.createRelationship( txId, typeName, startId,
endId ) );
}
RelationshipSpecification getRelationshipById( int txId, long relationshipId )
{
return receive( connection.getRelationshipById( txId, relationshipId ) );
}
Iterable<RelationshipSpecification> getAllRelationships( final int txId,
final long nodeId, final Direction dir )
{
return new BatchIterable<RelationshipSpecification>()
{
@Override
IterableSpecification<RelationshipSpecification> init()
{
return receive( connection.getAllRelationships( txId, nodeId,
dir ) );
}
@Override
IterableSpecification<RelationshipSpecification> more(
int requestToken )
{
return receive( connection.getMoreRelationships( txId,
requestToken ) );
}
@Override
void done( int requestToken )
{
receive( connection.closeRelationshipIterator( txId,
requestToken ) );
}
};
}
Iterable<RelationshipSpecification> getRelationships( final int txId,
final long nodeId, final Direction dir, final String[] typeNames )
{
return new BatchIterable<RelationshipSpecification>()
{
@Override
IterableSpecification<RelationshipSpecification> init()
{
return receive( connection.getRelationships( txId, nodeId, dir,
typeNames ) );
}
@Override
IterableSpecification<RelationshipSpecification> more(
int requestToken )
{
return receive( connection.getMoreRelationships( txId,
requestToken ) );
}
@Override
void done( int requestToken )
{
receive( connection.closeRelationshipIterator( txId,
requestToken ) );
}
};
}
void deleteRelationship( int txId, long relationshipId )
{
receive( connection.deleteRelationship( txId, relationshipId ) );
}
Iterable<TraversalPosition> traverse( final int txId, RemoteNode startNode,
Order order, StopEvaluator stopEvaluator,
ReturnableEvaluator returnableEvaluator,
RelationshipType[] relationshipTypes, Direction[] directions )
{
return traversal.performExternalEvaluatorTraversal( startNode, order,
stopEvaluator, returnableEvaluator, relationshipTypes, directions );
}
Object getNodeProperty( int txId, long nodeId, String key )
{
return receive( connection.getNodeProperty( txId, nodeId, key ) );
}
Object getRelationshipProperty( int txId, long relationshipId, String key )
{
return receive( connection.getRelationshipProperty( txId,
relationshipId, key ) );
}
void setNodeProperty( int txId, long nodeId, String key, Object value )
{
receive( connection.setNodeProperty( txId, nodeId, key, value ) );
}
void setRelationshipProperty( int txId, long relationshipId, String key,
Object value )
{
receive( connection.setRelationshipProperty( txId, relationshipId, key,
value ) );
}
Iterable<String> getNodePropertyKeys( final int txId, final long nodeId )
{
return new BatchIterable<String>()
{
@Override
IterableSpecification<String> init()
{
return receive( connection.getNodePropertyKeys( txId, nodeId ) );
}
@Override
IterableSpecification<String> more( int requestToken )
{
return receive( connection.getMorePropertyKeys( txId,
requestToken ) );
}
@Override
void done( int requestToken )
{
receive( connection.closePropertyKeyIterator( txId,
requestToken ) );
}
};
}
Iterable<String> getRelationshipPropertyKeys( final int txId,
final long relationshipId )
{
return new BatchIterable<String>()
{
@Override
IterableSpecification<String> init()
{
return receive( connection.getRelationshipPropertyKeys( txId,
relationshipId ) );
}
@Override
IterableSpecification<String> more( int requestToken )
{
return receive( connection.getMorePropertyKeys( txId,
requestToken ) );
}
@Override
void done( int requestToken )
{
receive( connection.closePropertyKeyIterator( txId,
requestToken ) );
}
};
}
boolean hasNodeProperty( int txId, long nodeId, String key )
{
return receive( connection.hasNodeProperty( txId, nodeId, key ) );
}
boolean hasRelationshipProperty( int txId, long relationshiId, String key )
{
return receive( connection.hasRelationshipProperty( txId,
relationshiId, key ) );
}
Object removeNodeProperty( int txId, long nodeId, String key )
{
return receive( connection.removeNodeProperty( txId, nodeId, key ) );
}
Object removeRelationshipProperty( int txId, long relationshipId, String key )
{
return receive( connection.removeRelationshipProperty( txId,
relationshipId, key ) );
}
// indexing
BatchIterable<NodeSpecification> getIndexNodes( final int txId,
final int indexId, final String key, final Object value )
{
return new BatchIterable<NodeSpecification>()
{
@Override
IterableSpecification<NodeSpecification> init()
{
return receive( connection.getIndexNodes( txId, indexId, key,
value ) );
}
@Override
IterableSpecification<NodeSpecification> more( int requestToken )
{
return receive( connection.getMoreNodes( txId, requestToken ) );
}
@Override
void done( int requestToken )
{
receive( connection.closeNodeIterator( txId, requestToken ) );
}
};
}
void indexNode( int txId, int indexId, long nodeId, String key, Object value )
{
receive( connection.indexNode( txId, indexId, nodeId, key, value ) );
}
void removeIndexNode( int txId, int indexId, long nodeId, String key,
Object value )
{
receive( connection.removeIndexNode( txId, indexId, nodeId, key, value ) );
}
interface CloseableIteratorWithSize<T> extends CloseableIterator<T>
{
long size();
}
static abstract class BatchIterable<T> implements Iterable<T>
{
public final CloseableIteratorWithSize<T> iterator()
{
final IterableSpecification<T> spec = init();
return new CloseableIteratorWithSize<T>()
{
int index = 0;
T[] content = spec.content;
int token = spec.token;
boolean hasMore = spec.hasMore;
long size = spec.size;
public boolean hasNext()
{
return content != null
&& ( index < content.length || hasMore );
}
public T next()
{
if ( content != null && index < content.length )
{
return content[ index++ ];
}
else if ( hasMore )
{
index = 0;
@SuppressWarnings( "hiding" )
IterableSpecification<T> spec = more( token );
content = spec.content;
token = spec.token;
hasMore = spec.hasMore;
return content[ index++ ];
}
else
{
throw new NoSuchElementException();
}
}
public long size()
{
if ( size < 0 )
{
throw new UnsupportedOperationException(
"This iterator has no size." );
}
return size;
}
public void close()
{
if ( content != null )
{
done( token );
hasMore = false;
content = null;
}
}
public void remove()
{
throw new UnsupportedOperationException();
}
@Override
protected void finalize()
{
close(); // Make sure that the iterator is closed
}
};
}
abstract IterableSpecification<T> init();
abstract IterableSpecification<T> more( int requestToken );
abstract void done( int requestToken );
}
int getIndexId( String name )
{
return receive( connection.getIndexServiceId( name ) );
}
}
|
|
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide;
import com.intellij.ide.presentation.Presentation;
import com.intellij.ide.presentation.PresentationProvider;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.util.ClassExtension;
import com.intellij.openapi.util.IconLoader;
import com.intellij.openapi.util.NullableLazyValue;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.containers.ConcurrentFactoryMap;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
/**
* @author peter
*/
public class TypePresentationServiceImpl extends TypePresentationService {
private static final ExtensionPointName<PresentationProvider> PROVIDER_EP = ExtensionPointName.create("com.intellij.presentationProvider");
private static final ClassExtension<PresentationProvider> PROVIDERS = new ClassExtension<>(PROVIDER_EP.getName());
@Override
public Icon getIcon(Object o) {
return getIcon(o.getClass(), o);
}
@Override@Nullable
public Icon getTypeIcon(Class type) {
return getIcon(type, null);
}
private Icon getIcon(Class type, Object o) {
Set<PresentationTemplate> templates = mySuperClasses.get(type);
for (PresentationTemplate template : templates) {
Icon icon = template.getIcon(o, 0);
if (icon != null) return icon;
}
return null;
}
@Override @Nullable
public String getTypePresentableName(Class type) {
Set<PresentationTemplate> templates = mySuperClasses.get(type);
for (PresentationTemplate template : templates) {
String typeName = template.getTypeName();
if (typeName != null) return typeName;
}
return getDefaultTypeName(type);
}
@Override
public String getTypeName(Object o) {
Set<PresentationTemplate> templates = mySuperClasses.get(o.getClass());
for (PresentationTemplate template : templates) {
String typeName = template.getTypeName(o);
if (typeName != null) return typeName;
}
return null;
}
public TypePresentationServiceImpl() {
for(TypeIconEP ep: TypeIconEP.EP_NAME.getExtensionList()) {
myIcons.put(ep.className, ep.getIcon());
}
for(TypeNameEP ep: TypeNameEP.EP_NAME.getExtensionList()) {
myNames.put(ep.className, ep.getTypeName());
}
}
@Nullable
private PresentationTemplate createPresentationTemplate(Class<?> type) {
Presentation presentation = type.getAnnotation(Presentation.class);
if (presentation != null) {
return new AnnotationBasedTemplate(presentation, type);
}
PresentationProvider provider = PROVIDERS.forClass(type);
if (provider != null) {
return new ProviderBasedTemplate(provider);
}
final NullableLazyValue<Icon> icon = myIcons.get(type.getName());
final NullableLazyValue<String> typeName = myNames.get(type.getName());
if (icon != null || typeName != null) {
return new PresentationTemplate() {
@Override
public Icon getIcon(Object o, int flags) {
return icon == null ? null : icon.getValue();
}
@Override
public String getName(Object o) {
return null;
}
@Override
public String getTypeName() {
return typeName == null ? null : typeName.getValue();
}
@Override
public String getTypeName(Object o) {
return getTypeName();
}
};
}
return null;
}
private final Map<String, NullableLazyValue<Icon>> myIcons = new HashMap<>();
private final Map<String, NullableLazyValue<String>> myNames = new HashMap<>();
private final Map<Class, Set<PresentationTemplate>> mySuperClasses = ConcurrentFactoryMap.createMap(key-> {
LinkedHashSet<PresentationTemplate> templates = new LinkedHashSet<>();
walkSupers(key, new LinkedHashSet<>(), templates);
return templates;
}
);
private void walkSupers(Class aClass, Set<? super Class> classes, Set<? super PresentationTemplate> templates) {
if (!classes.add(aClass)) {
return;
}
ContainerUtil.addIfNotNull(templates, createPresentationTemplate(aClass));
final Class superClass = aClass.getSuperclass();
if (superClass != null) {
walkSupers(superClass, classes, templates);
}
for (Class intf : aClass.getInterfaces()) {
walkSupers(intf, classes, templates);
}
}
/** @noinspection unchecked*/
public static class ProviderBasedTemplate implements PresentationTemplate {
private final PresentationProvider myProvider;
public ProviderBasedTemplate(PresentationProvider provider) {
myProvider = provider;
}
@Nullable
@Override
public Icon getIcon(Object o, int flags) {
return myProvider instanceof PresentationTemplate ?
((PresentationTemplate)myProvider).getIcon(o, flags) :
myProvider.getIcon(o);
}
@Nullable
@Override
public String getName(Object o) {
return myProvider.getName(o);
}
@Nullable
@Override
public String getTypeName() {
return myProvider instanceof PresentationTemplate ?
((PresentationTemplate)myProvider).getTypeName() : null;
}
@Override
public String getTypeName(Object o) {
return myProvider.getTypeName(o);
}
}
public static class PresentationTemplateImpl extends ProviderBasedTemplate {
public PresentationTemplateImpl(Presentation presentation, Class<?> aClass) {
super(new AnnotationBasedTemplate(presentation, aClass));
}
}
/** @noinspection unchecked*/
private static class AnnotationBasedTemplate extends PresentationProvider<Object> implements PresentationTemplate {
private final Presentation myPresentation;
private final Class<?> myClass;
AnnotationBasedTemplate(Presentation presentation, Class<?> aClass) {
myPresentation = presentation;
myClass = aClass;
}
@Override
@Nullable
public Icon getIcon(Object o) {
return getIcon(o, 0);
}
@Override
@Nullable
public Icon getIcon(Object o, int flags) {
if (o == null) return myIcon.getValue();
PresentationProvider provider = myPresentationProvider.getValue();
if (provider == null) {
return myIcon.getValue();
}
else {
Icon icon = provider.getIcon(o);
return icon == null ? myIcon.getValue() : icon;
}
}
@Override
@Nullable
public String getTypeName() {
return StringUtil.isEmpty(myPresentation.typeName()) ? null : myPresentation.typeName();
}
@Override
public String getTypeName(Object o) {
PresentationProvider provider = myPresentationProvider.getValue();
if (provider != null) {
String typeName = provider.getTypeName(o);
if (typeName != null) return typeName;
}
return getTypeName();
}
@Override
@Nullable
public String getName(Object o) {
PresentationProvider namer = myPresentationProvider.getValue();
return namer == null ? null : namer.getName(o);
}
private final NullableLazyValue<Icon> myIcon = new NullableLazyValue<Icon>() {
@Override
protected Icon compute() {
if (StringUtil.isEmpty(myPresentation.icon())) return null;
return IconLoader.getIcon(myPresentation.icon(), myClass);
}
};
private final NullableLazyValue<PresentationProvider> myPresentationProvider = new NullableLazyValue<PresentationProvider>() {
@Override
protected PresentationProvider compute() {
Class<? extends PresentationProvider> aClass = myPresentation.provider();
try {
return aClass == PresentationProvider.class ? null : aClass.newInstance();
}
catch (Exception e) {
return null;
}
}
};
}
interface PresentationTemplate {
@Nullable
Icon getIcon(Object o, int flags);
@Nullable
String getName(Object o);
@Nullable
String getTypeName();
String getTypeName(Object o);
}
}
|
|
/*
* Copyright (c) 2010-2016 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.gui.api.component.result;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.wicket.AttributeModifier;
import org.apache.wicket.Component;
import org.apache.wicket.Page;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.markup.html.AjaxLink;
import org.apache.wicket.behavior.AttributeAppender;
import org.apache.wicket.feedback.FeedbackMessage;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.link.DownloadLink;
import org.apache.wicket.markup.html.list.ListItem;
import org.apache.wicket.markup.html.list.ListView;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.model.AbstractReadOnlyModel;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.PropertyModel;
import org.apache.wicket.model.StringResourceModel;
import com.evolveum.midpoint.gui.api.component.BasePanel;
import com.evolveum.midpoint.gui.api.model.LoadableModel;
import com.evolveum.midpoint.gui.api.util.WebComponentUtil;
import com.evolveum.midpoint.schema.constants.ObjectTypes;
import com.evolveum.midpoint.schema.util.ObjectTypeUtil;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import com.evolveum.midpoint.web.component.dialog.Popupable;
import com.evolveum.midpoint.web.component.util.VisibleEnableBehaviour;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectReferenceType;
/**
* @author katkav
*/
public class OperationResultPanel extends BasePanel<OpResult> implements Popupable{
private static final long serialVersionUID = 1L;
private static final String ID_DETAILS_BOX = "detailsBox";
private static final String ID_ICON_TYPE = "iconType";
private static final String ID_MESSAGE = "message";
private static final String ID_MESSAGE_LABEL = "messageLabel";
private static final String ID_PARAMS = "params";
private static final String ID_BACKGROUND_TASK = "backgroundTask";
private static final String ID_SHOW_ALL = "showAll";
private static final String ID_HIDE_ALL = "hideAll";
private static final String ID_ERROR_STACK_TRACE = "errorStackTrace";
static final String OPERATION_RESOURCE_KEY_PREFIX = "operation.";
private static final Trace LOGGER = TraceManager.getTrace(OperationResultPanel.class);
public OperationResultPanel(String id, IModel<OpResult> model, Page parentPage) {
super(id, model);
initLayout(parentPage);
}
public void initLayout(Page parentPage) {
WebMarkupContainer detailsBox = new WebMarkupContainer(ID_DETAILS_BOX);
detailsBox.setOutputMarkupId(true);
detailsBox.add(AttributeModifier.append("class", createHeaderCss()));
add(detailsBox);
initHeader(detailsBox);
initDetails(detailsBox, parentPage);
}
private void initHeader(WebMarkupContainer box) {
WebMarkupContainer iconType = new WebMarkupContainer(ID_ICON_TYPE);
iconType.setOutputMarkupId(true);
iconType.add(new AttributeAppender("class", new AbstractReadOnlyModel<String>() {
private static final long serialVersionUID = 1L;
@Override
public String getObject() {
StringBuilder sb = new StringBuilder();
OpResult message = getModelObject();
switch (message.getStatus()) {
case IN_PROGRESS:
case NOT_APPLICABLE:
sb.append(" fa-info");
break;
case SUCCESS:
sb.append(" fa-check");
break;
case FATAL_ERROR:
sb.append(" fa-ban");
break;
case PARTIAL_ERROR:
case UNKNOWN:
case WARNING:
case HANDLED_ERROR:
default:
sb.append(" fa-warning");
}
return sb.toString();
}
}));
box.add(iconType);
Label message = createMessage();
AjaxLink<String> showMore = new AjaxLink<String>(ID_MESSAGE) {
private static final long serialVersionUID = 1L;
@Override
public void onClick(AjaxRequestTarget target) {
OpResult result = OperationResultPanel.this.getModelObject();
result.setShowMore(!result.isShowMore());
result.setAlreadyShown(false); // hack to be able to expand/collapse OpResult after rendered.
target.add(OperationResultPanel.this);
}
};
showMore.add(message);
box.add(showMore);
AjaxLink<String> backgroundTask = new AjaxLink<String>(ID_BACKGROUND_TASK) {
private static final long serialVersionUID = 1L;
@Override
public void onClick(AjaxRequestTarget target) {
final OpResult opResult = OperationResultPanel.this.getModelObject();
String oid = opResult.getBackgroundTaskOid();
if (oid == null || !opResult.isBackgroundTaskVisible()) {
return; // just for safety
}
ObjectReferenceType ref = ObjectTypeUtil.createObjectRef(oid, ObjectTypes.TASK);
WebComponentUtil.dispatchToObjectDetailsPage(ref, getPageBase(), false);
}
};
backgroundTask.add(new VisibleEnableBehaviour() {
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible() {
return getModelObject().getBackgroundTaskOid() != null
&& getModelObject().isBackgroundTaskVisible();
}
});
box.add(backgroundTask);
AjaxLink<String> showAll = new AjaxLink<String>(ID_SHOW_ALL) {
private static final long serialVersionUID = 1L;
@Override
public void onClick(AjaxRequestTarget target) {
showHideAll(true, OperationResultPanel.this.getModelObject(), target);
}
};
showAll.add(new VisibleEnableBehaviour() {
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible() {
return !OperationResultPanel.this.getModelObject().isShowMore();
}
});
box.add(showAll);
AjaxLink<String> hideAll = new AjaxLink<String>(ID_HIDE_ALL) {
private static final long serialVersionUID = 1L;
@Override
public void onClick(AjaxRequestTarget target) {
showHideAll(false, OperationResultPanel.this.getModel().getObject(), target);
}
};
hideAll.add(new VisibleEnableBehaviour() {
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible() {
return OperationResultPanel.this.getModelObject().isShowMore();
}
});
box.add(hideAll);
AjaxLink<String> close = new AjaxLink<String>("close") {
private static final long serialVersionUID = 1L;
@Override
public void onClick(AjaxRequestTarget target) {
close(target);
}
};
box.add(close);
DownloadLink downloadXml = new DownloadLink("downloadXml", new AbstractReadOnlyModel<File>() {
private static final long serialVersionUID = 1L;
@Override
public File getObject() {
String home = getPageBase().getMidpointConfiguration().getMidpointHome();
File f = new File(home, "result");
DataOutputStream dos = null;
try {
dos = new DataOutputStream(new FileOutputStream(f));
dos.writeBytes(OperationResultPanel.this.getModel().getObject().getXml());
} catch (IOException e) {
LOGGER.error("Could not download result: {}", e.getMessage(), e);
} finally {
IOUtils.closeQuietly(dos);
}
return f;
}
});
downloadXml.setDeleteAfterDownload(true);
box.add(downloadXml);
}
public void close(AjaxRequestTarget target) {
this.setVisible(false);
target.add(this);
}
private Label createMessage() {
Label message = null;
if (StringUtils.isNotBlank(getModelObject().getMessage())) {
PropertyModel<String> messageModel = new PropertyModel<String>(getModel(), "message");
message = new Label(ID_MESSAGE_LABEL, messageModel);
} else {
message = new Label(ID_MESSAGE_LABEL, new LoadableModel<Object>() {
private static final long serialVersionUID = 1L;
@Override
protected Object load() {
OpResult result = OperationResultPanel.this.getModelObject();
String resourceKey = OPERATION_RESOURCE_KEY_PREFIX + result.getOperation();
return getPage().getString(resourceKey, null, resourceKey);
}
});
}
//message.setRenderBodyOnly(true);
message.setOutputMarkupId(true);
return message;
}
private void initDetails(WebMarkupContainer box, Page parentPage) {
final WebMarkupContainer details = new WebMarkupContainer("details", getModel());
details.setOutputMarkupId(true);
details.add(new VisibleEnableBehaviour() {
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible() {
return getModel().getObject().isShowMore();
}
});
box.add(details);
WebMarkupContainer operationPanel = new WebMarkupContainer("type");
operationPanel.setOutputMarkupId(true);
operationPanel.add(new AttributeAppender("class", new LoadableModel<String>() {
private static final long serialVersionUID = 1L;
@Override
protected String load() {
return getLabelCss(getModel());
}
}, " "));
details.add(operationPanel);
Label operationLabel = new Label("operationLabel",
parentPage.getString("FeedbackAlertMessageDetails.operation"));
operationLabel.setOutputMarkupId(true);
operationPanel.add(operationLabel);
Label operation = new Label("operation", new LoadableModel<Object>() {
private static final long serialVersionUID = 1L;
@Override
protected Object load() {
OpResult result = getModelObject();
String resourceKey = OPERATION_RESOURCE_KEY_PREFIX + result.getOperation();
return getPage().getString(resourceKey, null, resourceKey);
}
});
operation.setOutputMarkupId(true);
operationPanel.add(operation);
Label count = new Label("countLabel", parentPage.getString("FeedbackAlertMessageDetails.count"));
count.add(new VisibleEnableBehaviour() {
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible() {
OpResult result = getModelObject();
return result.getCount() > 1;
}
});
operationPanel.add(count);
operationPanel.add(initCountPanel(getModel()));
Label message = new Label("resultMessage",
new PropertyModel<String>(getModel(), "message").getObject());// PageBase.new
// PropertyModel<String>(model,
// "message"));
message.setOutputMarkupId(true);
message.add(new VisibleEnableBehaviour() {
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible() {
return StringUtils.isNotBlank(getModelObject().getMessage());
}
});
operationPanel.add(message);
Label messageLabel = new Label("messageLabel", parentPage.getString("FeedbackAlertMessageDetails.message"));
messageLabel.setOutputMarkupId(true);
messageLabel.add(new VisibleEnableBehaviour() {
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible() {
return StringUtils.isNotBlank(getModelObject().getMessage());
}
});
operationPanel.add(messageLabel);
initParams(operationPanel, getModel(), parentPage);
initContexts(operationPanel, getModel(), parentPage);
initError(operationPanel, getModel(), parentPage);
}
private void initParams(WebMarkupContainer operationContent, final IModel<OpResult> model, Page parentPage) {
Label paramsLabel = new Label("paramsLabel", parentPage.getString("FeedbackAlertMessageDetails.params"));
paramsLabel.setOutputMarkupId(true);
paramsLabel.add(new VisibleEnableBehaviour() {
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible() {
return CollectionUtils.isNotEmpty(model.getObject().getParams());
}
});
operationContent.add(paramsLabel);
ListView<Param> params = new ListView<Param>(ID_PARAMS, createParamsModel(model)) {
private static final long serialVersionUID = 1L;
@Override
protected void populateItem(ListItem<Param> item) {
item.add(new Label("paramName", new PropertyModel<Object>(item.getModel(), "name")));
item.add(new Label("paramValue", new PropertyModel<Object>(item.getModel(), "value")));
}
};
params.setOutputMarkupId(true);
params.add(new VisibleEnableBehaviour() {
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible() {
return CollectionUtils.isNotEmpty(model.getObject().getParams());
}
});
operationContent.add(params);
ListView<OpResult> subresults = new ListView<OpResult>("subresults", createSubresultsModel(model)) {
private static final long serialVersionUID = 1L;
@Override
protected void populateItem(final ListItem<OpResult> item) {
Panel subresult = new OperationResultPanel("subresult", item.getModel(), getPage());
subresult.setOutputMarkupId(true);
item.add(subresult);
}
};
subresults.setOutputMarkupId(true);
subresults.add(new VisibleEnableBehaviour() {
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible() {
return CollectionUtils.isNotEmpty(model.getObject().getSubresults());
}
});
operationContent.add(subresults);
}
private void initContexts(WebMarkupContainer operationContent, final IModel<OpResult> model, Page parentPage) {
Label contextsLabel = new Label("contextsLabel", parentPage.getString("FeedbackAlertMessageDetails.contexts"));
contextsLabel.setOutputMarkupId(true);
contextsLabel.add(new VisibleEnableBehaviour() {
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible() {
return CollectionUtils.isNotEmpty(model.getObject().getContexts());
}
});
operationContent.add(contextsLabel);
ListView<Context> contexts = new ListView<Context>("contexts", createContextsModel(model)) {
private static final long serialVersionUID = 1L;
@Override
protected void populateItem(ListItem<Context> item) {
item.add(new Label("contextName", new PropertyModel<Object>(item.getModel(), "name")));
item.add(new Label("contextValue", new PropertyModel<Object>(item.getModel(), "value")));
}
};
contexts.setOutputMarkupId(true);
contexts.add(new VisibleEnableBehaviour() {
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible() {
return CollectionUtils.isNotEmpty(model.getObject().getContexts());
}
});
operationContent.add(contexts);
}
private void initError(WebMarkupContainer operationPanel, final IModel<OpResult> model, Page parentPage) {
Label errorLabel = new Label("errorLabel", parentPage.getString("FeedbackAlertMessageDetails.error"));
errorLabel.add(new VisibleEnableBehaviour() {
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible() {
// return true;
return StringUtils.isNotBlank(model.getObject().getExceptionsStackTrace());
}
});
errorLabel.setOutputMarkupId(true);
operationPanel.add(errorLabel);
Label errorMessage = new Label("errorMessage", new PropertyModel<String>(model, "exceptionMessage"));
errorMessage.add(new VisibleEnableBehaviour() {
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible() {
// return true;
return StringUtils.isNotBlank(model.getObject().getExceptionsStackTrace());
}
});
errorMessage.setOutputMarkupId(true);
operationPanel.add(errorMessage);
final Label errorStackTrace = new Label(ID_ERROR_STACK_TRACE,
new PropertyModel<String>(model, "exceptionsStackTrace"));
errorStackTrace.add(new VisibleEnableBehaviour() {
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible() {
// return true;
return model.getObject().isShowError();
}
});
errorStackTrace.setOutputMarkupId(true);
operationPanel.add(errorStackTrace);
AjaxLink errorStackTraceLink = new AjaxLink("errorStackTraceLink") {
private static final long serialVersionUID = 1L;
@Override
public void onClick(AjaxRequestTarget target) {
OpResult result = OperationResultPanel.this.getModelObject();
result.setShowError(!model.getObject().isShowError());
result.setAlreadyShown(false); // hack to be able to expand/collapse OpResult after rendered.
// model.getObject().setShowError(!model.getObject().isShowError());
target.add(OperationResultPanel.this);
}
};
errorStackTraceLink.setOutputMarkupId(true);
errorStackTraceLink.add(new VisibleEnableBehaviour() {
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible() {
return StringUtils.isNotBlank(model.getObject().getExceptionsStackTrace());
}
});
operationPanel.add(errorStackTraceLink);
}
private Label initCountPanel(final IModel<OpResult> model) {
Label count = new Label("count", new PropertyModel<String>(model, "count"));
count.add(new VisibleEnableBehaviour() {
private static final long serialVersionUID = 1L;
@Override
public boolean isVisible() {
OpResult result = model.getObject();
return result.getCount() > 1;
}
});
return count;
}
private void showHideAll(final boolean show, OpResult opresult, AjaxRequestTarget target) {
opresult.setShowMoreAll(show);
opresult.setAlreadyShown(false); // hack to be able to expand/collapse OpResult after rendered.
target.add(OperationResultPanel.this);
}
private IModel<String> createHeaderCss() {
return new AbstractReadOnlyModel<String>() {
private static final long serialVersionUID = 1L;
@Override
public String getObject() {
OpResult result = getModelObject();
if (result == null || result.getStatus() == null) {
return " box-warning";
}
switch (result.getStatus()) {
case IN_PROGRESS:
case NOT_APPLICABLE:
return " box-info";
case SUCCESS:
return " box-success";
case FATAL_ERROR:
return " box-danger";
case UNKNOWN:
case PARTIAL_ERROR:
case HANDLED_ERROR: // TODO:
case WARNING:
default:
return " box-warning";
}
}
};
}
static IModel<List<Param>> createParamsModel(final IModel<OpResult> model) {
return new LoadableModel<List<Param>>(false) {
private static final long serialVersionUID = 1L;
@Override
protected List<Param> load() {
OpResult result = model.getObject();
return result.getParams();
}
};
}
static IModel<List<Context>> createContextsModel(final IModel<OpResult> model) {
return new LoadableModel<List<Context>>(false) {
private static final long serialVersionUID = 1L;
@Override
protected List<Context> load() {
OpResult result = model.getObject();
return result.getContexts();
}
};
}
private IModel<List<OpResult>> createSubresultsModel(final IModel<OpResult> model) {
return new LoadableModel<List<OpResult>>(false) {
private static final long serialVersionUID = 1L;
@Override
protected List<OpResult> load() {
OpResult result = model.getObject();
List<OpResult> subresults = result.getSubresults();
if (subresults == null) {
subresults = new ArrayList<OpResult>();
}
return subresults;
}
};
}
private String getLabelCss(final IModel<OpResult> model) {
OpResult result = model.getObject();
if (result == null || result.getStatus() == null) {
return "messages-warn-content";
}
switch (result.getStatus()) {
case IN_PROGRESS:
case NOT_APPLICABLE:
return "left-info";
case SUCCESS:
return "left-success";
case FATAL_ERROR:
return "left-danger";
case UNKNOWN:
case PARTIAL_ERROR:
case HANDLED_ERROR: // TODO:
case WARNING:
default:
return "left-warning";
}
}
private String getIconCss(final IModel<OpResult> model) {
OpResult result = model.getObject();
if (result == null || result.getStatus() == null) {
return "fa-warning text-warning";
}
switch (result.getStatus()) {
case IN_PROGRESS:
case NOT_APPLICABLE:
return "fa-info-circle text-info";
case SUCCESS:
return "fa-check-circle-o text-success";
case FATAL_ERROR:
return "fa-times-circle-o text-danger";
case UNKNOWN:
case PARTIAL_ERROR:
case HANDLED_ERROR: // TODO:
case WARNING:
default:
return "fa-warning text-warning";
}
}
static String createMessageTooltip(final IModel<FeedbackMessage> model) {
FeedbackMessage message = model.getObject();
switch (message.getLevel()) {
case FeedbackMessage.INFO:
return "info";
case FeedbackMessage.SUCCESS:
return "success";
case FeedbackMessage.ERROR:
return "partialError";
case FeedbackMessage.FATAL:
return "fatalError";
case FeedbackMessage.UNDEFINED:
return "undefined";
case FeedbackMessage.DEBUG:
return "debug";
case FeedbackMessage.WARNING:
default:
return "warn";
}
}
@Override
public int getWidth() {
return 900;
}
@Override
public int getHeight() {
return 500;
}
@Override
public StringResourceModel getTitle() {
return new StringResourceModel("OperationResultPanel.result");
}
@Override
public Component getComponent() {
return this;
}
}
|
|
/*
* Copyright 2007 Yusuke Yamamoto
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package twitter4j;
import twitter4j.conf.Configuration;
import twitter4j.internal.async.Dispatcher;
import twitter4j.internal.http.HttpResponse;
import twitter4j.internal.json.DataObjectFactoryUtil;
import twitter4j.internal.json.z_T4JInternalFactory;
import twitter4j.internal.json.z_T4JInternalJSONImplFactory;
import twitter4j.internal.logging.Logger;
import twitter4j.internal.org.json.JSONArray;
import twitter4j.internal.org.json.JSONException;
import twitter4j.internal.org.json.JSONObject;
import twitter4j.json.JSONObjectType;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
/**
* @author Yusuke Yamamoto - yusuke at mac.com
* @since Twitter4J 2.1.8
*/
abstract class StatusStreamBase implements StatusStream {
protected static final Logger logger = Logger.getLogger(StatusStreamImpl.class);
private boolean streamAlive = true;
private BufferedReader br;
private InputStream is;
private HttpResponse response;
protected final Dispatcher dispatcher;
protected final Configuration CONF;
protected z_T4JInternalFactory factory;
/*package*/
StatusStreamBase(Dispatcher dispatcher, InputStream stream, Configuration conf) throws IOException {
this.is = stream;
this.br = new BufferedReader(new InputStreamReader(stream, "UTF-8"));
this.dispatcher = dispatcher;
this.CONF = conf;
this.factory = new z_T4JInternalJSONImplFactory(conf);
}
/*package*/
StatusStreamBase(Dispatcher dispatcher, HttpResponse response, Configuration conf) throws IOException {
this(dispatcher, response.asStream(), conf);
this.response = response;
}
protected String parseLine(String line) {
return line;
}
abstract class StreamEvent implements Runnable {
String line;
StreamEvent(String line) {
this.line = line;
}
}
protected void handleNextElement(final StreamListener[] listeners,
final RawStreamListener[] rawStreamListeners) throws TwitterException {
if (!streamAlive) {
throw new IllegalStateException("Stream already closed.");
}
try {
String line = br.readLine();
if (null == line) {
//invalidate this status stream
throw new IOException("the end of the stream has been reached");
}
dispatcher.invokeLater(new StreamEvent(line) {
public void run() {
try {
if (rawStreamListeners.length > 0) {
onMessage(line, rawStreamListeners);
}
// SiteStreamsImpl will parse "forUser" attribute
line = parseLine(line);
if (line != null && line.length() > 0) {
// parsing JSON is an expensive process and can be avoided when all listeners are instanceof RawStreamListener
if (listeners.length > 0) {
if (CONF.isJSONStoreEnabled()) {
DataObjectFactoryUtil.clearThreadLocalMap();
}
JSONObject json = new JSONObject(line);
JSONObjectType.Type event = JSONObjectType.determine(json);
if (logger.isDebugEnabled()) {
logger.debug("Received:", CONF.isPrettyDebugEnabled() ? json.toString(1) : json.toString());
}
switch (event) {
case SENDER:
onSender(json, listeners);
break;
case STATUS:
onStatus(json, listeners);
break;
case DIRECT_MESSAGE:
onDirectMessage(json, listeners);
break;
case DELETE:
onDelete(json, listeners);
break;
case LIMIT:
onLimit(json, listeners);
break;
case STALL_WARNING:
onStallWarning(json, listeners);
break;
case SCRUB_GEO:
onScrubGeo(json, listeners);
break;
case FRIENDS:
onFriends(json, listeners);
break;
case FAVORITE:
onFavorite(json.getJSONObject("source"), json.getJSONObject("target"), json.getJSONObject("target_object"), listeners);
break;
case UNFAVORITE:
onUnfavorite(json.getJSONObject("source"), json.getJSONObject("target"), json.getJSONObject("target_object"), listeners);
break;
case FOLLOW:
onFollow(json.getJSONObject("source"), json.getJSONObject("target"), listeners);
break;
case UNFOLLOW:
onUnfollow(json.getJSONObject("source"), json.getJSONObject("target"), listeners);
break;
case USER_LIST_MEMBER_ADDED:
onUserListMemberAddition(json.getJSONObject("target"), json.getJSONObject("source"), json.getJSONObject("target_object"), listeners);
break;
case USER_LIST_MEMBER_DELETED:
onUserListMemberDeletion(json.getJSONObject("target"), json.getJSONObject("source"), json.getJSONObject("target_object"), listeners);
break;
case USER_LIST_SUBSCRIBED:
onUserListSubscription(json.getJSONObject("source"), json.getJSONObject("target"), json.getJSONObject("target_object"), listeners);
break;
case USER_LIST_UNSUBSCRIBED:
onUserListUnsubscription(json.getJSONObject("source"), json.getJSONObject("target"), json.getJSONObject("target_object"), listeners);
break;
case USER_LIST_CREATED:
onUserListCreation(json.getJSONObject("source"), json.getJSONObject("target"), listeners);
break;
case USER_LIST_UPDATED:
onUserListUpdated(json.getJSONObject("source"), json.getJSONObject("target"), listeners);
break;
case USER_LIST_DESTROYED:
onUserListDestroyed(json.getJSONObject("source"), json.getJSONObject("target"), listeners);
break;
case USER_UPDATE:
onUserUpdate(json.getJSONObject("source"), json.getJSONObject("target"), listeners);
break;
case BLOCK:
onBlock(json.getJSONObject("source"), json.getJSONObject("target"), listeners);
break;
case UNBLOCK:
onUnblock(json.getJSONObject("source"), json.getJSONObject("target"), listeners);
break;
case DISCONNECTION:
onDisconnectionNotice(line, listeners);
break;
case UNKNOWN:
default:
logger.warn("Received unknown event:", CONF.isPrettyDebugEnabled() ? json.toString(1) : json.toString());
}
}
}
} catch (Exception ex) {
onException(ex, listeners);
}
}
});
} catch (IOException ioe) {
try {
is.close();
} catch (IOException ignore) {
}
boolean isUnexpectedException = streamAlive;
streamAlive = false;
if (isUnexpectedException) {
throw new TwitterException("Stream closed.", ioe);
}
}
}
protected void onMessage(String rawString, RawStreamListener[] listeners) throws TwitterException {
logger.warn("Unhandled event: onMessage");
}
protected void onSender(JSONObject json, StreamListener[] listeners) throws TwitterException {
logger.warn("Unhandled event: onSender");
}
protected void onStatus(JSONObject json, StreamListener[] listeners) throws TwitterException {
logger.warn("Unhandled event: onStatus");
}
protected void onDirectMessage(JSONObject json, StreamListener[] listeners) throws TwitterException, JSONException {
logger.warn("Unhandled event: onDirectMessage");
}
protected void onDelete(JSONObject json, StreamListener[] listeners) throws TwitterException, JSONException {
logger.warn("Unhandled event: onDelete");
}
protected void onLimit(JSONObject json, StreamListener[] listeners) throws TwitterException, JSONException {
logger.warn("Unhandled event: onLimit");
}
protected void onStallWarning(JSONObject json, StreamListener[] listeners) throws TwitterException, JSONException {
logger.warn("Unhandled event: onStallWarning");
}
protected void onScrubGeo(JSONObject json, StreamListener[] listeners) throws TwitterException, JSONException {
logger.warn("Unhandled event: onScrubGeo");
}
protected void onFriends(JSONObject json, StreamListener[] listeners) throws TwitterException, JSONException {
logger.warn("Unhandled event: onFriends");
}
protected void onFavorite(JSONObject source, JSONObject target, JSONObject targetObject, StreamListener[] listeners) throws TwitterException {
logger.warn("Unhandled event: onFavorite");
}
protected void onUnfavorite(JSONObject source, JSONObject target, JSONObject targetObject, StreamListener[] listeners) throws TwitterException {
logger.warn("Unhandled event: onUnfavorite");
}
protected void onFollow(JSONObject source, JSONObject target, StreamListener[] listeners) throws TwitterException {
logger.warn("Unhandled event: onFollow");
}
protected void onUnfollow(JSONObject source, JSONObject target, StreamListener[] listeners) throws TwitterException {
logger.warn("Unhandled event: onUnfollow");
}
protected void onUserListMemberAddition(JSONObject addedMember, JSONObject owner, JSONObject userList, StreamListener[] listeners) throws TwitterException, JSONException {
logger.warn("Unhandled event: onUserListMemberAddition");
}
protected void onUserListMemberDeletion(JSONObject deletedMember, JSONObject owner, JSONObject userList, StreamListener[] listeners) throws TwitterException, JSONException {
logger.warn("Unhandled event: onUserListMemberDeletion");
}
protected void onUserListSubscription(JSONObject source, JSONObject owner, JSONObject userList, StreamListener[] listeners) throws TwitterException, JSONException {
logger.warn("Unhandled event: onUserListSubscription");
}
protected void onUserListUnsubscription(JSONObject source, JSONObject owner, JSONObject userList, StreamListener[] listeners) throws TwitterException, JSONException {
logger.warn("Unhandled event: onUserListUnsubscription");
}
protected void onUserListCreation(JSONObject source, JSONObject userList, StreamListener[] listeners) throws TwitterException, JSONException {
logger.warn("Unhandled event: onUserListCreation");
}
protected void onUserListUpdated(JSONObject source, JSONObject userList, StreamListener[] listeners) throws TwitterException, JSONException {
logger.warn("Unhandled event: onUserListUpdated");
}
protected void onUserListDestroyed(JSONObject source, JSONObject userList, StreamListener[] listeners) throws TwitterException {
logger.warn("Unhandled event: onUserListDestroyed");
}
protected void onUserUpdate(JSONObject source, JSONObject target, StreamListener[] listeners) throws TwitterException {
logger.warn("Unhandled event: onUserUpdate");
}
protected void onBlock(JSONObject source, JSONObject target, StreamListener[] listeners) throws TwitterException {
logger.warn("Unhandled event: onBlock");
}
protected void onUnblock(JSONObject source, JSONObject target, StreamListener[] listeners) throws TwitterException {
logger.warn("Unhandled event: onUnblock");
}
protected void onDisconnectionNotice(String line, StreamListener[] listeners) {
logger.warn("Unhandled event: ", line);
}
protected void onException(Exception e, StreamListener[] listeners) {
logger.warn("Unhandled event: ", e.getMessage());
}
public void close() throws IOException {
streamAlive = false;
is.close();
br.close();
if (response != null) {
response.disconnect();
}
}
protected Status asStatus(JSONObject json) throws TwitterException {
Status status = factory.createStatus(json);
if (CONF.isJSONStoreEnabled()) {
DataObjectFactoryUtil.registerJSONObject(status, json);
}
return status;
}
protected DirectMessage asDirectMessage(JSONObject json) throws TwitterException {
DirectMessage directMessage;
try {
directMessage = factory.createDirectMessage(json.getJSONObject("direct_message"));
} catch (JSONException e) {
throw new TwitterException(e);
}
if (CONF.isJSONStoreEnabled()) {
DataObjectFactoryUtil.registerJSONObject(directMessage, json);
}
return directMessage;
}
protected long[] asFriendList(JSONObject json) throws TwitterException {
JSONArray friends;
try {
friends = json.getJSONArray("friends");
long[] friendIds = new long[friends.length()];
for (int i = 0; i < friendIds.length; ++i) {
friendIds[i] = Long.parseLong(friends.getString(i));
}
return friendIds;
} catch (JSONException e) {
throw new TwitterException(e);
}
}
protected User asUser(JSONObject json) throws TwitterException {
User user = factory.createUser(json);
if (CONF.isJSONStoreEnabled()) {
DataObjectFactoryUtil.registerJSONObject(user, json);
}
return user;
}
protected UserList asUserList(JSONObject json) throws TwitterException {
UserList userList = factory.createAUserList(json);
if (CONF.isJSONStoreEnabled()) {
DataObjectFactoryUtil.registerJSONObject(userList, json);
}
return userList;
}
public abstract void next(StatusListener listener) throws TwitterException;
public abstract void next(StreamListener[] listeners, RawStreamListener[] rawStreamListeners) throws TwitterException;
public void onException(Exception e, StreamListener[] listeners, RawStreamListener[] rawStreamListeners) {
for (StreamListener listener : listeners) {
listener.onException(e);
}
for (RawStreamListener listener : rawStreamListeners) {
listener.onException(e);
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.nbandroid.netbeans.gradle.v2.adb.nodes.actions;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.ConnectException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.SwingUtilities;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import org.nbandroid.netbeans.gradle.v2.adb.nodes.DevicesNode;
import org.nbandroid.netbeans.gradle.v2.sdk.AndroidSdkProvider;
import org.netbeans.modules.dlight.terminal.action.TerminalSupportImpl;
import org.netbeans.modules.dlight.terminal.ui.TerminalContainerTopComponent;
import org.netbeans.modules.nativeexecution.api.ExecutionEnvironment;
import org.netbeans.modules.nativeexecution.api.ExecutionEnvironmentFactory;
import org.netbeans.modules.nativeexecution.api.HostInfo;
import org.netbeans.modules.nativeexecution.api.NativeProcess;
import org.netbeans.modules.nativeexecution.api.NativeProcessBuilder;
import org.netbeans.modules.nativeexecution.api.execution.NativeExecutionDescriptor;
import org.netbeans.modules.nativeexecution.api.execution.NativeExecutionService;
import org.netbeans.modules.nativeexecution.api.pty.PtySupport;
import org.netbeans.modules.nativeexecution.api.util.ConnectionManager;
import org.netbeans.modules.nativeexecution.api.util.HostInfoUtils;
import org.openide.DialogDisplayer;
import org.openide.NotifyDescriptor;
import org.openide.awt.ActionID;
import org.openide.awt.ActionReference;
import org.openide.awt.ActionReferences;
import org.openide.awt.ActionRegistration;
import org.openide.nodes.Node;
import org.openide.util.Exceptions;
import org.openide.util.HelpCtx;
import org.openide.util.Lookup;
import org.openide.util.NbBundle;
import org.openide.util.RequestProcessor;
import org.openide.util.WeakListeners;
import org.openide.util.actions.NodeAction;
import org.openide.windows.IOContainer;
import org.openide.windows.IOProvider;
import org.openide.windows.InputOutput;
import org.openide.windows.OutputEvent;
import org.openide.windows.OutputListener;
import org.openide.windows.OutputWriter;
import org.openide.windows.WindowManager;
/**
*
* @author arsi
*/
@ActionID(
category = "ADB/MobileDevice",
id = "org.nbandroid.netbeans.gradle.v2.adb.nodes.actions.AndroidShellAdbAction"
)
@ActionRegistration(
displayName = "", lazy = false
)
@ActionReferences({
@ActionReference(path = "Android/ADB/MobileDevice", position = 9970),
@ActionReference(path = "Android/ADB/EmulatorDevice", position = 9970),})
public class AndroidShellAdbAction extends NodeAction {
public static final ExecutorService POOL = Executors.newCachedThreadPool();
private static final RequestProcessor RP = new RequestProcessor("Terminal Action RP", 100); // NOI18N
private static final ClassLoader CLASS_LOADER = Lookup.getDefault().lookup(ClassLoader.class);
@Override
protected void performAction(Node[] activatedNodes) {
Runnable runnable = new Runnable() {
@Override
public void run() {
for (Node activatedNode : activatedNodes) {
DevicesNode.MobileDeviceHolder holder = activatedNode.getLookup().lookup(DevicesNode.MobileDeviceHolder.class);
final TerminalContainerTopComponent instance = TerminalContainerTopComponent.findInstance();
instance.open();
instance.requestActive();
final IOContainer ioContainer = instance.getIOContainer();
final IOProvider term = IOProvider.get("Terminal"); // NOI18N
if (term != null) {
final ExecutionEnvironment env = ExecutionEnvironmentFactory.getLocal();
if (env != null) {
openTerminalImpl(ioContainer, env, null, false, false, 0, holder);
}
}
}
}
};
WindowManager.getDefault().invokeWhenUIReady(runnable);
}
@Override
protected boolean enable(Node[] activatedNodes) {
for (Node activatedNode : activatedNodes) {
DevicesNode.MobileDeviceHolder holder = activatedNode.getLookup().lookup(DevicesNode.MobileDeviceHolder.class);
if (holder == null) {
return false;
}
}
return true;
}
@Override
public String getName() {
return "Shell";
}
@Override
public HelpCtx getHelpCtx() {
return null;
}
/**
* Hack of TerminalSupportImpl.openTerminalImpl to open adb
*
* @param ioContainer
* @param env
* @param dir
* @param silentMode
* @param pwdFlag
* @param termId
* @param holder
*/
public static void openTerminalImpl(
final IOContainer ioContainer,
final ExecutionEnvironment env,
final String dir,
final boolean silentMode,
final boolean pwdFlag,
final long termId,
final DevicesNode.MobileDeviceHolder holder) {
final IOProvider ioProvider = IOProvider.get("Terminal"); // NOI18N
if (ioProvider != null) {
final AtomicReference<InputOutput> ioRef = new AtomicReference<>();
InputOutput io = ioProvider.getIO(holder.getSerialNumber(), null, ioContainer);
ioRef.set(io);
final AtomicBoolean destroyed = new AtomicBoolean(false);
final Runnable runnable = new Runnable() {
private final Runnable delegate = new Runnable() {
@Override
public void run() {
if (SwingUtilities.isEventDispatchThread()) {
ioContainer.requestActive();
} else {
doWork();
}
}
};
@Override
public void run() {
delegate.run();
}
private final HyperlinkAdapter retryLink = new HyperlinkAdapter() {
@Override
public void outputLineAction(OutputEvent ev) {
RP.post(delegate);
}
};
private void doWork() {
boolean verbose = env.isRemote(); // can use silentMode instead
OutputWriter out = ioRef.get().getOut();
if (!ConnectionManager.getInstance().isConnectedTo(env)) {
try {
if (verbose) {
out.println(NbBundle.getMessage(TerminalSupportImpl.class, "LOG_ConnectingTo", env.getDisplayName()));
}
ConnectionManager.getInstance().connectTo(env);
} catch (IOException ex) {
if (!destroyed.get()) {
if (verbose) {
try {
out.print(NbBundle.getMessage(TerminalSupportImpl.class, "LOG_ConnectionFailed"));
out.println(NbBundle.getMessage(TerminalSupportImpl.class, "LOG_Retry"), retryLink);
} catch (IOException ignored) {
}
}
String error = ex.getCause() == null ? ex.getMessage() : ex.getCause().getMessage();
String msg = NbBundle.getMessage(TerminalSupportImpl.class, "TerminalAction.FailedToStart.text", error); // NOI18N
DialogDisplayer.getDefault().notify(new NotifyDescriptor.Message(msg, NotifyDescriptor.ERROR_MESSAGE));
}
return;
} catch (ConnectionManager.CancellationException ex) {
if (verbose) {
try {
out.print(NbBundle.getMessage(TerminalSupportImpl.class, "LOG_Canceled"));
out.println(NbBundle.getMessage(TerminalSupportImpl.class, "LOG_Retry"), retryLink);
} catch (IOException ignored) {
}
}
return;
}
}
final HostInfo hostInfo;
try {
if (!ConnectionManager.getInstance().isConnectedTo(env)) {
return;
}
try {
if (dir != null && !HostInfoUtils.directoryExists(env, dir)) {
out.print(NbBundle.getMessage(TerminalSupportImpl.class, "LOG_DirNotExist", dir, env.getDisplayName()));
return;
}
} catch (ConnectException | InterruptedException ex) {
Exceptions.printStackTrace(ex);
}
hostInfo = HostInfoUtils.getHostInfo(env);
boolean isSupported = PtySupport.isSupportedFor(env);
if (!isSupported && !(hostInfo.getOSFamily() == HostInfo.OSFamily.WINDOWS)) {
//it might work under windows ;)
if (!silentMode) {
String message;
if (hostInfo.getOSFamily() == HostInfo.OSFamily.WINDOWS) {
message = NbBundle.getMessage(TerminalSupportImpl.class, "LocalTerminalNotSupported.error.nocygwin"); // NOI18N
} else {
message = NbBundle.getMessage(TerminalSupportImpl.class, "LocalTerminalNotSupported.error"); // NOI18N
}
NotifyDescriptor nd = new NotifyDescriptor.Message(message, NotifyDescriptor.INFORMATION_MESSAGE);
DialogDisplayer.getDefault().notify(nd);
}
return;
}
} catch (IOException | ConnectionManager.CancellationException ex) {
Exceptions.printStackTrace(ex);
return;
}
try {
NativeProcessBuilder npb = NativeProcessBuilder.newProcessBuilder(env);
npb.getEnvironment().put("LD_LIBRARY_PATH", "");// NOI18N
npb.getEnvironment().put("DYLD_LIBRARY_PATH", "");// NOI18N
npb.addNativeProcessListener(new NativeProcessListener(ioRef.get(), destroyed));
if (dir != null) {
npb.setWorkingDirectory(dir);
}
//override to call adb executable
npb.setExecutable(AndroidSdkProvider.getAdbPath());
npb.setArguments("-s", holder.getMasterDevice().getSerialNumber(), "shell");
NativeExecutionDescriptor descr;
descr = new NativeExecutionDescriptor().controllable(true).frontWindow(true).inputVisible(true).inputOutput(ioRef.get());
descr.postExecution(new Runnable() {
@Override
public void run() {
ioRef.get().closeInputOutput();
}
});
NativeExecutionService es = NativeExecutionService.newService(npb, descr, "Terminal Emulator"); // NOI18N
Future<Integer> result = es.run();
// ask terminal to become active
SwingUtilities.invokeLater(this);
try {
Integer rc = result.get(10, TimeUnit.SECONDS);
if (rc != 0) {
Logger.getLogger(TerminalSupportImpl.class.getName())
.log(Level.INFO, "{0}{1}", new Object[]{NbBundle.getMessage(TerminalSupportImpl.class, "LOG_ReturnCode"), rc});
}
} catch (java.util.concurrent.TimeoutException ex) {
} catch (InterruptedException ex) {
Exceptions.printStackTrace(ex);
} catch (ExecutionException ex) {
if (!destroyed.get()) {
String error = ex.getCause() == null ? ex.getMessage() : ex.getCause().getMessage();
String msg = NbBundle.getMessage(TerminalSupportImpl.class, "TerminalAction.FailedToStart.text", error); // NOI18N
DialogDisplayer.getDefault().notify(new NotifyDescriptor.Message(msg, NotifyDescriptor.ERROR_MESSAGE));
}
}
} catch (java.util.concurrent.CancellationException ex) {
Exceptions.printStackTrace(ex);
reportInIO(ioRef.get(), ex);
}
}
private void reportInIO(InputOutput io, Exception ex) {
if (io != null && ex != null) {
io.getErr().print(ex.getLocalizedMessage());
}
}
};
RP.post(runnable);
}
}
private final static class NativeProcessListener implements ChangeListener, PropertyChangeListener {
private final AtomicReference<NativeProcess> processRef;
private final AtomicBoolean destroyed;
public NativeProcessListener(InputOutput io, AtomicBoolean destroyed) {
assert destroyed != null;
this.destroyed = destroyed;
this.processRef = new AtomicReference<>();
try {
Class<?> iONotifier = CLASS_LOADER.loadClass("org.netbeans.modules.terminal.api.IONotifier");
Method m = iONotifier.getDeclaredMethod("addPropertyChangeListener", InputOutput.class, PropertyChangeListener.class);
m.invoke(null, io, WeakListeners.propertyChange(NativeProcessListener.this, io));
} catch (ClassNotFoundException | NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) {
Exceptions.printStackTrace(ex);
}
}
@Override
public void stateChanged(ChangeEvent e) {
NativeProcess process = processRef.get();
if (process == null && e.getSource() instanceof NativeProcess) {
processRef.compareAndSet(null, (NativeProcess) e.getSource());
}
}
@Override
public void propertyChange(PropertyChangeEvent evt) {
if ("IOVisibility.PROP_VISIBILITY".equals(evt.getPropertyName()) && Boolean.FALSE.equals(evt.getNewValue())) {
if (destroyed.compareAndSet(false, true)) {
final NativeProcess proc = processRef.get();
if (proc != null) {
RP.submit(new Runnable() {
@Override
public void run() {
try {
proc.destroy();
} catch (Throwable th) {
}
}
});
}
}
}
}
}
private static class HyperlinkAdapter implements OutputListener {
@Override
public void outputLineSelected(OutputEvent ev) {
}
@Override
public void outputLineAction(OutputEvent ev) {
}
@Override
public void outputLineCleared(OutputEvent ev) {
}
}
}
|
|
package ai.subut.kurjun.web.controllers;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.io.FilenameUtils;
import com.google.common.base.Optional;
import com.google.common.io.ByteStreams;
import com.google.common.io.Files;
import com.google.inject.Inject;
import ninja.AssetsController;
import ninja.Context;
import ninja.Renderable;
import ninja.Result;
import ninja.Results;
import ninja.params.PathParam;
import ninja.utils.HttpCacheToolkit;
import ninja.utils.MimeTypes;
import ninja.utils.NinjaProperties;
import ninja.utils.ResponseStreams;
public class DownloadController {
private final static Logger LOG = LoggerFactory.getLogger( AssetsController.class );
public final static String ASSETS_DIR = "assets";
public final static String FILENAME_PATH_PARAM = "fileName";
/** Used for dev mode streaming directly from src dir without jetty reload. */
final String srcDir =
System.getProperty( "user.dir" ) + File.separator + "src" + File.separator + "main" + File.separator
+ "java";
private final String defaultAssetBaseDir;
private Optional<String> assetBaseDir;
private final MimeTypes mimeTypes;
private final HttpCacheToolkit httpCacheToolkit;
private final NinjaProperties ninjaProperties;
@Inject
public DownloadController( HttpCacheToolkit httpCacheToolkit, MimeTypes mimeTypes, NinjaProperties ninjaProperties )
{
this.httpCacheToolkit = httpCacheToolkit;
this.mimeTypes = mimeTypes;
this.ninjaProperties = ninjaProperties;
this.assetBaseDir = getNormalizedAssetPath( ninjaProperties );
this.defaultAssetBaseDir = srcDir + File.separator + ASSETS_DIR + File.separator;
}
/**
* Serves resources from the assets directory of your application.
*
* For instance: route: /robots.txt A request to /robots.txt will be served from /assets/robots.txt.
*
* You can also use a path like the following to serve files: route: /assets/{fileName: .*}
*
* matches /assets/app/app.css and will return /assets/app/app.css (from your jar).
*/
public Result serveStatic(Context context , @PathParam( "fileName" ) String fileType)
{
assetBaseDir = getNormalizedAssetPath( ninjaProperties, fileType );
Object renderable = new Renderable()
{
@Override
public void render( Context context, Result result )
{
String fileName = getFileNameFromPathOrReturnRequestPath( context );
URL url = getStaticFileFromAssetsDir( context, fileName );
streamOutUrlEntity( url, context, result );
}
};
return Results.ok().render( renderable );
}
private void streamOutUrlEntity( URL url, Context context, Result result )
{
// check if stream exists. if not print a notfound exception
if ( url == null )
{
context.finalizeHeadersWithoutFlashAndSessionCookie( Results.notFound() );
}
else
{
try
{
URLConnection urlConnection = url.openConnection();
Long lastModified = urlConnection.getLastModified();
httpCacheToolkit.addEtag( context, result, lastModified );
if ( result.getStatusCode() == Result.SC_304_NOT_MODIFIED )
{
// Do not stream anything out. Simply return 304
context.finalizeHeadersWithoutFlashAndSessionCookie( result );
}
else
{
result.status( 200 );
// Try to set the mimetype:
String mimeType = mimeTypes.getContentType( context, url.getFile() );
if ( mimeType != null && !mimeType.isEmpty() )
{
result.contentType( mimeType );
}
// finalize headers:
ResponseStreams responseStreams = context.finalizeHeadersWithoutFlashAndSessionCookie( result );
try (
InputStream inputStream = urlConnection.getInputStream();
OutputStream outputStream = responseStreams.getOutputStream() )
{
ByteStreams.copy( inputStream, outputStream );
}
}
}
catch ( FileNotFoundException e )
{
LOG.error( "error streaming file", e );
}
catch ( IOException e )
{
LOG.error( "error streaming file", e );
}
}
}
/**
* Loads files from assets directory. This is the default directory of Ninja where to store stuff. Usually in
* src/main/java/assets/. But if user wants to use a dir outside of application project dir, then base dir can be
* overridden by static.asset.base.dir in application conf file.
*/
private URL getStaticFileFromAssetsDir( Context context, String fileName )
{
String finalNameWithoutLeadingSlash = normalizePathWithoutTrailingSlash( fileName );
Optional<URL> url = Optional.absent();
//Serve from the static asset base directory specified by user in application conf.
if ( assetBaseDir.isPresent() )
{
String p = assetBaseDir.get();
String fileSeparator = File.separator;
File possibleFile = new File( p + fileSeparator + finalNameWithoutLeadingSlash );
if ( possibleFile.exists() )
{
url = getUrlForFile( possibleFile );
}
}
// If asset base dir not specified by user, this allows to directly stream assets from src directory.
// Therefore jetty does not have to reload. Especially cool when developing js apps inside assets folder.
if ( ninjaProperties.isDev() && !url.isPresent() )
{
File possibleFile = new File( defaultAssetBaseDir + finalNameWithoutLeadingSlash );
if ( possibleFile.exists() )
{
url = getUrlForFile( possibleFile );
}
}
if ( !url.isPresent() )
{
// In mode test and prod, if static.asset.base.dir not specified then we stream via the classloader.
//
// In dev mode: If we cannot find the file in src we are also looking for the file
// on the classpath (can be the case for plugins that ship their own assets.
url = Optional.fromNullable(
this.getClass().getClassLoader().getResource( ASSETS_DIR + "/" + finalNameWithoutLeadingSlash ) );
}
return url.orNull();
}
private Optional<URL> getUrlForFile( File possibleFileInSrc )
{
try
{
return Optional.fromNullable( possibleFileInSrc.toURI().toURL() );
}
catch ( MalformedURLException malformedURLException )
{
LOG.error( "Error in dev mode while streaming files from src dir. ", malformedURLException );
}
return Optional.absent();
}
/**
* If we get - for whatever reason - a relative URL like assets/../conf/application.conf we expand that to the
* "real" path. In the above case conf/application.conf.
*
* You should then add the assets prefix.
*
* Otherwise someone can create an attack and read all resources of our app. If we expand and normalize the incoming
* path this is no longer possible.
*
* @param fileName A potential "fileName"
*
* @return A normalized fileName.
*/
public String normalizePathWithoutTrailingSlash( String fileName )
{
// We need simplifyPath to remove relative paths before we process it.
// Otherwise an attacker can read out arbitrary urls via ".."
String fileNameNormalized = Files.simplifyPath( fileName );
if ( fileNameNormalized.charAt( 0 ) == '/' )
{
return fileNameNormalized.substring( 1 );
}
return fileNameNormalized;
}
public static String getFileNameFromPathOrReturnRequestPath( Context context )
{
String fileName = context.getPathParameter( FILENAME_PATH_PARAM );
if ( fileName == null )
{
fileName = context.getRequestPath();
}
return fileName;
}
private Optional<String> getNormalizedAssetPath( NinjaProperties ninjaProperties )
{
return Optional.fromNullable( FilenameUtils.normalizeNoEndSeparator( ASSETS_DIR ) );
}
private Optional<String> getNormalizedAssetPath( NinjaProperties ninjaProperties, String fileType )
{
return Optional.fromNullable( FilenameUtils.normalizeNoEndSeparator( ASSETS_DIR ) );
}
}
|
|
/*
Copyright (c) 2016 Sebastian Schmidt
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
package de.swm.nis.logicaldecoding.parser.domain;
/**
* Represents a single Cell inside a database: The entry inside a column at a specific Row.
* Contains name, type and value information.
* @author Schmidt.Sebastian2
*
*/
public class Cell {
public enum Type {
text(true, true),
varchar(true, true),
character(true, false),
bool(false, false),
integer(false, false),
bigint(false, false),
real(false, false),
doubleprec(false, false),
numeric(false, false),
date(true, true),
timestamp(true, false),
timestamptz(true, false),
interval(true, false),
geometry(true, true),
json(false, false),
jsonb(false, false),
tsvector(true, false),
uuid(true, false);
private boolean quotedInJson;
private boolean quotedInInput;
private Type(boolean quotedInJson, boolean quotedInInput) {
this.quotedInJson = quotedInJson;
this.quotedInInput = quotedInInput;
}
boolean isQuotedInJson() {
return quotedInJson;
}
boolean isQuotedInInput() {
return quotedInInput;
};
};
private String name;
private String value;
private Type type;
public Cell() {
}
public Cell(Cell.Type type, String name, String value) {
this.value = value;
this.type = type;
this.name = name;
unquoteStrings();
}
public Cell(String type, String name, String value) {
this.name = name;
setType(type);
this.value = value;
unquoteStrings();
}
private void unquoteStrings() {
if (!value.equals("null")) {
if (this.type.isQuotedInInput()) {
int length = value.length();
value = value.substring(1, length - 1);
}
}
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public Type getType() {
return type;
}
public void setType(Type type) {
this.type = type;
}
public void setType(String typeAsString) {
switch(typeAsString)
{
case "boolean": {
this.type = Type.valueOf("bool");
break;
}
case "double precision": {
this.type = Type.valueOf("doubleprec");
break;
}
case "character varying": {
this.type = Type.valueOf("varchar");
break;
}
case "timestamp without time zone": {
this.type = Type.valueOf("timestamp");
break;
}
case "timestamp with time zone": {
this.type = Type.valueOf("timestamptz");
break;
}
default:
this.type = Type.valueOf(typeAsString);
}
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getJson() {
String jsonKey = new String("\"" + getName() + "\": ");
String jsonValue = new String(getValue());
if (getType().isQuotedInJson()) {
jsonValue = new String("\"" + getValue() + "\"");
}
if (getValue().equals("null")) {
jsonValue = "null";
}
return jsonKey + jsonValue;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((type == null) ? 0 : type.hashCode());
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Cell other = (Cell) obj;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
if (type != other.type)
return false;
if (value == null) {
if (other.value != null)
return false;
} else if (!value.equals(other.value))
return false;
return true;
}
@Override
public String toString() {
return "Cell [name=" + name + ", type=" + type + ", value=" + value + "]";
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.hadoop;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Locale;
import java.util.Set;
import java.util.UUID;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SolrOutputFormat<K, V> extends FileOutputFormat<K, V> {
private static final Logger LOG = LoggerFactory.getLogger(SolrOutputFormat.class);
/**
* The parameter used to pass the solr config zip file information. This will
* be the hdfs path to the configuration zip file
*/
public static final String SETUP_OK = "solr.output.format.setup";
/** The key used to pass the zip file name through the configuration. */
public static final String ZIP_NAME = "solr.zip.name";
/**
* The base name of the zip file containing the configuration information.
* This file is passed via the distributed cache using a unique name, obtained
* via {@link #getZipName(Configuration jobConf)}.
*/
public static final String ZIP_FILE_BASE_NAME = "solr.zip";
/**
* The key used to pass the boolean configuration parameter that instructs for
* regular or zip file output
*/
public static final String OUTPUT_ZIP_FILE = "solr.output.zip.format";
static int defaultSolrWriterThreadCount = 0;
public static final String SOLR_WRITER_THREAD_COUNT = "solr.record.writer.num.threads";
static int defaultSolrWriterQueueSize = 1;
public static final String SOLR_WRITER_QUEUE_SIZE = "solr.record.writer.max.queues.size";
static int defaultSolrBatchSize = 20;
public static final String SOLR_RECORD_WRITER_BATCH_SIZE = "solr.record.writer.batch.size";
public static final String SOLR_RECORD_WRITER_MAX_SEGMENTS = "solr.record.writer.maxSegments";
public static String getSetupOk() {
return SETUP_OK;
}
/** Get the number of threads used for index writing */
public static void setSolrWriterThreadCount(int count, Configuration conf) {
conf.setInt(SOLR_WRITER_THREAD_COUNT, count);
}
/** Set the number of threads used for index writing */
public static int getSolrWriterThreadCount(Configuration conf) {
return conf.getInt(SOLR_WRITER_THREAD_COUNT, defaultSolrWriterThreadCount);
}
/**
* Set the maximum size of the the queue for documents to be written to the
* index.
*/
public static void setSolrWriterQueueSize(int count, Configuration conf) {
conf.setInt(SOLR_WRITER_QUEUE_SIZE, count);
}
/** Return the maximum size for the number of documents pending index writing. */
public static int getSolrWriterQueueSize(Configuration conf) {
return conf.getInt(SOLR_WRITER_QUEUE_SIZE, defaultSolrWriterQueueSize);
}
/**
* Return the file name portion of the configuration zip file, from the
* configuration.
*/
public static String getZipName(Configuration conf) {
return conf.get(ZIP_NAME, ZIP_FILE_BASE_NAME);
}
/**
* configure the job to output zip files of the output index, or full
* directory trees. Zip files are about 1/5th the size of the raw index, and
* much faster to write, but take more cpu to create.
*
* @param output true if should output zip files
* @param conf to use
*/
public static void setOutputZipFormat(boolean output, Configuration conf) {
conf.setBoolean(OUTPUT_ZIP_FILE, output);
}
/**
* return true if the output should be a zip file of the index, rather than
* the raw index
*
* @param conf to use
* @return true if output zip files is on
*/
public static boolean isOutputZipFormat(Configuration conf) {
return conf.getBoolean(OUTPUT_ZIP_FILE, false);
}
public static String getOutputName(JobContext job) {
return FileOutputFormat.getOutputName(job);
}
@Override
public void checkOutputSpecs(JobContext job) throws IOException {
super.checkOutputSpecs(job);
if (job.getConfiguration().get(SETUP_OK) == null) {
throw new IOException("Solr home cache not set up!");
}
}
@Override
public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException {
Utils.getLogConfigFile(context.getConfiguration());
Path workDir = getDefaultWorkFile(context, "");
int batchSize = getBatchSize(context.getConfiguration());
return new SolrRecordWriter<>(context, workDir, batchSize);
}
public static void setupSolrHomeCache(File solrHomeDir, Job job) throws IOException{
File solrHomeZip = createSolrHomeZip(solrHomeDir);
addSolrConfToDistributedCache(job, solrHomeZip);
}
public static File createSolrHomeZip(File solrHomeDir) throws IOException {
return createSolrHomeZip(solrHomeDir, false);
}
private static File createSolrHomeZip(File solrHomeDir, boolean safeToModify) throws IOException {
if (solrHomeDir == null || !(solrHomeDir.exists() && solrHomeDir.isDirectory())) {
throw new IOException("Invalid solr home: " + solrHomeDir);
}
File solrHomeZip = File.createTempFile("solr", ".zip");
createZip(solrHomeDir, solrHomeZip);
return solrHomeZip;
}
public static void addSolrConfToDistributedCache(Job job, File solrHomeZip)
throws IOException {
// Make a reasonably unique name for the zip file in the distributed cache
// to avoid collisions if multiple jobs are running.
String hdfsZipName = UUID.randomUUID().toString() + '.'
+ ZIP_FILE_BASE_NAME;
Configuration jobConf = job.getConfiguration();
jobConf.set(ZIP_NAME, hdfsZipName);
Path zipPath = new Path("/tmp", getZipName(jobConf));
FileSystem fs = FileSystem.get(jobConf);
fs.copyFromLocalFile(new Path(solrHomeZip.toString()), zipPath);
final URI baseZipUrl = fs.getUri().resolve(
zipPath.toString() + '#' + getZipName(jobConf));
DistributedCache.addCacheArchive(baseZipUrl, jobConf);
LOG.debug("Set Solr distributed cache: {}", Arrays.asList(job.getCacheArchives()));
LOG.debug("Set zipPath: {}", zipPath);
// Actually send the path for the configuration zip file
jobConf.set(SETUP_OK, zipPath.toString());
}
private static void createZip(File dir, File out) throws IOException {
HashSet<File> files = new HashSet<>();
// take only conf/ and lib/
for (String allowedDirectory : SolrRecordWriter
.getAllowedConfigDirectories()) {
File configDir = new File(dir, allowedDirectory);
boolean configDirExists;
/** If the directory does not exist, and is required, bail out */
if (!(configDirExists = configDir.exists())
&& SolrRecordWriter.isRequiredConfigDirectory(allowedDirectory)) {
throw new IOException(String.format(Locale.ENGLISH,
"required configuration directory %s is not present in %s",
allowedDirectory, dir));
}
if (!configDirExists) {
continue;
}
listFiles(configDir, files); // Store the files in the existing, allowed
// directory configDir, in the list of files
// to store in the zip file
}
Files.deleteIfExists(out.toPath());
int subst = dir.toString().length();
ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(out));
byte[] buf = new byte[1024];
for (File f : files) {
ZipEntry ze = new ZipEntry(f.toString().substring(subst));
zos.putNextEntry(ze);
InputStream is = new FileInputStream(f);
int cnt;
while ((cnt = is.read(buf)) >= 0) {
zos.write(buf, 0, cnt);
}
is.close();
zos.flush();
zos.closeEntry();
}
ZipEntry ze = new ZipEntry("solr.xml");
zos.putNextEntry(ze);
zos.write("<cores><core name=\"collection1\" instanceDir=\".\"/></cores>".getBytes(StandardCharsets.UTF_8));
zos.flush();
zos.closeEntry();
zos.close();
}
private static void listFiles(File dir, Set<File> files) throws IOException {
File[] list = dir.listFiles();
if (list == null && dir.isFile()) {
files.add(dir);
return;
}
for (File f : list) {
if (f.isFile()) {
files.add(f);
} else {
listFiles(f, files);
}
}
}
public static int getBatchSize(Configuration jobConf) {
// TODO Auto-generated method stub
return jobConf.getInt(SolrOutputFormat.SOLR_RECORD_WRITER_BATCH_SIZE,
defaultSolrBatchSize);
}
public static void setBatchSize(int count, Configuration jobConf) {
jobConf.setInt(SOLR_RECORD_WRITER_BATCH_SIZE, count);
}
}
|
|
/*******************************************************************************
* <copyright>
*
* Copyright (c) 2005, 2010 SAP AG.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* SAP AG - initial API, implementation and documentation
*
* </copyright>
*
*******************************************************************************/
package org.activiti.designer.eclipse.util;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.eclipse.bpmn2.BaseElement;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFolder;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.graphiti.mm.pictograms.Diagram;
import org.eclipse.graphiti.mm.pictograms.PictogramElement;
/**
* Collection of general static helper methods.
*/
public class Util {
private static final String DIAGRAM_NAME_PATTERN = "name=\"%s\"";
private static final String DIAGRAM_NAME_DEFAULT = String.format(DIAGRAM_NAME_PATTERN, "my_bpmn2_diagram");
/**
* Moves the object at the source index of the list to the _target index of
* the list and returns the moved object.
*
* @param targetIndex
* the new position for the object in the list.
* @param sourceIndex
* the old position of the object in the list.
* @return the moved object.
* @exception IndexOutOfBoundsException
* if either index isn't within the size range.
*/
public static Object moveElementInList(List<Object> list, int targetIndex, int sourceIndex) {
if (targetIndex >= list.size() || targetIndex < 0)
throw new IndexOutOfBoundsException("targetIndex=" + targetIndex + ", size=" + list.size()); //$NON-NLS-1$ //$NON-NLS-2$
if (sourceIndex >= list.size() || sourceIndex < 0)
throw new IndexOutOfBoundsException("sourceIndex=" + sourceIndex + ", size=" + list.size()); //$NON-NLS-1$ //$NON-NLS-2$
Object object = list.get(sourceIndex);
if (targetIndex != sourceIndex) {
list.remove(sourceIndex);
list.add(targetIndex, object);
}
return object;
}
/**
* Returns true, if the given objects equal, while null is also a valid value.
* In detail the check is: (o1 == null && o2 == null) || (o1.equals(o2)).
*
* @param o1
* The first Object to compare.
* @param o2
* The second Object to compare.
* @return true, if the given objects equal, while null is also a valid value.
*/
public static boolean equalsWithNull(Object o1, Object o2) {
if (o1 == null && o2 == null)
return true;
if (o1 == null || o2 == null)
return false;
return o1.equals(o2);
}
public static BaseElement[] getAllBpmnElements(IProject project, ResourceSet rSet) {
// FIXME: always unload to have our resources refreshed, this is highly
// non-performant
EList<Resource> resources = rSet.getResources();
for (Resource resource : resources) {
resource.unload();
}
IFolder folder = project.getFolder("src");
IFolder folderDiagrams = project.getFolder("src/diagrams");
Collection<Diagram> diagrams = new ArrayList<Diagram>();
Set<BaseElement> bpmnElements = new HashSet<BaseElement>();
if (folder.exists()) {
List<IResource> membersList = new ArrayList<IResource>();
try {
membersList.addAll(Arrays.asList(folder.members()));
membersList.addAll(Arrays.asList(folderDiagrams.members()));
} catch (CoreException e) {
return new BaseElement[0];
}
for (IResource resource : membersList) {
if (resource instanceof IFile) {
IFile file = (IFile) resource;
if ("diagram".equals(file.getFileExtension()) || file.getName().equals("Predefined.data")) {
// The following call extracts the diagram from the
// given file. For the Tutorial diagrams always reside
// in a file of their own and are the first root object.
// This may of course be different in a concrete tool
// implementation, so tool builders should use their own
// way of retrieval here
Diagram diag = org.eclipse.graphiti.ui.internal.services.GraphitiUiInternal.getEmfService().getDiagramFromFile(file, rSet);
if (diag != null) {
diagrams.add(diag);
} else {
// The following call tries to retrieve a URI from
// any of the found files to check if there are any
// EClasses inside this file. Concrete tools should
// use their own logic to browse through their files
// (e.g. known by a special extension or residing in
// a special folder) instead of this generic logic.
URI uri = org.eclipse.graphiti.ui.internal.services.GraphitiUiInternal.getEmfService().getFileURI(file, rSet);
Resource fileResource = rSet.getResource(uri, true);
if (fileResource != null) {
EList<EObject> contents = fileResource.getContents();
for (EObject object : contents) {
if (object instanceof BaseElement && !(object instanceof PictogramElement)) {
bpmnElements.add((BaseElement) object);
}
}
}
}
}
}
}
}
for (Diagram diagram : diagrams) {
Resource resource = diagram.eResource();
if (resource == null)
return new BaseElement[0];
EList<EObject> contents = resource.getContents();
for (EObject object : contents) {
if (object instanceof BaseElement && !(object instanceof PictogramElement)) {
bpmnElements.add((BaseElement) object);
}
}
}
return bpmnElements.toArray(new BaseElement[bpmnElements.size()]);
}
public static InputStream getContentStream(final Content content) {
return Util.class.getClassLoader().getResourceAsStream(content.getContentPath());
}
public enum Content {
NEW_DIAGRAM_CONTENT("src/main/resources/content/new-diagram-content.xml"), NEW_SUBPROCESS_CONTENT("src/main/resources/content/new-subprocess-content.xml");
private final String contentPath;
private Content(String contentPath) {
this.contentPath = contentPath;
}
public String getContentPath() {
return contentPath;
}
}
/**
* Gets the {@link URI} where the diagram resource for a subprocess should be
* stored.
*
* @param diagram
* the parent diagram for the subprocess
* @param subprocessId
* the id of the subprocess
* @return the {@link URI} for the subprocess' resource
*/
public static final URI getSubProcessURI(Diagram diagram, String subprocessId) {
final URI baseURI = diagram.eResource().getURI().trimFileExtension();
final URI subProcessURI = baseURI.appendFileExtension(subprocessId).appendFileExtension(diagram.eResource().getURI().fileExtension());
return subProcessURI;
}
/**
* Replaces the document name in the provided contentStream's content and
* returns a new stream containing the new content.
*
* @param diagramName
* the name of the document to use
* @param contentStream
* the original content stream
* @return
*/
public static InputStream swapStreamContents(final String diagramName, final InputStream contentStream) {
InputStream result = null;
try {
Writer writer = new StringWriter();
char[] buffer = new char[1024];
try {
Reader reader = new BufferedReader(new InputStreamReader(contentStream, "UTF-8"));
int n;
while ((n = reader.read(buffer)) != -1) {
writer.write(buffer, 0, n);
}
} finally {
contentStream.close();
}
String contentString = writer.toString();
contentString = contentString.replace(DIAGRAM_NAME_DEFAULT, String.format(DIAGRAM_NAME_PATTERN, diagramName));
result = new ByteArrayInputStream(contentString.getBytes());
} catch (Exception e) {
// TODO
}
return result;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.planner.sql;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.apache.calcite.sql.type.SqlOperandTypeChecker;
import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlSyntax;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.type.SqlReturnTypeInference;
import org.apache.drill.exec.expr.fn.DrillFuncHolder;
public class DrillSqlOperator extends SqlFunction {
private final boolean isDeterministic;
private final boolean isNiladic;
private final List<DrillFuncHolder> functions;
/**
* This constructor exists for the legacy reason.
*
* It is because Drill cannot access to DrillOperatorTable at the place where this constructor is being called.
* In principle, if Drill needs a DrillSqlOperator, it is supposed to go to DrillOperatorTable for pickup.
*/
@Deprecated
public DrillSqlOperator(final String name, final int argCount, final boolean isDeterministic, final boolean isNiladic) {
this(name,
argCount,
isDeterministic,
DynamicReturnType.INSTANCE,
isNiladic);
}
/**
* This constructor exists for the legacy reason.
*
* It is because Drill cannot access to DrillOperatorTable at the place where this constructor is being called.
* In principle, if Drill needs a DrillSqlOperator, it is supposed to go to DrillOperatorTable for pickup.
*/
@Deprecated
public DrillSqlOperator(final String name, final int argCount, final boolean isDeterministic,
final SqlReturnTypeInference sqlReturnTypeInference, final boolean isNiladic) {
this(name,
new ArrayList<>(),
Checker.getChecker(argCount, argCount),
isDeterministic,
sqlReturnTypeInference,
isNiladic);
}
/**
* This constructor exists for the legacy reason.
*
* It is because Drill cannot access to DrillOperatorTable at the place where this constructor is being called.
* In principle, if Drill needs a DrillSqlOperator, it is supposed to go to DrillOperatorTable for pickup.
*/
@Deprecated
public DrillSqlOperator(final String name, final int argCount, final boolean isDeterministic, final RelDataType type, final boolean isNiladic) {
this(name,
new ArrayList<>(),
Checker.getChecker(argCount, argCount),
isDeterministic, opBinding -> type, isNiladic);
}
protected DrillSqlOperator(String name, List<DrillFuncHolder> functions,
SqlOperandTypeChecker operandTypeChecker, boolean isDeterministic,
SqlReturnTypeInference sqlReturnTypeInference, boolean isNiladic) {
super(new SqlIdentifier(name, SqlParserPos.ZERO),
sqlReturnTypeInference,
null,
operandTypeChecker,
null,
SqlFunctionCategory.USER_DEFINED_FUNCTION);
this.functions = functions;
this.isDeterministic = isDeterministic;
this.isNiladic = isNiladic;
}
@Override
public boolean isDeterministic() {
return isDeterministic;
}
public boolean isNiladic() {
return isNiladic;
}
public List<DrillFuncHolder> getFunctions() {
return functions;
}
@Override
public SqlSyntax getSyntax() {
if(isNiladic) {
return SqlSyntax.FUNCTION_ID;
}
return super.getSyntax();
}
public static class DrillSqlOperatorBuilder {
private String name;
private final List<DrillFuncHolder> functions = Lists.newArrayList();
private int argCountMin = Integer.MAX_VALUE;
private int argCountMax = Integer.MIN_VALUE;
private boolean isDeterministic = true;
private boolean isNiladic = false;
private boolean isVarArg = false;
public DrillSqlOperatorBuilder setName(final String name) {
this.name = name;
return this;
}
public DrillSqlOperatorBuilder addFunctions(Collection<DrillFuncHolder> functions) {
this.functions.addAll(functions);
return this;
}
public DrillSqlOperatorBuilder setArgumentCount(final int argCountMin, final int argCountMax) {
this.argCountMin = Math.min(this.argCountMin, argCountMin);
this.argCountMax = Math.max(this.argCountMax, argCountMax);
return this;
}
public DrillSqlOperatorBuilder setVarArg(boolean isVarArg) {
this.isVarArg = isVarArg;
return this;
}
public DrillSqlOperatorBuilder setDeterministic(boolean isDeterministic) {
/* By the logic here, we will group the entire Collection as a DrillSqlOperator. and claim it is non-deterministic.
* Add if there is a non-deterministic DrillFuncHolder, then we claim this DrillSqlOperator is non-deterministic.
*
* In fact, in this case, separating all DrillFuncHolder into two DrillSqlOperator
* (one being deterministic and the other being non-deterministic does not help) since in DrillOperatorTable.lookupOperatorOverloads(),
* parameter list is not passed in. So even if we have two DrillSqlOperator, DrillOperatorTable.lookupOperatorOverloads()
* does not have enough information to pick the one matching the argument list.
*/
if(this.isDeterministic) {
this.isDeterministic = isDeterministic;
}
return this;
}
public DrillSqlOperatorBuilder setNiladic(boolean isNiladic) {
/*
* Set Operand type-checking strategy for an operator which takes no operands and need to be invoked
* without parentheses. E.g.: session_id
*
* Niladic functions override columns that have names same as any niladic function. Such columns cannot be
* queried without the table qualification. Value of the niladic function is returned when table
* qualification is not used.
*
* For e.g. in the case of session_id:
*
* select session_id from <table> -> returns the value of niladic function session_id
* select t1.session_id from <table> t1 -> returns session_id column value from <table>
*
*/
this.isNiladic = isNiladic;
return this;
}
public DrillSqlOperator build() {
if(name == null || functions.isEmpty()) {
throw new AssertionError("The fields, name and functions, need to be set before build DrillSqlAggOperator");
}
return new DrillSqlOperator(
name,
functions,
isVarArg ? VarArgOperandTypeChecker.INSTANCE : Checker.getChecker(argCountMin, argCountMax),
isDeterministic,
TypeInferenceUtils.getDrillSqlReturnTypeInference(
name,
functions),
isNiladic);
}
}
}
|
|
package edu.tufts.cs.tripsharing;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.content.ContextCompat;
import android.support.v4.view.ViewPager;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.view.Gravity;
import android.view.View;
import android.widget.AdapterView;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.Toast;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.location.places.PlacePhotoMetadata;
import com.google.android.gms.location.places.PlacePhotoMetadataBuffer;
import com.google.android.gms.location.places.PlacePhotoMetadataResult;
import com.google.android.gms.location.places.Places;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ValueEventListener;
import java.util.ArrayList;
import java.util.List;
/* The screen of the result of searching, including photos and comments. */
public class Info extends AppCompatActivity implements GoogleApiClient.OnConnectionFailedListener {
ViewPager viewPager;
LinearLayout slideDotspanel;
private int dotscount;
private ImageView[] dots;
private GoogleApiClient mGoogleApiClient;
ListView listView;
FirebaseDatabase database;
DatabaseReference myRef;
private FirebaseAuth firebaseAuth;
Context context;
String place_id;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_info);
firebaseAuth = FirebaseAuth.getInstance();
context = this;
place_id = getIntent().getExtras().getString("PlaceId");
database = FirebaseDatabase.getInstance();
myRef = database.getReference();
DatabaseReference place = myRef.child("Place").child(place_id);
final DatabaseReference user = myRef.child("users").
child(firebaseAuth.getCurrentUser().getUid().toString());
final DatabaseReference Comment = place.child("Comments");
viewPager = (ViewPager) findViewById(R.id.viewPager);
slideDotspanel = (LinearLayout) findViewById(R.id.SlideDots);
mGoogleApiClient = new GoogleApiClient
.Builder(this)
.addApi(Places.GEO_DATA_API)
.addApi(Places.PLACE_DETECTION_API)
.enableAutoManage(this, this)
.build();
placePhotosTask(place_id, viewPager, this);
Comment.addValueEventListener(new ValueEventListener() {
@Override
public void onDataChange(DataSnapshot dataSnapshot) {
List<String> usernames = new ArrayList<>();
List<String> contents = new ArrayList<>();
for(DataSnapshot comment : dataSnapshot.getChildren()) {
usernames.add(comment.child("user").getValue(String.class));
contents.add(comment.child("Content").getValue(String.class));
}
alert(usernames.size() + "");
final String[] userName = new String[usernames.size() + 1];
String[] comments = new String[contents.size() + 1];
for(int i = 0; i < usernames.size(); i++) {
userName[i] = usernames.get(i);
comments[i] = contents.get(i);
}
final CustomListAdapter listAdapter =
new CustomListAdapter(context, userName, comments);
listView = (ListView) findViewById(R.id.listView);
listView.setAdapter(listAdapter);
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
if(i == listAdapter.getCount() - 1) {
AlertDialog.Builder mBuilder = new AlertDialog.Builder(Info.this);
View mView = getLayoutInflater().inflate(R.layout.dialog_comments, null);
final EditText editText = (EditText)mView.findViewById(R.id.editText);
Button btn = (Button) mView.findViewById(R.id.btnComment);
mBuilder.setView(mView);
final AlertDialog dialog = mBuilder.create();
dialog.show();
btn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
user.addValueEventListener(new ValueEventListener() {
@Override
public void onDataChange(DataSnapshot dataSnapshot) {
String userName;
if (dataSnapshot.child("name").getValue() == null)
userName = "Anonymity";
else
userName = dataSnapshot.child("name").getValue().toString();
String comments = editText.getText().toString();
Comments c = new Comments(userName, comments);
String id = Comment.push().getKey();
Comment.child(id).setValue(c);
alert(comments);
dialog.dismiss();
}
@Override
public void onCancelled(DatabaseError databaseError) {
}
});
}
});
}
}
});
}
@Override
public void onCancelled(DatabaseError databaseError) {
alert(databaseError.toString());
}
});
}
public void viewOnClick(View v) {
Intent i = new Intent(getApplicationContext(), ShowDetail.class);
i.putExtra("PlaceId", place_id);
startActivity(i);
}
public void alert(String s) {
Toast toast = Toast.makeText(getApplicationContext(), s, Toast.LENGTH_LONG);
toast.setGravity(Gravity.CENTER, 0, 0);
toast.show();
}
public void update(String userName, String comments) {
}
@Override
public void onConnectionFailed(@NonNull ConnectionResult connectionResult) {
}
abstract class PhotoTask extends AsyncTask<String, Void, PhotoTask.AttributedPhoto> {
private int mHeight;
private int mWidth;
public PhotoTask(int width, int height) {
mHeight = height;
mWidth = width;
}
// Loads the first photo for a place id from the Geo Data API.
// The place id must be the first (and only) parameter.
@Override
protected AttributedPhoto doInBackground(String... params) {
if (params.length != 1) {
return null;
}
final String placeId = params[0];
AttributedPhoto attributedPhoto = null;
ArrayList<CharSequence> attributionList = new ArrayList<CharSequence>();
ArrayList<Bitmap> bitmapList = new ArrayList<Bitmap>();
PlacePhotoMetadataResult result = Places.GeoDataApi
.getPlacePhotos(mGoogleApiClient, placeId).await();
if (result.getStatus().isSuccess()) {
PlacePhotoMetadataBuffer photoMetadataBuffer = result.getPhotoMetadata();
for (int i = 0; i < 5; i++) { // change i to get different numbers of photos
if (photoMetadataBuffer.getCount() > i && !isCancelled()) {
// Get the first bitmap and its attributions.
PlacePhotoMetadata photo = photoMetadataBuffer.get(i);
attributionList.add(photo.getAttributions());
// Load a scaled bitmap for this photo.
bitmapList.add(photo.getScaledPhoto(mGoogleApiClient, mWidth, mHeight).await()
.getBitmap());
}
}
attributedPhoto = new AttributedPhoto(attributionList, bitmapList);
// Release the PlacePhotoMetadataBuffer.
photoMetadataBuffer.release();
}
return attributedPhoto;
}
// Holder for an image and its attribution.
class AttributedPhoto {
public final ArrayList<CharSequence> attribution;
public final ArrayList<Bitmap> bitmap;
public AttributedPhoto(ArrayList<CharSequence> attribution, ArrayList<Bitmap> bitmap) {
this.attribution = attribution;
this.bitmap = bitmap;
}
}
}
private void placePhotosTask(String placeId, final ViewPager viewPager, final Context context) {
// Create a new AsyncTask that displays the bitmap and attribution once loaded.
new PhotoTask(500, 500) {
@Override
protected void onPreExecute() {
// Display a temporary image to show while bitmap is loading.
//mImageView.setImageResource(R.drawable.empty_photo);
}
@Override
protected void onPostExecute(AttributedPhoto attributedPhoto) {
if (attributedPhoto != null) {
Bitmap[] images = new Bitmap[5];
for(int i = 0; i < attributedPhoto.bitmap.size(); i++) {
images[i] = attributedPhoto.bitmap.get(i);
}
ViewPagerAdapter viewPagerAdapter = new ViewPagerAdapter(context, images);
viewPager.setAdapter(viewPagerAdapter);
dotscount = viewPagerAdapter.getCount();
dots = new ImageView[dotscount];
for(int i = 0; i < dotscount; i++) {
dots[i] = new ImageView(context);
dots[i].setImageDrawable(ContextCompat.getDrawable(getApplicationContext(),
R.drawable.nonactive_dot));
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams
(LinearLayout.LayoutParams.WRAP_CONTENT, LinearLayout.LayoutParams.WRAP_CONTENT);
params.setMargins(8, 0, 8, 0);
slideDotspanel.addView(dots[i], params);
}
dots[0].setImageDrawable(ContextCompat.getDrawable(getApplicationContext(),
R.drawable.active_dot));
viewPager.addOnPageChangeListener(new ViewPager.OnPageChangeListener() {
@Override
public void onPageScrolled(int position, float positionOffset,
int positionOffsetPixels) {
}
@Override
public void onPageSelected(int position) {
for(int i = 0; i < dotscount; i++) {
dots[i].setImageDrawable(ContextCompat.getDrawable
(getApplicationContext(), R.drawable.nonactive_dot));
}
dots[position].setImageDrawable(ContextCompat.getDrawable
(getApplicationContext(), R.drawable.active_dot));
}
@Override
public void onPageScrollStateChanged(int state) {
}
});
}
}
}.execute(placeId);
}
public void back_to_search(View v) {
Intent i = new Intent(Info.this, MainActivity.class);
startActivity(i);
}
}
class Comments{
String user;
String Content;
public Comments(String user, String comments) {
this.Content = comments;
this.user = user;
}
}
|
|
package gov.va.med.interactive;
import java.lang.reflect.Array;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
/**
* Value object describing each parameter of a command.
* @author VHAISWBECKEC
*
*/
public class CommandParametersDescription<T>
{
private final String parameterName;
private final Class<T> parameterClass;
private final boolean required;
private final boolean variant; // true only for the last parameter, allows repeating
private String description;
public CommandParametersDescription(Class<T> parameterClass, boolean required)
{
this(null, parameterClass, required, false, null);
}
public CommandParametersDescription(String name, Class<T> parameterClass, boolean required)
{
this(name, parameterClass, required, false, null);
}
public CommandParametersDescription(String name, Class<T> parameterClass, boolean required, boolean variant)
{
this(name, parameterClass, required, variant, null);
}
public CommandParametersDescription(String name, Class<T> parameterClass, boolean required, String description)
{
this(name, parameterClass, required, false, description);
}
/**
* The only real constructor, all else delegate here.
* @param name
* @param parameterClass
* @param required
* @param variant
* @param description
*/
public CommandParametersDescription(
String name,
Class<T> parameterClass,
boolean required,
boolean variant,
String description)
{
this.parameterName = name;
this.parameterClass = parameterClass;
this.required = required;
this.variant = variant;
this.description = description;
}
/**
* @return the parameterName
*/
public String getParameterName()
{
return this.parameterName;
}
/**
* @return true if the parameter is required
*/
public boolean isRequired()
{
return this.required;
}
/**
* @return the variant
*/
public boolean isVariant()
{
return this.variant;
}
/**
* @return the class of the parameter
*/
public Class<T> getParameterClass()
{
return this.parameterClass;
}
/**
* If getParameterClass() returns an array type, this returns
* the member type
* else this returns the same as getParameterClass().
*
* @return
*/
public Class<?> getParameterArrayMemberClass()
{
if(getParameterClass().isArray())
return getParameterClass().getComponentType();
else
return getParameterClass();
}
/**
* @return the description
*/
public String getDescription()
{
return this.description;
}
/**
* @param description the description to set
*/
void setDescription(String description)
{
this.description = description;
}
/**
* Convert a raw value (a String) into the type specified by the parameter class.
* This method tries two ways to do the type conversion, the first is to look for
* a constructor that takes a single String arg, the second is to look for a static
* "parse" method that takes a single String arg. If neither exists or an exception
* is thrown from both methods then a CommandTypeValidationException is thrown.
*
* @param rawValue
* @return
* @throws CommandTypeValidationException
*/
public T getValue(String rawValue)
throws CommandTypeValidationException
{
if(getParameterClass().isArray() && this.getParameterClass().isArray())
return getArrayValue(rawValue);
else
return getSingleValue(rawValue);
}
@SuppressWarnings("unchecked")
private T getArrayValue(String rawValue)
throws CommandTypeValidationException
{
String[] rawValues = rawValue.split(",");
T values = (T)Array.newInstance(this.getParameterClass().getComponentType(), rawValues.length);
int index = 0;
for(String rawValuesMember : rawValues)
{
Array.set(values, index, getSingleValue(rawValuesMember) );
index++;
}
return values;
}
@SuppressWarnings("unchecked")
private T getSingleValue(String rawValue)
throws CommandTypeValidationException
{
return (T)getSingleValueInternal(rawValue);
}
private Object getSingleValueInternal(String rawValue)
throws CommandTypeValidationException
{
// note that getParameterArrayMemberClass() returns the same as
// getParameterClass() if the type is not an array
Class<?> resultClass = getParameterArrayMemberClass();
// special case for Boolean values, where the presence indicates TRUE and lack thereof
// indicates FALSE
if(rawValue == null && Boolean.class == resultClass)
return Boolean.FALSE;
if(String.class == resultClass)
return rawValue;
try
{
Constructor<?> parameterConstructor = resultClass.getConstructor(String.class);
return parameterConstructor.newInstance(rawValue);
}
catch (Exception x)
{
try
{
Method parseMethod = resultClass.getMethod("parse", String.class);
if( resultClass.equals(parseMethod.getReturnType()) && Modifier.isStatic(parseMethod.getModifiers()) )
return parseMethod.invoke(null, rawValue);
}
catch (Exception x2)
{
throw new CommandTypeValidationException(getParameterName(), resultClass, rawValue);
}
}
return null;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.postoffice;
import java.util.Collection;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Stream;
import org.apache.activemq.artemis.api.core.Message;
import org.apache.activemq.artemis.api.core.Pair;
import org.apache.activemq.artemis.api.core.QueueConfiguration;
import org.apache.activemq.artemis.api.core.RoutingType;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.core.filter.Filter;
import org.apache.activemq.artemis.core.server.ActiveMQComponent;
import org.apache.activemq.artemis.core.server.MessageReference;
import org.apache.activemq.artemis.core.server.Queue;
import org.apache.activemq.artemis.core.server.RoutingContext;
import org.apache.activemq.artemis.core.server.cluster.impl.MessageLoadBalancingType;
import org.apache.activemq.artemis.core.server.impl.AddressInfo;
import org.apache.activemq.artemis.core.transaction.Transaction;
/**
* A PostOffice instance maintains a mapping of a String address to a Queue. Multiple Queue instances can be bound
* with the same String address.
*
* Given a message and an address a PostOffice instance will route that message to all the Queue instances that are
* registered with that address.
*
* Addresses can be any String instance.
*
* A Queue instance can only be bound against a single address in the post office.
*/
public interface PostOffice extends ActiveMQComponent {
/**
* @param addressInfo
* @return true if the address was added, false if it wasn't added
*/
boolean addAddressInfo(AddressInfo addressInfo) throws Exception;
default void reloadAddressInfo(AddressInfo addressInfo) throws Exception {
addAddressInfo(addressInfo);
}
AddressInfo removeAddressInfo(SimpleString address) throws Exception;
AddressInfo removeAddressInfo(SimpleString address, boolean force) throws Exception;
AddressInfo getAddressInfo(SimpleString address);
AddressInfo updateAddressInfo(SimpleString addressName, EnumSet<RoutingType> routingTypes) throws Exception;
@Deprecated
QueueBinding updateQueue(SimpleString name,
RoutingType routingType,
Filter filter,
Integer maxConsumers,
Boolean purgeOnNoConsumers,
Boolean exclusive,
Boolean groupRebalance,
Integer groupBuckets,
SimpleString groupFirstKey,
Boolean nonDestructive,
Integer consumersBeforeDispatch,
Long delayBeforeDispatch,
SimpleString user,
Boolean configurationManaged) throws Exception;
@Deprecated
QueueBinding updateQueue(SimpleString name,
RoutingType routingType,
Filter filter,
Integer maxConsumers,
Boolean purgeOnNoConsumers,
Boolean exclusive,
Boolean groupRebalance,
Integer groupBuckets,
SimpleString groupFirstKey,
Boolean nonDestructive,
Integer consumersBeforeDispatch,
Long delayBeforeDispatch,
SimpleString user,
Boolean configurationManaged,
Long ringSize) throws Exception;
QueueBinding updateQueue(QueueConfiguration queueConfiguration) throws Exception;
/**
* @param queueConfiguration
* @param forceUpdate Setting to <code>true</code> will make <code>null</code> values override current values too
* @return
* @throws Exception
*/
QueueBinding updateQueue(QueueConfiguration queueConfiguration, boolean forceUpdate) throws Exception;
List<Queue> listQueuesForAddress(SimpleString address) throws Exception;
void addBinding(Binding binding) throws Exception;
Binding removeBinding(SimpleString uniqueName, Transaction tx, boolean deleteData) throws Exception;
/**
* It will lookup the Binding without creating an item on the Queue if non-existent
*
* @param address
* @throws Exception
*/
Bindings lookupBindingsForAddress(SimpleString address) throws Exception;
/**
* Differently to lookupBindings, this will always create a new element on the Queue if non-existent
*
* @param address
* @throws Exception
*/
Bindings getBindingsForAddress(SimpleString address) throws Exception;
Binding getBinding(SimpleString uniqueName);
Collection<Binding> getMatchingBindings(SimpleString address) throws Exception;
Collection<Binding> getDirectBindings(SimpleString address) throws Exception;
Stream<Binding> getAllBindings();
SimpleString getMatchingQueue(SimpleString address, RoutingType routingType) throws Exception;
SimpleString getMatchingQueue(SimpleString address, SimpleString queueName, RoutingType routingType) throws Exception;
RoutingStatus route(Message message, boolean direct) throws Exception;
RoutingStatus route(Message message,
Transaction tx,
boolean direct) throws Exception;
RoutingStatus route(Message message,
Transaction tx,
boolean direct,
boolean rejectDuplicates) throws Exception;
RoutingStatus route(Message message,
Transaction tx,
boolean direct,
boolean rejectDuplicates,
Binding binding) throws Exception;
RoutingStatus route(Message message,
RoutingContext context,
boolean direct) throws Exception;
RoutingStatus route(Message message,
RoutingContext context,
boolean direct,
boolean rejectDuplicates,
Binding binding) throws Exception;
/**
* This method was renamed as reload, use the new method instead
* @param message
* @param queue
* @param tx
* @return
* @throws Exception
*/
@Deprecated
default MessageReference reroute(Message message, Queue queue, Transaction tx) throws Exception {
return reload(message, queue, tx);
}
MessageReference reload(Message message, Queue queue, Transaction tx) throws Exception;
Pair<RoutingContext, Message> redistribute(Message message,
Queue originatingQueue,
Transaction tx) throws Exception;
void processRoute(Message message, RoutingContext context, boolean direct) throws Exception;
DuplicateIDCache getDuplicateIDCache(SimpleString address);
void sendQueueInfoToQueue(SimpleString queueName, SimpleString address) throws Exception;
Object getNotificationLock();
// we can't start expiry scanner until the system is load otherwise we may get weird races - https://issues.jboss.org/browse/HORNETQ-1142
void startExpiryScanner();
void startAddressQueueScanner();
boolean isAddressBound(SimpleString address) throws Exception;
Set<SimpleString> getAddresses();
void updateMessageLoadBalancingTypeForAddress(SimpleString address, MessageLoadBalancingType messageLoadBalancingType) throws Exception;
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.test;
import static org.apache.pig.ExecType.LOCAL;
import static org.apache.pig.ExecType.MAPREDUCE;
import static org.apache.pig.builtin.mock.Storage.resetData;
import static org.apache.pig.builtin.mock.Storage.tuple;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.pig.ExecType;
import org.apache.pig.PigServer;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
import org.apache.pig.builtin.mock.Storage;
import org.apache.pig.builtin.mock.Storage.Data;
import org.apache.pig.data.Tuple;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
import com.google.common.collect.Sets;
public class TestParser {
protected final Log log = LogFactory.getLog(getClass());
protected ExecType execType = MAPREDUCE;
private static MiniCluster cluster;
protected PigServer pigServer;
@Before
public void setUp() throws Exception {
String execTypeString = System.getProperty("test.exectype");
if (execTypeString != null && execTypeString.length() > 0) {
execType = ExecType.fromString(execTypeString);
}
if (execType == MAPREDUCE) {
cluster = MiniCluster.buildCluster();
pigServer = new PigServer(MAPREDUCE, cluster.getProperties());
} else {
pigServer = new PigServer(LOCAL);
}
}
@After
public void tearDown() throws Exception {
pigServer.shutdown();
}
@AfterClass
public static void oneTimeTearDown() throws Exception {
if (cluster != null)
cluster.shutDown();
}
@Test(expected = IOException.class)
public void testLoadingNonexistentFile() throws ExecException, IOException {
// FIXME : this should be tested in all modes
if (execType == ExecType.LOCAL)
return;
pigServer.registerQuery("vals = load 'nonexistentfile';");
pigServer.openIterator("vals");
}
@Test
public void testRemoteServerList() throws ExecException, IOException {
Properties pigProperties = pigServer.getPigContext().getProperties();
pigProperties.setProperty("fs.default.name", "hdfs://a.com:8020");
Configuration conf;
Data data = Storage.resetData(pigServer.getPigContext());
data.set("/user/pig/1.txt");// no data
pigServer.registerQuery("a = load '/user/pig/1.txt' using mock.Storage;");
conf = ConfigurationUtil.toConfiguration(pigProperties);
assertTrue(conf.get("mapreduce.job.hdfs-servers") == null ||
conf.get("mapreduce.job.hdfs-servers").equals(pigProperties.get("fs.default.name"))||
conf.get("mapreduce.job.hdfs-servers").equals(pigProperties.get("fs.defaultFS")));
pigServer.registerQuery("a = load 'hdfs://a.com/user/pig/1.txt' using mock.Storage;");
conf = ConfigurationUtil.toConfiguration(pigProperties);
assertTrue(pigProperties.getProperty("mapreduce.job.hdfs-servers") == null ||
conf.get("mapreduce.job.hdfs-servers").equals(pigProperties.get("fs.default.name"))||
conf.get("mapreduce.job.hdfs-servers").equals(pigProperties.get("fs.defaultFS")));
pigServer.registerQuery("a = load 'har:///1.txt' using mock.Storage;");
conf = ConfigurationUtil.toConfiguration(pigProperties);
assertTrue(pigProperties.getProperty("mapreduce.job.hdfs-servers") == null ||
conf.get("mapreduce.job.hdfs-servers").equals(pigProperties.get("fs.default.name"))||
conf.get("mapreduce.job.hdfs-servers").equals(pigProperties.get("fs.defaultFS")));
pigServer.registerQuery("a = load 'hdfs://b.com/user/pig/1.txt' using mock.Storage;");
conf = ConfigurationUtil.toConfiguration(pigProperties);
assertTrue(conf.get("mapreduce.job.hdfs-servers") != null &&
conf.get("mapreduce.job.hdfs-servers").contains("hdfs://b.com"));
pigServer.registerQuery("a = load 'har://hdfs-c.com/user/pig/1.txt' using mock.Storage;");
conf = ConfigurationUtil.toConfiguration(pigProperties);
assertTrue(conf.get("mapreduce.job.hdfs-servers") != null &&
conf.get("mapreduce.job.hdfs-servers").contains("hdfs://c.com"));
pigServer.registerQuery("a = load 'hdfs://d.com:8020/user/pig/1.txt' using mock.Storage;");
conf = ConfigurationUtil.toConfiguration(pigProperties);
assertTrue(conf.get("mapreduce.job.hdfs-servers") != null &&
conf.get("mapreduce.job.hdfs-servers").contains("hdfs://d.com:8020"));
}
@Test
public void testRemoteServerList2() throws ExecException, IOException {
Properties pigProperties = pigServer.getPigContext().getProperties();
pigProperties.setProperty("fs.default.name", "hdfs://a.com:8020");
Configuration conf;
pigServer.setBatchOn();
Data data = Storage.resetData(pigServer.getPigContext());
data.set("/user/pig/1.txt");// no data
pigServer.registerQuery("a = load '/user/pig/1.txt' using mock.Storage;");
pigServer.registerQuery("store a into '/user/pig/1.txt';");
System.out.println("hdfs-servers: "
+ pigProperties.getProperty("mapreduce.job.hdfs-servers"));
conf = ConfigurationUtil.toConfiguration(pigProperties);
assertTrue(conf.get("mapreduce.job.hdfs-servers") == null ||
conf.get("mapreduce.job.hdfs-servers").equals(pigProperties.get("fs.default.name"))||
conf.get("mapreduce.job.hdfs-servers").equals(pigProperties.get("fs.defaultFS")));
pigServer.registerQuery("store a into 'hdfs://b.com/user/pig/1.txt' using mock.Storage;");
System.out.println("hdfs-servers: "
+ pigProperties.getProperty("mapreduce.job.hdfs-servers"));
conf = ConfigurationUtil.toConfiguration(pigProperties);
assertTrue(conf.get("mapreduce.job.hdfs-servers") != null &&
conf.get("mapreduce.job.hdfs-servers").contains("hdfs://b.com"));
pigServer.registerQuery("store a into 'har://hdfs-c.com:8020/user/pig/1.txt' using mock.Storage;");
System.out.println("hdfs-servers: "
+ pigProperties.getProperty("mapreduce.job.hdfs-servers"));
conf = ConfigurationUtil.toConfiguration(pigProperties);
assertTrue(conf.get("mapreduce.job.hdfs-servers") != null &&
conf.get("mapreduce.job.hdfs-servers").contains("hdfs://c.com:8020"));
pigServer.registerQuery("store a into 'hdfs://d.com:8020/user/pig/1.txt' using mock.Storage;");
System.out.println("hdfs-servers: "
+ pigProperties.getProperty("mapreduce.job.hdfs-servers"));
conf = ConfigurationUtil.toConfiguration(pigProperties);
assertTrue(conf.get("mapreduce.job.hdfs-servers") != null &&
conf.get("mapreduce.job.hdfs-servers").contains("hdfs://d.com:8020"));
}
@Test
public void testRestrictedColumnNamesWhitelist() throws Exception {
pigServer = new PigServer(LOCAL);
Data data = resetData(pigServer);
Set<Tuple> tuples = Sets.newHashSet(tuple(1),tuple(2),tuple(3));
data.set("foo",
"x:int",
tuples
);
pigServer.registerQuery("a = load 'foo' using mock.Storage();");
pigServer.registerQuery("a = foreach a generate x as rank;");
pigServer.registerQuery("a = foreach a generate rank as cube;");
pigServer.registerQuery("a = foreach a generate cube as y;");
pigServer.registerQuery("rank = a;");
pigServer.registerQuery("cube = rank;");
pigServer.registerQuery("rank = cube;");
pigServer.registerQuery("cube = foreach rank generate y as cube;");
pigServer.registerQuery("store cube into 'baz' using mock.Storage();");
List<Tuple> tuples2 = data.get("baz");
assertEquals(tuples.size(), tuples2.size());
for (Tuple t : tuples2) {
tuples.remove(t);
}
assertTrue(tuples.isEmpty());
}
}
|
|
package cz.muni.fi.civ.newohybat.persistence.entities;
import java.io.Serializable;
import java.util.Set;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
/**
* Entity implementation class for Entity: City
*
*/
@Entity
public class City implements Serializable {
@Id
@GeneratedValue
private Long id;
private String name;
@ManyToMany(fetch=FetchType.EAGER)
@JoinTable(
name="CITY_CITY_IMPROVEMENTS",
joinColumns={@JoinColumn(name="CITY_ID", referencedColumnName="ID")},
inverseJoinColumns={@JoinColumn(name="CITY_IMPROVEMENT_IDENT", referencedColumnName="IDENT")})
private Set<CityImprovement> improvements;
@ManyToMany(fetch=FetchType.EAGER)
@JoinTable(
name="CITY_ENABLED_CITY_IMPROVEMENTS",
joinColumns={@JoinColumn(name="CITY_ID", referencedColumnName="ID")},
inverseJoinColumns={@JoinColumn(name="CITY_IMPROVEMENT_IDENT", referencedColumnName="IDENT")})
private Set<CityImprovement> enabledImprovements;
@ManyToOne
private CityImprovement currentImprovement;
@ManyToOne
private UnitType currentUnit;
@OneToMany(fetch=FetchType.EAGER)
private Set<Unit> homeUnits;
@ManyToMany(fetch=FetchType.EAGER)
@JoinTable(
name="CITY_ENABLED_UNIT_TYPES",
joinColumns={@JoinColumn(name="CITY_ID", referencedColumnName="ID")},
inverseJoinColumns={@JoinColumn(name="UNIT_TYPE_IDENT", referencedColumnName="IDENT")})
private Set<UnitType> enabledUnitTypes;
@OneToMany(fetch=FetchType.EAGER)
private Set<TradeRoute> tradeRoutes;
@OneToOne
private Tile cityCentre;
@ManyToOne
private Player owner;
@OneToMany(fetch=FetchType.EAGER)
private Set<Tile> managedTiles;
@Column(nullable=false)
private Integer size;
private Boolean weLoveDay;
private Boolean disorder;
private Integer foodProduction;
private Integer foodConsumption;
private Integer foodSurplus;
private Integer foodStock;
private Integer resourcesProduction;
private Integer resourcesConsumption;
private Integer resourcesSurplus;
private Integer tradeProduction;
private Integer corruption; // lost of trade
private Integer luxuriesAmount;
private Integer luxuriesSpent;
private Integer taxesAmount;
private Integer researchAmount;
private Integer peopleHappy;
private Integer peopleUnhappy;
private Integer peopleContent;
private Integer peopleEntertainers;
private Integer peopleTaxmen;
private Integer peopleScientists;
private Integer pollutionChance;
private Integer improvementsUpkeep; // lost of taxesAmount
private Integer unitsSupport;
private Integer unitsFoodSupport;
private static final long serialVersionUID = 1L;
public City() {
super();
}
public Long getId() {
return this.id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public Tile getCityCentre() {
return cityCentre;
}
public void setCityCentre(Tile cityCentre) {
this.cityCentre = cityCentre;
}
public Set<CityImprovement> getImprovements() {
return improvements;
}
public void setImprovements(Set<CityImprovement> improvements) {
this.improvements = improvements;
}
public Set<CityImprovement> getEnabledImprovements() {
return enabledImprovements;
}
public void setEnabledImprovements(Set<CityImprovement> enabledImprovements) {
this.enabledImprovements = enabledImprovements;
}
public Set<Unit> getHomeUnits() {
return homeUnits;
}
public void setHomeUnits(Set<Unit> homeUnits) {
this.homeUnits = homeUnits;
}
public Set<UnitType> getEnabledUnitTypes() {
return enabledUnitTypes;
}
public void setEnabledUnitTypes(Set<UnitType> enabledUnitTypes) {
this.enabledUnitTypes = enabledUnitTypes;
}
public Set<TradeRoute> getTradeRoutes() {
return tradeRoutes;
}
public void setTradeRoutes(Set<TradeRoute> tradeRoutes) {
this.tradeRoutes = tradeRoutes;
}
public Player getOwner() {
return owner;
}
public void setOwner(Player owner) {
this.owner = owner;
}
/**
* @return the managedTiles
*/
public Set<Tile> getManagedTiles() {
return managedTiles;
}
/**
* @param managedTiles the managedTiles to set
*/
public void setManagedTiles(Set<Tile> managedTiles) {
this.managedTiles = managedTiles;
}
public CityImprovement getCurrentImprovement() {
return currentImprovement;
}
public void setCurrentImprovement(CityImprovement currentImprovement) {
this.currentImprovement = currentImprovement;
}
public UnitType getCurrentUnit() {
return currentUnit;
}
public void setCurrentUnit(UnitType currentUnit) {
this.currentUnit = currentUnit;
}
public Integer getSize() {
return size;
}
public void setSize(Integer size) {
this.size = size;
}
public Boolean getWeLoveDay() {
return weLoveDay;
}
public void setWeLoveDay(Boolean weLoveDay) {
this.weLoveDay = weLoveDay;
}
public Boolean getDisorder() {
return disorder;
}
public void setDisorder(Boolean disorder) {
this.disorder = disorder;
}
public Integer getFoodProduction() {
return foodProduction;
}
public void setFoodProduction(Integer foodProduction) {
this.foodProduction = foodProduction;
}
public Integer getFoodConsumption() {
return foodConsumption;
}
public void setFoodConsumption(Integer foodConsumption) {
this.foodConsumption = foodConsumption;
}
public Integer getFoodSurplus() {
return foodSurplus;
}
public void setFoodSurplus(Integer foodSurplus) {
this.foodSurplus = foodSurplus;
}
public Integer getFoodStock() {
return foodStock;
}
public void setFoodStock(Integer foodStock) {
this.foodStock = foodStock;
}
public Integer getResourcesProduction() {
return resourcesProduction;
}
public void setResourcesProduction(Integer resourcesProduction) {
this.resourcesProduction = resourcesProduction;
}
public Integer getResourcesConsumption() {
return resourcesConsumption;
}
public void setResourcesConsumption(Integer resourcesConsumption) {
this.resourcesConsumption = resourcesConsumption;
}
public Integer getResourcesSurplus() {
return resourcesSurplus;
}
public void setResourcesSurplus(Integer resourcesSurplus) {
this.resourcesSurplus = resourcesSurplus;
}
public Integer getTradeProduction() {
return tradeProduction;
}
public void setTradeProduction(Integer tradeProduction) {
this.tradeProduction = tradeProduction;
}
public Integer getCorruption() {
return corruption;
}
public void setCorruption(Integer corruption) {
this.corruption = corruption;
}
public Integer getLuxuriesAmount() {
return luxuriesAmount;
}
public void setLuxuriesAmount(Integer luxuriesAmount) {
this.luxuriesAmount = luxuriesAmount;
}
public Integer getLuxuriesSpent() {
return luxuriesSpent;
}
public void setLuxuriesSpent(Integer luxuriesSpent) {
this.luxuriesSpent = luxuriesSpent;
}
public Integer getTaxesAmount() {
return taxesAmount;
}
public void setTaxesAmount(Integer taxesAmount) {
this.taxesAmount = taxesAmount;
}
public Integer getResearchAmount() {
return researchAmount;
}
public void setResearchAmount(Integer researchAmount) {
this.researchAmount = researchAmount;
}
public Integer getPeopleHappy() {
return peopleHappy;
}
public void setPeopleHappy(Integer peopleHappy) {
this.peopleHappy = peopleHappy;
}
public Integer getPeopleUnhappy() {
return peopleUnhappy;
}
public void setPeopleUnhappy(Integer peopleUnhappy) {
this.peopleUnhappy = peopleUnhappy;
}
public Integer getPeopleContent() {
return peopleContent;
}
public void setPeopleContent(Integer peopleContent) {
this.peopleContent = peopleContent;
}
public Integer getPeopleEntertainers() {
return peopleEntertainers;
}
public void setPeopleEntertainers(Integer peopleEntertainers) {
this.peopleEntertainers = peopleEntertainers;
}
public Integer getPeopleTaxmen() {
return peopleTaxmen;
}
public void setPeopleTaxmen(Integer peopleTaxmen) {
this.peopleTaxmen = peopleTaxmen;
}
public Integer getPeopleScientists() {
return peopleScientists;
}
public void setPeopleScientists(Integer peopleScientists) {
this.peopleScientists = peopleScientists;
}
public Integer getPollutionChance() {
return pollutionChance;
}
public void setPollutionChance(Integer pollutionChance) {
this.pollutionChance = pollutionChance;
}
public Integer getImprovementsUpkeep() {
return improvementsUpkeep;
}
public void setImprovementsUpkeep(Integer improvementsUpkeep) {
this.improvementsUpkeep = improvementsUpkeep;
}
public Integer getUnitsSupport() {
return unitsSupport;
}
public void setUnitsSupport(Integer unitsSupport) {
this.unitsSupport = unitsSupport;
}
/**
* @return the unitsFoodSupport
*/
public Integer getUnitsFoodSupport() {
return unitsFoodSupport;
}
/**
* @param unitsFoodSupport the unitsFoodSupport to set
*/
public void setUnitsFoodSupport(Integer unitsFoodSupport) {
this.unitsFoodSupport = unitsFoodSupport;
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((cityCentre == null) ? 0 : cityCentre.hashCode());
result = prime * result + ((id == null) ? 0 : id.hashCode());
result = prime * result + ((owner == null) ? 0 : owner.hashCode());
return result;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
City other = (City) obj;
if (cityCentre == null) {
if (other.cityCentre != null)
return false;
} else if (!cityCentre.equals(other.cityCentre))
return false;
if (id == null) {
if (other.id != null)
return false;
} else if (!id.equals(other.id))
return false;
if (owner == null) {
if (other.owner != null)
return false;
} else if (!owner.equals(other.owner))
return false;
return true;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.transform;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.impl.DimensionsSpec;
import org.apache.druid.data.input.impl.InputRowParser;
import org.apache.druid.data.input.impl.MapInputRowParser;
import org.apache.druid.data.input.impl.TimeAndDimsParseSpec;
import org.apache.druid.data.input.impl.TimestampSpec;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.query.expression.TestExprMacroTable;
import org.apache.druid.query.filter.AndDimFilter;
import org.apache.druid.query.filter.SelectorDimFilter;
import org.apache.druid.segment.TestHelper;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.junit.Assert;
import org.junit.Test;
import java.util.Map;
public class TransformSpecTest extends InitializedNullHandlingTest
{
private static final MapInputRowParser PARSER = new MapInputRowParser(
new TimeAndDimsParseSpec(
new TimestampSpec("t", "auto", DateTimes.of("2000-01-01")),
new DimensionsSpec(
DimensionsSpec.getDefaultSchemas(ImmutableList.of("f", "x", "y")),
null,
null
)
)
);
private static final Map<String, Object> ROW1 = ImmutableMap.<String, Object>builder()
.put("x", "foo")
.put("y", "bar")
.put("a", 2.0)
.put("b", 3L)
.build();
private static final Map<String, Object> ROW2 = ImmutableMap.<String, Object>builder()
.put("x", "foo")
.put("y", "baz")
.put("a", 2.0)
.put("b", 4L)
.build();
@Test
public void testTransforms()
{
final TransformSpec transformSpec = new TransformSpec(
null,
ImmutableList.of(
new ExpressionTransform("f", "concat(x,y)", TestExprMacroTable.INSTANCE),
new ExpressionTransform("g", "a + b", TestExprMacroTable.INSTANCE),
new ExpressionTransform("h", "concat(f,g)", TestExprMacroTable.INSTANCE)
)
);
Assert.assertEquals(
ImmutableSet.of("x", "y", "a", "b", "f", "g"),
transformSpec.getRequiredColumns()
);
final InputRowParser<Map<String, Object>> parser = transformSpec.decorate(PARSER);
final InputRow row = parser.parseBatch(ROW1).get(0);
Assert.assertNotNull(row);
Assert.assertEquals(DateTimes.of("2000-01-01").getMillis(), row.getTimestampFromEpoch());
Assert.assertEquals(DateTimes.of("2000-01-01"), row.getTimestamp());
Assert.assertEquals(ImmutableList.of("f", "x", "y"), row.getDimensions());
Assert.assertEquals(ImmutableList.of("foo"), row.getDimension("x"));
Assert.assertEquals(3.0, row.getMetric("b").doubleValue(), 0);
Assert.assertEquals("foobar", row.getRaw("f"));
Assert.assertEquals(ImmutableList.of("foobar"), row.getDimension("f"));
Assert.assertEquals(ImmutableList.of("5.0"), row.getDimension("g"));
Assert.assertEquals(ImmutableList.of(), row.getDimension("h"));
Assert.assertEquals(5L, row.getMetric("g").longValue());
}
@Test
public void testTransformOverwriteField()
{
// Transforms are allowed to overwrite fields, and to refer to the fields they overwrite; double-check this.
final TransformSpec transformSpec = new TransformSpec(
null,
ImmutableList.of(
new ExpressionTransform("x", "concat(x,y)", TestExprMacroTable.INSTANCE)
)
);
Assert.assertEquals(
ImmutableSet.of("x", "y"),
transformSpec.getRequiredColumns()
);
final InputRowParser<Map<String, Object>> parser = transformSpec.decorate(PARSER);
final InputRow row = parser.parseBatch(ROW1).get(0);
Assert.assertNotNull(row);
Assert.assertEquals(DateTimes.of("2000-01-01").getMillis(), row.getTimestampFromEpoch());
Assert.assertEquals(DateTimes.of("2000-01-01"), row.getTimestamp());
Assert.assertEquals(ImmutableList.of("f", "x", "y"), row.getDimensions());
Assert.assertEquals(ImmutableList.of("foobar"), row.getDimension("x"));
Assert.assertEquals(3.0, row.getMetric("b").doubleValue(), 0);
Assert.assertNull(row.getRaw("f"));
}
@Test
public void testFilterOnTransforms()
{
// Filters are allowed to refer to transformed fields; double-check this.
final TransformSpec transformSpec = new TransformSpec(
new AndDimFilter(
ImmutableList.of(
new SelectorDimFilter("x", "foo", null),
new SelectorDimFilter("f", "foobar", null),
new SelectorDimFilter("g", "5.0", null)
)
),
ImmutableList.of(
new ExpressionTransform("f", "concat(x,y)", TestExprMacroTable.INSTANCE),
new ExpressionTransform("g", "a + b", TestExprMacroTable.INSTANCE)
)
);
Assert.assertEquals(
ImmutableSet.of("x", "f", "g", "y", "a", "b"),
transformSpec.getRequiredColumns()
);
final InputRowParser<Map<String, Object>> parser = transformSpec.decorate(PARSER);
Assert.assertNotNull(parser.parseBatch(ROW1).get(0));
Assert.assertNull(parser.parseBatch(ROW2).get(0));
}
@Test
public void testTransformTimeFromOtherFields()
{
final TransformSpec transformSpec = new TransformSpec(
null,
ImmutableList.of(
new ExpressionTransform("__time", "(a + b) * 3600000", TestExprMacroTable.INSTANCE)
)
);
Assert.assertEquals(
ImmutableSet.of("a", "b"),
transformSpec.getRequiredColumns()
);
final InputRowParser<Map<String, Object>> parser = transformSpec.decorate(PARSER);
final InputRow row = parser.parseBatch(ROW1).get(0);
Assert.assertNotNull(row);
Assert.assertEquals(DateTimes.of("1970-01-01T05:00:00Z"), row.getTimestamp());
Assert.assertEquals(DateTimes.of("1970-01-01T05:00:00Z").getMillis(), row.getTimestampFromEpoch());
}
@Test
public void testTransformTimeFromTime()
{
final TransformSpec transformSpec = new TransformSpec(
null,
ImmutableList.of(
new ExpressionTransform("__time", "__time + 3600000", TestExprMacroTable.INSTANCE)
)
);
Assert.assertEquals(
ImmutableSet.of("__time"),
transformSpec.getRequiredColumns()
);
final InputRowParser<Map<String, Object>> parser = transformSpec.decorate(PARSER);
final InputRow row = parser.parseBatch(ROW1).get(0);
Assert.assertNotNull(row);
Assert.assertEquals(DateTimes.of("2000-01-01T01:00:00Z"), row.getTimestamp());
Assert.assertEquals(DateTimes.of("2000-01-01T01:00:00Z").getMillis(), row.getTimestampFromEpoch());
}
@Test
public void testSerde() throws Exception
{
final TransformSpec transformSpec = new TransformSpec(
new AndDimFilter(
ImmutableList.of(
new SelectorDimFilter("x", "foo", null),
new SelectorDimFilter("f", "foobar", null),
new SelectorDimFilter("g", "5.0", null)
)
),
ImmutableList.of(
new ExpressionTransform("f", "concat(x,y)", TestExprMacroTable.INSTANCE),
new ExpressionTransform("g", "a + b", TestExprMacroTable.INSTANCE)
)
);
final ObjectMapper jsonMapper = TestHelper.makeJsonMapper();
Assert.assertEquals(
transformSpec,
jsonMapper.readValue(jsonMapper.writeValueAsString(transformSpec), TransformSpec.class)
);
}
}
|
|
package com.defterp.modules.purchases.controllers;
import com.defterp.modules.accounting.entities.Account;
import com.defterp.util.JsfUtil;
import com.defterp.translation.annotations.Status;
import com.defterp.modules.inventory.entities.DeliveryOrder;
import com.defterp.modules.inventory.entities.DeliveryOrderLine;
import com.defterp.modules.accounting.entities.Invoice;
import com.defterp.modules.accounting.entities.InvoiceLine;
import com.defterp.modules.accounting.entities.InvoiceTax;
import com.defterp.modules.accounting.entities.Journal;
import com.defterp.modules.accounting.queryBuilders.AccountQueryBuilder;
import com.defterp.modules.accounting.queryBuilders.JournalQueryBuilder;
import com.defterp.modules.partners.entities.Partner;
import com.defterp.modules.inventory.entities.Product;
import com.defterp.modules.purchases.entities.PurchaseOrder;
import com.defterp.modules.purchases.entities.PurchaseOrderLine;
import com.defterp.modules.commonClasses.AbstractController;
import com.defterp.modules.commonClasses.QueryWrapper;
import com.defterp.modules.inventory.queryBuilders.ProductQueryBuilder;
import com.defterp.modules.partners.queryBuilders.PartnerQueryBuilder;
import com.defterp.modules.purchases.queryBuilders.PurchaseOrderLineQueryBuilder;
import com.defterp.modules.purchases.queryBuilders.PurchaseOrderQueryBuilder;
import com.defterp.modules.commonClasses.IdGenerator;
import java.io.IOException;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.ResourceBundle;
import javax.faces.context.ExternalContext;
import javax.faces.context.FacesContext;
import javax.faces.event.ActionEvent;
import javax.faces.view.ViewScoped;
import javax.inject.Inject;
import javax.inject.Named;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import net.sf.jasperreports.engine.JREmptyDataSource;
import net.sf.jasperreports.engine.JRException;
import net.sf.jasperreports.engine.JasperExportManager;
import net.sf.jasperreports.engine.JasperFillManager;
import net.sf.jasperreports.engine.JasperPrint;
import org.apache.commons.lang.SerializationUtils;
import org.primefaces.context.RequestContext;
/**
*
* @author MOHAMMED BOUNAGA
*
* github.com/medbounaga
*/
@Named(value = "purchaseOrderController")
@ViewScoped
public class PurchaseOrderController extends AbstractController {
@Inject
@Status
private HashMap<String, String> statuses;
private List<PurchaseOrder> filteredPurchaseOrders;
private List<PurchaseOrder> purchaseOrders;
private PurchaseOrder purchaseOrder;
private List<PurchaseOrderLine> purchaseOrderLines;
private List<PurchaseOrderLine> filteredPurchaseOrderLines;
private PurchaseOrderLine purchaseOrderLine;
private DeliveryOrder deliveryOrder;
private List<DeliveryOrderLine> deliverOrderLines;
private Invoice invoice;
private List<InvoiceLine> invoiceLines;
private String invoiceMethod;
private String purchaseId;
private String purchaseLineId;
private String partnerId;
private String productId;
private String partialListType;
private int uninvoicedlines;
private int rowIndex;
private List<Partner> topNActiveVendors;
private List<Partner> activeVendors;
private List<Partner> filteredActiveVendors;
private List<Product> topNActivePurchasedProducts;
private List<Product> activePurchasedProducts;
private List<Product> filteredActivePurchasedProducts;
private Product product;
private Partner supplier;
private String currentForm;
private String currentList;
private QueryWrapper query;
public PurchaseOrderController() {
super("/sc/purchaseOrder/");
}
public void setRowIndex() {
Map<String, String> params = FacesContext.getCurrentInstance().getExternalContext().getRequestParameterMap();
rowIndex = Integer.valueOf(params.get("rowIndex"));
}
// public List<Product> getTopPurchasedNProducts() {
// if (topNActivePurchasedProducts == null) {
// topNActivePurchasedProducts = purchaseOrderFacade.findTopNPurchasedProducts(4);
// }
// return topNActivePurchasedProducts;
// }
//
// public List<Partner> getTopNSuppliers() {
// if (topNActiveVendors == null) {
// topNActiveVendors = purchaseOrderFacade.findTopNSuppliers(4);
// }
// return topNActiveVendors;
// }
public void onSelectSupplier() {
if ((supplier != null) && (!topNActiveVendors.contains(supplier))) {
topNActiveVendors.add(supplier);
}
purchaseOrder.setPartner(supplier);
}
public void onSelectProduct() {
if ((product != null)) {
if (!topNActivePurchasedProducts.contains(product)) {
topNActivePurchasedProducts.add(product);
}
if (rowIndex < 0) {
purchaseOrderLine.setProduct(product);
purchaseOrderLine.setPrice(product.getPurchasePrice());
purchaseOrderLine.setUom(product.getUom().getName());
RequestContext.getCurrentInstance().update("mainForm:productMenuTwo");
RequestContext.getCurrentInstance().update("mainForm:uom");
RequestContext.getCurrentInstance().update("mainForm:price");
} else {
purchaseOrderLines.get(rowIndex).setProduct(product);
purchaseOrderLines.get(rowIndex).setPrice(product.getPurchasePrice());
purchaseOrderLines.get(rowIndex).setUom(product.getUom().getName());
RequestContext.getCurrentInstance().update("mainForm:datalist:" + rowIndex + ":productMenu");
RequestContext.getCurrentInstance().update("mainForm:datalist:" + rowIndex + ":uomm");
RequestContext.getCurrentInstance().update("mainForm:datalist:" + rowIndex + ":pricee");
}
}
}
public void updateOrder() {
if (getOrderStatus(purchaseOrder.getId()) != null) {
if (!getOrderStatus(purchaseOrder.getId()).equals("Quotation")) {
JsfUtil.addWarningMessageDialog("InvalidAction", "ErrorProceedEditQuotation");
currentForm = "/sc/purchaseOrder/View.xhtml";
} else if (purchaseOrderLines.isEmpty()) {
JsfUtil.addWarningMessageDialog("InvalidAction", "AtLeastOnePurchaseOrderLineUpdate");
} else {
for (PurchaseOrderLine OrderLine : purchaseOrderLines) {
OrderLine.setPurchaseOrder(purchaseOrder);
}
purchaseOrder.setPurchaseOrderLines(purchaseOrderLines);
purchaseOrder = super.updateItem(purchaseOrder);
if (partialListType == null && purchaseOrders != null) {
purchaseOrders.set(purchaseOrders.indexOf(purchaseOrder), purchaseOrder);
} else {
query = PurchaseOrderQueryBuilder.getFindAllQuery();
purchaseOrders = super.findWithQuery(query);
partialListType = null;
}
currentForm = "/sc/purchaseOrder/View.xhtml";
}
}
}
public void createOrder() {
if (purchaseOrderLines.isEmpty()) {
JsfUtil.addWarningMessageDialog("InvalidAction", "AtLeastOnePurchaseOrderLineCreate");
} else {
for (PurchaseOrderLine OrderLine : purchaseOrderLines) {
OrderLine.setPurchaseOrder(purchaseOrder);
}
purchaseOrder.setState("Quotation");
purchaseOrder.setPurchaseOrderLines(purchaseOrderLines);
purchaseOrder = super.createItem(purchaseOrder);
purchaseOrder.setName(IdGenerator.generatePurchaseId(purchaseOrder.getId()));
purchaseOrder = super.updateItem(purchaseOrder);
if (partialListType == null && purchaseOrders != null) {
purchaseOrders.add(purchaseOrder);
} else {
query = PurchaseOrderQueryBuilder.getFindAllQuery();
purchaseOrders = super.findWithQuery(query);
partialListType = null;
}
currentForm = "/sc/purchaseOrder/View.xhtml";
}
}
public void deleteOrder() {
if (purchaseOrderExist(purchaseOrder.getId())) {
if (purchaseOrder.getState().equals("Cancelled")) {
cancelRelations();
boolean deleted = super.deleteItem(purchaseOrder);
if (deleted) {
JsfUtil.addSuccessMessage("ItemDeleted");
currentForm = VIEW_URL;
if (purchaseOrders != null && purchaseOrders.size() > 1) {
purchaseOrders.remove(purchaseOrder);
purchaseOrder = purchaseOrders.get(0);
} else {
partialListType = null;
query = PurchaseOrderQueryBuilder.getFindAllQuery();
purchaseOrders = super.findWithQuery(query);
if (purchaseOrders != null && !purchaseOrders.isEmpty()) {
purchaseOrder = purchaseOrders.get(0);
}
}
} else {
JsfUtil.addWarningMessageDialog("InvalidAction", "ErrorDelete3");
}
} else {
JsfUtil.addWarningMessageDialog("InvalidAction", "ErrorDelete");
}
}
}
private void cancelRelations() {
if (!purchaseOrder.getInvoices().isEmpty()) {
for (Invoice invoice : purchaseOrder.getInvoices()) {
invoice.setPurchaseOrder(null);
super.updateItem(invoice);
}
purchaseOrder.getInvoices().clear();
}
if (!purchaseOrder.getDeliveryOrders().isEmpty()) {
for (DeliveryOrder deliveryOrder : purchaseOrder.getDeliveryOrders()) {
deliveryOrder.setPurchaseOrder(null);
super.updateItem(deliveryOrder);
}
purchaseOrder.getDeliveryOrders().clear();
}
}
public void createDeliveryOrder() {
if (purchaseOrderExist(purchaseOrder.getId())) {
if (purchaseOrder.getState().equals("Purchase Order") && purchaseOrder.getDeliveryOrders() != null && purchaseOrder.getDeliveryCreated() != true) {
deliveryOrder = new DeliveryOrder(new Date(), purchaseOrder.getName(), "Draft", "Purchase", Boolean.TRUE, "Complete", null, purchaseOrder.getPartner(), purchaseOrder);
deliverOrderLines = new ArrayList<>();
for (PurchaseOrderLine purchaseLine : purchaseOrder.getPurchaseOrderLines()) {
deliverOrderLines.add(new DeliveryOrderLine(
purchaseLine.getProduct(),
purchaseLine.getQuantity(),
0d,
purchaseLine.getUom(),
"New",
"Purchase",
Boolean.TRUE,
purchaseOrder.getPartner(),
purchaseLine.getPrice(),
deliveryOrder));
}
deliveryOrder.setDeliveryOrderLines(deliverOrderLines);
purchaseOrder.getDeliveryOrders().add(deliveryOrder);
purchaseOrder.setDeliveryCreated(true);
deliveryOrder = super.createItem(deliveryOrder);
deliveryOrder.setName(IdGenerator.generateDeliveryInId(deliveryOrder.getId()));
super.updateItem(deliveryOrder);
purchaseOrder = super.updateItem(purchaseOrder);
purchaseOrders.set(purchaseOrders.indexOf(purchaseOrder), purchaseOrder);
} else {
JsfUtil.addWarningMessageDialog("InvalidAction", "ErrorAlreadyModified");
}
}
}
private void refreshLinesToInvoice() {
if (purchaseOrderLines != null && !purchaseOrderLines.isEmpty()) {
List<PurchaseOrderLine> selectedLines = new ArrayList<>();
selectedLines.addAll(purchaseOrderLines);
prepareInvoicing();
ListIterator<PurchaseOrderLine> iterator = purchaseOrderLines.listIterator();
while (iterator.hasNext()) {
PurchaseOrderLine purchaseLine = iterator.next();
boolean exist = false;
for (PurchaseOrderLine line : selectedLines) {
if (purchaseLine.getId() == line.getId()) {
exist = true;
}
}
if (exist == false) {
iterator.remove();
}
}
for (PurchaseOrderLine line : purchaseOrderLines) {
System.out.println("id: " + line.getId());
}
}
}
public void createInvoice(Integer id, boolean redirect) throws IOException {
if (purchaseOrderExist(id)) {
if ((purchaseOrder.getState().equals("Purchase Order")) && (purchaseOrder.getInvoiceMethod().equals("Partial"))) {
refreshLinesToInvoice();
if (purchaseOrderLines != null && !purchaseOrderLines.isEmpty()) {
String invoiceReference = ((purchaseOrder.getReference() == null) || (purchaseOrder.getReference().isEmpty())) ? purchaseOrder.getName() : purchaseOrder.getReference();
invoice = new Invoice(
new Date(),
"Purchase",
purchaseOrder.getName(),
"Draft",
Boolean.TRUE,
purchaseOrder.getPartner(),
purchaseOrder,
(Account) super.findSingleWithQuery(AccountQueryBuilder.getFindByNameQuery("Account Payable")),
(Journal) super.findSingleWithQuery(JournalQueryBuilder.getFindByCodeQuery("BILL")),
invoiceReference);
SumUpInvoice();
invoiceLines = new ArrayList<>();
for (PurchaseOrderLine lineToInvoice : purchaseOrderLines) {
for (PurchaseOrderLine purchaseLine : purchaseOrder.getPurchaseOrderLines()) {
if (lineToInvoice.getId() == purchaseLine.getId()) {
purchaseLine.setInvoiced(Boolean.TRUE);
invoiceLines.add(new InvoiceLine(
new Date(),
purchaseLine.getUom(),
purchaseLine.getPrice(),
purchaseLine.getSubTotal(),
0d,
purchaseLine.getQuantity(),
Boolean.TRUE,
invoice,
purchaseOrder.getPartner(),
purchaseLine.getProduct(),
purchaseLine.getTax(),
(Account) super.findSingleWithQuery(AccountQueryBuilder.getFindByNameQuery("Product Purchases"))));
}
}
}
if (purchaseOrderLines.size() == uninvoicedlines) {
purchaseOrder.setInvoiceMethod("Complete");
}
//account_id not null in invoice line
invoice.setInvoiceLines(invoiceLines);
generateInvoiceTaxes();
purchaseOrder.getInvoices().add(invoice);
invoice = super.createItem(invoice);
invoice.setName(IdGenerator.generateBillId(invoice.getId()));
invoice = super.updateItem(invoice);
purchaseOrder = super.updateItem(purchaseOrder);
purchaseOrders.set(purchaseOrders.indexOf(purchaseOrder), purchaseOrder);
if (redirect) {
ExternalContext context = FacesContext.getCurrentInstance().getExternalContext();
context.redirect(context.getRequestContextPath() + "/sc/supInvoice/index.xhtml?id=" + invoice.getId());
}
}
} else {
JsfUtil.addWarningMessageDialog("InvalidAction", "ErrorAlreadyModified");
}
}
}
private void SumUpInvoice() {
invoice.setAmountUntaxed(0d);
invoice.setAmountTax(0d);
invoice.setAmountTotal(0d);
for (PurchaseOrderLine orderLine : purchaseOrderLines) {
invoice.setAmountUntaxed(invoice.getAmountUntaxed() + orderLine.getSubTotal());
if (orderLine.getTax() != null) {
invoice.setAmountTax(invoice.getAmountTax() + (orderLine.getSubTotal() * orderLine.getTax().getAmount()));
}
}
invoice.setAmountUntaxed(JsfUtil.round(invoice.getAmountUntaxed()));
invoice.setAmountTax(JsfUtil.round(invoice.getAmountTax()));
BigDecimal amountUntaxed = BigDecimal.valueOf(invoice.getAmountUntaxed());
BigDecimal amountTax = BigDecimal.valueOf(invoice.getAmountTax());
BigDecimal amountTotal = amountUntaxed.add(amountTax);
invoice.setAmountTotal(JsfUtil.round(amountTotal.doubleValue()));
invoice.setResidual(invoice.getAmountTotal());
}
private void generateInvoiceTaxes() {
List<InvoiceTax> invoiceTaxes = new ArrayList<>();
for (InvoiceLine invoiceline : invoice.getInvoiceLines()) {
if (invoiceline.getTax() != null) {
double taxAmount = JsfUtil.round(invoiceline.getPriceSubtotal() * invoiceline.getTax().getAmount());
// invoiceTaxes.add(new InvoiceTax(
// invoice.getDate(),
// taxAmount,
// invoiceline.getPriceSubtotal(),
// Boolean.TRUE,
// purchaseOrderFacade.findAccount("Tax Paid"),
// invoice,
// invoiceline.getTax()));
}
}
invoice.setInvoiceTaxes(invoiceTaxes);
}
public void prepareInvoicing() {
purchaseOrderLines = new ArrayList<>();
uninvoicedlines = 0;
for (PurchaseOrderLine purchaseLine : purchaseOrder.getPurchaseOrderLines()) {
if (!purchaseLine.getInvoiced()) {
uninvoicedlines++;
purchaseOrderLines.add(purchaseLine);
}
}
}
public void prepareInvoicing(Integer id) {
if (purchaseOrderExist(id)) {
if ((purchaseOrder.getState().equals("Purchase Order")) && (!purchaseOrder.getInvoiceMethod().equals("Complete"))) {
purchaseOrderLines = new ArrayList<>();
invoiceMethod = "Complete";
uninvoicedlines = 0;
for (PurchaseOrderLine purchaseLine : purchaseOrder.getPurchaseOrderLines()) {
if (!purchaseLine.getInvoiced()) {
uninvoicedlines++;
purchaseOrderLines.add(purchaseLine);
}
}
} else {
FacesContext.getCurrentInstance().validationFailed();
JsfUtil.addWarningMessageDialog("InvalidAction", "ErrorAlreadyModified");
}
}
}
public void updateInvoiceMethod() {
purchaseOrderLines = new ArrayList<>();
for (PurchaseOrderLine purchaseLine : purchaseOrder.getPurchaseOrderLines()) {
if (!purchaseLine.getInvoiced()) {
purchaseOrderLines.add(purchaseLine);
}
}
}
public void removeLineToInvoice(int rowIndex) {
if (rowIndex >= 0 && rowIndex < purchaseOrderLines.size()) {
purchaseOrderLines.remove(rowIndex);
}
}
public void confirmPurchase() {
if (purchaseOrderExist(purchaseOrder.getId())) {
if (purchaseOrder.getState().equals("Quotation")) {
purchaseOrder.setState("Purchase Order");
purchaseOrder = super.updateItem(purchaseOrder);
purchaseOrders.set(purchaseOrders.indexOf(purchaseOrder), purchaseOrder);
} else {
JsfUtil.addWarningMessageDialog("InvalidAction", "ErrorAlreadyModified");
}
}
}
public void cancelOrder() {
if (purchaseOrderExist(purchaseOrder.getId())) {
if ((!purchaseOrder.getState().equals("Cancelled")) && (!purchaseOrder.getState().equals("Done"))) {
boolean canBeCancelled = true;
if (!purchaseOrder.getDeliveryOrders().isEmpty()) {
if (!isDeliveryCancelled()) {
canBeCancelled = false;
JsfUtil.addWarningMessageDialog("CannotCancelPurchaseOrder", "CannotCancelPurchaseOrder_cancelDelivery");
} else if (!purchaseOrder.getInvoices().isEmpty()) {
if (!isInvoiceCancelled()) {
canBeCancelled = false;
JsfUtil.addWarningMessageDialog("CannotCancelPurchaseOrder", "CannotCancelPurchaseOrder_cancelInvoice");
}
}
} else if (!purchaseOrder.getInvoices().isEmpty()) {
if (!isInvoiceCancelled()) {
canBeCancelled = false;
JsfUtil.addWarningMessageDialog("CannotCancelPurchaseOrder", "CannotCancelPurchaseOrder_cancelInvoice");
}
}
if (canBeCancelled == true) {
purchaseOrder.setState("Cancelled");
purchaseOrder = super.updateItem(purchaseOrder);
purchaseOrders.set(purchaseOrders.indexOf(purchaseOrder), purchaseOrder);
}
} else if (purchaseOrder.getState().equals("Done")) {
JsfUtil.addWarningMessageDialog("InvalidAction", "ErrorCancelRecordDone");
} else {
JsfUtil.addWarningMessageDialog("InvalidAction", "ErrorAlreadyCancelled");
}
}
}
private boolean isDeliveryCancelled() {
for (DeliveryOrder delivery : purchaseOrder.getDeliveryOrders()) {
if (!(delivery.getState().equals("Cancelled"))) {
return false;
}
}
return true;
}
private boolean isInvoiceCancelled() {
for (Invoice invoice : purchaseOrder.getInvoices()) {
if (!(invoice.getState().equals("Cancelled"))) {
return false;
}
}
return true;
}
public void showForm() {
if (purchaseOrders != null && purchaseOrders.size() > 0) {
purchaseOrder = purchaseOrders.get(0);
currentForm = "/sc/purchaseOrder/View.xhtml";
}
}
public void showForm(Integer id) {
if (id != null) {
purchaseOrder = super.findItemById(id, PurchaseOrder.class);
if (purchaseOrder != null) {
partialListType = null;
purchaseOrderLines = null;
purchaseOrderLine = null;
query = PurchaseOrderQueryBuilder.getFindAllQuery();
purchaseOrders = super.findWithQuery(query);
currentForm = "/sc/purchaseOrder/View.xhtml";
currentList = "/sc/purchaseOrder/List.xhtml";
} else {
JsfUtil.addWarningMessage("ItemDoesNotExist");
}
}
}
public void showOrderLineForm() {
if (purchaseOrderLines != null && purchaseOrderLines.size() > 0) {
purchaseOrderLine = purchaseOrderLines.get(0);
currentForm = "/sc/purchaseOrder/ViewByProduct.xhtml";
}
}
public void showOrderList() {
purchaseOrder = null;
purchaseOrderLine = null;
purchaseOrderLines = null;
topNActiveVendors = null;
topNActivePurchasedProducts = null;
currentList = "/sc/purchaseOrder/List.xhtml";
}
public void prepareCreate() {
purchaseOrder = new PurchaseOrder();
purchaseOrder.setDate(new Date());
purchaseOrderLines = new ArrayList<>();
purchaseOrderLine = new PurchaseOrderLine();
loadActiveVendors();
loadActivePurchasedProducts();
if (topNActivePurchasedProducts != null && !topNActivePurchasedProducts.isEmpty()) {
purchaseOrderLine.setProduct(topNActivePurchasedProducts.get(0));
purchaseOrderLine.setPrice(purchaseOrderLine.getProduct().getPurchasePrice());
purchaseOrderLine.setUom(purchaseOrderLine.getProduct().getUom().getName());
}
currentForm = "/sc/purchaseOrder/Create.xhtml";
}
public void prepareEdit() {
if (purchaseOrderExist(purchaseOrder.getId())) {
if (purchaseOrder.getState().equals("Quotation")) {
purchaseOrderLine = new PurchaseOrderLine();
purchaseOrderLines = purchaseOrder.getPurchaseOrderLines();
loadActiveVendors();
loadActivePurchasedProducts();
if (topNActivePurchasedProducts != null && !topNActivePurchasedProducts.isEmpty()) {
purchaseOrderLine.setProduct(topNActivePurchasedProducts.get(0));
purchaseOrderLine.setPrice(purchaseOrderLine.getProduct().getPurchasePrice());
purchaseOrderLine.setUom(purchaseOrderLine.getProduct().getUom().getName());
}
if (!topNActiveVendors.contains(purchaseOrder.getPartner())) {
topNActiveVendors.add(purchaseOrder.getPartner());
}
for (PurchaseOrderLine orderLine : purchaseOrderLines) {
if (!topNActivePurchasedProducts.contains(orderLine.getProduct())) {
topNActivePurchasedProducts.add(orderLine.getProduct());
}
}
currentForm = "/sc/purchaseOrder/Edit.xhtml";
} else {
JsfUtil.addWarningMessageDialog("InvalidAction", "ErrorEditQuotation");
}
}
}
public void prepareView() {
if (purchaseOrder != null) {
if (purchaseOrderExist(purchaseOrder.getId())) {
currentForm = "/sc/purchaseOrder/View.xhtml";
}
}
}
public void cancelCreate() {
purchaseOrderLine = null;
purchaseOrderLines = null;
topNActiveVendors = null;
topNActivePurchasedProducts = null;
if (purchaseOrders != null && !purchaseOrders.isEmpty()) {
purchaseOrder = purchaseOrders.get(0);
currentForm = "/sc/purchaseOrder/View.xhtml";
}
}
public void cancelEdit() {
purchaseOrderLine = null;
purchaseOrderLines = null;
topNActiveVendors = null;
topNActivePurchasedProducts = null;
if (purchaseOrderExist(purchaseOrder.getId())) {
currentForm = "/sc/purchaseOrder/View.xhtml";
}
}
public void resolveRequestParams() {
if (JsfUtil.isNumeric(purchaseId)) {
Integer id = Integer.valueOf(purchaseId);
purchaseOrder = super.findItemById(id, PurchaseOrder.class);
if (purchaseOrder != null) {
query = PurchaseOrderQueryBuilder.getFindAllQuery();
purchaseOrders = super.findWithQuery(query);
currentList = "/sc/purchaseOrder/List.xhtml";
currentForm = "/sc/purchaseOrder/View.xhtml";
return;
}
}
if (JsfUtil.isNumeric(partnerId)) {
Integer id = Integer.valueOf(partnerId);
query = PurchaseOrderQueryBuilder.getFindByVendorQuery(id);
purchaseOrders = super.findWithQuery(query);
if (purchaseOrders != null && !purchaseOrders.isEmpty()) {
purchaseOrder = purchaseOrders.get(0);
currentList = "/sc/purchaseOrder/List.xhtml";
currentForm = "/sc/purchaseOrder/View.xhtml";
partialListType = "partner";
return;
}
}
if (JsfUtil.isNumeric(productId)) {
Integer id = Integer.valueOf(productId);
query = PurchaseOrderLineQueryBuilder.getFindByProductQuery(id);
purchaseOrderLines = super.findWithQuery(query);
if (purchaseOrderLines != null && !purchaseOrderLines.isEmpty()) {
purchaseOrderLine = purchaseOrderLines.get(0);
currentList = "/sc/purchaseOrder/ListByProduct.xhtml";
currentForm = "/sc/purchaseOrder/ViewByProduct.xhtml";
return;
}
}
if (JsfUtil.isNumeric(purchaseLineId)) {
Integer id = Integer.valueOf(purchaseLineId);
purchaseOrderLine = super.findItemById(id, PurchaseOrderLine.class);
if (purchaseOrderLine != null) {
query = PurchaseOrderLineQueryBuilder.getFindByProductQuery(purchaseOrderLine.getProduct().getId());
purchaseOrderLines = super.findWithQuery(query);
productId = Integer.toString(purchaseOrderLine.getProduct().getId());
currentList = "/sc/purchaseOrder/ListByProduct.xhtml";
currentForm = "/sc/purchaseOrder/ViewByProduct.xhtml";
return;
}
}
currentList = "/sc/purchaseOrder/List.xhtml";
currentForm = "/sc/purchaseOrder/View.xhtml";
query = PurchaseOrderQueryBuilder.getFindAllQuery();
purchaseOrders = super.findWithQuery(query);
if (purchaseOrders != null && !purchaseOrders.isEmpty()) {
purchaseOrder = purchaseOrders.get(0);
}
}
public void duplicatePurchaseOrder() {
if (purchaseOrderExist(purchaseOrder.getId())) {
purchaseOrder.getPurchaseOrderLines().size();
PurchaseOrder newPurchaseOrder = (PurchaseOrder) SerializationUtils.clone(purchaseOrder);
newPurchaseOrder.setId(null);
newPurchaseOrder.setDeliveryOrders(null);
newPurchaseOrder.setInvoiceMethod("Partial");
newPurchaseOrder.setInvoices(null);
newPurchaseOrder.setPaid(Boolean.FALSE);
newPurchaseOrder.setShipped(Boolean.FALSE);
newPurchaseOrder.setDeliveryCreated(Boolean.FALSE);
newPurchaseOrder.setDate(new Date());
newPurchaseOrder.setNotes(null);
//newPurchaseOrder.setDiscount(null);
newPurchaseOrder.setName(null);
newPurchaseOrder.setReference(null);
newPurchaseOrder.setState("Quotation");
newPurchaseOrder.setActive(Boolean.TRUE);
for (PurchaseOrderLine Line : newPurchaseOrder.getPurchaseOrderLines()) {
Line.setId(null);
Line.setInvoiced(Boolean.FALSE);
Line.setPurchaseOrder(newPurchaseOrder);
}
purchaseOrder = newPurchaseOrder;
purchaseOrderLine = new PurchaseOrderLine();
purchaseOrderLines = purchaseOrder.getPurchaseOrderLines();
loadActiveVendors();
loadActivePurchasedProducts();
if (topNActivePurchasedProducts != null && !topNActivePurchasedProducts.isEmpty()) {
purchaseOrderLine.setProduct(topNActivePurchasedProducts.get(0));
purchaseOrderLine.setPrice(purchaseOrderLine.getProduct().getPurchasePrice());
purchaseOrderLine.setUom(purchaseOrderLine.getProduct().getUom().getName());
}
if (!topNActiveVendors.contains(purchaseOrder.getPartner())) {
topNActiveVendors.add(purchaseOrder.getPartner());
}
for (PurchaseOrderLine orderLine : purchaseOrderLines) {
if (!topNActivePurchasedProducts.contains(orderLine.getProduct())) {
topNActivePurchasedProducts.add(orderLine.getProduct());
}
}
currentForm = "/sc/purchaseOrder/Create.xhtml";
}
}
public void printOrder(ActionEvent actionEvent) throws IOException, JRException {
for (PurchaseOrderLine orderLine : purchaseOrder.getPurchaseOrderLines()) {
orderLine.setProductName(orderLine.getProduct().getName());
if (orderLine.getTax() != null) {
orderLine.setTaxName(orderLine.getTax().getName());
} else {
orderLine.setTaxName("");
}
}
ResourceBundle bundle = JsfUtil.getBundle();
String name = bundle.getString("PurchaseOrder");
String currency = bundle.getString("Currency");
Map<String, Object> params = new HashMap<>();
params.put("purchaseOrder", purchaseOrder);
params.put("partner", purchaseOrder.getPartner());
params.put("orderLines", purchaseOrder.getPurchaseOrderLines());
params.put("currency", currency);
params.put("SUBREPORT_DIR", FacesContext.getCurrentInstance().getExternalContext().getRealPath("/reports/") + "/");
String reportPath = FacesContext.getCurrentInstance().getExternalContext().getRealPath("/reports/purchaseOrder.jasper");
JasperPrint jasperPrint = JasperFillManager.fillReport(reportPath, params, new JREmptyDataSource());
// JasperPrint jasperPrint = JasperFillManager.fillReport(reportPath, new HashMap<String,Object>(), new JRBeanArrayDataSource(new SaleOrder[]{saleOrder}));
HttpServletResponse httpServletResponse = (HttpServletResponse) FacesContext.getCurrentInstance().getExternalContext().getResponse();
httpServletResponse.addHeader("Content-disposition", "attachment; filename=" + name + "_" + purchaseOrder.getName() + ".pdf");
ServletOutputStream servletOutputStream = httpServletResponse.getOutputStream();
JasperExportManager.exportReportToPdfStream(jasperPrint, servletOutputStream);
FacesContext.getCurrentInstance().responseComplete();
}
public void onRowAdd(ActionEvent event) {
purchaseOrderLine.setActive(Boolean.TRUE);
purchaseOrderLine.setInvoiced(Boolean.FALSE);
purchaseOrderLine.setPrice(JsfUtil.round(purchaseOrderLine.getPrice()));
purchaseOrderLine.setQuantity(JsfUtil.round(purchaseOrderLine.getQuantity(), purchaseOrderLine.getProduct().getUom().getDecimals()));
if (purchaseOrderLine.getQuantity() == 0d) {
purchaseOrderLine.setQuantity(1d);
}
if (purchaseOrderLine.getPrice() == 0d) {
purchaseOrderLine.setTax(null);
}
purchaseOrderLine.setSubTotal(JsfUtil.round((purchaseOrderLine.getPrice()) * (purchaseOrderLine.getQuantity())));
purchaseOrderLines.add(purchaseOrderLine);
SumUpOrder();
purchaseOrderLine = new PurchaseOrderLine();
if (topNActivePurchasedProducts != null && !topNActivePurchasedProducts.isEmpty()) {
purchaseOrderLine.setProduct(topNActivePurchasedProducts.get(0));
purchaseOrderLine.setPrice(topNActivePurchasedProducts.get(0).getPurchasePrice());
purchaseOrderLine.setUom(topNActivePurchasedProducts.get(0).getUom().getName());
}
}
public void onRowDelete(int Index) {
purchaseOrderLines.remove(Index);
SumUpOrder();
}
public void onRowEditInit(PurchaseOrderLine orderLine) {
purchaseOrderLine = (PurchaseOrderLine) SerializationUtils.clone(orderLine);
}
public void onRowEdit(int index) {
purchaseOrderLines.get(index).setQuantity(JsfUtil.round(purchaseOrderLines.get(index).getQuantity(), purchaseOrderLines.get(index).getProduct().getUom().getDecimals()));
purchaseOrderLines.get(index).setPrice(JsfUtil.round(purchaseOrderLines.get(index).getPrice()));
if (purchaseOrderLines.get(index).getQuantity() == 0d) {
purchaseOrderLines.get(index).setQuantity(1d);
}
if (purchaseOrderLines.get(index).getPrice() == 0d) {
purchaseOrderLines.get(index).setTax(null);
}
purchaseOrderLines.get(index).setSubTotal(JsfUtil.round(purchaseOrderLines.get(index).getPrice() * purchaseOrderLines.get(index).getQuantity()));
SumUpOrder();
}
public void onRowCancel(int index) {
purchaseOrderLines.remove(index);
purchaseOrderLines.add(index, purchaseOrderLine);
purchaseOrderLine = new PurchaseOrderLine();
if (topNActivePurchasedProducts != null && !topNActivePurchasedProducts.isEmpty()) {
purchaseOrderLine.setProduct(topNActivePurchasedProducts.get(0));
purchaseOrderLine.setPrice(purchaseOrderLine.getProduct().getPurchasePrice());
purchaseOrderLine.setUom(purchaseOrderLine.getProduct().getUom().getName());
}
}
public void onRowCancel() {
purchaseOrderLine = new PurchaseOrderLine();
if (topNActivePurchasedProducts != null && !topNActivePurchasedProducts.isEmpty()) {
purchaseOrderLine.setProduct(topNActivePurchasedProducts.get(0));
purchaseOrderLine.setPrice(purchaseOrderLine.getProduct().getPurchasePrice());
purchaseOrderLine.setUom(purchaseOrderLine.getProduct().getUom().getName());
}
}
public void onProductChange() {
purchaseOrderLine.setPrice(purchaseOrderLine.getProduct().getPurchasePrice());
purchaseOrderLine.setUom(purchaseOrderLine.getProduct().getUom().getName());
}
public void onProductChange(int rowIndex) {
purchaseOrderLines.get(rowIndex).setPrice(purchaseOrderLines.get(rowIndex).getProduct().getPurchasePrice());
purchaseOrderLines.get(rowIndex).setUom(purchaseOrderLines.get(rowIndex).getProduct().getUom().getName());
}
private void SumUpOrder() {
purchaseOrder.setAmountUntaxed(0d);
purchaseOrder.setAmountTax(0d);
purchaseOrder.setAmountTotal(0d);
for (PurchaseOrderLine orderLine : purchaseOrderLines) {
purchaseOrder.setAmountUntaxed(purchaseOrder.getAmountUntaxed() + orderLine.getSubTotal());
if (orderLine.getTax() != null) {
purchaseOrder.setAmountTax(purchaseOrder.getAmountTax() + (orderLine.getSubTotal() * orderLine.getTax().getAmount()));
}
}
purchaseOrder.setAmountUntaxed(JsfUtil.round(purchaseOrder.getAmountUntaxed()));
purchaseOrder.setAmountTax(JsfUtil.round(purchaseOrder.getAmountTax()));
BigDecimal amountUntaxed = BigDecimal.valueOf(purchaseOrder.getAmountUntaxed());
BigDecimal amountTax = BigDecimal.valueOf(purchaseOrder.getAmountTax());
BigDecimal amountTotal = amountUntaxed.add(amountTax);
purchaseOrder.setAmountTotal(JsfUtil.round(amountTotal.doubleValue()));
}
public String getStatus(String status) {
return statuses.get(status);
}
public String getStatusColor(String status) {
switch (status) {
case "Quotation":
return "#009fd4";
case "Purchase Order":
return "#406098";
case "Done":
return "#3477db";
default:
return "#6d8891";
}
}
private boolean purchaseOrderExist(Integer id) {
if (id != null) {
purchaseOrder = super.findItemById(id, PurchaseOrder.class);
if (purchaseOrder == null) {
JsfUtil.addWarningMessage("ItemDoesNotExist");
purchaseOrders.remove(purchaseOrder);
purchaseOrder = purchaseOrders.get(0);
// partialListType = null; (new modification)
currentForm = "/sc/purchaseOrder/View.xhtml"; // purchaseOrderLines !!!
currentList = "/sc/purchaseOrder/List.xhtml"; // purchaseOrderLines !!!
return false;
} else {
return true;
}
} else {
return false;
}
}
private String getOrderStatus(Integer id) {
if (id != null) {
PurchaseOrder purchaseOrder = super.findItemById(id, PurchaseOrder.class);
if (purchaseOrder != null) {
return purchaseOrder.getState();
} else {
JsfUtil.addWarningMessage("ItemDoesNotExist");
purchaseOrders.remove(purchaseOrder);
this.purchaseOrder = purchaseOrders.get(0);
// partialListType = null;
currentForm = "/sc/purchaseOrder/View.xhtml"; // purchaseOrderLines !!!
currentList = "/sc/purchaseOrder/List.xhtml"; // purchaseOrderLines !!!
return null;
}
}
return null;
}
public Double getLineTax() {
if (purchaseOrderLine != null && purchaseOrderLine.getTax() != null) {
return JsfUtil.round(purchaseOrderLine.getQuantity() * purchaseOrderLine.getPrice() * purchaseOrderLine.getTax().getAmount());
} else {
return 0d;
}
}
public Double getLineTotal() {
return getLineTax() + purchaseOrderLine.getSubTotal();
}
private void loadActiveVendors() {
query = PartnerQueryBuilder.getFindActiveVendorsQuery();
activeVendors = super.findWithQuery(query);
if (activeVendors != null && activeVendors.size() > MAX_DROPDOWN_ITEMS) {
topNActiveVendors = activeVendors.subList(0, MAX_DROPDOWN_ITEMS);
} else {
topNActiveVendors = activeVendors;
}
}
private void loadActivePurchasedProducts() {
query = ProductQueryBuilder.getFindActivePurchasedProductsQuery();
activePurchasedProducts = super.findWithQuery(query);
if (activePurchasedProducts != null && activePurchasedProducts.size() > MAX_DROPDOWN_ITEMS) {
topNActivePurchasedProducts = activePurchasedProducts.subList(0, MAX_DROPDOWN_ITEMS);
} else {
topNActivePurchasedProducts = activePurchasedProducts;
}
}
public PurchaseOrder getPurchaseOrder() {
if (purchaseOrder == null) {
return purchaseOrder = new PurchaseOrder();
}
return purchaseOrder;
}
public void setPurchaseOrder(PurchaseOrder purchaseOrder) {
this.purchaseOrder = purchaseOrder;
}
public PurchaseOrderLine getPurchaseOrderLine() {
if (purchaseOrderLine == null) {
purchaseOrderLine = new PurchaseOrderLine();
}
return purchaseOrderLine;
}
public void setPurchaseOrderLine(PurchaseOrderLine purchaseOrderLine) {
this.purchaseOrderLine = purchaseOrderLine;
}
public List<PurchaseOrder> getPurchaseOrders() {
if (purchaseOrders == null) {
query = PurchaseOrderQueryBuilder.getFindAllQuery();
purchaseOrders = super.findWithQuery(query);
}
return purchaseOrders;
}
public List<PurchaseOrder> getFilteredPurchaseOrders() {
return filteredPurchaseOrders;
}
public void setFilteredPurchaseOrders(List<PurchaseOrder> filteredPurchaseOrders) {
this.filteredPurchaseOrders = filteredPurchaseOrders;
}
public void setPurchaseOrders(List<PurchaseOrder> purchaseOrders) {
this.purchaseOrders = purchaseOrders;
}
public List<PurchaseOrderLine> getPurchaseOrderLines() {
if (purchaseOrderLines == null) {
purchaseOrderLines = new ArrayList<>();
}
return purchaseOrderLines;
}
public void setPurchaseOrderLines(List<PurchaseOrderLine> purchaseOrderLines) {
this.purchaseOrderLines = purchaseOrderLines;
}
public List<PurchaseOrderLine> getFilteredPurchaseOrderLines() {
return filteredPurchaseOrderLines;
}
public void setFilteredPurchaseOrderLines(List<PurchaseOrderLine> filteredPurchaseOrderLines) {
this.filteredPurchaseOrderLines = filteredPurchaseOrderLines;
}
public String getPurchaseId() {
return purchaseId;
}
public void setPurchaseId(String purchaseId) {
this.purchaseId = purchaseId;
}
public String getPurchaseLineId() {
return purchaseLineId;
}
public void setPurchaseLineId(String purchaseLineId) {
this.purchaseLineId = purchaseLineId;
}
public String getPartnerId() {
return partnerId;
}
public void setPartnerId(String partnerId) {
this.partnerId = partnerId;
}
public String getProductId() {
return productId;
}
public void setProductId(String productId) {
this.productId = productId;
}
public String getPartialListType() {
return partialListType;
}
public void setPartialListType(String partialListType) {
this.partialListType = partialListType;
}
public String getCurrentForm() {
return currentForm;
}
public void setCurrentForm(String currentForm) {
this.currentForm = currentForm;
}
public String getCurrentList() {
return currentList;
}
public void setCurrentList(String currentList) {
this.currentList = currentList;
}
public String getInvoiceMethod() {
return invoiceMethod;
}
public void setInvoiceMethod(String invoiceMethod) {
this.invoiceMethod = invoiceMethod;
}
public int getPurchaseOrderIndex() {
if (purchaseOrders != null && purchaseOrder != null) {
return purchaseOrders.indexOf(purchaseOrder) + 1;
}
return 0;
}
public void nextPurchaseOrder() {
if (purchaseOrders.indexOf(purchaseOrder) == (purchaseOrders.size() - 1)) {
purchaseOrder = purchaseOrders.get(0);
} else {
purchaseOrder = purchaseOrders.get(purchaseOrders.indexOf(purchaseOrder) + 1);
}
}
public void previousPurchaseOrder() {
if (purchaseOrders.indexOf(purchaseOrder) == 0) {
purchaseOrder = purchaseOrders.get(purchaseOrders.size() - 1);
} else {
purchaseOrder = purchaseOrders.get(purchaseOrders.indexOf(purchaseOrder) - 1);
}
}
// public void showOrderLineList() {
// purchaseOrderLine = null;
// currentList = "/sc/purchaseOrder/ListByProduct.xhtml";
// }
public void prepareViewOrderByProduct() {
if (purchaseOrderLine != null) {
currentForm = "/sc/purchaseOrder/ViewByProduct.xhtml";
} else {
JsfUtil.addWarningMessage("ItemDoesNotExist");
}
}
public int getOrderLineIndex() {
if (purchaseOrderLines != null && purchaseOrderLine != null) {
return purchaseOrderLines.indexOf(purchaseOrderLine) + 1;
}
return 0;
}
public void nextOrderLine() {
if (purchaseOrderLines.indexOf(purchaseOrderLine) == (purchaseOrderLines.size() - 1)) {
purchaseOrderLine = purchaseOrderLines.get(0);
} else {
purchaseOrderLine = purchaseOrderLines.get(purchaseOrderLines.indexOf(purchaseOrderLine) + 1);
}
}
public void previousOrderLine() {
if (purchaseOrderLines.indexOf(purchaseOrderLine) == 0) {
purchaseOrderLine = purchaseOrderLines.get(purchaseOrderLines.size() - 1);
} else {
purchaseOrderLine = purchaseOrderLines.get(purchaseOrderLines.indexOf(purchaseOrderLine) - 1);
}
}
public List<Partner> getTopNActiveVendors() {
return topNActiveVendors;
}
public List<Partner> getActiveVendors() {
return activeVendors;
}
public List<Partner> getFilteredActiveVendors() {
return filteredActiveVendors;
}
public List<Product> getTopNActivePurchasedProducts() {
return topNActivePurchasedProducts;
}
public List<Product> getActivePurchasedProducts() {
return activePurchasedProducts;
}
public List<Product> getFilteredActivePurchasedProducts() {
return filteredActivePurchasedProducts;
}
public Product getProduct() {
return product;
}
public void setProduct(Product product) {
this.product = product;
}
public Partner getSupplier() {
return supplier;
}
public void setSupplier(Partner supplier) {
this.supplier = supplier;
}
}
|
|
package org.devocative.ares.web.dpage;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.model.CompoundPropertyModel;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.Model;
import org.apache.wicket.model.ResourceModel;
import org.devocative.ares.AresPrivilegeKey;
import org.devocative.ares.entity.EServerOS;
import org.devocative.ares.entity.OServer;
import org.devocative.ares.entity.oservice.OServiceInstance;
import org.devocative.ares.iservice.IOServerService;
import org.devocative.ares.vo.filter.OServerFVO;
import org.devocative.ares.web.AresIcon;
import org.devocative.ares.web.dpage.oservice.OSIUserFormDPage;
import org.devocative.ares.web.dpage.oservice.OServiceInstanceFormDPage;
import org.devocative.demeter.web.DPage;
import org.devocative.demeter.web.component.DAjaxButton;
import org.devocative.demeter.web.component.grid.OEditAjaxColumn;
import org.devocative.wickomp.WModel;
import org.devocative.wickomp.form.WSelectionInput;
import org.devocative.wickomp.form.WTextInput;
import org.devocative.wickomp.form.range.WDateRangeInput;
import org.devocative.wickomp.form.range.WNumberRangeInput;
import org.devocative.wickomp.formatter.ODateFormatter;
import org.devocative.wickomp.formatter.ONumberFormatter;
import org.devocative.wickomp.grid.IGridDataSource;
import org.devocative.wickomp.grid.OGrid;
import org.devocative.wickomp.grid.WDataGrid;
import org.devocative.wickomp.grid.WSortField;
import org.devocative.wickomp.grid.column.OColumnList;
import org.devocative.wickomp.grid.column.OPropertyColumn;
import org.devocative.wickomp.grid.column.link.OAjaxLinkColumn;
import org.devocative.wickomp.grid.toolbar.OExportExcelButton;
import org.devocative.wickomp.grid.toolbar.OGridGroupingButton;
import org.devocative.wickomp.html.WAjaxLink;
import org.devocative.wickomp.html.WFloatTable;
import org.devocative.wickomp.html.icon.FontAwesome;
import org.devocative.wickomp.html.window.WModalWindow;
import org.devocative.wickomp.opt.IStyler;
import org.devocative.wickomp.opt.OSize;
import org.devocative.wickomp.opt.OStyle;
import javax.inject.Inject;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class OServerListDPage extends DPage implements IGridDataSource<OServer> {
private static final long serialVersionUID = -1874204417L;
@Inject
private IOServerService oServerService;
private OServerFVO filter;
private boolean formVisible = true;
private String[] invisibleFormItems;
private WDataGrid<OServer> grid;
private String[] removeColumns;
private Boolean gridFit;
private boolean gridEnabled = false;
private OSize gridHeight = OSize.fixed(600);
private OSize gridWidth = OSize.percent(100);
// ------------------------------
// Panel Call - New Filter
public OServerListDPage(String id) {
this(id, Collections.<String>emptyList(), new OServerFVO());
}
// Panel Call - Open Filter
public OServerListDPage(String id, OServerFVO filter) {
this(id, Collections.<String>emptyList(), filter);
}
// REST Call - New Filter
public OServerListDPage(String id, List<String> params) {
this(id, params, new OServerFVO());
}
// Main Constructor
private OServerListDPage(String id, List<String> params, OServerFVO filter) {
super(id, params);
this.filter = filter;
}
// ------------------------------
@Override
protected void onInitialize() {
super.onInitialize();
final WModalWindow window = new WModalWindow("window");
add(window);
add(new WAjaxLink("add", AresIcon.ADD) {
private static final long serialVersionUID = 1460678018L;
@Override
public void onClick(AjaxRequestTarget target) {
window.setContent(new OServerFormDPage(window.getContentId()));
window.show(target);
}
}.setVisible(hasPermission(AresPrivilegeKey.OServerAdd)));
WFloatTable floatTable = new WFloatTable("floatTable");
floatTable.add(new WTextInput("name")
.setLabel(new ResourceModel("OServer.name")));
floatTable.add(new WTextInput("address")
.setLabel(new ResourceModel("OServer.address")));
floatTable.add(new WSelectionInput("function", oServerService.getFunctionList(), true)
.setLabel(new ResourceModel("OServer.function")));
floatTable.add(new WNumberRangeInput("counter", Integer.class)
.setLabel(new ResourceModel("OServer.counter")));
floatTable.add(new WSelectionInput("environment", oServerService.getEnvironmentList(), true)
.setLabel(new ResourceModel("OServer.environment")));
floatTable.add(new WSelectionInput("location", oServerService.getLocationList(), true)
.setLabel(new ResourceModel("OServer.location")));
floatTable.add(new WSelectionInput("company", oServerService.getCompanyList(), true)
.setLabel(new ResourceModel("OServer.company")));
floatTable.add(new WTextInput("vmId")
.setLabel(new ResourceModel("OServer.vmId")));
floatTable.add(new WSelectionInput("serverOS", EServerOS.list(), true)
.setLabel(new ResourceModel("OServer.serverOS")));
floatTable.add(new WSelectionInput("hypervisor", oServerService.getHypervisorList(), true)
.setLabel(new ResourceModel("OServer.hypervisor")));
floatTable.add(new WSelectionInput("owner", oServerService.getOwnerList(), true)
.setLabel(new ResourceModel("OServer.owner")));
floatTable.add(new WDateRangeInput("creationDate")
.setTimePartVisible(true)
.setLabel(new ResourceModel("entity.creationDate")));
floatTable.add(new WSelectionInput("creatorUser", oServerService.getCreatorUserList(), true)
.setLabel(new ResourceModel("entity.creatorUser")));
floatTable.add(new WDateRangeInput("modificationDate")
.setTimePartVisible(true)
.setLabel(new ResourceModel("entity.modificationDate")));
floatTable.add(new WSelectionInput("modifierUser", oServerService.getModifierUserList(), true)
.setLabel(new ResourceModel("entity.modifierUser")));
Form<OServerFVO> form = new Form<>("form", new CompoundPropertyModel<>(filter));
form.add(floatTable);
form.add(new DAjaxButton("search", new ResourceModel("label.search"), AresIcon.SEARCH) {
private static final long serialVersionUID = -1235411353L;
@Override
protected void onSubmit(AjaxRequestTarget target) {
grid.setEnabled(true);
grid.loadData(target);
}
});
add(form);
OColumnList<OServer> columnList = new OColumnList<>();
columnList.add(new OPropertyColumn<>(new ResourceModel("OServer.name"), "name"));
columnList.add(new OPropertyColumn<>(new ResourceModel("OServer.address"), "address"));
columnList.add(new OPropertyColumn<>(new ResourceModel("OServer.services"), "services"));
columnList.add(new OPropertyColumn<>(new ResourceModel("OServer.function"), "function"));
columnList.add(new OPropertyColumn<OServer>(new ResourceModel("OServer.counter"), "counter")
.setFormatter(ONumberFormatter.integer())
.setStyle("direction:ltr"));
columnList.add(new OPropertyColumn<>(new ResourceModel("OServer.environment"), "environment"));
columnList.add(new OPropertyColumn<>(new ResourceModel("OServer.location"), "location"));
columnList.add(new OPropertyColumn<>(new ResourceModel("OServer.company"), "company"));
columnList.add(new OPropertyColumn<>(new ResourceModel("OServer.vmId"), "vmId"));
columnList.add(new OPropertyColumn<>(new ResourceModel("OServer.serverOS"), "serverOS"));
columnList.add(new OPropertyColumn<>(new ResourceModel("OServer.hypervisor"), "hypervisor"));
columnList.add(new OPropertyColumn<>(new ResourceModel("OServer.owner"), "owner"));
columnList.add(new OPropertyColumn<OServer>(new ResourceModel("entity.creationDate"), "creationDate")
.setFormatter(ODateFormatter.getDateTimeByUserPreference())
.setStyle("direction:ltr"));
columnList.add(new OPropertyColumn<>(new ResourceModel("entity.creatorUser"), "creatorUser"));
columnList.add(new OPropertyColumn<OServer>(new ResourceModel("entity.modificationDate"), "modificationDate")
.setFormatter(ODateFormatter.getDateTimeByUserPreference())
.setStyle("direction:ltr"));
columnList.add(new OPropertyColumn<>(new ResourceModel("entity.modifierUser"), "modifierUser"));
columnList.add(new OPropertyColumn<OServer>(new ResourceModel("entity.version"), "version")
.setFormatter(ONumberFormatter.integer())
.setStyle("direction:ltr"));
if (hasPermission(AresPrivilegeKey.OServerEdit)) {
columnList.add(new OEditAjaxColumn<OServer>() {
private static final long serialVersionUID = 585801344L;
@Override
public void onClick(AjaxRequestTarget target, IModel<OServer> rowData) {
window.setContent(new OServerFormDPage(window.getContentId(), rowData.getObject()));
window.show(target);
}
});
}
if (hasPermission(AresPrivilegeKey.OServiceInstanceAdd)) {
columnList.add(new OAjaxLinkColumn<OServer>(new Model<>(), AresIcon.ADD.setTooltip(new Model<>("Add Service Instance"))) {
private static final long serialVersionUID = 6041498077363834924L;
@Override
public void onClick(AjaxRequestTarget target, IModel<OServer> rowData) {
window.setContent(new OServiceInstanceFormDPage(window.getContentId(), new OServiceInstance(null, rowData.getObject(), null)));
window.show(target);
}
});
}
if (hasPermission(AresPrivilegeKey.OSIUserAdd)) {
columnList.add(new OAjaxLinkColumn<OServer>(new Model<>(), AresIcon.ADD_USER.setTooltip(new Model<>("Add Service Instance User"))) {
private static final long serialVersionUID = 6041498077363834924L;
@Override
public void onClick(AjaxRequestTarget target, IModel<OServer> rowData) {
window.setContent(new OSIUserFormDPage(window.getContentId()).setServerId(rowData.getObject().getId()));
window.show(target);
}
});
}
OGrid<OServer> oGrid = new OGrid<>();
oGrid
.setColumns(columnList)
.setMultiSort(false)
.setPageList(Arrays.asList(20, 40, 100, 200))
.setRowStyler((IStyler<OServer> & Serializable) (bean, id) ->
OStyle.style(bean.getHypervisorId() == null ? "background-color:#f0e68c" : null))
.addToolbarButton(new OGridGroupingButton<>(new FontAwesome("expand"), new FontAwesome("compress")))
.addToolbarButton(new OExportExcelButton<>(new FontAwesome("file-excel-o", new Model<>("Export to excel")).setColor("green"), this))
.setHeight(gridHeight)
.setWidth(gridWidth)
.setFit(gridFit);
grid = new WDataGrid<>("grid", oGrid, this);
add(grid);
// ---------------
form.setVisible(formVisible);
grid.setEnabled(gridEnabled || !formVisible);
if (invisibleFormItems != null) {
for (String formItem : invisibleFormItems) {
floatTable.get(formItem).setVisible(false);
}
}
if (removeColumns != null) {
for (String column : removeColumns) {
columnList.removeColumn(column);
}
}
}
// ------------------------------
public OServerListDPage setFormVisible(boolean formVisible) {
this.formVisible = formVisible;
return this;
}
public OServerListDPage setInvisibleFormItems(String... invisibleFormItems) {
this.invisibleFormItems = invisibleFormItems;
return this;
}
public OServerListDPage setGridHeight(OSize gridHeight) {
this.gridHeight = gridHeight;
return this;
}
public OServerListDPage setGridWidth(OSize gridWidth) {
this.gridWidth = gridWidth;
return this;
}
public OServerListDPage setGridFit(Boolean gridFit) {
this.gridFit = gridFit;
return this;
}
public OServerListDPage setGridEnabled(boolean gridEnabled) {
this.gridEnabled = gridEnabled;
return this;
}
public OServerListDPage setRemoveColumns(String... removeColumns) {
this.removeColumns = removeColumns;
return this;
}
// ------------------------------ IGridDataSource
@Override
public List<OServer> list(long pageIndex, long pageSize, List<WSortField> sortFields) {
return oServerService.search(filter, pageIndex, pageSize);
}
@Override
public long count() {
return oServerService.count(filter);
}
@Override
public IModel<OServer> model(OServer object) {
return new WModel<>(object);
}
}
|
|
package schemaConversion;
import com.google.gson.JsonParser;
import production.Reagan_Main_V4;
import java.io.*;
import java.util.*;
import java.util.regex.Pattern;
/**
* Class that translates the graph database schema that Neo4J uses, into a set of relations
* that can be executed on a relational database. This includes relations for all the nodes
* and relationships, as well as more specific ones for each label type and type of
* relationship. Additional metafiles are created to help the translator and output module
* of Reagan.
* <p>
* The class makes use of parallel processing on the original dump file to optimise speed up.
* The original dump file from Neo4J needs to be parsed beforehand, as it contains unnecessary
* line breaks, as well as characters that will not work in SQL. The config.properties file
* states the location of the 'work area', which is used as scratch space for this method.
*/
public class SchemaTranslate {
// storing all the labels for nodes and edges.
public static List<String> nodeRelLabels = Collections.synchronizedList(new ArrayList<>());
public static List<String> edgesRelLabels = Collections.synchronizedList(new ArrayList<>());
// storing separate information for types of nodes
public static Map<String, String> labelMappings = Collections.synchronizedMap(new HashMap<>());
// storing separate information on the types of relationships
public static List<String> relTypes = Collections.synchronizedList(new ArrayList<>());
// workspace area for both nodes and edges
public static String nodesFile = Reagan_Main_V4.workspaceArea + "/nodes.txt";
public static String edgesFile = Reagan_Main_V4.workspaceArea + "/edges.txt";
// JSON Parser for creating JSON objects from the text file.
// passed to all of the threads
static JsonParser parser = new JsonParser();
private static char[] alphabet = "abcdefghijklmnopqrstuvwxyz".toCharArray();
// regex for deciding whether a line is a node or a relationship
private static String patternForNode = "(_\\d+:.*)";
static Pattern patternN = Pattern.compile(patternForNode);
// regex for deciding whether relationship has properties
private static String patternForRel = "\\{.+\\}";
static Pattern patternR = Pattern.compile(patternForRel);
/**
* Main method for translating the schema.
*
* @param file Dump File from Neo4J.
*/
public static void translate(String file) {
// perform initial preprocess of the file to remove content such as new file markers
// and other aspects that will break the schema converter.
// return number of lines if no issue, else -1
long startTimePP = System.nanoTime();
int count = performPreProcessFile(file);
long finTimePP = System.nanoTime();
if (count == -1) return;
// number of concurrent threads to work on dump file. Currently 8. Test.
final int segments = 8;
final int amountPerSeg = (int) Math.ceil(count / segments);
ArrayList<String>[] group = new ArrayList[segments];
for (int i = 0; i < segments; i++) {
group[i] = new ArrayList<>();
}
try {
long startTimeSP = System.nanoTime();
// open correctly preparsed file
FileInputStream fis = new FileInputStream(file.replace(".txt", "_new.txt"));
//Construct BufferedReader from InputStreamReader
BufferedReader br = new BufferedReader(new InputStreamReader(fis));
String line;
int segNum = 0;
int amountInSeg = 0;
System.out.println("***SPLITTING FILE INTO SEGMENTS***");
while ((line = br.readLine()) != null) {
if (amountInSeg++ <= amountPerSeg) {
group[segNum].add(line);
} else {
segNum++;
group[segNum].add(line);
amountInSeg = 1;
}
}
System.out.println("\n***SPLITTING COMPLETE***\n");
// file indicators for the threads to output on.
String[] files = new String[segments];
for (int j = 0; j < segments; j++) {
files[j] = String.valueOf(alphabet[j]);
}
Thread[] ts = new Thread[segments];
for (int i = 0; i < ts.length; i++) {
ts[i] = new Thread(new PerformWork(group[i], files[i]));
}
System.out.println("***PARSING***");
for (Thread q : ts) {
q.start();
}
int done = 0;
while (done < ts.length) {
try {
ts[done].join();
done++;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
System.out.println("***PARSING COMPLETE***\n");
combineWork(files);
// remove strange duplicates appearing in the ArrayList
Set<String> hs = new HashSet<>();
hs.addAll(nodeRelLabels);
nodeRelLabels.clear();
nodeRelLabels.addAll(hs);
hs.clear();
hs.addAll(edgesRelLabels);
edgesRelLabels.clear();
edgesRelLabels.addAll(hs);
hs.clear();
hs.addAll(relTypes);
relTypes.clear();
relTypes.addAll(hs);
long finTimeSP = System.nanoTime();
System.out.println("Time to initially preprocess file : "
+ (finTimePP - startTimePP) / 1000000.0 + "ms.");
System.out.println("Time to parse and process dump file : "
+ (finTimeSP - startTimeSP) / 1000000.0 + "ms.");
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Preprocess the original file to remove weird line breaks and to format
* correctly for insertion into a relational backend.
*
* @param file Location of the dump file.
* @return Number of lines in the file, or -1 if there was a failure in this method.
*/
private static int performPreProcessFile(String file) {
System.out.println("***PRE PROCESSING FILE***");
FileInputStream fis;
FileOutputStream fos;
int count = 0;
try {
fis = new FileInputStream(file);
//Construct BufferedReader from InputStreamReader
BufferedReader br = new BufferedReader(new InputStreamReader(fis));
String line;
long totalBytes = new File(file).length();
long bytesRead = 0;
int previousPercent = 0;
fos = new FileOutputStream(file.replace(".txt", "_new.txt"));
//Construct BufferedReader from InputStreamReader
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fos));
String output = "";
boolean firstLine = true;
while ((line = br.readLine()) != null) {
count++;
// escape character in SQL (' replaced with '')
line = line.replace("'", "''");
if (line.startsWith("create")) {
if (firstLine) {
firstLine = false;
} else {
bw.write(output);
bw.newLine();
}
output = line;
} else if (line.isEmpty()) {
// do nothing intentionally
} else {
output += line;
}
bytesRead += line.length();
int percent = (int) (bytesRead * 100 / totalBytes);
if ((previousPercent + 10) < percent) {
System.out.println(percent + "% read.");
previousPercent = percent;
}
}
br.close();
bw.write(output);
bw.flush();
bw.close();
} catch (IOException e) {
e.printStackTrace();
return -1;
}
// delete the original file as no longer needed.
File f = new File(file);
f.delete();
System.out.println("***PRE PROCESSING COMPLETE***\n");
return count;
}
/**
* Concatenate result of individual threads to one file. One method call does this
* for both the nodes and relationships.
*
* @param files n files resulted from reading initial dump. (where n is the number of files created from
* the last step).
* @throws IOException Error with the text files being written to.
*/
private static void combineWork(String[] files) throws IOException {
System.out.println("***COMBINING FILES***");
OutputStream out = null;
byte[] buf;
for (int i = 0; i < 2; i++) {
String file = (i == 0) ? nodesFile : edgesFile;
out = new FileOutputStream(file);
buf = new byte[1024];
for (String indivFile : files) {
InputStream in = new FileInputStream(file.replace(".txt", indivFile + ".txt"));
int b;
while ((b = in.read(buf)) >= 0) {
out.write(buf, 0, b);
out.flush();
}
in.close();
File f = new File(file.replace(".txt", indivFile + ".txt"));
f.delete();
}
}
out.close();
System.out.println("\n***COMBINING COMPLETE***");
}
}
|
|
package com.smeanox.games.sg002.view;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.GlyphLayout;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
import com.badlogic.gdx.math.Vector2;
import com.smeanox.games.sg002.player.Player;
import com.smeanox.games.sg002.util.Assets;
import com.smeanox.games.sg002.util.Consts;
import com.smeanox.games.sg002.world.Action;
import com.smeanox.games.sg002.world.GameObject;
import com.smeanox.games.sg002.world.GameWorld;
import com.smeanox.games.sg002.world.MapObject;
import com.smeanox.games.sg002.data.Point;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* Render the GameWorld
*
* @author Benjamin Schmid
*/
public class GameView {
private GameWorld gameWorld;
private GlyphLayout glyphLayout;
private float zoom;
private float aFieldSizeX;
private float aFieldSizeY;
private int activeX;
private int activeY;
private boolean needCacheRefresh;
private Map<Action.ActionType, Set<Point>> canActionToCache;
private TextureRegion backgroundRegions[][];
/**
* Create a new instance
*
* @param gameWorld the gameWorld to display
*/
public GameView(GameWorld gameWorld) {
this.gameWorld = gameWorld;
activeX = -1;
activeY = -1;
zoom = 0.1f;
canActionToCache = new HashMap<Action.ActionType, Set<Point>>();
for(Action.ActionType actionType : Action.ActionType.values()){
canActionToCache.put(actionType, new HashSet<Point>());
}
glyphLayout = new GlyphLayout();
initBackgroundRegions();
}
/**
* Initialize the list of background regions. The background is split into several texture regions
*/
private void initBackgroundRegions() {
backgroundRegions = new TextureRegion[Consts.backgroundFieldsY][Consts.backgroundFieldsX];
int backgroundRegionWidth, backgroundRegionHeight;
backgroundRegionWidth = Assets.background.getWidth() / Consts.backgroundFieldsX;
backgroundRegionHeight = Assets.background.getHeight() / Consts.backgroundFieldsY;
for (int y = 0; y < Consts.backgroundFieldsY; y++) {
for (int x = 0; x < Consts.backgroundFieldsX; x++) {
backgroundRegions[Consts.backgroundFieldsY - y - 1][x] = new TextureRegion(Assets.background,
x * backgroundRegionWidth, y * backgroundRegionHeight,
backgroundRegionWidth, backgroundRegionHeight);
}
}
}
public float getZoom() {
return zoom;
}
public void setZoom(float zoom) {
this.zoom = zoom;
}
public void zoomIn() {
zoom *= Consts.zoomStep;
}
public void zoomOut() {
zoom /= Consts.zoomStep;
}
public int getActiveX() {
return activeX;
}
public void setActiveX(int activeX) {
this.activeX = activeX;
needCacheRefresh = true;
}
public int getActiveY() {
return activeY;
}
public void setActiveY(int activeY) {
this.activeY = activeY;
needCacheRefresh = true;
}
/**
* Return the GameObject at the position [acitveX, acitveY]
*
* @return the GameObject or null if there is no GameObject
*/
public GameObject getActiveGameObject() {
return gameWorld.getWorldGameObject(activeX, activeY);
}
/**
* Return the Field that belongs to the given coordinates in world space
*
* @param x coordinates in world space
* @param y coordinates in world space
* @return the coordinates of the field
*/
public Point getFieldByPosition(float x, float y) {
int newActiveX, newActiveY;
newActiveX = (int) (x / (Consts.fieldSizeX * Consts.devScaleY * zoom));
newActiveY = (int) (y / (Consts.fieldSizeY * Consts.devScaleY * zoom));
return new Point(newActiveX, newActiveY);
}
/**
* Return the Field that belongs to the given coordinates in world space
*
* @param vector2 coordinates in world space
* @return the coordinates of the field
*/
public Point getFieldByPosition(Vector2 vector2) {
return getFieldByPosition(vector2.x, vector2.y);
}
/**
* Return the coordinates in world space of the lower left corner of the given field
*
* @param x coordinates of the field
* @param y coordinates of the field
* @return coordinates in world space
*/
public Vector2 getPositionByField(int x, int y) {
float positionX, positionY;
positionX = x * Consts.fieldSizeX * Consts.devScaleY * zoom;
positionY = y * Consts.fieldSizeX * Consts.devScaleY * zoom;
return new Vector2(positionX, positionY);
}
/**
* Set the active field to the field at the given coordinates
*
* @param x coordinates in world space
* @param y coordinates in world space
*/
public void setActiveByPosition(float x, float y) {
int newActiveX, newActiveY;
newActiveX = (int) (x / (Consts.fieldSizeX * Consts.devScaleY * zoom));
newActiveY = (int) (y / (Consts.fieldSizeX * Consts.devScaleY * zoom));
if (x >= 0 && newActiveX < gameWorld.getMapSizeX()
&& y >= 0 && newActiveY < gameWorld.getMapSizeY()) {
activeX = newActiveX;
activeY = newActiveY;
}
needCacheRefresh = true;
}
/**
* Set the active field to the field at the given coordinates
*
* @param vector2 coordinates in world space
*/
public void setActiveByPosition(Vector2 vector2) {
setActiveByPosition(vector2.x, vector2.y);
}
/**
* render a Texture on a given field
*
* @param spriteBatch spriteBatch to use
* @param texture the texture to draw
* @param x coordinate of the field to draw
* @param y coordinate of the field to draw
*/
private void renderField(SpriteBatch spriteBatch, Texture texture, int x, int y) {
renderField(spriteBatch, texture, x, y, 0f, 0f, 1f, 1f);
}
/**
* render a Texture on a given field with a specified offset and size
*
* @param spriteBatch spriteBatch to use
* @param texture the texture to draw
* @param x coordinate of the field to draw
* @param y coordinate of the field to draw
* @param offX relative offset of the texture, 0.0 is left border, 1.0 is right border of the field
* @param offY relative offset of the texture, 0.0 is top border, 1.0 is bottom border of the field
* @param width relative width of the texture, 1.0 is width of the field
* @param height relative height of the texture, 1.0 is height of the field
*/
private void renderField(SpriteBatch spriteBatch, Texture texture, int x, int y, float offX, float offY, float width, float height) {
spriteBatch.draw(texture,
(x + offX) * aFieldSizeX,
(y + offY) * aFieldSizeY,
aFieldSizeX * width,
aFieldSizeY * height);
}
/**
* render the background
*
* @param spriteBatch spriteBatch
*/
private void renderBackground(SpriteBatch spriteBatch) {
spriteBatch.setColor(Color.WHITE);
for (int y = 0; y < gameWorld.getMapSizeY(); y++) {
for (int x = 0; x < gameWorld.getMapSizeX(); x++) {
spriteBatch.draw(backgroundRegions[y % Consts.backgroundFieldsY][x % Consts.backgroundFieldsX],
x * aFieldSizeX, y * aFieldSizeY, aFieldSizeX, aFieldSizeY);
}
}
}
/**
* render the grid
*
* @param spriteBatch spriteBatch
*/
private void renderGrid(SpriteBatch spriteBatch) {
spriteBatch.setColor(Consts.gridColor);
for (int y = 0; y < gameWorld.getMapSizeY(); y++) {
for (int x = 0; x < gameWorld.getMapSizeX(); x++) {
renderField(spriteBatch, Assets.grid, x, y);
}
}
}
/**
* render the GameWorld
*
* @param spriteBatch spriteBatch
* @param activePlayer the player that is playing
*/
public void render(SpriteBatch spriteBatch, Player activePlayer) {
aFieldSizeX = (Consts.fieldSizeX * Consts.devScaleY * zoom);
aFieldSizeY = (Consts.fieldSizeY * Consts.devScaleY * zoom);
renderBackground(spriteBatch);
renderGrid(spriteBatch);
if(needCacheRefresh){
rebuildCache();
}
GameObject gameObject;
MapObject mapObject;
GameObject activeGameObject = gameWorld.getWorldGameObject(activeX, activeY);
for(int y = 0; y < gameWorld.getMapSizeY(); y++){
for(int x = 0; x < gameWorld.getMapSizeX(); x++){
//render MapObjects
mapObject = gameWorld.getWorldMapObject(x, y);
spriteBatch.setColor(Color.WHITE);
renderField(spriteBatch, mapObject.getMapObjectType().getTexture(), x, y);
//render GameObjects
gameObject = gameWorld.getWorldGameObject(x, y);
if (gameObject != null) {
spriteBatch.setColor(((float)gameObject.getHp() / gameObject.getGameObjectType().getDefaultHP() > 0.4) ? Consts.hpFullColor : Consts.hpLowColor);
renderField(spriteBatch, Assets.healthbar, x, y, 0f, 0f, (float)gameObject.getHp() / gameObject.getGameObjectType().getDefaultHP(), 1f);
spriteBatch.setColor(gameObject.getPlayer().getColor());
if(gameWorld.wasUsed(x, y) && gameWorld.getWorldGameObject(x, y).getPlayer().equals(activePlayer)
&& !gameObject.getGameObjectType().isCanDoAction(Action.ActionType.NONE)){
spriteBatch.setColor(Consts.usedColor);
}
renderField(spriteBatch, gameObject.getGameObjectType().getTexture(), x, y);
if (zoom >= Consts.hpDisplayMinZoom) {
Assets.liberationMicroShadow.bitmapFont.setColor(Consts.hpColor);
glyphLayout.setText(Assets.liberationMicroShadow.bitmapFont, "" + gameObject.getHp());
Assets.liberationMicroShadow.bitmapFont.draw(spriteBatch, glyphLayout,
x * aFieldSizeX + (aFieldSizeX - glyphLayout.width) / 2f,
y * aFieldSizeY + aFieldSizeY * 0.95f);
}
}
if (x == activeX && y == activeY) {
spriteBatch.setColor(activePlayer.getColor());
renderField(spriteBatch, Assets.selection, x, y);
}
}
}
if(activeGameObject != null) {
for (Action.ActionType actionType : Action.ActionType.values()) {
Color actionColor = Consts.usedColor;
switch (actionType) {
case MOVE:
actionColor = Consts.canMoveColor;
break;
case FIGHT:
actionColor = Consts.canFightColor;
break;
case PRODUCE:
actionColor = Consts.canProduceColor;
break;
}
spriteBatch.setColor(activeGameObject.wasUsed(actionType) ? Consts.usedColor : actionColor);
Texture fieldTexture = null;
switch (actionType) {
case MOVE:
fieldTexture = Assets.possibleFieldMove;
break;
case FIGHT:
fieldTexture = Assets.possibleFieldFight;
break;
case PRODUCE:
fieldTexture = Assets.possibleFieldProduce;
break;
}
if (fieldTexture != null) {
for (Point point : canActionToCache.get(actionType)) {
renderField(spriteBatch, fieldTexture, point.x, point.y);
}
}
}
}
}
/**
* Rebuild the cache of fields that are available for a given action
*/
private void rebuildCache(){
needCacheRefresh = false;
for(Action.ActionType actionType : Action.ActionType.values()){
canActionToCache.get(actionType).clear();
}
GameObject activeGameObject = gameWorld.getWorldGameObject(activeX, activeY);
if(activeGameObject == null) {
return;
}
for(int y = 0; y < gameWorld.getMapSizeY(); y++) {
for (int x = 0; x < gameWorld.getMapSizeX(); x++) {
if (activeGameObject.canMoveTo(x, y)) {
canActionToCache.get(Action.ActionType.MOVE).add(new Point(x, y));
}
if (activeGameObject.canFightTo(x, y)) {
canActionToCache.get(Action.ActionType.FIGHT).add(new Point(x, y));
}
if (activeGameObject.canProduceTo(x, y)) {
canActionToCache.get(Action.ActionType.PRODUCE).add(new Point(x, y));
}
}
}
}
}
|
|
/*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.screens.server.management.client.box;
import java.util.Collections;
import java.util.HashMap;
import org.guvnor.common.services.project.model.GAV;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.screens.server.management.client.events.ContainerInfoUpdateEvent;
import org.kie.workbench.common.screens.server.management.events.ContainerStarted;
import org.kie.workbench.common.screens.server.management.events.ContainerStopped;
import org.kie.workbench.common.screens.server.management.events.ContainerUpdated;
import org.kie.workbench.common.screens.server.management.events.ServerConnected;
import org.kie.workbench.common.screens.server.management.events.ServerOnError;
import org.kie.workbench.common.screens.server.management.model.ConnectionType;
import org.kie.workbench.common.screens.server.management.model.Container;
import org.kie.workbench.common.screens.server.management.model.ContainerRef;
import org.kie.workbench.common.screens.server.management.model.ContainerStatus;
import org.kie.workbench.common.screens.server.management.model.impl.ContainerImpl;
import org.kie.workbench.common.screens.server.management.model.impl.ContainerRefImpl;
import org.kie.workbench.common.screens.server.management.model.impl.ServerImpl;
import org.kie.workbench.common.screens.server.management.model.impl.ServerRefImpl;
import org.mockito.Mock;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.runners.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import org.uberfire.client.mvp.PlaceManager;
import org.uberfire.mocks.EventSourceMock;
import org.uberfire.mvp.impl.DefaultPlaceRequest;
import static junit.framework.Assert.*;
import static org.mockito.Mockito.*;
@RunWith(MockitoJUnitRunner.class)
public class BoxPresenterTest {
private BoxPresenter boxPresenter;
@Mock
private BoxPresenter.View boxView;
@Mock
private PlaceManager placeManager;
@Mock
private EventSourceMock<ContainerInfoUpdateEvent> event;
@Before
public void setup() {
boxPresenter = new BoxPresenter( boxView, placeManager, event );
doAnswer( new Answer<Void>() {
public Void answer( InvocationOnMock invocation ) {
boxPresenter.onSelect();
return null;
}
} ).when( boxView ).onSelect();
doAnswer( new Answer<Void>() {
public Void answer( InvocationOnMock invocation ) {
boxPresenter.onUnSelect();
return null;
}
} ).when( boxView ).onDeselect();
assertEquals( boxView, boxPresenter.getView() );
}
@Test
public void testSetupLoadingServerRef() {
boxPresenter.setup( new ServerRefImpl( "my_id", "http://localhost", "my_server", "admin", null, ContainerStatus.LOADING, ConnectionType.REMOTE, new HashMap<String, String>() {{
put( "version", "0.1" );
}}, Collections.<ContainerRef>emptyList() ) );
assertEquals( "(id: 'my_id', version: 0.1)", boxPresenter.getDescription() );
generalServerTest();
}
@Test
public void testSetupLoadingServer() {
boxPresenter.setup( new ServerImpl( "my_id", "http://localhost", "my_server", null, null, ContainerStatus.LOADING, ConnectionType.REMOTE, Collections.<Container>emptyList(), Collections.<String, String>emptyMap(), Collections.<ContainerRef>emptyList() ) );
assertEquals( "(id: 'my_id', version: unknown)", boxPresenter.getDescription() );
generalServerTest();
}
@Test
public void testSetupLoadingContainerRef() {
boxPresenter.setup( new ContainerRefImpl( "my_id", "my_container_id", ContainerStatus.LOADING, new GAV( "com.example", "example-artifact", "0.1.Final" ), null, null ) );
assertEquals( "my_container_id", boxPresenter.getName() );
assertEquals( BoxType.CONTAINER, boxPresenter.getType() );
assertEquals( "com.example:example-artifact-0.1.Final", boxPresenter.getDescription() );
boxPresenter.openBoxInfo();
verify( event, times( 1 ) ).fire( any( ContainerInfoUpdateEvent.class ) );
verify( placeManager, times( 1 ) ).goTo( "ContainerInfo" );
boxPresenter.openAddScreen();
verify( placeManager, times( 0 ) ).goTo( "NewContainerForm" );
testSelection();
testVisibility( "my_container_id" );
assertEquals( ContainerStatus.LOADING, boxPresenter.getStatus() );
boxPresenter.onContainerStopped( new ContainerStopped( new ContainerRefImpl( "x_my_id", "my_container_id", ContainerStatus.STOPPED, new GAV( "com.example", "example-artifact", "0.1.Final" ), null, null ) ) );
assertEquals( ContainerStatus.LOADING, boxPresenter.getStatus() );
boxPresenter.onContainerStopped( new ContainerStopped( new ContainerRefImpl( "my_id", "x_my_container_id", ContainerStatus.STOPPED, new GAV( "com.example", "example-artifact", "0.1.Final" ), null, null ) ) );
assertEquals( ContainerStatus.LOADING, boxPresenter.getStatus() );
boxPresenter.onContainerStopped( new ContainerStopped( new ContainerRefImpl( "my_id", "my_container_id", ContainerStatus.STOPPED, new GAV( "com.example", "example-artifact", "0.1.Final" ), null, null ) ) );
assertEquals( ContainerStatus.STOPPED, boxPresenter.getStatus() );
}
@Test
public void testSetupLoadingContainerRefNoGAV() {
boxPresenter.setup( new ContainerRefImpl( "my_id", "my_container_id", ContainerStatus.LOADING, null, null, null ) );
assertEquals( "my_container_id", boxPresenter.getName() );
assertEquals( BoxType.CONTAINER, boxPresenter.getType() );
assertEquals( "Unknown Container", boxPresenter.getDescription() );
assertEquals( ContainerStatus.LOADING, boxPresenter.getStatus() );
boxPresenter.onContainerStopped( new ContainerStopped( new ContainerRefImpl( "x_my_id", "my_container_id", ContainerStatus.STOPPED, new GAV( "com.example", "example-artifact", "0.1.Final" ), null, null ) ) );
assertEquals( ContainerStatus.LOADING, boxPresenter.getStatus() );
boxPresenter.onContainerStopped( new ContainerStopped( new ContainerRefImpl( "my_id", "x_my_container_id", ContainerStatus.STOPPED, null, null, null ) ) );
assertEquals( ContainerStatus.LOADING, boxPresenter.getStatus() );
boxPresenter.onContainerStopped( new ContainerStopped( new ContainerRefImpl( "my_id", "my_container_id", ContainerStatus.STOPPED, null, null, null ) ) );
assertEquals( ContainerStatus.STOPPED, boxPresenter.getStatus() );
}
@Test
public void testSetupLoadingContainer() {
final Container container = new ContainerImpl( "my_id", "my_container_id", ContainerStatus.LOADING, new GAV( "com.example", "example-artifact", "LATEST" ), null, null, new GAV( "com.example", "example-artifact", "0.1.Final" ) );
boxPresenter.setup( container );
assertEquals( "my_container_id", boxPresenter.getName() );
assertEquals( BoxType.CONTAINER, boxPresenter.getType() );
assertEquals( "com.example:example-artifact-0.1.Final(com.example:example-artifact-LATEST)", boxPresenter.getDescription() );
boxPresenter.openBoxInfo();
verify( event, times( 1 ) ).fire( any( ContainerInfoUpdateEvent.class ) );
verify( placeManager, times( 1 ) ).goTo( "ContainerInfo" );
boxPresenter.openAddScreen();
verify( placeManager, times( 0 ) ).goTo( "NewContainerForm" );
testSelection();
testVisibility( "my_container_id" );
assertEquals( ContainerStatus.LOADING, boxPresenter.getStatus() );
container.setStatus( ContainerStatus.STARTED );
boxPresenter.onContainerStarted( new ContainerStarted( container ) );
assertEquals( ContainerStatus.STARTED, boxPresenter.getStatus() );
container.getResolvedReleasedId().setVersion( "0.2.Final" );
boxPresenter.onContainerUpdated( new ContainerUpdated( container ) );
assertEquals( "com.example:example-artifact-0.2.Final(com.example:example-artifact-LATEST)", boxPresenter.getDescription() );
boxPresenter.onContainerStopped( new ContainerStopped( new ContainerRefImpl( "my_id", "x_my_container_id", ContainerStatus.STOPPED, new GAV( "com.example", "example-artifact", "0.1.Final" ), null, null ) ) );
assertEquals( ContainerStatus.STARTED, boxPresenter.getStatus() );
boxPresenter.onContainerStopped( new ContainerStopped( new ContainerRefImpl( "my_id", "my_container_id", ContainerStatus.STOPPED, new GAV( "com.example", "example-artifact", "0.1.Final" ), null, null ) ) );
assertEquals( ContainerStatus.STOPPED, boxPresenter.getStatus() );
}
private void testVisibility( String id ) {
assertEquals( true, boxPresenter.isVisible() );
boxPresenter.filter( id.substring( 0, 2 ) );
assertEquals( true, boxPresenter.isVisible() );
boxPresenter.filter( id );
assertEquals( true, boxPresenter.isVisible() );
boxPresenter.filter( id + "xx" );
assertEquals( false, boxPresenter.isVisible() );
}
private void testSelection() {
assertEquals( false, boxPresenter.isSelected() );
boxPresenter.select( true );
assertEquals( true, boxPresenter.isSelected() );
assertEquals( true, boxPresenter.isSelected() );
boxPresenter.select( false );
assertEquals( false, boxPresenter.isSelected() );
}
private void generalServerTest() {
assertEquals( "my_server", boxPresenter.getName() );
assertEquals( BoxType.SERVER, boxPresenter.getType() );
boxPresenter.openBoxInfo();
verify( event, times( 0 ) ).fire( any( ContainerInfoUpdateEvent.class ) );
verify( placeManager, times( 0 ) ).goTo( "ContainerInfo" );
boxPresenter.openAddScreen(); // we allow to add containers offline so every time we hit opeAddScreen it will be allowed
verify( placeManager, times( 1 ) ).goTo(new DefaultPlaceRequest( "NewContainerForm" ).addParameter( "serverId", "my_id" ));
testSelection();
testVisibility( "my_server" );
assertEquals( ContainerStatus.LOADING, boxPresenter.getStatus() );
boxPresenter.onServerConnected( new ServerConnected( new ServerImpl( "my_id", "http://localhost", "my_server", "admin", null, ContainerStatus.STARTED, ConnectionType.REMOTE, Collections.<Container>emptyList(), Collections.<String, String>emptyMap(), Collections.<ContainerRef>emptyList() ) ) );
assertEquals( ContainerStatus.STARTED, boxPresenter.getStatus() );
boxPresenter.openAddScreen();
verify( placeManager, times( 2 ) ).goTo( new DefaultPlaceRequest( "NewContainerForm" ).addParameter( "serverId", "my_id" ) );
boxPresenter.onServerConnected( new ServerConnected( new ServerImpl( "my_id", "http://localhost", "my_server", "admin", null, ContainerStatus.LOADING, ConnectionType.REMOTE, Collections.<Container>emptyList(), Collections.<String, String>emptyMap(), Collections.<ContainerRef>emptyList() ) ) );
assertEquals( ContainerStatus.LOADING, boxPresenter.getStatus() );
boxPresenter.openAddScreen();
verify( placeManager, times( 3 ) ).goTo( new DefaultPlaceRequest( "NewContainerForm" ).addParameter( "serverId", "my_id" ) );
boxPresenter.onServerConnected( new ServerConnected( new ServerImpl( "xmy_id", "http://localhost", "my_server", "admin", null, ContainerStatus.STARTED, ConnectionType.REMOTE, Collections.<Container>emptyList(), Collections.<String, String>emptyMap(), Collections.<ContainerRef>emptyList() ) ) );
boxPresenter.openAddScreen();
assertEquals( ContainerStatus.LOADING, boxPresenter.getStatus() );
verify( placeManager, times( 4 ) ).goTo( new DefaultPlaceRequest( "NewContainerForm" ).addParameter( "serverId", "my_id" ) );
boxPresenter.onServerOnError( new ServerOnError( new ServerRefImpl( "my_id", "http://localhost", "my_server", "admin", null, ContainerStatus.ERROR, ConnectionType.REMOTE, Collections.<String, String>emptyMap(), Collections.<ContainerRef>emptyList() ), "message" ) );
assertEquals( ContainerStatus.ERROR, boxPresenter.getStatus() );
boxPresenter.openAddScreen();
verify( placeManager, times( 5 ) ).goTo( new DefaultPlaceRequest( "NewContainerForm" ).addParameter( "serverId", "my_id" ) );
}
}
|
|
package com.scand.realmbrowser;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.app.DialogFragment;
import android.text.InputType;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.Button;
import android.widget.DatePicker;
import android.widget.EditText;
import android.widget.RadioButton;
import android.widget.RadioGroup;
import android.widget.TabHost;
import android.widget.TextView;
import android.widget.TimePicker;
import java.lang.reflect.Field;
import java.util.Calendar;
import java.util.Date;
import io.realm.Realm;
import io.realm.RealmObject;
/**
* Created by Slabodeniuk on 6/29/15.
*/
public class EditDialogFragment extends DialogFragment {
public interface OnFieldEditDialogInteraction {
void onRowWasEdit(int position);
}
private static final String ARG_POSITION = "ream object position";
private RealmObject mRealmObject;
private Field mField;
private int mPosition;
private OnFieldEditDialogInteraction mListener;
// for text edit dialog
private EditText mEditText;
private TextView mErrorText;
// for date edit dialog
private TabHost mTabHost;
private DatePicker mDatePicker;
private TimePicker mTimePicker;
// for boolean edit dialog
private RadioGroup mRadioGroup;
// for byte[] edit dialog
private TextView mByteTextView;
public static EditDialogFragment createInstance(RealmObject obj, Field field, int position) {
RealmObjectHolder realmObjectHolder = RealmObjectHolder.getInstance();
realmObjectHolder.setObject(obj);
realmObjectHolder.setField(field);
Bundle args = new Bundle();
args.putInt(ARG_POSITION, position);
EditDialogFragment fragment = new EditDialogFragment();
fragment.setArguments(args);
return fragment;
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
mListener = (OnFieldEditDialogInteraction) activity;
}
@Override
public void onDetach() {
super.onDetach();
mListener = null;
}
@NonNull
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
mRealmObject = RealmObjectHolder.getInstance().getObject();
mField = RealmObjectHolder.getInstance().getField();
mPosition = getArguments().getInt(ARG_POSITION);
if (mRealmObject == null || mField == null) {
throw new IllegalArgumentException("Use RealmObjectHolder to store data");
}
int layoutId = -1;
Class<?> type = mField.getType();
if (type == String.class
|| type == Short.class || type == short.class
|| type == Integer.class || type == int.class
|| type == Long.class || type == long.class
|| type == Float.class || type == float.class
|| type == Double.class || type == double.class) {
layoutId = R.layout.realm_browser_text_edit_layout;
} else if (type == Boolean.class || type == boolean.class) {
layoutId = R.layout.realm_browser_boolean_edit_layout;
} else if (type == Date.class) {
layoutId = R.layout.realm_browser_date_edit_layout;
} else if (type == Byte[].class || type == byte[].class) {
layoutId = R.layout.realm_browser_byte_array_edit_layout;
}
LayoutInflater inflater = LayoutInflater.from(getActivity());
View root = inflater.inflate(layoutId, null);
findViews(root);
initUI(mRealmObject, mField, type);
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
if (layoutId == -1) {
builder.setMessage("Unknown field type.");
} else {
builder.setView(root);
}
builder.setPositiveButton(R.string.realm_browser_ok, null);
if (type != Byte[].class && type != byte[].class) {
builder.setNegativeButton(R.string.realm_browser_cancel, mCancelClickListener);
}
if (isTypeNullable(type)) {
builder.setNeutralButton(R.string.realm_browser_reset_to_null, null);
}
AlertDialog dialog = builder.create();
dialog.setOnShowListener(new DialogInterface.OnShowListener() {
@Override
public void onShow(DialogInterface dialog) {
Button positiveButton = ((AlertDialog) dialog).getButton(DialogInterface.BUTTON_POSITIVE);
positiveButton.setOnClickListener(mOkClickListener);
Button resetToNull = ((AlertDialog) dialog).getButton(DialogInterface.BUTTON_NEUTRAL);
if (resetToNull != null) {
resetToNull.setOnClickListener(mResetToNullClickListener);
}
}
});
return dialog;
}
private void findViews(View root) {
mEditText = (EditText) root.findViewById(R.id.text_edit_dialog);
mErrorText = (TextView) root.findViewById(R.id.error_message);
mRadioGroup = (RadioGroup) root.findViewById(R.id.edit_boolean_group);
mTabHost = (TabHost) root.findViewById(R.id.tabHost);
mDatePicker = (DatePicker) root.findViewById(R.id.tab_date);
mTimePicker = (TimePicker) root.findViewById(R.id.tab_time);
mByteTextView = (TextView) root.findViewById(R.id.array);
}
private void initUI(RealmObject obj, Field field, Class<?> type) {
if (type == String.class
|| type == Short.class || type == short.class
|| type == Integer.class || type == int.class
|| type == Long.class || type == long.class
|| type == Float.class || type == float.class
|| type == Double.class || type == double.class) {
Object valueObj = RealmUtils.getNotParamFieldValue(obj, field);
mEditText.setText(valueObj == null ? "" : valueObj.toString());
int inputType;
if (type == String.class) {
inputType = InputType.TYPE_CLASS_TEXT;
} else if (type == Float.class || type == float.class
|| type == Double.class || type == double.class) {
inputType = InputType.TYPE_CLASS_NUMBER
| InputType.TYPE_NUMBER_FLAG_DECIMAL
| InputType.TYPE_NUMBER_FLAG_SIGNED;
} else {
inputType = InputType.TYPE_CLASS_NUMBER | InputType.TYPE_NUMBER_FLAG_SIGNED;
}
mEditText.setInputType(inputType);
} else if (type == Boolean.class || type == boolean.class) {
Boolean valueObj = (Boolean) RealmUtils.getNotParamFieldValue(obj, field);
int checkedId;
if (valueObj == null) {
checkedId = -1;
} else if (valueObj) {
checkedId = R.id.edit_boolean_true;
} else {
checkedId = R.id.edit_boolean_false;
}
if (checkedId != -1)
((RadioButton) mRadioGroup.findViewById(checkedId)).setChecked(true);
} else if (type == Date.class) {
mTabHost.setup();
// create date tab
TabHost.TabSpec specDate = mTabHost.newTabSpec("Date");
specDate.setIndicator("Date");
specDate.setContent(R.id.tab_date);
mTabHost.addTab(specDate);
// create time tab
TabHost.TabSpec specTime = mTabHost.newTabSpec("Time");
specTime.setIndicator("Time");
specTime.setContent(R.id.tab_time);
mTabHost.addTab(specTime);
Date valueObj = (Date) RealmUtils.getNotParamFieldValue(obj, field);
Calendar c = Calendar.getInstance();
c.setTime(valueObj != null ? valueObj : new Date());
mDatePicker.updateDate(c.get(Calendar.YEAR),
c.get(Calendar.MONTH),
c.get(Calendar.DAY_OF_MONTH));
mTimePicker.setCurrentHour(c.get(Calendar.HOUR));
mTimePicker.setCurrentMinute(c.get(Calendar.MINUTE));
mTimePicker.setIs24HourView(true);
} else if (type == Byte[].class || type == byte[].class) {
byte[] valueObj = (byte[]) RealmUtils.getNotParamFieldValue(obj, field);
if (valueObj == null) {
mByteTextView.setText(R.string.realm_browser_byte_array_is_null);
} else {
for (byte b : valueObj) {
mByteTextView.append(String.format("0x%02X", b) + " ");
}
}
}
}
private Boolean isTypeNullable(Class type) {
return (type == Date.class
|| type == Boolean.class
|| type == String.class
|| type == Short.class
|| type == Integer.class
|| type == Long.class
|| type == Float.class
|| type == Double.class);
}
private final View.OnClickListener mResetToNullClickListener = new View.OnClickListener() {
@Override
public void onClick(View v) {
saveNewValue(null);
mListener.onRowWasEdit(mPosition);
EditDialogFragment.this.dismiss();
}
};
private final View.OnClickListener mOkClickListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
Class<?> type = mField.getType();
Object newValue;
if (type == String.class) {
newValue = mEditText.getText().toString();
} else if (type == Boolean.class || type == boolean.class) {
newValue = mRadioGroup.getCheckedRadioButtonId() == R.id.edit_boolean_true;
} else if (type == Short.class || type == short.class) {
try {
newValue = Short.valueOf(mEditText.getText().toString());
} catch (NumberFormatException e) {
e.printStackTrace();
newValue = null;
}
} else if (type == Integer.class || type == int.class) {
try {
newValue = Integer.valueOf(mEditText.getText().toString());
} catch (NumberFormatException e) {
e.printStackTrace();
newValue = null;
}
} else if (type == Long.class || type == long.class) {
try {
newValue = Long.valueOf(mEditText.getText().toString());
} catch (NumberFormatException e) {
e.printStackTrace();
newValue = null;
}
} else if (type == Float.class || type == float.class) {
try {
newValue = Float.valueOf(mEditText.getText().toString());
} catch (NumberFormatException e) {
e.printStackTrace();
newValue = null;
}
} else if (type == Double.class || type == double.class) {
try {
newValue = Double.valueOf(mEditText.getText().toString());
} catch (NumberFormatException e) {
e.printStackTrace();
newValue = null;
}
} else if (type == Date.class) {
Class objClass = mRealmObject.getClass().getSuperclass();
Realm realm = Realm.getInstance(RealmBrowser.getInstance().getRealmConfig(objClass));
Date currentValue = (Date) RealmUtils.getNotParamFieldValue(mRealmObject, mField);
realm.close();
Calendar calendar = Calendar.getInstance();
if (currentValue != null)
calendar.setTime(currentValue);
calendar.set(Calendar.YEAR, mDatePicker.getYear());
calendar.set(Calendar.MONTH, mDatePicker.getMonth());
calendar.set(Calendar.DATE, mDatePicker.getDayOfMonth());
calendar.set(Calendar.HOUR_OF_DAY, mTimePicker.getCurrentHour());
calendar.set(Calendar.MINUTE, mTimePicker.getCurrentMinute());
newValue = calendar.getTime();
} else if (type == Byte[].class || type == byte[].class) {
EditDialogFragment.this.dismiss();
return;
} else {
newValue = null;
}
if (newValue != null) {
saveNewValue(newValue);
mListener.onRowWasEdit(mPosition);
EditDialogFragment.this.dismiss();
} else {
showError(type);
}
}
};
private final DialogInterface.OnClickListener mCancelClickListener = new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
// nothing to do here
}
};
private void saveNewValue(Object newValue) {
Class objClass = mRealmObject.getClass().getSuperclass();
Realm realm = Realm.getInstance(RealmBrowser.getInstance().getRealmConfig(objClass));
realm.beginTransaction();
RealmUtils.setNotParamFieldValue(mRealmObject, mField, newValue);
realm.commitTransaction();
realm.close();
}
private void showError(Class<?> clazz) {
String notFormatted = getString(R.string.realm_browser_value_edit_error);
String error = String.format(notFormatted, mEditText.getText().toString(),
clazz.getSimpleName());
mErrorText.setText(error);
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred.gridmix;
import java.io.IOException;
import java.util.Formatter;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Delayed;
import java.util.concurrent.TimeUnit;
import java.security.PrivilegedExceptionAction;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.tools.rumen.JobStory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Synthetic job generated from a trace description.
*/
abstract class GridmixJob implements Callable<Job>, Delayed {
public static final String JOBNAME = "GRIDMIX";
public static final String ORIGNAME = "gridmix.job.name.original";
public static final Log LOG = LogFactory.getLog(GridmixJob.class);
private static final ThreadLocal<Formatter> nameFormat =
new ThreadLocal<Formatter>() {
@Override
protected Formatter initialValue() {
final StringBuilder sb = new StringBuilder(JOBNAME.length() + 5);
sb.append(JOBNAME);
return new Formatter(sb);
}
};
protected final int seq;
protected final Path outdir;
protected final Job job;
protected final JobStory jobdesc;
protected final UserGroupInformation ugi;
protected final long submissionTimeNanos;
private static final ConcurrentHashMap<Integer,List<InputSplit>> descCache =
new ConcurrentHashMap<Integer,List<InputSplit>>();
protected static final String GRIDMIX_JOB_SEQ = "gridmix.job.seq";
protected static final String GRIDMIX_USE_QUEUE_IN_TRACE =
"gridmix.job-submission.use-queue-in-trace";
protected static final String GRIDMIX_DEFAULT_QUEUE =
"gridmix.job-submission.default-queue";
private static void setJobQueue(Job job, String queue) {
if (queue != null) {
job.getConfiguration().set(MRJobConfig.QUEUE_NAME, queue);
}
}
public GridmixJob(final Configuration conf, long submissionMillis,
final JobStory jobdesc, Path outRoot, UserGroupInformation ugi,
final int seq) throws IOException {
this.ugi = ugi;
this.jobdesc = jobdesc;
this.seq = seq;
((StringBuilder)nameFormat.get().out()).setLength(JOBNAME.length());
try {
job = this.ugi.doAs(new PrivilegedExceptionAction<Job>() {
public Job run() throws IOException {
Job ret =
new Job(conf,
nameFormat.get().format("%05d", seq).toString());
ret.getConfiguration().setInt(GRIDMIX_JOB_SEQ, seq);
String jobId = null == jobdesc.getJobID()
? "<unknown>"
: jobdesc.getJobID().toString();
ret.getConfiguration().set(ORIGNAME, jobId);
if (conf.getBoolean(GRIDMIX_USE_QUEUE_IN_TRACE, false)) {
setJobQueue(ret, jobdesc.getQueueName());
} else {
setJobQueue(ret, conf.get(GRIDMIX_DEFAULT_QUEUE));
}
return ret;
}
});
} catch (InterruptedException e) {
throw new IOException(e);
}
submissionTimeNanos = TimeUnit.NANOSECONDS.convert(
submissionMillis, TimeUnit.MILLISECONDS);
outdir = new Path(outRoot, "" + seq);
}
protected GridmixJob(final Configuration conf, long submissionMillis,
final String name) throws IOException {
submissionTimeNanos = TimeUnit.NANOSECONDS.convert(
submissionMillis, TimeUnit.MILLISECONDS);
jobdesc = null;
outdir = null;
seq = -1;
ugi = UserGroupInformation.getCurrentUser();
try {
job = this.ugi.doAs(new PrivilegedExceptionAction<Job>() {
public Job run() throws IOException {
Job ret = new Job(conf, name);
ret.getConfiguration().setInt(GRIDMIX_JOB_SEQ, seq);
setJobQueue(ret, conf.get(GRIDMIX_DEFAULT_QUEUE));
return ret;
}
});
} catch (InterruptedException e) {
throw new IOException(e);
}
}
public UserGroupInformation getUgi() {
return ugi;
}
public String toString() {
return job.getJobName();
}
public long getDelay(TimeUnit unit) {
return unit.convert(submissionTimeNanos - System.nanoTime(),
TimeUnit.NANOSECONDS);
}
@Override
public int compareTo(Delayed other) {
if (this == other) {
return 0;
}
if (other instanceof GridmixJob) {
final long otherNanos = ((GridmixJob)other).submissionTimeNanos;
if (otherNanos < submissionTimeNanos) {
return 1;
}
if (otherNanos > submissionTimeNanos) {
return -1;
}
return id() - ((GridmixJob)other).id();
}
final long diff =
getDelay(TimeUnit.NANOSECONDS) - other.getDelay(TimeUnit.NANOSECONDS);
return 0 == diff ? 0 : (diff > 0 ? 1 : -1);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
// not possible unless job is cloned; all jobs should be unique
return other instanceof GridmixJob && id() == ((GridmixJob)other).id();
}
@Override
public int hashCode() {
return id();
}
int id() {
return seq;
}
Job getJob() {
return job;
}
JobStory getJobDesc() {
return jobdesc;
}
static void pushDescription(int seq, List<InputSplit> splits) {
if (null != descCache.putIfAbsent(seq, splits)) {
throw new IllegalArgumentException("Description exists for id " + seq);
}
}
static List<InputSplit> pullDescription(JobContext jobCtxt) {
return pullDescription(GridmixJob.getJobSeqId(jobCtxt));
}
static List<InputSplit> pullDescription(int seq) {
return descCache.remove(seq);
}
static void clearAll() {
descCache.clear();
}
void buildSplits(FilePool inputDir) throws IOException {
}
static int getJobSeqId(JobContext job) {
return job.getConfiguration().getInt(GRIDMIX_JOB_SEQ,-1);
}
public static class DraftPartitioner<V> extends Partitioner<GridmixKey,V> {
public int getPartition(GridmixKey key, V value, int numReduceTasks) {
return key.getPartition();
}
}
public static class SpecGroupingComparator
implements RawComparator<GridmixKey> {
private final DataInputBuffer di = new DataInputBuffer();
private final byte[] reset = di.getData();
@Override
public int compare(GridmixKey g1, GridmixKey g2) {
final byte t1 = g1.getType();
final byte t2 = g2.getType();
if (t1 == GridmixKey.REDUCE_SPEC ||
t2 == GridmixKey.REDUCE_SPEC) {
return t1 - t2;
}
assert t1 == GridmixKey.DATA;
assert t2 == GridmixKey.DATA;
return g1.compareTo(g2);
}
@Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
try {
final int ret;
di.reset(b1, s1, l1);
final int x1 = WritableUtils.readVInt(di);
di.reset(b2, s2, l2);
final int x2 = WritableUtils.readVInt(di);
final int t1 = b1[s1 + x1];
final int t2 = b2[s2 + x2];
if (t1 == GridmixKey.REDUCE_SPEC ||
t2 == GridmixKey.REDUCE_SPEC) {
ret = t1 - t2;
} else {
assert t1 == GridmixKey.DATA;
assert t2 == GridmixKey.DATA;
ret =
WritableComparator.compareBytes(b1, s1, x1, b2, s2, x2);
}
di.reset(reset, 0, 0);
return ret;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
static class RawBytesOutputFormat<K>
extends FileOutputFormat<K,GridmixRecord> {
@Override
public RecordWriter<K,GridmixRecord> getRecordWriter(
TaskAttemptContext job) throws IOException {
Path file = getDefaultWorkFile(job, "");
FileSystem fs = file.getFileSystem(job.getConfiguration());
final FSDataOutputStream fileOut = fs.create(file, false);
return new RecordWriter<K,GridmixRecord>() {
@Override
public void write(K ignored, GridmixRecord value)
throws IOException {
value.writeRandom(fileOut, value.getSize());
}
@Override
public void close(TaskAttemptContext ctxt) throws IOException {
fileOut.close();
}
};
}
}
}
|
|
/**
*
* @author Austin
*
* This file describes the main functionality of the player and the enemys.
*/
import java.awt.*;
public class Player extends Rectangle
{
double velX, velY = 0.0;
int lastMoveX, lastMoveX2;
int lastMoveY, lastMoveY2;
boolean Left, Right, Jumping, Jumping2, Firing;
boolean isPlayer;
int AMOCAP = 6;
private double TotalHealth, CurrentHealth;
private boolean Alive;
Color color;
Bullet bullets[] = new Bullet[AMOCAP];
void setAlive(boolean X) { Alive = X; }
boolean isAlive() { return Alive; }
Player(int x, int y, double velX, int width, int height, double Health, Color c)
{
super(x, y, width, height);
this.velX = velX;
lastMoveX = x;
lastMoveY = y;
TotalHealth = CurrentHealth = Health;
Left = Right = Jumping = Jumping2 = false;
isPlayer = false;
Alive = false;
color = c;
loadBullets();
}
Player(int x, int y, int width, int height, double Health, Color c)
{
super(x, y, width, height);
velX = 0.0;
lastMoveX = x;
lastMoveY = y;
TotalHealth = CurrentHealth = Health;
Left = Right = Jumping = Jumping2 = false;
isPlayer = false;
Alive = false;
color = c;
loadBullets();
}
Player(int x, int y, double velX, int width, int height, double Health)
{
super(x, y, width, height);
this.velX = velX;
lastMoveX = x;
lastMoveY = y;
TotalHealth = CurrentHealth = Health;
Left = Right = Jumping = Jumping2 = false;
isPlayer = false;
Alive = false;
color = Color.GREEN;
loadBullets();
}
Player(int x, int y, int width, int height, double Health)
{
super(x, y, width, height);
velX = 0.0;
lastMoveX = x;
lastMoveY = y;
TotalHealth = CurrentHealth = Health;
Left = Right = Jumping = Jumping2 = false;
isPlayer = false;
Alive = false;
color = Color.GREEN;
loadBullets();
}
void moveTo(int x, int y)
{
lastMoveX = this.x;
lastMoveY = this.y;
this.x = x;
this.y = y;
}
void incX(double velX)
{
lastMoveX2 = lastMoveX;
lastMoveX = this.x;
this.x += velX;
}
void incY(double velY)
{
lastMoveY2 = lastMoveY;
lastMoveY = this.y;
this.y += velY;
}
void incHealth(int x)
{
CurrentHealth += x;
if(CurrentHealth >= TotalHealth)
CurrentHealth = TotalHealth;
}
void deincHealth(int x)
{
CurrentHealth -= x;
if(CurrentHealth <= 0)
reset();
}
void Jump()
{
if(!Jumping)
{
velY = -(height*0.45);
Jumping = true;
return;
}
if(Jumping && !Jumping2 && (velY > -1.0))
{
velY = -height*0.4;
Jumping2 = true;
return;
}
}
boolean getHeading()
{
if(lastMoveX < lastMoveX2)
return true; // moving left
else
return false; // moving right
}
void changeDir()
{
if(!isPlayer)
{
Right = !Right;
Left = !Left;
}
}
void reset()
{
if(isPlayer)
{
velY = 0.0;
x = 240;
y = 20;
CurrentHealth = TotalHealth;
Alive = true;
}
else
Alive = false;
}
void loadBullets()
{
for(int i=0; i < bullets.length; i++)
{
bullets[i] = new Bullet(this.x, this.y, 4.0, 0.0, 3, 2);
}
}
public void drawBox(Graphics2D g2d)
{
//draw the player
if(!Alive)
return;
double healthbar = (width-2) * (CurrentHealth/TotalHealth);
g2d.setColor(color);
g2d.fillRect(x, y, width, height);
g2d.setColor(Color.GREEN);
g2d.fillRect(x+1, (y-7), (int)healthbar, 4);
g2d.setColor(Color.LIGHT_GRAY);
g2d.drawRect(x, (y-7), width-1, 4);
}
void checkBoxColisions(Player box2)
{
if(!Alive || !box2.Alive)
return;
if(intersects(box2.getBounds()))
{
//Check Left box side
if((getCenterX() < box2.getCenterX()) &&
((lastMoveY + height) > (box2.y)) &&
(lastMoveY < (box2.y + box2.height)))
{
x = (box2.x - width);
changeDir();
}
//Check Right box side
else if((getCenterX() > box2.getCenterX()) &&
((lastMoveY + height) > (box2.y)) &&
(lastMoveY < (box2.y + box2.height)))
{
x = (box2.x + box2.width);
changeDir();
}
//Check Top box side
if((getCenterY() < box2.getCenterY()) &&
((lastMoveX + width) > (box2.x)) &&
(lastMoveX < (box2.x + box2.width)))
{
y = (box2.y - height);
velY = 0.0;
Jumping = false;
Jumping2 = false;
}
//Check Bottom box side
else if((getCenterY() > box2.getCenterY()) &&
((lastMoveX + width) > (box2.x)) &&
(lastMoveX < (box2.x + box2.width)))
{
y = (box2.y + box2.height);
velY = 0.0;
}
}
}
}
|
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v9/services/ad_service.proto
package com.google.ads.googleads.v9.services;
/**
* <pre>
* The result for the ad mutate.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.services.MutateAdResult}
*/
public final class MutateAdResult extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v9.services.MutateAdResult)
MutateAdResultOrBuilder {
private static final long serialVersionUID = 0L;
// Use MutateAdResult.newBuilder() to construct.
private MutateAdResult(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MutateAdResult() {
resourceName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new MutateAdResult();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MutateAdResult(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
resourceName_ = s;
break;
}
case 18: {
com.google.ads.googleads.v9.resources.Ad.Builder subBuilder = null;
if (ad_ != null) {
subBuilder = ad_.toBuilder();
}
ad_ = input.readMessage(com.google.ads.googleads.v9.resources.Ad.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(ad_);
ad_ = subBuilder.buildPartial();
}
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.services.AdServiceProto.internal_static_google_ads_googleads_v9_services_MutateAdResult_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.services.AdServiceProto.internal_static_google_ads_googleads_v9_services_MutateAdResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.services.MutateAdResult.class, com.google.ads.googleads.v9.services.MutateAdResult.Builder.class);
}
public static final int RESOURCE_NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object resourceName_;
/**
* <pre>
* The resource name returned for successful operations.
* </pre>
*
* <code>string resource_name = 1;</code>
* @return The resourceName.
*/
@java.lang.Override
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
}
}
/**
* <pre>
* The resource name returned for successful operations.
* </pre>
*
* <code>string resource_name = 1;</code>
* @return The bytes for resourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int AD_FIELD_NUMBER = 2;
private com.google.ads.googleads.v9.resources.Ad ad_;
/**
* <pre>
* The mutated ad with only mutable fields after mutate. The field will only
* be returned when response_content_type is set to "MUTABLE_RESOURCE".
* </pre>
*
* <code>.google.ads.googleads.v9.resources.Ad ad = 2;</code>
* @return Whether the ad field is set.
*/
@java.lang.Override
public boolean hasAd() {
return ad_ != null;
}
/**
* <pre>
* The mutated ad with only mutable fields after mutate. The field will only
* be returned when response_content_type is set to "MUTABLE_RESOURCE".
* </pre>
*
* <code>.google.ads.googleads.v9.resources.Ad ad = 2;</code>
* @return The ad.
*/
@java.lang.Override
public com.google.ads.googleads.v9.resources.Ad getAd() {
return ad_ == null ? com.google.ads.googleads.v9.resources.Ad.getDefaultInstance() : ad_;
}
/**
* <pre>
* The mutated ad with only mutable fields after mutate. The field will only
* be returned when response_content_type is set to "MUTABLE_RESOURCE".
* </pre>
*
* <code>.google.ads.googleads.v9.resources.Ad ad = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v9.resources.AdOrBuilder getAdOrBuilder() {
return getAd();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_);
}
if (ad_ != null) {
output.writeMessage(2, getAd());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_);
}
if (ad_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getAd());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v9.services.MutateAdResult)) {
return super.equals(obj);
}
com.google.ads.googleads.v9.services.MutateAdResult other = (com.google.ads.googleads.v9.services.MutateAdResult) obj;
if (!getResourceName()
.equals(other.getResourceName())) return false;
if (hasAd() != other.hasAd()) return false;
if (hasAd()) {
if (!getAd()
.equals(other.getAd())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getResourceName().hashCode();
if (hasAd()) {
hash = (37 * hash) + AD_FIELD_NUMBER;
hash = (53 * hash) + getAd().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v9.services.MutateAdResult parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.services.MutateAdResult parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.MutateAdResult parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.services.MutateAdResult parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.MutateAdResult parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.services.MutateAdResult parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.MutateAdResult parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.services.MutateAdResult parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.MutateAdResult parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.services.MutateAdResult parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.MutateAdResult parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.services.MutateAdResult parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v9.services.MutateAdResult prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* The result for the ad mutate.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.services.MutateAdResult}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v9.services.MutateAdResult)
com.google.ads.googleads.v9.services.MutateAdResultOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.services.AdServiceProto.internal_static_google_ads_googleads_v9_services_MutateAdResult_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.services.AdServiceProto.internal_static_google_ads_googleads_v9_services_MutateAdResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.services.MutateAdResult.class, com.google.ads.googleads.v9.services.MutateAdResult.Builder.class);
}
// Construct using com.google.ads.googleads.v9.services.MutateAdResult.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
resourceName_ = "";
if (adBuilder_ == null) {
ad_ = null;
} else {
ad_ = null;
adBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v9.services.AdServiceProto.internal_static_google_ads_googleads_v9_services_MutateAdResult_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v9.services.MutateAdResult getDefaultInstanceForType() {
return com.google.ads.googleads.v9.services.MutateAdResult.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v9.services.MutateAdResult build() {
com.google.ads.googleads.v9.services.MutateAdResult result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v9.services.MutateAdResult buildPartial() {
com.google.ads.googleads.v9.services.MutateAdResult result = new com.google.ads.googleads.v9.services.MutateAdResult(this);
result.resourceName_ = resourceName_;
if (adBuilder_ == null) {
result.ad_ = ad_;
} else {
result.ad_ = adBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v9.services.MutateAdResult) {
return mergeFrom((com.google.ads.googleads.v9.services.MutateAdResult)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v9.services.MutateAdResult other) {
if (other == com.google.ads.googleads.v9.services.MutateAdResult.getDefaultInstance()) return this;
if (!other.getResourceName().isEmpty()) {
resourceName_ = other.resourceName_;
onChanged();
}
if (other.hasAd()) {
mergeAd(other.getAd());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v9.services.MutateAdResult parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v9.services.MutateAdResult) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object resourceName_ = "";
/**
* <pre>
* The resource name returned for successful operations.
* </pre>
*
* <code>string resource_name = 1;</code>
* @return The resourceName.
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The resource name returned for successful operations.
* </pre>
*
* <code>string resource_name = 1;</code>
* @return The bytes for resourceName.
*/
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The resource name returned for successful operations.
* </pre>
*
* <code>string resource_name = 1;</code>
* @param value The resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resourceName_ = value;
onChanged();
return this;
}
/**
* <pre>
* The resource name returned for successful operations.
* </pre>
*
* <code>string resource_name = 1;</code>
* @return This builder for chaining.
*/
public Builder clearResourceName() {
resourceName_ = getDefaultInstance().getResourceName();
onChanged();
return this;
}
/**
* <pre>
* The resource name returned for successful operations.
* </pre>
*
* <code>string resource_name = 1;</code>
* @param value The bytes for resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resourceName_ = value;
onChanged();
return this;
}
private com.google.ads.googleads.v9.resources.Ad ad_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v9.resources.Ad, com.google.ads.googleads.v9.resources.Ad.Builder, com.google.ads.googleads.v9.resources.AdOrBuilder> adBuilder_;
/**
* <pre>
* The mutated ad with only mutable fields after mutate. The field will only
* be returned when response_content_type is set to "MUTABLE_RESOURCE".
* </pre>
*
* <code>.google.ads.googleads.v9.resources.Ad ad = 2;</code>
* @return Whether the ad field is set.
*/
public boolean hasAd() {
return adBuilder_ != null || ad_ != null;
}
/**
* <pre>
* The mutated ad with only mutable fields after mutate. The field will only
* be returned when response_content_type is set to "MUTABLE_RESOURCE".
* </pre>
*
* <code>.google.ads.googleads.v9.resources.Ad ad = 2;</code>
* @return The ad.
*/
public com.google.ads.googleads.v9.resources.Ad getAd() {
if (adBuilder_ == null) {
return ad_ == null ? com.google.ads.googleads.v9.resources.Ad.getDefaultInstance() : ad_;
} else {
return adBuilder_.getMessage();
}
}
/**
* <pre>
* The mutated ad with only mutable fields after mutate. The field will only
* be returned when response_content_type is set to "MUTABLE_RESOURCE".
* </pre>
*
* <code>.google.ads.googleads.v9.resources.Ad ad = 2;</code>
*/
public Builder setAd(com.google.ads.googleads.v9.resources.Ad value) {
if (adBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ad_ = value;
onChanged();
} else {
adBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* The mutated ad with only mutable fields after mutate. The field will only
* be returned when response_content_type is set to "MUTABLE_RESOURCE".
* </pre>
*
* <code>.google.ads.googleads.v9.resources.Ad ad = 2;</code>
*/
public Builder setAd(
com.google.ads.googleads.v9.resources.Ad.Builder builderForValue) {
if (adBuilder_ == null) {
ad_ = builderForValue.build();
onChanged();
} else {
adBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* The mutated ad with only mutable fields after mutate. The field will only
* be returned when response_content_type is set to "MUTABLE_RESOURCE".
* </pre>
*
* <code>.google.ads.googleads.v9.resources.Ad ad = 2;</code>
*/
public Builder mergeAd(com.google.ads.googleads.v9.resources.Ad value) {
if (adBuilder_ == null) {
if (ad_ != null) {
ad_ =
com.google.ads.googleads.v9.resources.Ad.newBuilder(ad_).mergeFrom(value).buildPartial();
} else {
ad_ = value;
}
onChanged();
} else {
adBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* The mutated ad with only mutable fields after mutate. The field will only
* be returned when response_content_type is set to "MUTABLE_RESOURCE".
* </pre>
*
* <code>.google.ads.googleads.v9.resources.Ad ad = 2;</code>
*/
public Builder clearAd() {
if (adBuilder_ == null) {
ad_ = null;
onChanged();
} else {
ad_ = null;
adBuilder_ = null;
}
return this;
}
/**
* <pre>
* The mutated ad with only mutable fields after mutate. The field will only
* be returned when response_content_type is set to "MUTABLE_RESOURCE".
* </pre>
*
* <code>.google.ads.googleads.v9.resources.Ad ad = 2;</code>
*/
public com.google.ads.googleads.v9.resources.Ad.Builder getAdBuilder() {
onChanged();
return getAdFieldBuilder().getBuilder();
}
/**
* <pre>
* The mutated ad with only mutable fields after mutate. The field will only
* be returned when response_content_type is set to "MUTABLE_RESOURCE".
* </pre>
*
* <code>.google.ads.googleads.v9.resources.Ad ad = 2;</code>
*/
public com.google.ads.googleads.v9.resources.AdOrBuilder getAdOrBuilder() {
if (adBuilder_ != null) {
return adBuilder_.getMessageOrBuilder();
} else {
return ad_ == null ?
com.google.ads.googleads.v9.resources.Ad.getDefaultInstance() : ad_;
}
}
/**
* <pre>
* The mutated ad with only mutable fields after mutate. The field will only
* be returned when response_content_type is set to "MUTABLE_RESOURCE".
* </pre>
*
* <code>.google.ads.googleads.v9.resources.Ad ad = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v9.resources.Ad, com.google.ads.googleads.v9.resources.Ad.Builder, com.google.ads.googleads.v9.resources.AdOrBuilder>
getAdFieldBuilder() {
if (adBuilder_ == null) {
adBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v9.resources.Ad, com.google.ads.googleads.v9.resources.Ad.Builder, com.google.ads.googleads.v9.resources.AdOrBuilder>(
getAd(),
getParentForChildren(),
isClean());
ad_ = null;
}
return adBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v9.services.MutateAdResult)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v9.services.MutateAdResult)
private static final com.google.ads.googleads.v9.services.MutateAdResult DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v9.services.MutateAdResult();
}
public static com.google.ads.googleads.v9.services.MutateAdResult getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MutateAdResult>
PARSER = new com.google.protobuf.AbstractParser<MutateAdResult>() {
@java.lang.Override
public MutateAdResult parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MutateAdResult(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<MutateAdResult> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MutateAdResult> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v9.services.MutateAdResult getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
|
/*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.tetopology.management.impl;
import static org.onosproject.tetopology.management.api.TeConstants.NIL_LONG_VALUE;
import java.util.List;
import java.util.Map;
import org.apache.commons.collections.MapUtils;
import org.onosproject.tetopology.management.api.CommonTopologyData;
import org.onosproject.tetopology.management.api.TeTopology;
import org.onosproject.tetopology.management.api.link.TeLink;
import org.onosproject.tetopology.management.api.link.TeLinkTpGlobalKey;
import org.onosproject.tetopology.management.api.link.TeLinkTpKey;
import org.onosproject.tetopology.management.api.node.TeNode;
import org.onosproject.tetopology.management.api.node.TeNodeKey;
import com.google.common.base.MoreObjects;
import com.google.common.base.Objects;
import com.google.common.collect.Lists;
/**
* TE topology representation in store.
*/
public class InternalTeTopology {
private String teTopologyId;
private List<TeNodeKey> teNodeKeys;
private List<TeLinkTpGlobalKey> teLinkKeys;
private CommonTopologyData topologyData;
private long nextTeNodeId = NIL_LONG_VALUE;
private boolean childUpdate;
/**
* Creates an instance of InternalTeTopology.
*
* @param teTopology the TE Topology object
*/
public InternalTeTopology(TeTopology teTopology) {
this.teTopologyId = teTopology.teTopologyIdStringValue();
this.topologyData = new CommonTopologyData(teTopology);
// teNodeKeys
if (MapUtils.isNotEmpty(teTopology.teNodes())) {
this.teNodeKeys = Lists.newArrayList();
for (Map.Entry<Long, TeNode> entry : teTopology.teNodes().entrySet()) {
this.teNodeKeys.add(new TeNodeKey(teTopology.teTopologyId(), entry.getKey()));
}
}
// teLink Keys
if (MapUtils.isNotEmpty(teTopology.teLinks())) {
this.teLinkKeys = Lists.newArrayList();
for (Map.Entry<TeLinkTpKey, TeLink> entry : teTopology.teLinks().entrySet()) {
this.teLinkKeys.add(new TeLinkTpGlobalKey(teTopology.teTopologyId(), entry.getKey()));
}
}
}
/**
* Creates a default instance of InternalNetwork.
*
* @param teTopologyId string value of id
*/
public InternalTeTopology(String teTopologyId) {
this.teTopologyId = teTopologyId;
}
/**
* Returns the TE Topology Id string value.
*
* @return the teTopologyId
*/
public String teTopologyId() {
return teTopologyId;
}
/**
* Returns the list of TE node keys in the topology.
*
* @return the teNodeKeys
*/
public List<TeNodeKey> teNodeKeys() {
return teNodeKeys;
}
/**
* Sets the list of TE node keys.
*
* @param teNodeKeys the teNodeKeys to set
*/
public void setTeNodeKeys(List<TeNodeKey> teNodeKeys) {
this.teNodeKeys = teNodeKeys;
}
/**
* Returns the list of TE link keys in the topology.
*
* @return the teLinkKeys
*/
public List<TeLinkTpGlobalKey> teLinkKeys() {
return teLinkKeys;
}
/**
* Sets the list of TE link keys.
*
* @param teLinkKeys the teLinkKeys to set
*/
public void setTeLinkKeys(List<TeLinkTpGlobalKey> teLinkKeys) {
this.teLinkKeys = teLinkKeys;
}
/**
* Returns the common TE topology data.
*
* @return the topology data
*/
public CommonTopologyData topologyData() {
return topologyData;
}
/**
* Sets the common TE topology data.
*
* @param topologyData the topologyData to set
*/
public void setTopologydata(CommonTopologyData topologyData) {
this.topologyData = topologyData;
}
/**
* Returns the next available TE node Id.
*
* @return the next TE nodeId
*/
public long nextTeNodeId() {
return nextTeNodeId;
}
/**
* Sets the next available TE node Id.
*
* @param nextTeNodeId the nextTeNodeId to set
*/
public void setNextTeNodeId(long nextTeNodeId) {
this.nextTeNodeId = nextTeNodeId;
}
/**
* Returns the flag if the data was updated by child change.
*
* @return value of childUpdate
*/
public boolean childUpdate() {
return childUpdate;
}
/**
* Sets the flag if the data was updated by child change.
*
* @param childUpdate the childUpdate value to set
*/
public void setChildUpdate(boolean childUpdate) {
this.childUpdate = childUpdate;
}
@Override
public int hashCode() {
return Objects.hashCode(teTopologyId, teNodeKeys, teLinkKeys,
topologyData);
}
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (object instanceof InternalTeTopology) {
InternalTeTopology that = (InternalTeTopology) object;
return Objects.equal(teTopologyId, that.teTopologyId)
&& Objects.equal(teNodeKeys, that.teNodeKeys)
&& Objects.equal(teLinkKeys, that.teLinkKeys)
&& Objects.equal(topologyData, that.topologyData);
}
return false;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("teTopologyId", teTopologyId)
.add("teNodeKeys", teNodeKeys)
.add("teLinkKeys", teLinkKeys)
.add("topologyData", topologyData)
.add("nextTeNodeId", nextTeNodeId)
.add("childUpdate", childUpdate)
.toString();
}
}
|
|
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.jbcsrc.restricted;
import static com.google.common.base.Preconditions.checkArgument;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.html.types.SafeHtml;
import com.google.common.html.types.SafeHtmlProto;
import com.google.common.html.types.SafeUrl;
import com.google.common.html.types.SafeUrlProto;
import com.google.common.html.types.TrustedResourceUrl;
import com.google.common.html.types.TrustedResourceUrlProto;
import com.google.common.primitives.Ints;
import com.google.protobuf.Message;
import com.google.template.soy.base.internal.SanitizedContentKind;
import com.google.template.soy.data.Dir;
import com.google.template.soy.data.LoggingAdvisingAppendable;
import com.google.template.soy.data.SanitizedContent;
import com.google.template.soy.data.SanitizedContent.ContentKind;
import com.google.template.soy.data.SoyLegacyObjectMap;
import com.google.template.soy.data.SoyList;
import com.google.template.soy.data.SoyMap;
import com.google.template.soy.data.SoyProtoValue;
import com.google.template.soy.data.SoyRecord;
import com.google.template.soy.data.SoyValue;
import com.google.template.soy.data.SoyValueProvider;
import com.google.template.soy.data.SoyVisualElement;
import com.google.template.soy.data.SoyVisualElementData;
import com.google.template.soy.data.internal.Converters;
import com.google.template.soy.data.restricted.BooleanData;
import com.google.template.soy.data.restricted.FloatData;
import com.google.template.soy.data.restricted.IntegerData;
import com.google.template.soy.data.restricted.NumberData;
import com.google.template.soy.data.restricted.StringData;
import com.google.template.soy.internal.proto.JavaQualifiedNames;
import com.google.template.soy.jbcsrc.api.RenderResult;
import com.google.template.soy.jbcsrc.restricted.Expression.Feature;
import com.google.template.soy.jbcsrc.restricted.Expression.Features;
import com.google.template.soy.jbcsrc.shared.CompiledTemplate;
import com.google.template.soy.jbcsrc.shared.LargeStringConstantFactory;
import com.google.template.soy.jbcsrc.shared.Names;
import com.google.template.soy.jbcsrc.shared.RenderContext;
import com.google.template.soy.jbcsrc.shared.StackFrame;
import com.google.template.soy.logging.LoggableElementMetadata;
import com.google.template.soy.types.SoyProtoEnumType;
import com.google.template.soy.types.SoyProtoType;
import com.google.template.soy.types.SoyType;
import java.io.Closeable;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodType;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import javax.annotation.Nullable;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.Handle;
import org.objectweb.asm.Label;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import org.objectweb.asm.commons.Method;
import org.objectweb.asm.util.Printer;
/** A set of utilities for generating simple expressions in bytecode */
public final class BytecodeUtils {
// https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.11
private static final int MAX_CONSTANT_STRING_LENGTH = 65535;
public static final TypeInfo OBJECT = TypeInfo.create(Object.class);
private static final Type OBJECT_ARRAY_TYPE = Type.getType(Object[].class);
public static final Type LOGGING_ADVISING_APPENDABLE_TYPE =
Type.getType(LoggingAdvisingAppendable.class);
public static final Type LOGGING_ADVISING_BUILDER_TYPE =
Type.getType(LoggingAdvisingAppendable.BufferingAppendable.class);
public static final Type COMPILED_TEMPLATE_TYPE = Type.getType(CompiledTemplate.class);
public static final Type COMPILED_TEMPLATE_TEMPLATE_VALUE_TYPE =
Type.getType(CompiledTemplate.TemplateValue.class);
public static final Type CONTENT_KIND_TYPE = Type.getType(ContentKind.class);
public static final Type CLOSEABLE_TYPE = Type.getType(Closeable.class);
public static final Type DIR_TYPE = Type.getType(Dir.class);
public static final Type HASH_MAP_TYPE = Type.getType(HashMap.class);
public static final Type NUMBER_DATA_TYPE = Type.getType(NumberData.class);
public static final Type INTEGER_DATA_TYPE = Type.getType(IntegerData.class);
public static final Type FLOAT_DATA_TYPE = Type.getType(FloatData.class);
public static final Type BOOLEAN_DATA_TYPE = Type.getType(BooleanData.class);
public static final Type STRING_DATA_TYPE = Type.getType(StringData.class);
public static final Type LINKED_HASH_MAP_TYPE = Type.getType(LinkedHashMap.class);
public static final Type LIST_TYPE = Type.getType(List.class);
public static final Type IMMUTIBLE_LIST_TYPE = Type.getType(ImmutableList.class);
public static final Type IMMUTIBLE_MAP_TYPE = Type.getType(ImmutableMap.class);
public static final Type MAP_TYPE = Type.getType(Map.class);
public static final Type MAP_ENTRY_TYPE = Type.getType(Map.Entry.class);
public static final Type MESSAGE_TYPE = Type.getType(Message.class);
public static final Type NULL_POINTER_EXCEPTION_TYPE = Type.getType(NullPointerException.class);
public static final Type RENDER_CONTEXT_TYPE = Type.getType(RenderContext.class);
public static final Type RENDER_RESULT_TYPE = Type.getType(RenderResult.class);
public static final Type SANITIZED_CONTENT_TYPE = Type.getType(SanitizedContent.class);
public static final Type SOY_LIST_TYPE = Type.getType(SoyList.class);
public static final Type SOY_LEGACY_OBJECT_MAP_TYPE = Type.getType(SoyLegacyObjectMap.class);
public static final Type SOY_MAP_TYPE = Type.getType(SoyMap.class);
public static final Type SOY_PROTO_VALUE_TYPE = Type.getType(SoyProtoValue.class);
public static final Type SOY_RECORD_TYPE = Type.getType(SoyRecord.class);
public static final Type SOY_VALUE_TYPE = Type.getType(SoyValue.class);
public static final Type SOY_VALUE_PROVIDER_TYPE = Type.getType(SoyValueProvider.class);
public static final Type SOY_STRING_TYPE = Type.getType(StringData.class);
public static final Type STRING_TYPE = Type.getType(String.class);
public static final Type THROWABLE_TYPE = Type.getType(Throwable.class);
public static final Type ILLEGAL_STATE_EXCEPTION_TYPE = Type.getType(IllegalStateException.class);
public static final Type SOY_VISUAL_ELEMENT_TYPE = Type.getType(SoyVisualElement.class);
public static final Type SOY_VISUAL_ELEMENT_DATA_TYPE = Type.getType(SoyVisualElementData.class);
public static final Type CLASS_TYPE = Type.getType(Class.class);
public static final Type INTEGER_TYPE = Type.getType(Integer.class);
public static final Type BOXED_LONG_TYPE = Type.getType(Long.class);
public static final Type BOXED_BOOLEAN_TYPE = Type.getType(Boolean.class);
public static final Type BOXED_DOUBLE_TYPE = Type.getType(Double.class);
public static final Type BOXED_FLOAT_TYPE = Type.getType(Float.class);
public static final Type NUMBER_TYPE = Type.getType(Number.class);
public static final Type LOGGABLE_ELEMENT_METADATA_TYPE =
Type.getType(LoggableElementMetadata.class);
public static final Type STACK_FRAME_TYPE = Type.getType(StackFrame.class);
public static final Type SAFE_URL_TYPE = Type.getType(SafeUrl.class);
public static final Type SAFE_URL_PROTO_TYPE = Type.getType(SafeUrlProto.class);
public static final Type TRUSTED_RESOURCE_PROTO_TYPE =
Type.getType(TrustedResourceUrlProto.class);
public static final Type SAFE_HTML_PROTO_TYPE = Type.getType(SafeHtmlProto.class);
public static final Type SAFE_HTML_TYPE = Type.getType(SafeHtml.class);
public static final Type TRUSTED_RESOURCE_URL_TYPE = Type.getType(TrustedResourceUrl.class);
public static final Method CLASS_INIT = Method.getMethod("void <clinit>()");
public static final Method NULLARY_INIT = Method.getMethod("void <init>()");
private static final LoadingCache<Type, Optional<Class<?>>> objectTypeToClassCache =
CacheBuilder.newBuilder()
.build(
new CacheLoader<Type, Optional<Class<?>>>() {
@Override
public Optional<Class<?>> load(Type key) throws Exception {
switch (key.getSort()) {
case Type.ARRAY:
Optional<Class<?>> elementType =
objectTypeToClassCache.getUnchecked(key.getElementType());
if (elementType.isPresent()) {
// The easiest way to generically get an array class.
return Optional.of(Array.newInstance(elementType.get(), 0).getClass());
}
return Optional.empty();
case Type.VOID:
return Optional.of(void.class);
case Type.BOOLEAN:
return Optional.of(boolean.class);
case Type.BYTE:
return Optional.of(byte.class);
case Type.CHAR:
return Optional.of(char.class);
case Type.DOUBLE:
return Optional.of(double.class);
case Type.INT:
return Optional.of(int.class);
case Type.SHORT:
return Optional.of(short.class);
case Type.LONG:
return Optional.of(long.class);
case Type.FLOAT:
return Optional.of(float.class);
case Type.OBJECT:
try {
String className = key.getClassName();
if (className.startsWith(Names.CLASS_PREFIX)) {
// if the class is generated, don't try to look it up.
// It might actually succeed in a case where we have the class on our
// classpath already!
return Optional.empty();
}
return Optional.of(
Class.forName(
className,
/*initialize=*/ false,
BytecodeUtils.class.getClassLoader()));
} catch (ClassNotFoundException e) {
return Optional.empty();
}
default:
throw new IllegalArgumentException("unsupported type: " + key);
}
}
});
private BytecodeUtils() {}
/**
* Returns {@code true} if {@code left} is possibly assignable from {@code right}.
*
* <p>Analogous to {@code right instanceof left}.
*/
public static boolean isPossiblyAssignableFrom(Type left, Type right) {
return doIsAssignableFrom(left, right, true);
}
/**
* Returns {@code true} if {@code left} is definitely assignable from {@code right}.
*
* <p>Analogous to {@code right instanceof left}.
*/
public static boolean isDefinitelyAssignableFrom(Type left, Type right) {
return doIsAssignableFrom(left, right, false);
}
/**
* Checks if {@code left} is assignable from {@code right}, however if we don't have information
* about one of the types then this returns {@code failOpen}.
*/
private static boolean doIsAssignableFrom(Type left, Type right, boolean failOpen) {
if (left.equals(right)) {
return true;
}
if (left.getSort() != right.getSort()) {
return false;
}
if (left.getSort() != Type.OBJECT) {
return false; // all other sorts require exact equality (even arrays)
}
// for object types we really need to know type hierarchy information to test for whether
// right is assignable to left.
Optional<Class<?>> leftClass = objectTypeToClassCache.getUnchecked(left);
Optional<Class<?>> rightClass = objectTypeToClassCache.getUnchecked(right);
if (!leftClass.isPresent() || !rightClass.isPresent()) {
// This means one of the types being compared is a generated object. So we can't easily check
// it. Just delegate responsibility to the verifier.
return failOpen;
}
return leftClass.get().isAssignableFrom(rightClass.get());
}
/**
* Returns the runtime class represented by the given type.
*
* @throws IllegalArgumentException if the class cannot be found. It is expected that this method
* will only be called for types that have a runtime on the compilers classpath.
*/
public static Class<?> classFromAsmType(Type type) {
Optional<Class<?>> maybeClass = objectTypeToClassCache.getUnchecked(type);
if (!maybeClass.isPresent()) {
throw new IllegalArgumentException("Could not load: " + type);
}
return maybeClass.get();
}
private static final Expression FALSE =
new Expression(Type.BOOLEAN_TYPE, Feature.CHEAP) {
@Override
protected void doGen(CodeBuilder mv) {
mv.pushBoolean(false);
}
};
private static final Expression TRUE =
new Expression(Type.BOOLEAN_TYPE, Feature.CHEAP) {
@Override
protected void doGen(CodeBuilder mv) {
mv.pushBoolean(true);
}
};
/** Returns an {@link Expression} that can load the given boolean constant. */
public static Expression constant(boolean value) {
return value ? TRUE : FALSE;
}
/** Returns an {@link Expression} that can load the given int constant. */
public static Expression constant(final int value) {
return new Expression(Type.INT_TYPE, Feature.CHEAP) {
@Override
protected void doGen(CodeBuilder mv) {
mv.pushInt(value);
}
};
}
/** Returns an {@link Expression} that can load the given char constant. */
public static Expression constant(final char value) {
return new Expression(Type.CHAR_TYPE, Feature.CHEAP) {
@Override
protected void doGen(CodeBuilder mv) {
mv.pushInt(value);
}
};
}
/** Returns an {@link Expression} that can load the given long constant. */
public static Expression constant(final long value) {
return new Expression(Type.LONG_TYPE, Feature.CHEAP) {
@Override
protected void doGen(CodeBuilder mv) {
mv.pushLong(value);
}
};
}
/** Returns an {@link Expression} that can load the given double constant. */
public static Expression constant(final double value) {
return new Expression(Type.DOUBLE_TYPE, Feature.CHEAP) {
@Override
protected void doGen(CodeBuilder mv) {
mv.pushDouble(value);
}
};
}
/** Returns an {@link Expression} that can load the given String constant. */
public static Expression constant(String value) {
// string constants use a "modified UTF8" encoding
// https://en.wikipedia.org/wiki/UTF-8#Modified_UTF-8
// and are limited by the classfile format to contain no more than 65535 bytes
// https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.4.7
// In soy we often have large constants that can exceed these limits, which is annoying since
// it is difficult to predict whether a given string constant will exceed these limits (since it
// needs to be encoded first).
int previousStart = 0;
List<String> stringConstants = new ArrayList<>();
int byteCount = 0;
int index = 0;
while (index < value.length()) {
char c = value.charAt(index);
int charBytes;
// This algorithm is described here
// https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.4.7
if (c >= '\001' && c <= '\177') {
charBytes = 1;
} else if (c > '\u07FF') {
charBytes = 3;
} else {
charBytes = 2;
}
// does this char push us over the limit?
if (byteCount + charBytes > MAX_CONSTANT_STRING_LENGTH) {
stringConstants.add(value.substring(previousStart, index));
byteCount = 0;
previousStart = index;
}
byteCount += charBytes;
index++;
}
stringConstants.add(value.substring(previousStart));
return new Expression(STRING_TYPE, Feature.CHEAP, Feature.NON_NULLABLE) {
@Override
protected void doGen(CodeBuilder cb) {
if (stringConstants.size() == 1) {
cb.pushString(stringConstants.get(0));
} else {
cb.visitInvokeDynamicInsn(
"constantString",
Type.getMethodDescriptor(STRING_TYPE),
LARGE_STRING_CONSTANT_HANDLE,
stringConstants.toArray());
}
}
};
}
/** Returns an {@link Expression} that evaluates to the given ContentKind, or null. */
public static Expression constant(@Nullable ContentKind kind) {
return (kind == null)
? BytecodeUtils.constantNull(CONTENT_KIND_TYPE)
: FieldRef.enumReference(kind).accessor();
}
/** Returns an {@link Expression} that evaluates to the given Dir, or null. */
public static Expression constant(@Nullable Dir dir) {
return (dir == null)
? BytecodeUtils.constantNull(DIR_TYPE)
: FieldRef.enumReference(dir).accessor();
}
public static Expression constant(Type type) {
return new Expression(CLASS_TYPE, Feature.CHEAP, Feature.NON_NULLABLE) {
@Override
protected void doGen(CodeBuilder mv) {
mv.pushType(type);
}
};
}
private static final Handle LARGE_STRING_CONSTANT_HANDLE =
MethodRef.create(
LargeStringConstantFactory.class,
"bootstrapLargeStringConstant",
MethodHandles.Lookup.class,
String.class,
MethodType.class,
String[].class)
.asHandle();
/**
* Returns an {@link Expression} that evaluates to the {@link ContentKind} value that is
* equivalent to the given {@link SanitizedContentKind}, or null.
*/
public static Expression constantSanitizedContentKindAsContentKind(SanitizedContentKind kind) {
return FieldRef.enumReference(Converters.contentKindfromSanitizedContentKind(kind)).accessor();
}
/** Returns an {@link Expression} with the given type that always returns null. */
public static Expression constantNull(Type type) {
checkArgument(
type.getSort() == Type.OBJECT || type.getSort() == Type.ARRAY,
"%s is not a reference type",
type);
return new Expression(type, Feature.CHEAP) {
@Override
protected void doGen(CodeBuilder mv) {
mv.visitInsn(Opcodes.ACONST_NULL);
}
};
}
/**
* Returns an expression that does a numeric conversion cast from the given expression to the
* given type.
*
* @throws IllegalArgumentException if either the expression or the target type is not a numeric
* primitive
*/
public static Expression numericConversion(Expression expr, Type to) {
if (to.equals(expr.resultType())) {
return expr;
}
if (!isNumericPrimitive(to) || !isNumericPrimitive(expr.resultType())) {
throw new IllegalArgumentException("Cannot convert from " + expr.resultType() + " to " + to);
}
return new Expression(to, expr.features()) {
@Override
protected void doGen(CodeBuilder adapter) {
expr.gen(adapter);
adapter.cast(expr.resultType(), to);
}
};
}
private static boolean isNumericPrimitive(Type type) {
int sort = type.getSort();
switch (sort) {
case Type.OBJECT:
case Type.ARRAY:
case Type.VOID:
case Type.METHOD:
case Type.BOOLEAN:
return false;
case Type.BYTE:
case Type.CHAR:
case Type.DOUBLE:
case Type.INT:
case Type.SHORT:
case Type.LONG:
case Type.FLOAT:
return true;
default:
throw new AssertionError("unexpected type " + type);
}
}
/** Returns {@code true} if {@link Type} is a primitive type. */
public static boolean isPrimitive(Type type) {
switch (type.getSort()) {
case Type.OBJECT:
case Type.ARRAY:
return false;
case Type.BOOLEAN:
case Type.BYTE:
case Type.CHAR:
case Type.DOUBLE:
case Type.INT:
case Type.SHORT:
case Type.LONG:
case Type.FLOAT:
return true;
case Type.VOID:
case Type.METHOD:
throw new IllegalArgumentException("Invalid type: " + type);
default:
throw new AssertionError("unexpected type " + type);
}
}
/**
* Generates a default nullary public constructor for the given type on the {@link ClassVisitor}.
*
* <p>For java classes this is normally generated by the compiler and looks like:
*
* <pre>{@code
* public Foo() {
* super();
* }
* }</pre>
*/
public static void defineDefaultConstructor(ClassVisitor cv, TypeInfo ownerType) {
CodeBuilder mg = new CodeBuilder(Opcodes.ACC_PUBLIC, NULLARY_INIT, null, cv);
mg.visitCode();
Label start = mg.mark();
Label end = mg.newLabel();
LocalVariable thisVar = LocalVariable.createThisVar(ownerType, start, end);
thisVar.gen(mg);
mg.invokeConstructor(OBJECT.type(), NULLARY_INIT);
mg.returnValue();
mg.mark(end);
thisVar.tableEntry(mg);
mg.endMethod();
}
// TODO(lukes): some of these branch operators are a little too branchy. For example, the
// expression a == b || a == c, could be implemented by
// logicalOr(compare(Opcodes.IFEQ, a, b), compare(Opcodes.IFEQ, a, c)), but that is not optimal
// instead we could allow compare to take an expression for what to do when the comparison fails
// that way we could save a branch. Maybe these operators are a failed abstraction?
/** Compares the two primitive valued expressions using the provided comparison operation. */
public static Expression compare(
final int comparisonOpcode, final Expression left, final Expression right) {
checkArgument(
left.resultType().equals(right.resultType()),
"left and right must have matching types, found %s and %s",
left.resultType(),
right.resultType());
checkIntComparisonOpcode(left.resultType(), comparisonOpcode);
Features features =
Expression.areAllCheap(left, right) ? Features.of(Feature.CHEAP) : Features.of();
return new Expression(Type.BOOLEAN_TYPE, features) {
@Override
protected void doGen(CodeBuilder mv) {
left.gen(mv);
right.gen(mv);
Label ifTrue = mv.newLabel();
Label end = mv.newLabel();
mv.ifCmp(left.resultType(), comparisonOpcode, ifTrue);
mv.pushBoolean(false);
mv.goTo(end);
mv.mark(ifTrue);
mv.pushBoolean(true);
mv.mark(end);
}
};
}
private static void checkIntComparisonOpcode(Type comparisonType, int opcode) {
switch (opcode) {
case Opcodes.IFEQ:
case Opcodes.IFNE:
return;
case Opcodes.IFGT:
case Opcodes.IFGE:
case Opcodes.IFLT:
case Opcodes.IFLE:
if (comparisonType.getSort() == Type.ARRAY || comparisonType.getSort() == Type.OBJECT) {
throw new IllegalArgumentException(
"Type: " + comparisonType + " cannot be compared via " + Printer.OPCODES[opcode]);
}
return;
default:
throw new IllegalArgumentException(
"Unsupported opcode for comparison operation: " + opcode);
}
}
/**
* Returns an expression that evaluates to the logical negation of the given boolean valued
* expression.
*/
public static Expression logicalNot(final Expression baseExpr) {
baseExpr.checkAssignableTo(Type.BOOLEAN_TYPE);
checkArgument(baseExpr.resultType().equals(Type.BOOLEAN_TYPE), "not a boolean expression");
return new Expression(Type.BOOLEAN_TYPE, baseExpr.features()) {
@Override
protected void doGen(CodeBuilder mv) {
baseExpr.gen(mv);
// Surprisingly, java bytecode uses a branch (instead of 'xor 1' or something) to implement
// this. This is most likely useful for allowing true to be represented by any non-zero
// number.
Label ifTrue = mv.newLabel();
Label end = mv.newLabel();
mv.ifZCmp(Opcodes.IFNE, ifTrue); // if not 0 goto ifTrue
mv.pushBoolean(true);
mv.goTo(end);
mv.mark(ifTrue);
mv.pushBoolean(false);
mv.mark(end);
}
};
}
/** Compares two {@link SoyExpression}s for equality using soy == semantics. */
public static Expression compareSoyEquals(final SoyExpression left, final SoyExpression right) {
// We can special case when we know the types.
// If either is a string, we run special logic so test for that first
// otherwise we special case primitives and eventually fall back to our runtime.
SoyRuntimeType leftRuntimeType = left.soyRuntimeType();
SoyRuntimeType rightRuntimeType = right.soyRuntimeType();
if (leftRuntimeType.isKnownString()) {
return doEqualsString(left.unboxAsString(), right);
}
if (rightRuntimeType.isKnownString()) {
// TODO(lukes): we are changing the order of evaluation here.
return doEqualsString(right.unboxAsString(), left);
}
if (leftRuntimeType.isKnownInt()
&& rightRuntimeType.isKnownInt()
&& left.isNonNullable()
&& right.isNonNullable()) {
return compare(Opcodes.IFEQ, left.unboxAsLong(), right.unboxAsLong());
}
if (leftRuntimeType.isKnownNumber()
&& rightRuntimeType.isKnownNumber()
&& left.isNonNullable()
&& right.isNonNullable()
&& (leftRuntimeType.isKnownFloat() || rightRuntimeType.isKnownFloat())) {
return compare(Opcodes.IFEQ, left.coerceToDouble(), right.coerceToDouble());
}
return MethodRef.RUNTIME_EQUAL.invoke(left.box(), right.box());
}
/**
* Compare a string valued expression to another expression using soy == semantics.
*
* @param stringExpr An expression that is known to be an unboxed string
* @param other An expression to compare it to.
*/
private static Expression doEqualsString(SoyExpression stringExpr, SoyExpression other) {
// This is compatible with SharedRuntime.compareString, which interestingly makes == break
// transitivity. See b/21461181
SoyRuntimeType otherRuntimeType = other.soyRuntimeType();
if (otherRuntimeType.isKnownStringOrSanitizedContent()) {
if (stringExpr.isNonNullable()) {
return stringExpr.invoke(MethodRef.EQUALS, other.unboxAsString());
} else {
return MethodRef.OBJECTS_EQUALS.invoke(stringExpr, other.unboxAsString());
}
}
if (otherRuntimeType.isKnownNumber() && other.isNonNullable()) {
// in this case, we actually try to convert stringExpr to a number
return MethodRef.RUNTIME_STRING_EQUALS_AS_NUMBER.invoke(stringExpr, other.coerceToDouble());
}
// We don't know what other is, assume the worst and call out to our boxed implementation for
// string comparisons.
return MethodRef.RUNTIME_COMPARE_NULLABLE_STRING.invoke(stringExpr, other.box());
}
/**
* Returns an expression that evaluates to {@code left} if left is non null, and evaluates to
* {@code right} otherwise.
*/
public static Expression firstNonNull(final Expression left, final Expression right) {
checkArgument(left.resultType().getSort() == Type.OBJECT);
checkArgument(right.resultType().getSort() == Type.OBJECT);
Features features = Features.of();
if (Expression.areAllCheap(left, right)) {
features = features.plus(Feature.CHEAP);
}
if (right.isNonNullable()) {
features = features.plus(Feature.NON_NULLABLE);
}
return new Expression(left.resultType(), features) {
@Override
protected void doGen(CodeBuilder cb) {
Label leftIsNonNull = new Label();
left.gen(cb); // Stack: L
cb.dup(); // Stack: L, L
cb.ifNonNull(leftIsNonNull); // Stack: L
// pop the extra copy of left
cb.pop(); // Stack:
right.gen(cb); // Stack: R
cb.mark(leftIsNonNull); // At this point the stack has an instance of L or R
}
};
}
/**
* Returns an expression that evaluates equivalently to a java ternary expression: {@code
* condition ? left : right}
*/
public static Expression ternary(
Expression condition, Expression trueBranch, Expression falseBranch) {
// Choose the type of the ternary as the least specific of the two options.
// In theory we shold really choose the least common superclass which would cover more cases,
// but this should be fine for now. Mostly this is just turning (ImmutableList,List)->List. If
// this isn't possible, an error will be thrown and we can re-evaluate this approach.
Type ternaryType;
Type trueType = trueBranch.resultType();
Type falseType = falseBranch.resultType();
if (isDefinitelyAssignableFrom(trueType, falseType)) {
ternaryType = trueType;
} else if (isDefinitelyAssignableFrom(falseType, trueType)) {
ternaryType = falseType;
} else {
throw new IllegalArgumentException(
String.format(
"true (%s) and false (%s) branches must be compatible", trueType, falseType));
}
return ternary(condition, trueBranch, falseBranch, ternaryType);
}
/**
* Returns an expression that evaluates equivalently to a java ternary expression: {@code
* condition ? left : right}.
*
* <p>This allows the caller to specify the result type of the ternary expression. By default the
* ternary expression is typed with the type of the true branch, but the caller can specify the
* result type if they know more about the types of the branches.
*/
public static Expression ternary(
final Expression condition,
final Expression trueBranch,
final Expression falseBranch,
Type resultType) {
checkArgument(
condition.resultType().equals(Type.BOOLEAN_TYPE),
"The condition must be a boolean, got %s",
condition.resultType());
checkArgument(
isPossiblyAssignableFrom(resultType, trueBranch.resultType()),
"expected %s to be assignable to %s",
trueBranch.resultType(),
resultType);
checkArgument(
isPossiblyAssignableFrom(resultType, falseBranch.resultType()),
"expected %s to be assignable to %s",
falseBranch.resultType(),
resultType);
Features features = Features.of();
if (Expression.areAllCheap(condition, trueBranch, falseBranch)) {
features = features.plus(Feature.CHEAP);
}
if (trueBranch.isNonNullable() && falseBranch.isNonNullable()) {
features = features.plus(Feature.NON_NULLABLE);
}
return new Expression(resultType, features) {
@Override
protected void doGen(CodeBuilder mv) {
condition.gen(mv);
Label ifFalse = new Label();
Label end = new Label();
mv.visitJumpInsn(Opcodes.IFEQ, ifFalse); // if 0 goto ifFalse
trueBranch.gen(mv); // eval true branch
mv.visitJumpInsn(Opcodes.GOTO, end); // jump to the end
mv.visitLabel(ifFalse);
falseBranch.gen(mv); // eval false branch
mv.visitLabel(end);
}
};
}
/**
* Implements the short circuiting logical or ({@code ||}) operator over the list of boolean
* expressions.
*/
public static Expression logicalOr(Expression... expressions) {
return logicalOr(ImmutableList.copyOf(expressions));
}
/**
* Implements the short circuiting logical or ({@code ||}) operator over the list of boolean
* expressions.
*/
public static Expression logicalOr(List<? extends Expression> expressions) {
return doShortCircuitingLogicalOperator(ImmutableList.copyOf(expressions), true);
}
/**
* Implements the short circuiting logical and ({@code &&}) operator over the list of boolean
* expressions.
*/
public static Expression logicalAnd(Expression... expressions) {
return logicalAnd(ImmutableList.copyOf(expressions));
}
/**
* Implements the short circuiting logical and ({@code &&}) operator over the list of boolean
* expressions.
*/
public static Expression logicalAnd(List<? extends Expression> expressions) {
return doShortCircuitingLogicalOperator(ImmutableList.copyOf(expressions), false);
}
private static Expression doShortCircuitingLogicalOperator(
final ImmutableList<? extends Expression> expressions, final boolean isOrOperator) {
checkArgument(!expressions.isEmpty());
for (Expression expr : expressions) {
expr.checkAssignableTo(Type.BOOLEAN_TYPE);
}
if (expressions.size() == 1) {
return expressions.get(0);
}
return new Expression(
Type.BOOLEAN_TYPE,
Expression.areAllCheap(expressions) ? Features.of(Feature.CHEAP) : Features.of()) {
@Override
protected void doGen(CodeBuilder adapter) {
Label end = new Label();
Label shortCircuit = new Label();
for (int i = 0; i < expressions.size(); i++) {
Expression expr = expressions.get(i);
expr.gen(adapter);
if (i == expressions.size() - 1) {
// if we are the last one, just goto end. Whatever the result of the last expression is
// determines the result of the whole expression (when all prior tests fail).
adapter.goTo(end);
} else {
adapter.ifZCmp(isOrOperator ? Opcodes.IFNE : Opcodes.IFEQ, shortCircuit);
}
}
adapter.mark(shortCircuit);
adapter.pushBoolean(isOrOperator); // default for || is true && is false
adapter.mark(end);
}
};
}
/**
* Returns an expression that returns a new {@code ImmutableList} containing the given items.
*
* <p>NOTE: {@code ImmutableList} rejects null elements.
*/
public static Expression asImmutableList(Iterable<? extends Expression> items) {
ImmutableList<Expression> copy = ImmutableList.copyOf(items);
if (copy.size() < MethodRef.IMMUTABLE_LIST_OF.size()) {
return MethodRef.IMMUTABLE_LIST_OF.get(copy.size()).invoke(copy);
}
ImmutableList<Expression> explicit = copy.subList(0, MethodRef.IMMUTABLE_LIST_OF.size());
Expression remainder =
asArray(OBJECT_ARRAY_TYPE, copy.subList(MethodRef.IMMUTABLE_LIST_OF.size(), copy.size()));
return MethodRef.IMMUTABLE_LIST_OF_ARRAY.invoke(
Iterables.concat(explicit, ImmutableList.of(remainder)));
}
private static Expression asArray(
final Type arrayType, final ImmutableList<? extends Expression> elements) {
final Type elementType = arrayType.getElementType();
return new Expression(arrayType, Feature.NON_NULLABLE) {
@Override
protected void doGen(CodeBuilder adapter) {
adapter.pushInt(elements.size());
adapter.newArray(elementType);
for (int i = 0; i < elements.size(); i++) {
adapter.dup(); // dup the array
adapter.pushInt(i); // the index to store into
elements.get(i).gen(adapter); // the element to store
adapter.arrayStore(elementType);
}
}
};
}
/** Returns an expression that returns a new {@link ArrayList} containing all the given items. */
public static Expression asList(Iterable<? extends Expression> items) {
final ImmutableList<Expression> copy = ImmutableList.copyOf(items);
if (copy.isEmpty()) {
return MethodRef.IMMUTABLE_LIST_OF.get(0).invoke();
}
// Note, we cannot necessarily use ImmutableList for anything besides the empty list because
// we may need to put a null in it.
final Expression construct = ConstructorRef.ARRAY_LIST_SIZE.construct(constant(copy.size()));
return new Expression(LIST_TYPE, Feature.NON_NULLABLE) {
@Override
protected void doGen(CodeBuilder mv) {
construct.gen(mv);
for (Expression child : copy) {
mv.dup();
child.gen(mv);
MethodRef.ARRAY_LIST_ADD.invokeUnchecked(mv);
mv.pop(); // pop the bool result of arraylist.add
}
}
};
}
/**
* Outputs bytecode that will test the item at the top of the stack for null, and branch to {@code
* nullExit} if it is {@code null}. At {@code nullSafeExit} there will be a null value at the top
* of the stack.
*/
public static void nullCoalesce(CodeBuilder builder, Label nullExit) {
builder.dup();
Label nonNull = new Label();
builder.ifNonNull(nonNull);
// See http://mail.ow2.org/wws/arc/asm/2016-02/msg00001.html for a discussion of this pattern
// but even though the value at the top of the stack here is null, its type isn't. So we need
// to pop and push. This is the idiomatic pattern.
builder.pop();
builder.pushNull();
builder.goTo(nullExit);
builder.mark(nonNull);
}
/**
* Outputs bytecode that unboxes the current top element of the stack as {@code asType}. Top of
* stack must not be null.
*
* <p>Always prefer using {@link SoyExpression#unboxAs} over this method, whenever possible.
*
* <p>Guarantees: * Bytecode output will not change stack height * Output will only change the top
* element, and nothing below that
*
* @return the type of the result of the unbox operation
*/
public static Type unboxUnchecked(CodeBuilder cb, SoyRuntimeType soyType, Class<?> asType) {
checkArgument(soyType.isBoxed(), "Expected %s to be a boxed type", soyType);
Type fromType = soyType.runtimeType();
checkArgument(
!SoyValue.class.isAssignableFrom(asType),
"Can't use unboxUnchecked() to convert from %s to a SoyValue: %s.",
fromType,
asType);
// No-op conversion
if (isDefinitelyAssignableFrom(Type.getType(asType), fromType)) {
return fromType;
}
if (asType.equals(boolean.class)) {
MethodRef.SOY_VALUE_BOOLEAN_VALUE.invokeUnchecked(cb);
return Type.BOOLEAN_TYPE;
}
if (asType.equals(long.class)) {
MethodRef.SOY_VALUE_LONG_VALUE.invokeUnchecked(cb);
return Type.LONG_TYPE;
}
if (asType.equals(double.class)) {
MethodRef.SOY_VALUE_FLOAT_VALUE.invokeUnchecked(cb);
return Type.DOUBLE_TYPE;
}
if (asType.equals(String.class)) {
MethodRef.SOY_VALUE_STRING_VALUE.invokeUnchecked(cb);
return STRING_TYPE;
}
if (asType.equals(List.class)) {
cb.checkCast(SOY_LIST_TYPE);
MethodRef.SOY_LIST_AS_JAVA_LIST.invokeUnchecked(cb);
return LIST_TYPE;
}
if (asType.equals(Message.class)) {
if (!isDefinitelyAssignableFrom(SOY_PROTO_VALUE_TYPE, fromType)) {
cb.checkCast(SOY_PROTO_VALUE_TYPE);
}
MethodRef.SOY_PROTO_VALUE_GET_PROTO.invokeUnchecked(cb);
return MESSAGE_TYPE;
}
throw new UnsupportedOperationException(
"Can't unbox top of stack from " + fromType + " to " + asType);
}
/** Returns an expression that returns a new {@link HashMap} containing all the given entries. */
public static Expression newHashMap(
Iterable<? extends Expression> keys, Iterable<? extends Expression> values) {
return newMap(keys, values, ConstructorRef.HASH_MAP_CAPACITY, HASH_MAP_TYPE);
}
/**
* Returns an expression that returns a new {@link LinkedHashMap} containing all the given
* entries.
*/
public static Expression newLinkedHashMap(
Iterable<? extends Expression> keys, Iterable<? extends Expression> values) {
return newMap(keys, values, ConstructorRef.LINKED_HASH_MAP_CAPACITY, LINKED_HASH_MAP_TYPE);
}
private static Expression newMap(
Iterable<? extends Expression> keys,
Iterable<? extends Expression> values,
ConstructorRef constructorRef,
Type mapType) {
final ImmutableList<Expression> keysCopy = ImmutableList.copyOf(keys);
final ImmutableList<Expression> valuesCopy = ImmutableList.copyOf(values);
checkArgument(keysCopy.size() == valuesCopy.size());
for (int i = 0; i < keysCopy.size(); i++) {
checkArgument(keysCopy.get(i).resultType().getSort() == Type.OBJECT);
checkArgument(valuesCopy.get(i).resultType().getSort() == Type.OBJECT);
}
final Expression construct =
constructorRef.construct(constant(hashMapCapacity(keysCopy.size())));
return new Expression(mapType, Feature.NON_NULLABLE) {
@Override
protected void doGen(CodeBuilder mv) {
construct.gen(mv);
for (int i = 0; i < keysCopy.size(); i++) {
Expression key = keysCopy.get(i);
Expression value = valuesCopy.get(i);
mv.dup();
key.gen(mv);
value.gen(mv);
MethodRef.MAP_PUT.invokeUnchecked(mv);
mv.pop(); // pop the Object result of map.put
}
}
};
}
private static int hashMapCapacity(int expectedSize) {
if (expectedSize < 3) {
return expectedSize + 1;
}
if (expectedSize < Ints.MAX_POWER_OF_TWO) {
// This is the calculation used in JDK8 to resize when a putAll
// happens; it seems to be the most conservative calculation we
// can make. 0.75 is the default load factor.
return (int) (expectedSize / 0.75F + 1.0F);
}
return Integer.MAX_VALUE; // any large value
}
/**
* Returns a {@link SoyExpression} that evaluates to true if the expression evaluated to a
* non-null value.
*/
public static SoyExpression isNonNull(final Expression expr) {
if (BytecodeUtils.isPrimitive(expr.resultType())) {
// Reference the statement so that the SoyValueProvider detaches for resolve, and
// TemplateAnalysis will correctly cause subsequent accesses to resolve immediately.
return SoyExpression.forBool(expr.toStatement().then(BytecodeUtils.constant(true)));
}
return SoyExpression.forBool(
new Expression(Type.BOOLEAN_TYPE, expr.features()) {
@Override
protected void doGen(CodeBuilder adapter) {
expr.gen(adapter);
Label isNull = new Label();
adapter.ifNull(isNull);
// non-null
adapter.pushBoolean(true);
Label end = new Label();
adapter.goTo(end);
adapter.mark(isNull);
adapter.pushBoolean(false);
adapter.mark(end);
}
});
}
/** Returns a {@link SoyExpression} that evaluates to true if the expression evaluated to null. */
public static SoyExpression isNull(final Expression expr) {
if (BytecodeUtils.isPrimitive(expr.resultType())) {
// Reference the statement so that the SoyValueProvider detaches for resolve, and
// TemplateAnalysis will correctly cause subsequent accesses to resolve immediately.
return SoyExpression.forBool(expr.toStatement().then(BytecodeUtils.constant(false)));
}
// This is what javac generates for 'someObject == null'
return SoyExpression.forBool(
new Expression(Type.BOOLEAN_TYPE, expr.features()) {
@Override
protected void doGen(CodeBuilder adapter) {
expr.gen(adapter);
Label isNull = new Label();
adapter.ifNull(isNull);
// non-null
adapter.pushBoolean(false);
Label end = new Label();
adapter.goTo(end);
adapter.mark(isNull);
adapter.pushBoolean(true);
adapter.mark(end);
}
});
}
public static Type getTypeForClassName(String name) {
return Type.getType('L' + name.replace('.', '/') + ';');
}
public static Type getTypeForSoyType(SoyType type) {
switch (type.getKind()) {
case INT:
return BOXED_LONG_TYPE;
case FLOAT:
return BOXED_DOUBLE_TYPE;
case BOOL:
return BOXED_BOOLEAN_TYPE;
case STRING:
return STRING_TYPE;
case PROTO:
return getTypeForClassName(
JavaQualifiedNames.getClassName(((SoyProtoType) type).getDescriptor()));
case PROTO_ENUM:
return getTypeForClassName(
JavaQualifiedNames.getClassName(((SoyProtoEnumType) type).getDescriptor()));
default:
throw new IllegalArgumentException("unsupported type: " + type);
}
}
/** Converts int to Integer, long to Long, etc. Java "boxing", not Soy "boxing". */
public static Expression boxJavaPrimitive(SoyExpression actualParam) {
Type type = actualParam.soyRuntimeType().runtimeType();
switch (type.getSort()) {
case Type.INT:
return MethodRef.BOX_INTEGER.invoke(actualParam);
case Type.LONG:
return MethodRef.BOX_LONG.invoke(actualParam);
case Type.BOOLEAN:
return MethodRef.BOX_BOOLEAN.invoke(actualParam);
case Type.FLOAT:
return MethodRef.BOX_FLOAT.invoke(actualParam);
case Type.DOUBLE:
return MethodRef.BOX_DOUBLE.invoke(actualParam);
default:
throw new IllegalArgumentException(type.getClassName());
}
}
}
|
|
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.iot.model;
import java.io.Serializable;
/**
* <p>
* The output from the CreateCertificateFromCsr operation.
* </p>
*/
public class CreateCertificateFromCsrResult implements Serializable, Cloneable {
/**
* <p>
* The Amazon Resource Name (ARN) of the certificate. You can use the ARN as
* a principal for policy operations.
* </p>
*/
private String certificateArn;
/**
* <p>
* The ID of the certificate. Certificate management operations only take a
* certificateId.
* </p>
*/
private String certificateId;
/**
* <p>
* The certificate data, in PEM format.
* </p>
*/
private String certificatePem;
/**
* <p>
* The Amazon Resource Name (ARN) of the certificate. You can use the ARN as
* a principal for policy operations.
* </p>
*
* @param certificateArn
* The Amazon Resource Name (ARN) of the certificate. You can use the
* ARN as a principal for policy operations.
*/
public void setCertificateArn(String certificateArn) {
this.certificateArn = certificateArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the certificate. You can use the ARN as
* a principal for policy operations.
* </p>
*
* @return The Amazon Resource Name (ARN) of the certificate. You can use
* the ARN as a principal for policy operations.
*/
public String getCertificateArn() {
return this.certificateArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the certificate. You can use the ARN as
* a principal for policy operations.
* </p>
*
* @param certificateArn
* The Amazon Resource Name (ARN) of the certificate. You can use the
* ARN as a principal for policy operations.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateCertificateFromCsrResult withCertificateArn(
String certificateArn) {
setCertificateArn(certificateArn);
return this;
}
/**
* <p>
* The ID of the certificate. Certificate management operations only take a
* certificateId.
* </p>
*
* @param certificateId
* The ID of the certificate. Certificate management operations only
* take a certificateId.
*/
public void setCertificateId(String certificateId) {
this.certificateId = certificateId;
}
/**
* <p>
* The ID of the certificate. Certificate management operations only take a
* certificateId.
* </p>
*
* @return The ID of the certificate. Certificate management operations only
* take a certificateId.
*/
public String getCertificateId() {
return this.certificateId;
}
/**
* <p>
* The ID of the certificate. Certificate management operations only take a
* certificateId.
* </p>
*
* @param certificateId
* The ID of the certificate. Certificate management operations only
* take a certificateId.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateCertificateFromCsrResult withCertificateId(String certificateId) {
setCertificateId(certificateId);
return this;
}
/**
* <p>
* The certificate data, in PEM format.
* </p>
*
* @param certificatePem
* The certificate data, in PEM format.
*/
public void setCertificatePem(String certificatePem) {
this.certificatePem = certificatePem;
}
/**
* <p>
* The certificate data, in PEM format.
* </p>
*
* @return The certificate data, in PEM format.
*/
public String getCertificatePem() {
return this.certificatePem;
}
/**
* <p>
* The certificate data, in PEM format.
* </p>
*
* @param certificatePem
* The certificate data, in PEM format.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateCertificateFromCsrResult withCertificatePem(
String certificatePem) {
setCertificatePem(certificatePem);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getCertificateArn() != null)
sb.append("CertificateArn: " + getCertificateArn() + ",");
if (getCertificateId() != null)
sb.append("CertificateId: " + getCertificateId() + ",");
if (getCertificatePem() != null)
sb.append("CertificatePem: " + getCertificatePem());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateCertificateFromCsrResult == false)
return false;
CreateCertificateFromCsrResult other = (CreateCertificateFromCsrResult) obj;
if (other.getCertificateArn() == null
^ this.getCertificateArn() == null)
return false;
if (other.getCertificateArn() != null
&& other.getCertificateArn().equals(this.getCertificateArn()) == false)
return false;
if (other.getCertificateId() == null ^ this.getCertificateId() == null)
return false;
if (other.getCertificateId() != null
&& other.getCertificateId().equals(this.getCertificateId()) == false)
return false;
if (other.getCertificatePem() == null
^ this.getCertificatePem() == null)
return false;
if (other.getCertificatePem() != null
&& other.getCertificatePem().equals(this.getCertificatePem()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getCertificateArn() == null) ? 0 : getCertificateArn()
.hashCode());
hashCode = prime
* hashCode
+ ((getCertificateId() == null) ? 0 : getCertificateId()
.hashCode());
hashCode = prime
* hashCode
+ ((getCertificatePem() == null) ? 0 : getCertificatePem()
.hashCode());
return hashCode;
}
@Override
public CreateCertificateFromCsrResult clone() {
try {
return (CreateCertificateFromCsrResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
|
|
package ualberta.cmput301w16t16.glamorousborrowingwhaleapp;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.location.Location;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatActivity;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.Toast;
import java.io.ByteArrayOutputStream;
import java.sql.Time;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
/**
* This activity allows the user to enter all the necessary information about a
* piece of equipment and save it to their repository of equipment. It also has
* the option to cancel creating equipment.
* @author adam, andrew, erin, laura, martina
*/
public class NewListingActivity extends AppCompatActivity {
private EditText name;
private EditText sport;
private EditText size;
private EditText description;
private ImageView photo;
private BidList bids;
private User user;
private Item item;
private double latitude;
private double longitude;
private byte[] photoStream = new byte[65536];
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_new_listing);
final ActionBar actionBar = getSupportActionBar();
actionBar.setTitle("Create New Listing");
actionBar.setHomeButtonEnabled(true);
name = (EditText) findViewById(R.id.name);
sport = (EditText) findViewById(R.id.sport);
size = (EditText) findViewById(R.id.size);
description = (EditText) findViewById(R.id.description);
photo = (ImageView) findViewById(R.id.pictureView);
final ImageButton saveButton;
final ImageButton deleteButton;
user = UserController.getUser();
saveButton = (ImageButton) findViewById(R.id.save);
deleteButton = (ImageButton) findViewById(R.id.delete);
bids = new BidList();
saveButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// checking that something was inputted in each field
if (name.getText().toString().isEmpty() ||
sport.getText().toString().isEmpty() ||
size.getText().toString().isEmpty() ||
description.getText().toString().isEmpty() ||
photo.toString().isEmpty()) {
Toast.makeText(NewListingActivity.this, "Something must be entered in every field.", Toast.LENGTH_SHORT).show();
} else {
// picture management
if (photo == null) {
photo.setImageResource(R.drawable.glamorouswhale1);
}
Bitmap image = ((BitmapDrawable) photo.getDrawable()).getBitmap();
ByteArrayOutputStream photosNeedToBeCompressedToThis = new ByteArrayOutputStream();
image.compress(Bitmap.CompressFormat.JPEG, 100, photosNeedToBeCompressedToThis);
photoStream = photosNeedToBeCompressedToThis.toByteArray();
//Gathers all the variables used for EditText to apply to the Item object.
//toString for compatibility.
item = new Item();
item.setTitle(name.getText().toString());
item.setDescription(description.getText().toString());
item.setSize(size.getText().toString());
item.setAvailability(true);
item.setBids(bids);
item.setOwnerID(user.getID());
item.setRenterID("");
item.setPhoto(photoStream);
item.setOwnerID(user.getID());
item.setSport(sport.getText().toString());
item.setLatitude(latitude);
item.setLongitude(longitude);
//setting controller to this item now for fun
ItemController.setItem(item);
// check whether we have connectivity
if (NetworkUtil.getConnectivityStatus(NewListingActivity.this) == 1) {
// network is available
//Adding the latestItem to the current user's (Controlled by UserController) ItemList
ItemController.addItemElasticSearch(item);
user.addMyItem(item.getID());
// update the user to include the new item in its list
UserController.updateUserElasticSearch(user);
Toast.makeText(NewListingActivity.this, "New Thing Saved!", Toast.LENGTH_SHORT).show();
finish();
} else {
// network is not available
// save the item to user.offlineItems
user.addOfflineItem(item);
// turn on the receiver to watch for network changes
// receiver will add the item when the network is back
NetworkUtil.startListeningForNetwork(v.getContext());
Toast.makeText(NewListingActivity.this, "Item will be pushed once network is connected", Toast.LENGTH_SHORT).show();
finish();
}
}
}
});
deleteButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
//All this does atm is clear the edittext boxes.
name.setText("");
size.setText("");
description.setText("");
Toast.makeText(NewListingActivity.this, "View Cleared!", Toast.LENGTH_SHORT).show();
}
});
// this is the gallery selection method for pictures
photo.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent bringTheGallery = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
final int result = 0;
startActivityForResult(bringTheGallery, result);
}
});
}
//setting up the action bar icons
//taken from http://www.androidhive.info/2013/11/android-working-with-action-bar/
// Apr3/16
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.main_actions, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.home:
goToHome();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
public void goToHome() {
Intent i = new Intent(NewListingActivity.this, MyProfileViewActivity.class);
startActivity(i);
}
// this gets the returns from the photo and location Activities
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
switch (requestCode) {
case 0:
if (resultCode == RESULT_OK) {
Uri selectedImage = data.getData();
photo.setImageURI(selectedImage);
} else {
Toast.makeText(this, "Could not load image", Toast.LENGTH_SHORT).show();
}
break;
case 1:
if (resultCode == RESULT_OK) {
Location location = data.getParcelableExtra("location");
latitude = location.getLatitude();
longitude = location.getLongitude();
} else {
Toast.makeText(this, "Could not get location", Toast.LENGTH_SHORT).show();
}
break;
}
}
// @Override
// protected void onPause() {
// //this also clears on home button press. Issue for sure.
// super.onPause();
// name.setText("");
// size.setText("");
// description.setText("");
// }
public void launchGetLocation(View view) {
Intent intent = new Intent(view.getContext(), GetLocationActivity.class);
final int result = 1;
startActivityForResult(intent, result);
}
public void deletePhoto(View view) {
photo.setImageResource(R.drawable.glamorouswhale1);
Toast.makeText(this, "photo returned to default!", Toast.LENGTH_SHORT).show();
}
public void deleteLocation(View view) {
latitude = 0;
longitude = 0;
Toast.makeText(this, "location deleted!", Toast.LENGTH_SHORT).show();
}
}
|
|
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.stage.origin.logtail;
import com.codahale.metrics.Counter;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.streamsets.pipeline.api.BatchMaker;
import com.streamsets.pipeline.api.EventRecord;
import com.streamsets.pipeline.api.Field;
import com.streamsets.pipeline.api.Record;
import com.streamsets.pipeline.api.StageException;
import com.streamsets.pipeline.api.base.BaseSource;
import com.streamsets.pipeline.api.impl.Utils;
import com.streamsets.pipeline.config.DataFormat;
import com.streamsets.pipeline.config.FileRollMode;
import com.streamsets.pipeline.config.PostProcessingOptions;
import com.streamsets.pipeline.lib.io.FileEvent;
import com.streamsets.pipeline.lib.io.FileLine;
import com.streamsets.pipeline.lib.io.LiveFile;
import com.streamsets.pipeline.lib.io.LiveFileChunk;
import com.streamsets.pipeline.lib.io.MultiFileInfo;
import com.streamsets.pipeline.lib.io.MultiFileReader;
import com.streamsets.pipeline.lib.io.RollMode;
import com.streamsets.pipeline.lib.parser.DataParser;
import com.streamsets.pipeline.lib.parser.DataParserException;
import com.streamsets.pipeline.lib.parser.DataParserFactory;
import com.streamsets.pipeline.stage.common.DefaultErrorRecordHandler;
import com.streamsets.pipeline.stage.common.ErrorRecordHandler;
import com.streamsets.pipeline.stage.common.HeaderAttributeConstants;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
public class FileTailSource extends BaseSource {
private static final Logger LOG = LoggerFactory.getLogger(FileTailSource.class);
public static final String FILE_TAIL_CONF_PREFIX = "conf.";
public static final String FILE_TAIL_DATA_FORMAT_CONFIG_PREFIX = FILE_TAIL_CONF_PREFIX + "dataFormatConfig.";
private static final String OFFSETS_LAG = "offsets.lag";
private static final String PENDING_FILES = "pending.files";
private final FileTailConfigBean conf;
private final int scanIntervalSecs;
public FileTailSource(FileTailConfigBean conf) {
this(conf, 20);
}
FileTailSource(FileTailConfigBean conf, int scanIntervalSecs) {
this.conf = conf;
this.scanIntervalSecs = scanIntervalSecs;
}
private MultiFileReader multiDirReader;
private long maxWaitTimeMillis;
private ErrorRecordHandler errorRecordHandler;
private DataParserFactory parserFactory;
private String outputLane;
private String metadataLane;
private Map<String, Counter> offsetLagMetric;
private Map<String, Counter> pendingFilesMetric;
private boolean validateFileInfo(FileInfo fileInfo, List<ConfigIssue> issues) {
boolean ok = true;
String fileName = Paths.get(fileInfo.fileFullPath).getFileName().toString();
String token = fileInfo.fileRollMode.getTokenForPattern();
if (!validateFilePathNoNull(fileInfo, fileName, issues)) {
return false;
}
ok &= validateTokenConfiguration(fileInfo, issues, fileName, token);
return ok;
}
private boolean validateFilePathNoNull(FileInfo fileInfo, String fileName, List<ConfigIssue> issues) {
if (fileName == null || fileName.isEmpty()) {
issues.add(
getContext().createConfigIssue(
Groups.FILES.name(),
FILE_TAIL_CONF_PREFIX + "fileInfos",
Errors.TAIL_20,
fileInfo.fileFullPath
)
);
return false;
}
return true;
}
private boolean validateTokenConfiguration(FileInfo fileInfo, List<ConfigIssue> issues, String fileName, String token) {
boolean ok = true;
String fileParentDir = Paths.get(fileInfo.fileFullPath).getParent().toString();
if (!token.isEmpty()) {
// If the token exists, it can't be in the directory name
if(fileParentDir.contains(token)) {
issues.add(
getContext().createConfigIssue(
Groups.FILES.name(),
FILE_TAIL_CONF_PREFIX + "fileInfos",
Errors.TAIL_16,
fileInfo.fileFullPath,
fileInfo.fileRollMode.getTokenForPattern()
)
);
ok = false;
}
// The token has to be in the filename instead
if (!fileName.contains(token)) {
issues.add(
getContext().createConfigIssue(
Groups.FILES.name(),
FILE_TAIL_CONF_PREFIX + "fileInfos",
Errors.TAIL_08,
fileInfo.fileFullPath,
fileInfo.fileRollMode.getTokenForPattern(),
fileName
)
);
ok = false;
}
}
if (ok && fileInfo.fileRollMode == FileRollMode.PATTERN) {
// must provide a pattern if using this roll mode
if (fileInfo.patternForToken == null || fileInfo.patternForToken.isEmpty()) {
ok &= false;
issues.add(
getContext().createConfigIssue(
Groups.FILES.name(),
FILE_TAIL_CONF_PREFIX + "fileInfos",
Errors.TAIL_08,
fileInfo.fileFullPath
)
);
} else {
// valid patternForTokens must be parseable regexes
ok &= validatePatternIsValidRegex(fileInfo, issues);
}
// if firstFile is provided, make sure it's possible to use it
if (ok && fileInfo.firstFile != null && !fileInfo.firstFile.isEmpty()) {
RollMode rollMode = fileInfo.fileRollMode.createRollMode(fileInfo.fileFullPath, fileInfo.patternForToken);
if (!rollMode.isFirstAcceptable(fileInfo.firstFile)) {
ok = false;
issues.add(
getContext().createConfigIssue(
Groups.FILES.name(),
FILE_TAIL_CONF_PREFIX + "fileInfos",
Errors.TAIL_19,
fileInfo.fileFullPath
)
);
}
}
}
return ok;
}
private boolean validatePatternIsValidRegex(FileInfo fileInfo, List<ConfigIssue> issues) {
try {
Pattern.compile(fileInfo.patternForToken);
} catch (PatternSyntaxException ex) {
issues.add(
getContext().createConfigIssue(
Groups.FILES.name(),
FILE_TAIL_CONF_PREFIX + "fileInfos",
Errors.TAIL_09,
fileInfo.fileFullPath,
fileInfo.patternForToken,
ex.toString()
)
);
return false;
}
return true;
}
@Override
protected List<ConfigIssue> init() {
List<ConfigIssue> issues = super.init();
errorRecordHandler = new DefaultErrorRecordHandler(getContext());
if (conf.postProcessing == PostProcessingOptions.ARCHIVE) {
if (conf.archiveDir == null || conf.archiveDir.isEmpty()) {
issues.add(
getContext().createConfigIssue(
Groups.POST_PROCESSING.name(),
FILE_TAIL_CONF_PREFIX + "archiveDir",
Errors.TAIL_05
)
);
} else {
File dir = new File(conf.archiveDir);
if (!dir.exists()) {
issues.add(
getContext().createConfigIssue(
Groups.POST_PROCESSING.name(),
FILE_TAIL_CONF_PREFIX + "archiveDir",
Errors.TAIL_06
)
);
}
if (!dir.isDirectory()) {
issues.add(
getContext().createConfigIssue(
Groups.POST_PROCESSING.name(),
FILE_TAIL_CONF_PREFIX + "archiveDir",
Errors.TAIL_07
)
);
}
}
}
if (conf.fileInfos.isEmpty()) {
issues.add(
getContext().createConfigIssue(
Groups.FILES.name(),
FILE_TAIL_CONF_PREFIX + "fileInfos",
Errors.TAIL_01
)
);
} else {
Set<String> fileKeys = new LinkedHashSet<>();
List<MultiFileInfo> dirInfos = new ArrayList<>();
for (FileInfo fileInfo : conf.fileInfos) {
if (validateFileInfo(fileInfo, issues)) {
MultiFileInfo directoryInfo = new MultiFileInfo(
fileInfo.tag,
fileInfo.fileFullPath,
fileInfo.fileRollMode,
fileInfo.patternForToken,
fileInfo.firstFile,
conf.multiLineMainPattern
);
dirInfos.add(directoryInfo);
if (fileKeys.contains(directoryInfo.getFileKey())) {
issues.add(getContext().createConfigIssue(
Groups.FILES.name(),
FILE_TAIL_CONF_PREFIX + "fileInfos",
Errors.TAIL_04,
fileInfo.fileFullPath
));
}
fileKeys.add(directoryInfo.getFileKey());
}
}
if (!dirInfos.isEmpty()) {
try {
int maxLineLength = Integer.MAX_VALUE;
if (conf.dataFormat == DataFormat.TEXT) {
maxLineLength = conf.dataFormatConfig.textMaxLineLen;
} else if (conf.dataFormat == DataFormat.JSON) {
maxLineLength = conf.dataFormatConfig.jsonMaxObjectLen;
} else if (conf.dataFormat == DataFormat.LOG) {
maxLineLength = conf.dataFormatConfig.logMaxObjectLen;
}
int scanIntervalSecs = (getContext().isPreview()) ? 0 : this.scanIntervalSecs;
multiDirReader = new MultiFileReader(
dirInfos,
Charset.forName(conf.dataFormatConfig.charset),
maxLineLength,
conf.postProcessing,
conf.archiveDir,
true,
scanIntervalSecs,
conf.allowLateDirectories,
getContext().isPreview()
);
} catch (IOException ex) {
issues.add(
getContext().createConfigIssue(
Groups.FILES.name(),
FILE_TAIL_CONF_PREFIX + "fileInfos",
Errors.TAIL_02,
ex.toString(),
ex
)
);
}
}
}
conf.dataFormatConfig.init(
getContext(),
conf.dataFormat,
Groups.FILES.name(),
FILE_TAIL_DATA_FORMAT_CONFIG_PREFIX,
!conf.multiLineMainPattern.isEmpty(),
issues
);
parserFactory = conf.dataFormatConfig.getParserFactory();
maxWaitTimeMillis = conf.maxWaitTimeSecs * 1000;
outputLane = getContext().getOutputLanes().get(0);
metadataLane = getContext().getOutputLanes().get(1);
offsetLagMetric = new HashMap<String, Counter>();
pendingFilesMetric = new HashMap<String, Counter>();
return issues;
}
@Override
public void destroy() {
IOUtils.closeQuietly(multiDirReader);
super.destroy();
}
private final static ObjectMapper OBJECT_MAPPER = new ObjectMapper();
@SuppressWarnings("unchecked")
private Map<String, String> deserializeOffsetMap(String lastSourceOffset) throws StageException {
Map<String, String> map;
if (lastSourceOffset == null) {
map = new HashMap<>();
} else {
try {
map = OBJECT_MAPPER.readValue(lastSourceOffset, Map.class);
} catch (IOException ex) {
throw new StageException(Errors.TAIL_10, ex.toString(), ex);
}
}
return map;
}
private String serializeOffsetMap(Map<String, String> map) throws StageException {
try {
return OBJECT_MAPPER.writeValueAsString(map);
} catch (IOException ex) {
throw new StageException(Errors.TAIL_13, ex.toString(), ex);
}
}
// if we are in timeout
private boolean isTimeout(long startTime) {
return (System.currentTimeMillis() - startTime) > maxWaitTimeMillis;
}
// remaining time till timeout, return zero if already in timeout
private long getRemainingWaitTime(long startTime) {
long remaining = maxWaitTimeMillis - (System.currentTimeMillis() - startTime);
return (remaining > 0) ? remaining : 0;
}
/*
When we start with a file (empty or not) the file offset is zero.
If the file is a rolled file, the file will be EOF immediately triggering a close of the reader and setting the
offset to Long.MAX_VALUE (this happens in the MultiDirectoryReader class). This is the signal that in the next
read a directory scan should be triggered to get the next rolled file or the live file if we were scanning the last
rolled file.
If the file you are starting is the live file, we don't get an EOF as we expect data to be appended. We just return
null chunks while there is no data. If the file is rolled we'll detect that and then do what is described in the
previous paragraph.
When offset for file is "" it means we never processed things in the directory, at that point we start from the
first file (according to the defined order) in the directory, or if a 'first file' as been set in the configuration,
we start from that file.
We encode in lastSourceOffset the current file and offset from all directories in JSON.
*/
@Override
public String produce(String lastSourceOffset, int maxBatchSize, BatchMaker batchMaker) throws StageException {
int recordCounter = 0;
long startTime = System.currentTimeMillis();
maxBatchSize = Math.min(conf.batchSize, maxBatchSize);
// deserializing offsets of all directories
Map<String, String> offsetMap = deserializeOffsetMap(lastSourceOffset);
boolean offsetSet = false;
while (!offsetSet) {
try {
multiDirReader.setOffsets(offsetMap);
offsetSet = true;
} catch (IOException ex) {
LOG.warn("Error while creating reading previous offset: {}", ex.toString(), ex);
multiDirReader.purge();
}
}
while (recordCounter < maxBatchSize && !isTimeout(startTime)) {
LiveFileChunk chunk = multiDirReader.next(getRemainingWaitTime(startTime));
if (chunk != null) {
String tag = chunk.getTag();
tag = (tag != null && tag.isEmpty()) ? null : tag;
String liveFileStr = chunk.getFile().serialize();
List<FileLine> lines = chunk.getLines();
int truncatedLine = chunk.isTruncated() ? lines.size()-1 : -1;
for (int i = 0; i < lines.size(); i++) {
FileLine line = lines.get(i);
String sourceId = liveFileStr + "::" + line.getFileOffset();
try (DataParser parser = parserFactory.getParser(sourceId, line.getText())) {
if(i == truncatedLine) {
//set truncated
parser.setTruncated();
}
Record record = parser.parse();
if (record != null) {
if (tag != null) {
record.getHeader().setAttribute("tag", tag);
}
record.getHeader().setAttribute(HeaderAttributeConstants.FILE, chunk.getFile().getPath().toString());
record.getHeader().setAttribute(HeaderAttributeConstants.FILE_NAME, chunk.getFile().getPath().getFileName().toString());
record.getHeader().setAttribute(HeaderAttributeConstants.OFFSET, String.valueOf(line.getFileOffset()));
record.getHeader().setAttribute(
HeaderAttributeConstants.LAST_MODIFIED_TIME,
String.valueOf(Files.getLastModifiedTime(chunk.getFile().getPath()).toMillis())
);
batchMaker.addRecord(record, outputLane);
recordCounter++;
}
} catch (IOException | DataParserException ex) {
errorRecordHandler.onError(Errors.TAIL_12, sourceId, ex.toString(), ex);
}
}
}
}
boolean metadataGenerationFailure = false;
Date now = new Date(startTime);
for (FileEvent event : multiDirReader.getEvents()) {
try {
LiveFile file = event.getFile().refresh();
Record metadataRecord = getContext().createRecord("");
Map<String, Field> map = new HashMap<>();
map.put("fileName", Field.create(file.getPath().toString()));
map.put("inode", Field.create(file.getINode()));
map.put("time", Field.createDate(now));
map.put("event", Field.create((event.getAction().name())));
metadataRecord.set(Field.create(map));
batchMaker.addRecord(metadataRecord, metadataLane);
// We're also sending the same information on event lane
String eventRecordSourceId =
Utils.format("event:{}:{}:{}", event.getAction().name(), 1, file.getPath().toString());
EventRecord eventRecord = getContext().createEventRecord(event.getAction().name(), 1, eventRecordSourceId);
eventRecord.set(Field.create(map));
getContext().toEvent(eventRecord);
} catch (IOException ex) {
LOG.warn("Error while creating metadata records: {}", ex.toString(), ex);
metadataGenerationFailure = true;
}
}
if (metadataGenerationFailure) {
multiDirReader.purge();
}
boolean offsetExtracted = false;
while (!offsetExtracted) {
try {
offsetMap = multiDirReader.getOffsets();
offsetExtracted = true;
} catch (IOException ex) {
LOG.warn("Error while creating creating new offset: {}", ex.toString(), ex);
multiDirReader.purge();
}
}
//Calculate Offset lag Metric.
calculateOffsetLagMetric(offsetMap);
//Calculate Pending Files Metric
calculatePendingFilesMetric();
// serializing offsets of all directories
return serializeOffsetMap(offsetMap);
}
private void calibrateMetric(Map<String, Long> resultMap, Map<String, Counter> metricMap, String metricPrefix) {
for (Map.Entry<String, Long> mapEntry : resultMap.entrySet()) {
String fileKey = mapEntry.getKey();
Long currValue = mapEntry.getValue();
Counter counter = metricMap.get(fileKey);
if (counter == null) {
counter = getContext().createCounter(metricPrefix + "." + fileKey);
}
//Counter only supports inc/dec by a number from an existing count value.
counter.inc(currValue - counter.getCount());
metricMap.put(fileKey, counter);
}
}
private void calculateOffsetLagMetric(Map<String, String> offsetMap) {
try {
calibrateMetric(multiDirReader.getOffsetsLag(offsetMap), offsetLagMetric, OFFSETS_LAG);
} catch (IOException ex) {
LOG.warn("Error while Calculating Offset Lag {}", ex.toString(), ex);
}
}
private void calculatePendingFilesMetric() {
try {
calibrateMetric(multiDirReader.getPendingFiles(), pendingFilesMetric, PENDING_FILES);
} catch (IOException ex) {
LOG.warn("Error while Calculating Pending Files Metric {}", ex.toString(), ex);
}
}
}
|
|
/*******************************************************************************
* Copyright (c) 2006-2007 University of Toronto Database Group
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the Software
* is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
* OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*******************************************************************************/
package experiment;
import java.io.File;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Vector;
import java.util.Set;
import java.util.Random;
import java.util.Arrays;
import java.util.Map;
import java.util.TreeMap;
import dbdriver.MySqlDB;
import evaluation.AccuracyMeasure;
import simfunctions.EvaluateSignThread;
import simfunctions.Preprocess;
import simfunctions.WeightedJaccardBM25wPartEnum;
import utility.Config;
import utility.Util;
public class runExpsWSign {
public static HashMap<Integer, Integer> perm = new HashMap<Integer, Integer>();
public static int queryTokenLength = 2;
public static String getQuery(int tid, String tableName) {
String resultQuery = "";
String query = "";
Config config = new Config();
MySqlDB mysqlDB = new MySqlDB(config.returnURL(), config.user, config.passwd);
try {
query = "SELECT " + config.preprocessingColumn + ", id FROM " + config.dbName + "."
+ tableName + " T WHERE T.tid = " + tid;
//System.out.println("Executing " + query);
ResultSet rs = mysqlDB.executeQuery(query);
rs.next();
resultQuery = rs.getString(config.preprocessingColumn);
mysqlDB.close();
} catch (Exception e) {
System.err.println("Can't generate the query");
e.printStackTrace();
}
return resultQuery;
}
public static HashSet<Integer> getAllTidsHavingIdSameAs(int tid, String tableName) {
HashSet<Integer> tidsHavingThisID = new HashSet<Integer>();
Config config = new Config();
MySqlDB mysqlDB = new MySqlDB(config.returnURL(), config.user, config.passwd);
try {
String query = "SELECT tid FROM " + config.dbName + "." + tableName + " where id=" +
"(SELECT id FROM " + config.dbName + "." + tableName + " t where t.tid= " + tid +")";
//System.out.println("Executing " + query);
ResultSet rs = mysqlDB.executeQuery(query);
if (rs != null) {
while (rs.next()) {
tidsHavingThisID.add(rs.getInt("tid"));
}
}
mysqlDB.close();
} catch (Exception e) {
System.err.println("Can't run query");
e.printStackTrace();
}
return tidsHavingThisID;
}
// The sortOrder defines the ordering for the tuples having similar scores
public static int[] generateBooleanList(HashSet<Integer> actualResult, List<IdScore> scoreList, int sortOrder) {
int[] booleanList = new int[scoreList.size()];
int booleanListCounter = 0;
double oldScore = 0, newScore = 0;
ArrayList<Integer> tempBooleanList = new ArrayList<Integer>();
// For the first element
newScore = scoreList.get(0).score;
oldScore = scoreList.get(0).score;
if (actualResult.contains(scoreList.get(0).id + 1)) {
tempBooleanList.add(1);
Util.printlnDebug("Got match at position: "+1);
} else {
tempBooleanList.add(0);
}
for (int i = 1; i < scoreList.size(); i++) {
newScore = scoreList.get(i).score;
if (newScore != oldScore) {
// sort the old list and set the values in the actual
// booleanList
Collections.sort(tempBooleanList);
if (sortOrder != 0) {
Collections.reverse(tempBooleanList);
}
for (int k = 0; k < tempBooleanList.size(); k++) {
booleanList[booleanListCounter++] = tempBooleanList.get(k);
}
tempBooleanList = new ArrayList<Integer>();
oldScore = newScore;
if (actualResult.contains(scoreList.get(i).id + 1)) {
tempBooleanList.add(1);
Util.printlnDebug("Got match at position: "+ (i+1));
} else {
tempBooleanList.add(0);
}
} else {
if (actualResult.contains(scoreList.get(i).id + 1)) {
tempBooleanList.add(1);
Util.printlnDebug("Got match at position: "+ (i+1));
} else {
tempBooleanList.add(0);
}
}
}
Collections.sort(tempBooleanList);
if (sortOrder != 0) {
Collections.reverse(tempBooleanList);
}
for (int k = 0; k < tempBooleanList.size(); k++) {
booleanList[booleanListCounter++] = tempBooleanList.get(k);
}
// For the last block of tempBooleanList
return booleanList;
}
public int hash(String str){
int h = Integer.valueOf(str);
return h;
}
public static BitSet convertToBitSet(Set<String> stringSet){
BitSet output = new BitSet();
int c1,c2;
HashMap<Integer,Integer> pr = permutation((1<<7) -1, 9573);
for (String qgram : stringSet) {
c1 = qgram.charAt(1);
c2 = qgram.charAt(0);
c1 = pr.get(c1);
c2 = pr.get(c2);
output.set((c1 << 7) | c2);
}
return output;
}
public static Set<String> convertToStringSet(BitSet bitset){
Set<String> output = new HashSet<String>();
int i = bitset.nextSetBit(0);
while (i != -1){
char c1 = (char)(127 & i);
char c2 = (char)(((127 << 7) & i) >> 7);
String qgram = new String();
qgram = qgram + c1;
qgram = qgram + c2;
output.add(qgram);
i = bitset.nextSetBit(i+1);
}
return output;
}
public static HashMap<Integer, Integer> permutation(int n){
HashMap<Integer, Integer> output = new HashMap<Integer, Integer>();
BitSet allnums = new BitSet();
allnums.set(1, n+1, true);
Random rand = new Random(2353);
int k=1;
while (!allnums.isEmpty()){
int i = rand.nextInt(n);
//System.out.print(k + " " + i + " ");
while (!allnums.get(i)) {
if (i > n) i=0;
i++;
}
allnums.clear(i);
//System.out.println(i);
output.put(k, i);
//System.out.println(output.get(k));
k++;
}
return output;
}
public static HashMap<Integer, Integer> permutation(int n, int rnd){
HashMap<Integer, Integer> output = new HashMap<Integer, Integer>();
BitSet allnums = new BitSet();
allnums.set(1, n+1, true);
Random rand = new Random(rnd);
int k=1;
while (!allnums.isEmpty()){
int i = rand.nextInt(n);
//System.out.print(k + " " + i + " ");
while (!allnums.get(i)) {
if (i > n) i=0;
i++;
}
allnums.clear(i);
//System.out.println(i);
output.put(k, i);
//System.out.println(output.get(k));
k++;
}
return output;
}
public static int b(int i, int j, int n1, int n2, int N){
int n = N + ( (N % (n1*n2) == 0) ? 0 : ((n1*n2)-(N%(n1*n2))) ); // make N divisable by n1xn2
return n * ( n2*(i-1)+j-1 ) / (n1*n2) + 1; // ?? +1
}
public static int e(int i, int j, int n1, int n2, int N){
int n = N + ( (N % (n1*n2) == 0) ? 0 : ((n1*n2)-(N%(n1*n2))) ); // make N divisable by n1xn2
return n * ( n2*(i-1)+j ) / (n1*n2) + 1; // ?? +1
}
public static BitSet p(int i, int j, int n1, int n2, int N){
BitSet output = new BitSet();
//System.out.println("i:" + i + " j:" + j + " e:"+ e(i,j,n1,n2,N));
for (int t = b(i,j,n1,n2,N); t< e(i,j,n1,n2,N); t++){
output.set(perm.get(t));
}
return output;
}
public static Vector<Vector<Integer>> subsets(Vector<Integer> set, int size){
Vector<Vector<Integer>> output = new Vector<Vector<Integer>>();
//System.out.println();
if (size == 1) {
for (Integer vi: set){
Vector<Integer> v = new Vector<Integer>();
v.add(vi);
output.add(v);
}
}
else {
for (int i = 1; i <= set.size()-(size-1); i++){
Vector<Integer> set2 = new Vector<Integer>();
for (int j = i+1; j <= set.size(); j++) set2.add(set.elementAt(j-1));
for (Vector<Integer> sset : subsets(set2, size-1)){
Vector<Integer> v = new Vector<Integer>();
v.add(set.elementAt(i-1));
for (Integer vi:sset) v.add(vi);
output.add(v);
}
}
}
return output;
}
public static void tst(int i, Integer j, Vector<String> vStr, HashMap<Integer, Double> hm){
i = 1;
j = 0;
vStr.add("ey baba!");
hm.put(1, 2.2);
}
class pair {
private float x;
private float y;
public pair(float x, float y) {
this.x = x;
this.y = y;
}
public float getX() { return x; }
public float getY() { return y; }
public void setX(float x) { this.x = x; }
public void setY(float y) { this.y = y; }
}
public static void main(String[] args) {
/*
Config config = new Config();
MySqlDB mysqlDB = new MySqlDB(config.returnURL(), config.user, config.passwd);
String resultTable = "signresults";
try {
//String query = "DROP TABLE IF EXISTS " + config.dbName + "." + resultTable;
//mysqlDB.executeUpdate(query);
String query = "CREATE TABLE IF NOT EXISTS " + config.dbName + "." + resultTable
+ " (tbl varchar(10), simfunc varchar(50), thr double, pr double, re double, f1 double, " +
" PRIMARY KEY (tbl, simfunc, thr) )";
System.out.println(query);
mysqlDB.executeUpdate(query);
mysqlDB.close();
} catch (Exception e) {
System.err.println("DB Error");
e.printStackTrace();
}
*/
/*
Preprocess bm25WeightedJaccardSimHash = new WeightedJaccardBM25wSimhash();
Preprocess measure = bm25WeightedJaccardSimHash;
*/
Preprocess bm25WeightedJaccardPartEnum = new WeightedJaccardBM25wPartEnum();
Preprocess measure = bm25WeightedJaccardPartEnum;
/*
Preprocess bm25WeightedJaccardMinHash = new WeightedJaccardBM25wMinhash();
Preprocess measure = bm25WeightedJaccardMinHash;
*/
Vector<String> tables = new Vector<String>();
tables.add("cu1");
/*
tables.add("cu2"); tables.add("cu3");
tables.add("cu4"); tables.add("cu5"); tables.add("cu6");
tables.add("cu7"); tables.add("cu8");
*/
Vector<Thread> threads = new Vector<Thread>();
for (String table : tables){
//threads.add(new SignatureGenThread(table,measure,2,2,0));
//threads.add(new SignatureGenThread(table,measure,2,5,0));
//threads.add(new SignatureGenThread(table,measure,4,5,0));
//threads.add(new SignatureGenThread(table,measure,3,7,0));
threads.add(new SignatureGenThread(table,measure,11,2,6));
}
//threads.add(new RunSimilarityJoinThread("cu8",measure,0.1));
for (Thread thread: threads){
thread.start();
}
for (Thread thread: threads){
try{
thread.join();
} catch (Exception e) {
System.out.println("Error");
}
}
threads = new Vector<Thread>();
/* threads.add(new RunClusteringThread("cu1",hmm));
//threads.add(new RunClusteringThread("cu1",measure,0.0));
threads.add(new RunClusteringThread("cu1",measure,0.2));
threads.add(new RunClusteringThread("cu1",measure,0.4));
threads.add(new RunClusteringThread("cu1",measure,0.6));
threads.add(new RunClusteringThread("cu1",measure,0.8));
/*
threads.add(new RunClusteringThread("cu2",measure,0.6));
threads.add(new RunClusteringThread("cu3",measure,0.6));
threads.add(new RunClusteringThread("cu4",measure,0.6));
threads.add(new RunClusteringThread("cu5",measure,0.6));
threads.add(new RunClusteringThread("cu6",measure,0.6));
threads.add(new RunClusteringThread("cu7",measure,0.6));
threads.add(new RunClusteringThread("cu8",measure,0.6));
* /
threads.add(new EvaluateSJoinThread("cu8",measure,0.01));
threads.add(new EvaluateSJoinThread("cu8",measure,0.15));
threads.add(new EvaluateSJoinThread("cu8",measure,0.20));
threads.add(new EvaluateSJoinThread("cu8",measure,0.25)); */
//threads.add(new EvaluateSJoinThread("cu8",measure,0.30));
//threads.add(new EvaluateSJoinThread("cu1",measure,0.4));
//threads.add(new EvaluateSJoinThread("cu1",measure,0.6));
//threads.add(new EvaluateSJoinThread("cu1",measure,0.8));
/*
*
*
*/
for (String table : tables){
/*
threads.add(new EvaluateSignThread(table,measure,2,2,0));
threads.add(new EvaluateSignThread(table,measure,2,5,0));
threads.add(new EvaluateSignThread(table,measure,4,5,0));
threads.add(new EvaluateSignThread(table,measure,3,7,0));
*/
threads.add(new EvaluateSignThread(table,measure,5,2,3));
// threads.add(new EvaluateSignThread(table,measure,11,2,6));
}
for (Thread thread: threads){
thread.start();
try{
thread.join();
} catch (Exception e) {
System.out.println("Error");
}
}
/*
Vector<Integer> v = new Vector<Integer>();
v.add(1);
v.add(2);
v.add(3);
v.add(4);
System.out.println(v);
boolean[][] correctOrder = new boolean[v.size()][v.size()];
for (int i=0; i<v.size(); i++)
for (int j=0; j<v.size(); j++)
correctOrder[i][j]=false;
int size = 0;
for (int i=0; i<v.size(); i++){
for (int j=i+1; j<v.size(); j++){
size++;
correctOrder[v.get(i)-1][v.get(j)-1] = true;
System.out.println(v.get(i) + ", " + v.get(j));
}
}
Vector<Integer> v2 = new Vector<Integer>();
v2.add(4);
v2.add(3);
v2.add(1);
v2.add(2);
//Collections.sort(v2);
//v2 = new Vector<Integer>();
System.out.println(v2);
/*
HashMap<Integer, Double> probs = new HashMap<Integer, Double>();
probs.put(1, 0.5);
probs.put(2, 0.2);
probs.put(3, 0.2);
probs.put(4, 0.6);
List mapValues = new ArrayList(probs.values());
Vector<Double> sortedProbs = new Vector<Double>();
for (Double prob:probs.values()){
sortedProbs.add(prob);
}
Collections.sort(sortedProbs);
for (Double prob:sortedProbs){
//System.out.println(prob);
int m = mapValues.indexOf(prob);
System.out.println(m+1);
mapValues.set(m, -1);
}
* /
int correct = 0;
for (int i=0; i<v2.size(); i++){
for (int j=i+1; j<v2.size(); j++){
if (correctOrder[v2.get(i)-1][v2.get(j)-1]) {
correct++;
System.out.println(v2.get(i) + ", " + v2.get(j));
}
}
}
System.out.println("Percentage: " + correct + "/" + size);
/*
int i = -1;
Integer j = -1;
Vector<String> vStr = new Vector<String>();
HashMap<Integer, Double> hm = new HashMap<Integer, Double>();
System.out.println(i + " " + j + " " + vStr + " " + hm);
tst(i,j,vStr,hm);
System.out.println(i + " " + j + " " + vStr + " " + hm);
/*
String str1 = "ab";
String str2 = "$$";
char[] chars = str1.toCharArray();
int h1 = str1.charAt (1) << 7 | str1.charAt (0);
int h2 = str2.charAt (0) << 7 | str2.charAt (1);
System.out.println("hash(" + str1 + ") = " + (h1) );
System.out.println("hash(" + str1 + ") = " + (h2) );
BitSet b = new BitSet();
long n = 327544767;
//for (int i=0; i<32; i++)
// System.out.println("bit(" + i + ") = " + ( ((n & (1 << i)) >>> i) == 1 ? 1 : 0 ) );
long one = 1;
long n2 = (one << 40);
System.out.println(n2);
for (int i=63; i>=0; i--)
System.out.print(( ((n2 & (one << i)) >>> i) == 1 ? 1 : 0 ) );
*/
/*
Vector<Integer> m1 = new Vector<Integer>(2);
m1.add(1);
m1.add(2);
m1.add(3);
Vector<Integer> m2 = new Vector<Integer>();
m2.add(1);
m2.add(2);
m2.add(3);
System.out.println(m1.equals(m2));
* /
Set<String> qgrams = new HashSet<String>();
qgrams.add("ab");
qgrams.add("df");
qgrams.add("et");
qgrams.add("se");
qgrams.add("df");
qgrams.add("gh");
System.out.println(qgrams.toString());
BitSet b = convertToBitSet(qgrams);
System.out.println(b.toString());
qgrams = convertToStringSet(b);
System.out.println(qgrams.toString());
/*
//System.out.println(1 << 14);
//HashMap<Integer, Integer> perm = permutation(1 << 14);
//System.out.println(perm.toString());
BitSet v = new BitSet();
int N = 6;
v.set(2); v.set(4); v.set(5);
int k=3, n1=2, n2=3;
int k2 = (k+1)/n1 - 1;
//HashMap<Integer, Integer> perm = permutation(6);
//for (int i=0; i<=N; i++) perm.put(i, i);
*
* /
perm = permutation(6);
System.out.println(perm);
Vector<Integer> v = new Vector<Integer>();
v.add(2); v.add(7); v.add(9);
System.out.println(subsets(v,2));
/*
//BitSet[] p = new BitSet[65];
for (int i=1; i<=n1; i++){
BitSet bb1 = p(i,1,n1,n2,N);
BitSet bb2 = p(i,2,n1,n2,N);
bb1.or(bb2);
System.out.println(bb1);
System.out.println(p(i,3,n1,n2,N));
}
* /
BitSet test = new BitSet();
test.set(1, 7, true);
//Vector<Integer> vv = new Vector<Integer>();
//vv.add(1); vv.add(2); vv.add(3); vv.add(4); vv.add(5); vv.add(6);
//System.out.println(subsets(vv, 2));
System.out.println(v.toString());
Vector<Integer> vv = new Vector<Integer>();
for (int i=1; i<=n2; i++) vv.add(i);
for (int i=1; i<=n1; i++){
for (Vector<Integer> subset:subsets(vv, n2-k2)){
BitSet P = new BitSet();
for (Integer j: subset){
P.or(p(i,j,n1,n2,N));
}
HashMap<BitSet, BitSet> sign = new HashMap<BitSet, BitSet>();
BitSet proj = new BitSet();
proj = (BitSet) v.clone();
proj.and(P);
System.out.println("<" + proj + "," + P + ">");
sign.put(proj, P);
Random rand = new Random();
Long hash= new Long(0);
int t = P.nextSetBit(0);
while (t!=-1){
rand = new Random(t);
hash += rand.nextLong();
t=P.nextSetBit(t+1);
}
t = proj.nextSetBit(0);
while (t!=-1){
rand = new Random(t);
hash += rand.nextLong();
t=proj.nextSetBit(t+1);
}
System.out.println(hash);
//System.out.println((int)(P.hashCode()+proj.hashCode()));
//System.out.println(P.hashCode());System.out.println(proj.hashCode());
//System.out.println(sign.hashCode());
}
}
*/
}
}
|
|
package wcb;
import java.awt.*;
import robocode.RobotDeathEvent;
import robocode.ScannedRobotEvent;
/*
* This robot extends the data collection/surveyor robot DataMiner
* Circular targeting uses fits a circle to project along the robot's path
* uses perceived change in position/velocity, instead of measured change in position/velocity so that it takes the net movement, disregarding wide swings
*/
public class WcbCircularTarget extends WcbDataMiner //extends AdvancedRobot
{ //these variables and methods are public
// Reasoning: trying to make sure that this is modular code, so that circular targetting could be used by any bot in package, not just extensions
//variables set here, extends the list of DataMiner
public static double radius[][] = new double[maxNumRobots][maxNumTurns];
public static double centerX[][] = new double[maxNumRobots][maxNumTurns]; //center of circle as calc'd
public static double centerY[][] = new double[maxNumRobots][maxNumTurns]; //center of circle as calc'd
public static double projectX[][] = new double[maxNumRobots][maxNumTurns]; //projected X location
public static double projectY[][] = new double[maxNumRobots][maxNumTurns]; //projected Y location
public static double projectEightX[][] = new double[maxNumRobots][maxNumTurns]; //projects forward 1 full swing of radar, to test for accuracy of targeting
public static double projectEightY[][] = new double[maxNumRobots][maxNumTurns]; //ditto
public static double TrueGunBearing[][] = new double[maxNumRobots][maxNumTurns]; //most recent true gun bearing for robot to want to point at
public static String robotLockName = ""; //default lock name
public static int LorR = 1; //used to set whether the scanned, circular projected robot is turning left or right (1 = left, -1 = right)
public static double bulletPower = 1;
//method 2 circle finding variables
public static double mX1 = 0; //midpoint of line coordinate
public static double mY1 = 0; //midpoint of line coordinate
public static double s1 = 0; //slope
public static double mX2 = 0; //midpoint of line coordinate
public static double mY2 = 0; //midpoint of line coordinate
public static double s2 = 0; //midpoint of line coordinate
public static double intersectX = 0; //intersection of perpendicular bisectors of slope
public static double intersectY = 0; //intersection of perpendicular bisectors of slope
public static double radial = 0; //radius of circle, mostly for onPaint
public static double effectiveHeading; //projected net heading by circ-target
public static double eightHeading; //projected net heading by circ-target for next 8 ticks
public static double angle;
public static double counter = 1;
public void run()
{
while (true)
{
setTurnRadarRight(360);
execute();
}
//see Oscill/Oscar, run method implemented there
}
public void startCase()
{
super.startCase(); //call all prior start case
// i is the index of each robot, k is the time stamps for
for(int i = 0;i<maxNumRobots;i++)
{
for(int k = 0;k<maxNumTurns;k++)
{ //more array default values (see more in DataMiner)
radius[i][k] = 4*i;
centerX[i][k] = 5*i;
centerY[i][k] = 6*i;
}
}
}
public void onScannedRobot(ScannedRobotEvent vive)
{
super.onScannedRobot(vive); //store data scan from DataMiner
for (int i = 0; i<maxNumRobots; i++)
{
if (robotLockName.equals(vive.getName()) || robotLockName.equals(""))
{ // if there is no lock name or the robot is the chosen one, continue, so it doesn't try to endlessly switch between targets
if (vive.getName().equals(ScannedNameSet[i]))
{
fitCircle(X[i][0], Y[i][0], X[i][1], Y[i][1], X[i][2], Y[i][2], i); //calc and project circle
setGunBearing(this.getX(), this.getY(), projectX[i][0], projectY[i][0], i, vive); //aim the gun
if (Math.abs(this.getGunHeading()-TrueGunBearing[i][0])<=5 && this.getGunHeat()<=.01) //if the gun is close and ready to fire
{
//FIRE!!
fire(.5);
}
}
}
}
}
public double turnToHeading(double sH) //returns the number of degrees to turn right to a desired heading
{ //returns the change in heading needed for robot
//double suggestedHeading
double mH = this.getHeading(); //my Heading
double dH = mH - sH; //change in Heading needed
//filters, so that it never goes more than 360 to an angle
//the while loops were the infinite loop problem
if (Math.abs(dH)>360)
{
while (Math.abs(dH)>360)
{
if (dH >360)
{
dH = dH-360;
}
else if (dH<-360)
{
dH = dH+360;
}
else
{
break;
}
}
}
//filters, so that it never goes more than 180 to an angle
//the while loops were the infinite loop problem
if (Math.abs(dH)>180)
{
while (Math.abs(dH)>180)
{
if (dH >180)
{
dH = -(360-dH);
}
else if (dH<-180)
{
dH = -(360-dH);
}
else
{
break;
}
}
}
return dH;
}
public void fitCircle(double x1, double y1, double x2, double y2, double x3, double y3, int robotIndex)
{
double radium = 0; //radius
double centerH = 0; //X
double centerK = 0; //Y
advanceCircleArray(robotIndex); //prepare for data capture
if (linearTest(x1,y1,x2,y2,x3,y3)) //if the points are in a line, use linear projection
{
double bulletFlightTime = (distance[robotIndex][0])/(20-(3*1));
lineProjection(X[robotIndex][0], Y[robotIndex][0], direction[robotIndex][0], speed[robotIndex][0], bulletFlightTime, robotIndex);
}
else
{
mX1 = (x1+x2)/2; //find midpoint coordinates
mY1 = (y1+y2)/2;
s1 = -((x1-x2)/(y1-y2)); //find slope
mX2 = (x2+x3)/2; //repeat
mY2 = (y2+y3)/2;
s2 = -((x2-x3)/(y2-y3));
//center-point by intersecting perpendicular bysectors
intersectX = (s1*mX1-mY1-s2*mX2+mY2)/(s1-s2); // x=(-m1x1+y1+m2x2-y2)/(m2-m1)
intersectY = s1*(intersectX-mX1)+mY1; // y= m1*(x-x1)+y1
radial = Math.sqrt(Math.abs((intersectX-mX1)*(intersectX-mX1)+(intersectY-mY1)*(intersectY-mY1)));
radius[robotIndex][0] = radial;
centerX[robotIndex][0] = intersectX;
centerY[robotIndex][0] = intersectY;
//robot's circle found!!
double pi = Math.PI;
double avSpeed = 0;
if (getTime()<32)
{
avSpeed = (speed[robotIndex][0]); //if there isn't enough time for the robot to have lots of data to average, just use recent
}
else
{
avSpeed = (speed[robotIndex][0]+speed[robotIndex][1]+speed[robotIndex][2])/3; //take average speed
}
double sections = (2*pi*radial)/(avSpeed); //the number of sections of the circle the robot travels (totol distance/covered distance)
LeftOrRight(robotIndex); //figure our Clockwise or CounterClockwise (misnamed)
double projectTurnLeftperSection = (LorR*(360/sections)); //degrees per section
double bulletFlightTime = (distance[robotIndex][0])/(20-3*bulletPower); //time for bullet to reach target
double totalTurn = (projectTurnLeftperSection*bulletFlightTime); //degrees moved around circle in bullet flight time
//simplify total turn
if (totalTurn>=360) totalTurn = totalTurn-360;
if (totalTurn<0) totalTurn = totalTurn+360;
double eightTurn = (projectTurnLeftperSection*8);
if(LorR == -1)
{
effectiveHeading = LorR*(180-(totalTurn/2))+direction[robotIndex][0];
eightHeading = LorR*(180-(eightTurn/2))+direction[robotIndex][0];
}
else
{
effectiveHeading = LorR*((totalTurn/2))+direction[robotIndex][0];
eightHeading = LorR*((eightTurn/2))+direction[robotIndex][0];
}
double effectiveDistance = radial*Math.pow(3/4, 1/2)*2*Math.sin(rad(totalTurn/2));
double eightDistance = radial*Math.pow(3/4, 1/2)*2*Math.sin(rad(eightTurn/2));
double effectiveSpeed = effectiveDistance/bulletFlightTime;
double eightSpeed = eightDistance/8;
// end method 2
lineProjection(X[robotIndex][0], Y[robotIndex][0], effectiveHeading, effectiveSpeed, bulletFlightTime, robotIndex);
lineProjection(X[robotIndex][0], Y[robotIndex][0], eightHeading, eightSpeed, 8, robotIndex);
}
}
public void onRobotDeath(RobotDeathEvent deadman)
{
robotLockName = "";
System.out.println("locked onto no robot where no robot has gone before");
}
public void lineProjection(double enemyX, double enemyY, double heading, double speed, double time, int robotIndex)
{
double eX = enemyX;
double eY = enemyY;
double deltaX = speed*time*Math.cos(rad(-(heading-90)));
double deltaY = speed*time*Math.sin(rad(-(heading-90)));
double projectedX = eX+deltaX;
double projectedY = eY+deltaY;
if (time == 8)
{
advanceProjectEightArray(robotIndex);
projectEightX[robotIndex][0] = projectedX;
projectEightY[robotIndex][0] = projectedY;
}
else
{
advanceProjectArray(robotIndex);
projectX[robotIndex][0] = projectedX;
projectY[robotIndex][0] = projectedY;
}
}
public void setGunBearing (double myX, double myY, double enemyX, double enemyY, int robotIndex, ScannedRobotEvent vive)
{
//calculates the angle to the other robot from self
double deltaX = projectX[robotIndex][0]-myX;
double deltaY = projectY[robotIndex][0]-myY;
if (deltaX<=0)
{
if (deltaY<=0)
{
angle = 180+(90-(deg(Math.atan(Math.abs(deltaY/deltaX)))));
}
else
{
angle = 180+(90+(deg(Math.atan(Math.abs(deltaY/deltaX)))));
}
}
else
{
if (deltaY<=0)
{
angle = 180-90+(deg(Math.atan(Math.abs(deltaY/deltaX))));
}
else
{
angle = 180-90-(deg(Math.atan(Math.abs(deltaY/deltaX))));
}
}
//System.out.println("angle! = "+angle);
double currentGun = this.getGunHeading();
if(currentGun>180) currentGun = currentGun-360;
if(currentGun>180) currentGun = currentGun-360;
double suggestedGunTurnLeft = currentGun-angle;
//reduction of the angle to within a full turn
int count =0;
while (Math.abs(suggestedGunTurnLeft)>360)
{
count = count+1;
if (suggestedGunTurnLeft>=360) suggestedGunTurnLeft = suggestedGunTurnLeft-360;
if (suggestedGunTurnLeft<=-360) suggestedGunTurnLeft = suggestedGunTurnLeft+360;
System.out.println("360count "+count);
if (count>4) break;
}
//reduction of the angle to no more than a half turn
count = 0;
while (Math.abs(suggestedGunTurnLeft)>180)
{
count = count+1;
if (suggestedGunTurnLeft>180) suggestedGunTurnLeft = 180-suggestedGunTurnLeft;
if (suggestedGunTurnLeft<-180) suggestedGunTurnLeft = 360+suggestedGunTurnLeft;
System.out.println("180count "+count);
if (count>4) break;
}
advanceGunBearingArray(robotIndex); //prepare to input data
TrueGunBearing[robotIndex][0] = suggestedGunTurnLeft+this.getGunHeading(); //set the desired heading
setTurnGunLeft(suggestedGunTurnLeft); //move gun to desired heading
}
public void LeftOrRight(int robotIndex)
{ //determine clockwise or counterclock, actually the wrong name
if (X[robotIndex][0]!=X[robotIndex][1] && X[robotIndex][0]!=X[robotIndex][2])
{
//compares differnce in the angle of heading as time passes
double angle1 = direction[robotIndex][0];
double angle2 = direction[robotIndex][1];
double difference = (angle2-angle1);
if (difference<0) LorR = 1;
if (difference>0) LorR = -1;
if (difference>180) LorR = 1;
if (difference<-180) LorR = -1;
}
else
{
LorR = LorR+0;
}
}
public boolean linearTest(double x1, double y1, double x2, double y2, double x3, double y3)
{
//false = non-linear
//check to see if a line is linear by same slope
if (y1!=y2 && y1!=y3)
{
double slope1 = ((x1-x2)/(y1-y2));
double slope2 = ((x1-x3)/(y1-y3));
double angle1 = deg(Math.atan(rad(slope1)));
double angle2 = deg(Math.atan(rad(slope2)));
double difference = Math.abs(angle1-angle2);
return (difference<.005);
}
else if (y1==y2 && y1==y3)
{
return true;
}
else
{
return false;
}
}
public void advanceCircleArray(int robotIndex)
{ //advance array in order to add more data
for (int k = maxNumTurns-1;k>0;k--)
{
radius[robotIndex][k]= radius[robotIndex][k-1];
centerX[robotIndex][k] = centerX[robotIndex][k-1];
centerY[robotIndex][k] = centerY[robotIndex][k-1];
}
}
public void advanceProjectArray(int robotIndex)
{ //advance array in order to add more data
for (int k = maxNumTurns-1;k>0;k--)
{
projectX[robotIndex][k]= projectX[robotIndex][k-1];
projectY[robotIndex][k] = projectY[robotIndex][k-1];
}
}
public void advanceProjectEightArray(int robotIndex)
{ //advance array in order to add more data
for (int k = maxNumTurns-1;k>0;k--)
{
projectEightX[robotIndex][k] = projectEightX[robotIndex][k-1];
projectEightY[robotIndex][k] = projectEightY[robotIndex][k-1];
}
}
public void advanceGunBearingArray(int robotIndex)
{ //advance array in order to add more data
for (int k = maxNumTurns-1;k>0;k--)
{
TrueGunBearing[robotIndex][k]= TrueGunBearing[robotIndex][k-1];
}
}
public void onPaint(Graphics2D painted)
{ //advance array in order to add more data
super.onPaint(painted);
int squareZise = 5;
for (int i = 0;i<maxNumRobots;i++)
{
if (!ScannedNameSet[i].equals("")) //accounts for blank space
{
//sets a unique color, hopefully
if (i<10)
{
painted.setColor(paintAssist[i]);
}
// draws a small box at last X,Y position
painted.fillRect((int) (X[i][0]), (int) (Y[i][0]), squareZise, squareZise);
// draws a circle at centerX,center Y with diameter 2*radius
painted.setColor(java.awt.Color.CYAN);
painted.drawOval((int)(intersectX-radial), (int)(intersectY-radial), (int) (2*radial), (int) (2*radial));
painted.setColor(java.awt.Color.ORANGE); //actual projection of location when bullet arrives
painted.fillRect((int) (projectX[i][0]), (int) (projectY[i][0]), squareZise, squareZise);
painted.drawLine((int) (X[i][0]), (int) (Y[i][0]), (int) (projectX[i][0]), (int) (projectY[i][0]));
painted.setColor(java.awt.Color.GREEN); //8 step projection (next radar, for accuracy of guess)
painted.fillRect((int) (projectEightX[i][0]), (int) (projectEightY[i][0]), squareZise, squareZise);
painted.drawLine((int) (X[i][0]), (int) (Y[i][0]), (int) (projectEightX[i][0]), (int) (projectEightY[i][0]));
painted.setColor((java.awt.Color.RED));
//draws a line to the projected target location
painted.drawLine((int) this.getX(), (int) this.getY(), (int) projectX[i][0], (int) projectY[i][0]); //draw my desired targetting line
}
}
}
}
|
|
/*
Derby - Class org.apache.derby.iapi.services.io.FileUtil
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.iapi.services.io;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.Array;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Field;
import org.apache.derby.io.StorageFactory;
import org.apache.derby.io.WritableStorageFactory;
import org.apache.derby.io.StorageFile;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.derby.iapi.reference.Property;
import org.apache.derby.iapi.services.info.JVMInfo;
import org.apache.derby.iapi.services.property.PropertyUtil;
import org.apache.derby.shared.common.sanity.SanityManager;
/**
A set of public static methods for dealing with File objects.
*/
public abstract class FileUtil {
private static final int BUFFER_SIZE = 4096*4;
/**
Remove a directory and all of its contents.
The results of executing File.delete() on a File object
that represents a directory seems to be platform
dependent. This method removes the directory
and all of its contents.
@return true if the complete directory was removed, false if it could not be.
If false is returned then some of the files in the directory may have been removed.
*/
public static boolean removeDirectory(File directory) {
// System.out.println("removeDirectory " + directory);
if (directory == null)
return false;
if (!directory.exists())
return true;
if (!directory.isDirectory())
return false;
String[] list = directory.list();
// Some JVMs return null for File.list() when the
// directory is empty.
if (list != null) {
for (int i = 0; i < list.length; i++) {
File entry = new File(directory, list[i]);
// System.out.println("\tremoving entry " + entry);
if (entry.isDirectory())
{
if (!removeDirectory(entry))
return false;
}
else
{
if (!entry.delete())
return false;
}
}
}
return directory.delete();
}
public static boolean removeDirectory(String directory)
{
return removeDirectory(new File(directory));
}
/**
Copy a directory and all of its contents.
*/
public static boolean copyDirectory(File from, File to)
{
return copyDirectory(from, to, (byte[])null, (String[])null);
}
public static boolean copyDirectory(String from, String to)
{
return copyDirectory(new File(from), new File(to));
}
/**
@param filter - array of names to not copy.
*/
public static boolean copyDirectory(File from, File to, byte[] buffer,
String[] filter)
{
//
// System.out.println("copyDirectory("+from+","+to+")");
if (from == null)
return false;
if (!from.exists())
return true;
if (!from.isDirectory())
return false;
if (to.exists())
{
// System.out.println(to + " exists");
return false;
}
if (!to.mkdirs())
{
// System.out.println("can't make" + to);
return false;
}
limitAccessToOwner(to);
String[] list = from.list();
// Some JVMs return null for File.list() when the
// directory is empty.
if (list != null) {
if (buffer == null)
buffer = new byte[BUFFER_SIZE]; // reuse this buffer to copy files
nextFile: for (int i = 0; i < list.length; i++) {
String fileName = list[i];
if (filter != null) {
for (int j = 0; j < filter.length; j++) {
if (fileName.equals(filter[j]))
continue nextFile;
}
}
File entry = new File(from, fileName);
// System.out.println("\tcopying entry " + entry);
if (entry.isDirectory())
{
if (!copyDirectory(entry,new File(to,fileName),buffer,filter))
return false;
}
else
{
if (!copyFile(entry,new File(to,fileName),buffer))
return false;
}
}
}
return true;
}
public static boolean copyFile(File from, File to, byte[] buf)
{
if (buf == null)
buf = new byte[BUFFER_SIZE];
//
// System.out.println("Copy file ("+from+","+to+")");
FileInputStream from_s = null;
FileOutputStream to_s = null;
try {
from_s = new FileInputStream(from);
to_s = new FileOutputStream(to);
limitAccessToOwner(to);
for (int bytesRead = from_s.read(buf);
bytesRead != -1;
bytesRead = from_s.read(buf))
to_s.write(buf,0,bytesRead);
from_s.close();
from_s = null;
to_s.getFD().sync(); // RESOLVE: sync or no sync?
to_s.close();
to_s = null;
}
catch (IOException ioe)
{
return false;
}
finally
{
if (from_s != null)
{
try { from_s.close(); }
catch (IOException ioe) {}
}
if (to_s != null)
{
try { to_s.close(); }
catch (IOException ioe) {}
}
}
return true;
}
public static boolean copyDirectory( StorageFactory storageFactory,
StorageFile from,
File to,
byte[] buffer,
String[] filter,
boolean copySubDirs)
{
if (from == null)
return false;
if (!from.exists())
return true;
if (!from.isDirectory())
return false;
if (to.exists())
{
// System.out.println(to + " exists");
return false;
}
if (!to.mkdirs())
{
// System.out.println("can't make" + to);
return false;
}
limitAccessToOwner(to);
String[] list = from.list();
// Some JVMs return null for File.list() when the
// directory is empty.
if (list != null)
{
if (buffer == null)
buffer = new byte[BUFFER_SIZE]; // reuse this buffer to copy files
nextFile:
for (int i = 0; i < list.length; i++)
{
String fileName = list[i];
if (filter != null) {
for (int j = 0; j < filter.length; j++) {
if (fileName.equals(filter[j]))
continue nextFile;
}
}
StorageFile entry = storageFactory.newStorageFile(from, fileName);
if (entry.isDirectory())
{
if(copySubDirs) {
if (!copyDirectory( storageFactory, entry,
new File(to,fileName), buffer,
filter, copySubDirs))
return false;
}
else {
// the request is to not copy the directories, continue
// to the next file in the list.
continue nextFile;
}
}
else
{
if (!copyFile( storageFactory, entry, new File(to,fileName), buffer))
return false;
}
}
}
return true;
} // end of copyDirectory( StorageFactory sf, StorageFile from, File to, byte[] buf, String[] filter)
public static boolean copyFile( StorageFactory storageFactory, StorageFile from, File to)
{
return copyFile( storageFactory, from, to, (byte[]) null);
}
public static boolean copyFile( StorageFactory storageFactory, StorageFile from, File to, byte[] buf)
{
InputStream from_s = null;
FileOutputStream to_s = null;
try {
from_s = from.getInputStream();
to_s = new FileOutputStream( to);
limitAccessToOwner(to);
if (buf == null)
buf = new byte[BUFFER_SIZE]; // reuse this buffer to copy files
for (int bytesRead = from_s.read(buf);
bytesRead != -1;
bytesRead = from_s.read(buf))
to_s.write(buf,0,bytesRead);
from_s.close();
from_s = null;
to_s.getFD().sync(); // RESOLVE: sync or no sync?
to_s.close();
to_s = null;
}
catch (IOException ioe)
{
return false;
}
finally
{
if (from_s != null)
{
try { from_s.close(); }
catch (IOException ioe) {}
}
if (to_s != null)
{
try { to_s.close(); }
catch (IOException ioe) {}
}
}
return true;
} // end of copyFile( StorageFactory storageFactory, StorageFile from, File to, byte[] buf)
public static boolean copyDirectory( WritableStorageFactory storageFactory,
File from,
StorageFile to)
{
return copyDirectory( storageFactory, from, to, null, null);
}
public static boolean copyDirectory( WritableStorageFactory storageFactory,
File from,
StorageFile to,
byte[] buffer,
String[] filter)
{
if (from == null)
return false;
if (!from.exists())
return true;
if (!from.isDirectory())
return false;
if (to.exists())
{
// System.out.println(to + " exists");
return false;
}
if (!to.mkdirs())
{
// System.out.println("can't make" + to);
return false;
}
to.limitAccessToOwner();
String[] list = from.list();
// Some JVMs return null for File.list() when the
// directory is empty.
if (list != null)
{
if (buffer == null)
buffer = new byte[BUFFER_SIZE]; // reuse this buffer to copy files
nextFile:
for (int i = 0; i < list.length; i++)
{
String fileName = list[i];
if (filter != null) {
for (int j = 0; j < filter.length; j++) {
if (fileName.equals(filter[j]))
continue nextFile;
}
}
File entry = new File(from, fileName);
if (entry.isDirectory())
{
if (!copyDirectory( storageFactory, entry, storageFactory.newStorageFile(to,fileName), buffer, filter))
return false;
}
else
{
if (!copyFile( storageFactory, entry, storageFactory.newStorageFile(to,fileName), buffer))
return false;
}
}
}
return true;
} // end of copyDirectory( StorageFactory sf, StorageFile from, File to, byte[] buf, String[] filter)
public static boolean copyFile( WritableStorageFactory storageFactory, File from, StorageFile to)
{
return copyFile( storageFactory, from, to, (byte[]) null);
}
public static boolean copyFile( WritableStorageFactory storageFactory, File from, StorageFile to, byte[] buf)
{
InputStream from_s = null;
OutputStream to_s = null;
try {
from_s = new FileInputStream( from);
to_s = to.getOutputStream();
if (buf == null)
buf = new byte[BUFFER_SIZE]; // reuse this buffer to copy files
for (int bytesRead = from_s.read(buf);
bytesRead != -1;
bytesRead = from_s.read(buf))
to_s.write(buf,0,bytesRead);
from_s.close();
from_s = null;
storageFactory.sync( to_s, false); // RESOLVE: sync or no sync?
to_s.close();
to_s = null;
}
catch (IOException ioe)
{
return false;
}
finally
{
if (from_s != null)
{
try { from_s.close(); }
catch (IOException ioe) {}
}
if (to_s != null)
{
try { to_s.close(); }
catch (IOException ioe) {}
}
}
return true;
} // end of copyFile
public static boolean copyFile( WritableStorageFactory storageFactory,
StorageFile from, StorageFile to)
{
return copyFile( storageFactory, from, to, (byte[]) null);
}
public static boolean copyFile( WritableStorageFactory storageFactory,
StorageFile from, StorageFile to,
byte[] buf)
{
InputStream from_s = null;
OutputStream to_s = null;
try {
from_s = from.getInputStream();
to_s = to.getOutputStream();
if (buf == null)
buf = new byte[BUFFER_SIZE]; // reuse this buffer to copy files
for (int bytesRead = from_s.read(buf);
bytesRead != -1;
bytesRead = from_s.read(buf))
to_s.write(buf,0,bytesRead);
from_s.close();
from_s = null;
storageFactory.sync( to_s, false); // RESOLVE: sync or no sync?
to_s.close();
to_s = null;
}
catch (IOException ioe)
{
return false;
}
finally
{
if (from_s != null)
{
try { from_s.close(); }
catch (IOException ioe) {}
}
if (to_s != null)
{
try { to_s.close(); }
catch (IOException ioe) {}
}
}
return true;
} // end of copyFile
/**
Convert a file path into a File object with an absolute path
relative to a passed in root. If path is absolute then
a file object constructed from new File(path) is returned,
otherwise a file object is returned from new File(root, path)
if root is not null, otherwise null is returned.
*/
public static File getAbsoluteFile(File root, String path) {
File file = new File(path);
if (file.isAbsolute())
return file;
if (root == null)
return null;
return new File(root, path);
}
/**
A replacement for new File(File, String) that correctly implements
the case when the first argument is null. The documentation for java.io.File
says that new File((File) null, name) is the same as new File(name).
This is not the case in pre 1.1.8 vms, a NullPointerException is thrown instead.
*/
public static File newFile(File parent, String name) {
if (parent == null)
return new File(name);
else
return new File(parent, name);
}
/**
Remove the leading 'file://' protocol from a filename which has been
expressed as an URL. If the filename is not an URL, then nothing is done.
Otherwise, an URL like 'file:///tmp/foo.txt' is transformed into the legal
file name '/tmp/foo.txt'.
*/
public static String stripProtocolFromFileName( String originalName )
{
String result = originalName;
try {
URL url = new URL(originalName);
result = url.getFile();
} catch (MalformedURLException ex) {}
return result;
}
// Members used by limitAccessToOwner
private static final Object region = new Object();
private static boolean initialized = false;
// Reflection helper objects for calling into Java >= 6
private static Method setWrite = null;
private static Method setRead = null;
private static Method setExec = null;
// Reflection helper objects for calling into Java >= 7
private static Class fileClz = File.class;
private static Class filesClz;
private static Class pathClz;
private static Class pathsClz;
private static Class aclEntryClz;
private static Class aclFileAttributeViewClz;
private static Class posixFileAttributeViewClz;
private static Class userPrincipalClz;
private static Class linkOptionArrayClz;
private static Class linkOptionClz;
private static Class stringArrayClz;
private static Class aclEntryBuilderClz;
private static Class aclEntryTypeClz;
private static Class aclEntryPermissionClz;
private static Method get;
private static Method getFileAttributeView;
private static Method getOwner;
private static Method setAcl;
private static Method build;
private static Method newBuilder;
private static Method setPrincipal;
private static Method setType;
private static Method values;
private static Method setPermissions;
private static Field allow;
/**
* Use when creating new files. If running with Java 6 or higher on Unix,
* limit read and write permissions on {@code file} to owner if {@code
* derby.storage.useDefaultFilePermissions == false}.
* <p/>
* If the property is not specified, we use restrictive permissions anyway
* iff running with the server server started from the command line.
* <p/>
* On Unix, this is equivalent to running with umask 0077.
* <p/>
* On Windows, with FAT/FAT32, we lose, since the fs does not support
* permissions, only a read-only flag.
* <p/>
* On Windows, with NTFS with ACLs, if running with Java 7 or higher, we
* limit access also for Windows using the new {@code
* java.nio.file.attribute} package.
*
* @param file assumed to be just created
*/
public static void limitAccessToOwner(File file) {
String value = PropertyUtil.getSystemProperty(
Property.STORAGE_USE_DEFAULT_FILE_PERMISSIONS);
if (value != null) {
if (Boolean.valueOf(value.trim()).booleanValue()) {
return;
}
} else {
// The property has not been specified. Only proceed if we are
// running with the network server started from the command line
// *and* at Java 7 or above
if (JVMInfo.JDK_ID >= JVMInfo.J2SE_17 &&
(PropertyUtil.getSystemBoolean(
Property.SERVER_STARTED_FROM_CMD_LINE, false)) ) {
// proceed
} else {
return;
}
}
// lazy initialization, needs to be called in security context
synchronized (region) {
if (!initialized) {
initialized = true;
// >= Java 6
try {
setWrite = fileClz.getMethod(
"setWritable",
new Class[]{Boolean.TYPE, Boolean.TYPE});
setRead = fileClz.getMethod(
"setReadable",
new Class[]{Boolean.TYPE, Boolean.TYPE});
setExec = fileClz.getMethod(
"setExecutable",
new Class[]{Boolean.TYPE, Boolean.TYPE});
} catch (NoSuchMethodException e) {
// not Java 6 or higher
}
// >= Java 7
try {
// If found, we have >= Java 7.
filesClz = Class.forName(
"java.nio.file.Files");
pathClz = Class.forName(
"java.nio.file.Path");
pathsClz = Class.forName(
"java.nio.file.Paths");
aclEntryClz = Class.forName(
"java.nio.file.attribute.AclEntry");
aclFileAttributeViewClz = Class.forName(
"java.nio.file.attribute.AclFileAttributeView");
posixFileAttributeViewClz = Class.forName(
"java.nio.file.attribute.PosixFileAttributeView");
userPrincipalClz = Class.forName(
"java.nio.file.attribute.UserPrincipal");
linkOptionArrayClz = Class.forName(
"[Ljava.nio.file.LinkOption;");
linkOptionClz = Class.forName(
"java.nio.file.LinkOption");
stringArrayClz = Class.forName(
"[Ljava.lang.String;");
aclEntryBuilderClz = Class.forName(
"java.nio.file.attribute.AclEntry$Builder");
aclEntryTypeClz = Class.forName(
"java.nio.file.attribute.AclEntryType");
aclEntryPermissionClz = Class.forName(
"java.nio.file.attribute.AclEntryPermission");
get = pathsClz.getMethod(
"get",
new Class[]{String.class, stringArrayClz});
getFileAttributeView = filesClz.getMethod(
"getFileAttributeView",
new Class[]{pathClz, Class.class, linkOptionArrayClz});
getOwner = filesClz.
getMethod("getOwner",
new Class[]{pathClz, linkOptionArrayClz});
setAcl = aclFileAttributeViewClz.
getMethod("setAcl", new Class[]{List.class});
build = aclEntryBuilderClz.
getMethod("build", new Class[]{});
newBuilder = aclEntryClz.
getMethod("newBuilder", new Class[]{});
setPrincipal = aclEntryBuilderClz.
getMethod("setPrincipal",
new Class[]{userPrincipalClz});
setType = aclEntryBuilderClz.
getMethod("setType", new Class[]{aclEntryTypeClz});
values = aclEntryPermissionClz.
getMethod("values", (Class[]) null);
setPermissions = aclEntryBuilderClz.
getMethod("setPermissions", new Class[] { Set.class });
allow = aclEntryTypeClz.getField("ALLOW");
} catch (NoSuchMethodException e) {
// not Java 7 or higher
} catch (ClassNotFoundException e) {
// not Java 7 or higher
} catch (NoSuchFieldException e) {
// not Java 7 or higher
}
}
}
if (setWrite == null) {
// JVM level too low
return;
}
if (limitAccessToOwnerViaACLs(file)) {
return;
}
try {
//
// First switch off all write access
//
Object r;
r = setWrite.invoke(
file,
new Object[]{Boolean.FALSE, Boolean.FALSE});
assertTrue(r);
//
// Next, switch on write again, but for owner only
//
r = setWrite.invoke(
file,
new Object[]{Boolean.TRUE, Boolean.TRUE});
assertTrue(r);
//
// First switch off all read access
//
r = setRead.invoke(
file,
new Object[]{Boolean.FALSE, Boolean.FALSE});
assertTrue(r);
//
// Next, switch on read access again, but for owner only
//
r = setRead.invoke(
file,
new Object[]{Boolean.TRUE, Boolean.TRUE});
assertTrue(r);
if (file.isDirectory()) {
//
// First switch off all exec access
//
r = setExec.invoke(
file,
new Object[]{Boolean.FALSE, Boolean.FALSE});
assertTrue(r);
//
// Next, switch on read exec again, but for owner only
//
r = setExec.invoke(
file,
new Object[]{Boolean.TRUE, Boolean.TRUE});
assertTrue(r);
}
} catch (InvocationTargetException e) {
// setWritable/setReadable can throw SecurityException
throw (SecurityException)e.getCause();
} catch (IllegalAccessException e) {
// coding error
if (SanityManager.DEBUG) {
SanityManager.THROWASSERT(e);
}
}
}
private static void assertTrue(Object r){
// We should always have the permission to modify the access since have
// just created the file. On some file systems, some operations will
// not work, though, notably FAT/FAT32, as well as NTFS on java < 7, so
// we ignore it the failure.
if (SanityManager.DEBUG) {
Boolean b = (Boolean)r;
if (!b.booleanValue()) {
String os =
PropertyUtil.getSystemProperty("os.name").toLowerCase();
if (os.indexOf("windows") >= 0) {
// expect this to fail, Java 6 on Windows doesn't cut it,
// known not to work.
} else {
SanityManager.THROWASSERT(
"File.set{RWX} failed on this file system");
}
}
}
}
private static boolean limitAccessToOwnerViaACLs(File file) {
// See if we are running on JDK 7 so we can deny access
// using the new java.nio.file.attribute package.
if (filesClz == null) {
// nope
return false;
}
// We have Java 7, so call. We need to call reflectively, since the
// source level isn't yet at Java 7.
try {
// Path fileP = Paths.get(file.getPath());
Object fileP = get.invoke(
null, new Object[]{file.getPath(), new String[]{}});
// AclFileAttributeView view =
// Files.getFileAttributeView(fileP,
// AclFileAttributeView.class);
Object view = getFileAttributeView.invoke(
null,
new Object[]{fileP,
aclFileAttributeViewClz,
Array.newInstance(linkOptionClz, 0)});
if (view == null) {
// ACLs not supported on this file system.
return false;
}
// If we have a posix view, just return and fall back on
// the JDK 6 approach.
Object posixView = getFileAttributeView.invoke(
null,
new Object[]{fileP,
posixFileAttributeViewClz,
Array.newInstance(linkOptionClz, 0)});
if (posixView != null) {
return false;
}
// Since we have an AclFileAttributeView which is not a
// PosixFileAttributeView, we probably have a NTFS file
// system.
// UserPrincipal owner = Files.getOwner(fileP);
Object owner = getOwner.invoke(
null,
new Object[]{fileP, Array.newInstance(linkOptionClz, 0)});
//
// Remove existing ACEs, build a new one which simply
// gives all possible permissions to current owner.
//
// List<AclEntry> newAcl = new ArrayList<>();
// AclEntryPermissions[] perms = AclEntryPermission.values();
// AclEntry.Builder aceb = AclEntry.newBuilder();
//
// aceb.setType(AclEntryType.ALLOW);
// aceb.setPermissions(new HashSet(Arrays.asList(perms);
// newAcl.add(aceb);
List newAcl = new ArrayList();
Object[] perms = (Object[]) values.invoke(null, (Object[]) null);
Object aceb = newBuilder.invoke(null, (Object[]) null);
Object allowValue = allow.get(aclEntryTypeClz);
aceb = setPrincipal.invoke(aceb, new Object[]{owner});
aceb = setType.invoke(aceb, new Object[]{allowValue});
aceb = setPermissions.invoke(
aceb,
new Object[] {new HashSet(Arrays.asList(perms))});
newAcl.add(build.invoke(aceb, (Object[]) null));
// view.setAcl(newAcl);
setAcl.invoke(view, new Object[]{newAcl});
} catch (IllegalAccessException e) {
// coding error
if (SanityManager.DEBUG) {
SanityManager.THROWASSERT(e);
}
} catch (IllegalArgumentException e) {
// coding error
if (SanityManager.DEBUG) {
SanityManager.THROWASSERT(e);
}
} catch (InvocationTargetException e) {
// java.security.AccessControlException: access denied
// ("java.lang.RuntimePermission" "accessUserInformation") can
// happen, so throw.
//
// Should we get an IOException from getOwner, the cast below
// would throw which is fine, since it should not happen.
throw (RuntimeException)e.getCause();
}
return true;
}
}
|
|
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.waf.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* The response to a <a>ListXssMatchSets</a> request.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/waf-regional-2016-11-28/ListXssMatchSets" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListXssMatchSetsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* If you have more <a>XssMatchSet</a> objects than the number that you specified for <code>Limit</code> in the
* request, the response includes a <code>NextMarker</code> value. To list more <code>XssMatchSet</code> objects,
* submit another <code>ListXssMatchSets</code> request, and specify the <code>NextMarker</code> value from the
* response in the <code>NextMarker</code> value in the next request.
* </p>
*/
private String nextMarker;
/**
* <p>
* An array of <a>XssMatchSetSummary</a> objects.
* </p>
*/
private java.util.List<XssMatchSetSummary> xssMatchSets;
/**
* <p>
* If you have more <a>XssMatchSet</a> objects than the number that you specified for <code>Limit</code> in the
* request, the response includes a <code>NextMarker</code> value. To list more <code>XssMatchSet</code> objects,
* submit another <code>ListXssMatchSets</code> request, and specify the <code>NextMarker</code> value from the
* response in the <code>NextMarker</code> value in the next request.
* </p>
*
* @param nextMarker
* If you have more <a>XssMatchSet</a> objects than the number that you specified for <code>Limit</code> in
* the request, the response includes a <code>NextMarker</code> value. To list more <code>XssMatchSet</code>
* objects, submit another <code>ListXssMatchSets</code> request, and specify the <code>NextMarker</code>
* value from the response in the <code>NextMarker</code> value in the next request.
*/
public void setNextMarker(String nextMarker) {
this.nextMarker = nextMarker;
}
/**
* <p>
* If you have more <a>XssMatchSet</a> objects than the number that you specified for <code>Limit</code> in the
* request, the response includes a <code>NextMarker</code> value. To list more <code>XssMatchSet</code> objects,
* submit another <code>ListXssMatchSets</code> request, and specify the <code>NextMarker</code> value from the
* response in the <code>NextMarker</code> value in the next request.
* </p>
*
* @return If you have more <a>XssMatchSet</a> objects than the number that you specified for <code>Limit</code> in
* the request, the response includes a <code>NextMarker</code> value. To list more <code>XssMatchSet</code>
* objects, submit another <code>ListXssMatchSets</code> request, and specify the <code>NextMarker</code>
* value from the response in the <code>NextMarker</code> value in the next request.
*/
public String getNextMarker() {
return this.nextMarker;
}
/**
* <p>
* If you have more <a>XssMatchSet</a> objects than the number that you specified for <code>Limit</code> in the
* request, the response includes a <code>NextMarker</code> value. To list more <code>XssMatchSet</code> objects,
* submit another <code>ListXssMatchSets</code> request, and specify the <code>NextMarker</code> value from the
* response in the <code>NextMarker</code> value in the next request.
* </p>
*
* @param nextMarker
* If you have more <a>XssMatchSet</a> objects than the number that you specified for <code>Limit</code> in
* the request, the response includes a <code>NextMarker</code> value. To list more <code>XssMatchSet</code>
* objects, submit another <code>ListXssMatchSets</code> request, and specify the <code>NextMarker</code>
* value from the response in the <code>NextMarker</code> value in the next request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListXssMatchSetsResult withNextMarker(String nextMarker) {
setNextMarker(nextMarker);
return this;
}
/**
* <p>
* An array of <a>XssMatchSetSummary</a> objects.
* </p>
*
* @return An array of <a>XssMatchSetSummary</a> objects.
*/
public java.util.List<XssMatchSetSummary> getXssMatchSets() {
return xssMatchSets;
}
/**
* <p>
* An array of <a>XssMatchSetSummary</a> objects.
* </p>
*
* @param xssMatchSets
* An array of <a>XssMatchSetSummary</a> objects.
*/
public void setXssMatchSets(java.util.Collection<XssMatchSetSummary> xssMatchSets) {
if (xssMatchSets == null) {
this.xssMatchSets = null;
return;
}
this.xssMatchSets = new java.util.ArrayList<XssMatchSetSummary>(xssMatchSets);
}
/**
* <p>
* An array of <a>XssMatchSetSummary</a> objects.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setXssMatchSets(java.util.Collection)} or {@link #withXssMatchSets(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param xssMatchSets
* An array of <a>XssMatchSetSummary</a> objects.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListXssMatchSetsResult withXssMatchSets(XssMatchSetSummary... xssMatchSets) {
if (this.xssMatchSets == null) {
setXssMatchSets(new java.util.ArrayList<XssMatchSetSummary>(xssMatchSets.length));
}
for (XssMatchSetSummary ele : xssMatchSets) {
this.xssMatchSets.add(ele);
}
return this;
}
/**
* <p>
* An array of <a>XssMatchSetSummary</a> objects.
* </p>
*
* @param xssMatchSets
* An array of <a>XssMatchSetSummary</a> objects.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListXssMatchSetsResult withXssMatchSets(java.util.Collection<XssMatchSetSummary> xssMatchSets) {
setXssMatchSets(xssMatchSets);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getNextMarker() != null)
sb.append("NextMarker: ").append(getNextMarker()).append(",");
if (getXssMatchSets() != null)
sb.append("XssMatchSets: ").append(getXssMatchSets());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListXssMatchSetsResult == false)
return false;
ListXssMatchSetsResult other = (ListXssMatchSetsResult) obj;
if (other.getNextMarker() == null ^ this.getNextMarker() == null)
return false;
if (other.getNextMarker() != null && other.getNextMarker().equals(this.getNextMarker()) == false)
return false;
if (other.getXssMatchSets() == null ^ this.getXssMatchSets() == null)
return false;
if (other.getXssMatchSets() != null && other.getXssMatchSets().equals(this.getXssMatchSets()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getNextMarker() == null) ? 0 : getNextMarker().hashCode());
hashCode = prime * hashCode + ((getXssMatchSets() == null) ? 0 : getXssMatchSets().hashCode());
return hashCode;
}
@Override
public ListXssMatchSetsResult clone() {
try {
return (ListXssMatchSetsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
|
|
package org.simpleflatmapper.map.mapper;
import org.simpleflatmapper.map.FieldKey;
import org.simpleflatmapper.map.FieldMapper;
import org.simpleflatmapper.map.MapperBuilderErrorHandler;
import org.simpleflatmapper.map.MapperBuildingException;
import org.simpleflatmapper.map.MapperConfig;
import org.simpleflatmapper.map.MappingContext;
import org.simpleflatmapper.map.SourceFieldMapper;
import org.simpleflatmapper.map.context.MappingContextFactory;
import org.simpleflatmapper.map.context.MappingContextFactoryBuilder;
import org.simpleflatmapper.map.impl.DiscriminatorPropertyFinder;
import org.simpleflatmapper.map.property.OptionalProperty;
import org.simpleflatmapper.reflect.BiInstantiator;
import org.simpleflatmapper.reflect.Getter;
import org.simpleflatmapper.reflect.meta.ClassMeta;
import org.simpleflatmapper.reflect.meta.PropertyFinder;
import org.simpleflatmapper.reflect.meta.PropertyMeta;
import org.simpleflatmapper.util.*;
import java.lang.reflect.Type;
import java.util.*;
import static org.simpleflatmapper.map.MapperConfig.sameDiscriminatorId;
public class DiscriminatorConstantSourceMapperBuilder<S, T, K extends FieldKey<K>> extends ConstantSourceMapperBuilder<S, T, K> {
private final DiscriminatedBuilder<S, T, K>[][] builders;
private final MapperConfig.Discriminator<? super S, K, T>[] discriminators;
private final MappingContextFactoryBuilder<S, K> mappingContextFactoryBuilder;
private final CaptureError mapperBuilderErrorHandler;
private final MapperConfig<K, ? extends S> mapperConfig;
public DiscriminatorConstantSourceMapperBuilder(
MapperConfig.Discriminator<? super S, K, T>[] discriminators,
final MapperSource<? super S, K> mapperSource,
final ClassMeta<T> classMeta,
final MapperConfig<K, ? extends S> mapperConfig,
MappingContextFactoryBuilder<S, K> mappingContextFactoryBuilder,
KeyFactory<K> keyFactory,
PropertyFinder<T> propertyFinder) throws MapperBuildingException {
this(discriminators, mapperSource, null, classMeta, mapperConfig, mappingContextFactoryBuilder, keyFactory, propertyFinder);
}
@SuppressWarnings("unchecked")
public DiscriminatorConstantSourceMapperBuilder(
MapperConfig.Discriminator<? super S, K, T>[] discriminators,
final MapperSource<? super S, K> mapperSource,
final PropertyMeta<?, T> owner,
final ClassMeta<T> classMeta,
final MapperConfig<K, ? extends S> mapperConfig,
MappingContextFactoryBuilder<S, K> mappingContextFactoryBuilder,
KeyFactory<K> keyFactory,
PropertyFinder<T> propertyFinder) throws MapperBuildingException {
this.mappingContextFactoryBuilder = mappingContextFactoryBuilder;
this.mapperConfig = mapperConfig;
builders = new DiscriminatedBuilder[discriminators.length][];
this.discriminators = discriminators;
mapperBuilderErrorHandler = new CaptureError(mapperConfig.mapperBuilderErrorHandler(), numberOfBuilders(discriminators));
MapperConfig<K, ? extends S> kMapperConfig = mapperConfig.mapperBuilderErrorHandler(mapperBuilderErrorHandler);
for(int i = 0; i < discriminators.length; i++) {
MapperConfig.Discriminator<? super S, K, T> discriminator = discriminators[i];
builders[i] = new DiscriminatedBuilder[discriminator.cases.length];
for (int ci = 0; ci < discriminator.cases.length; ci++) {
MapperConfig.DiscriminatorCase<? super S, K, ? extends T> discriminatorCase = discriminator.cases[ci];
PropertyFinder<T> subPropertyFinder = propertyFinder;
if (propertyFinder instanceof DiscriminatorPropertyFinder) {
subPropertyFinder = (PropertyFinder<T>) ((DiscriminatorPropertyFinder<T>) subPropertyFinder).getImplementationPropertyFinder(discriminatorCase.classMeta.getType(), discriminator.discriminatorId);
}
builders[i][ci] = getDiscriminatedBuilder(mapperSource, mappingContextFactoryBuilder, keyFactory, subPropertyFinder, kMapperConfig, discriminatorCase, classMeta, discriminator.discriminatorId);
}
}
}
private int numberOfBuilders(MapperConfig.Discriminator<? super S, K, T>[] discriminators) {
int i = 0;
for (MapperConfig.Discriminator<? super S, K, T> d : discriminators) {
i += d.cases.length;
}
return i;
}
private <T> DiscriminatedBuilder<S, T, K> getDiscriminatedBuilder(MapperSource<? super S, K> mapperSource,
MappingContextFactoryBuilder<S, K> mappingContextFactoryBuilder,
KeyFactory<K> keyFactory, PropertyFinder<T> propertyFinder,
MapperConfig<K, ? extends S> kMapperConfig,
MapperConfig.DiscriminatorCase<? super S, K, ? extends T> discrimnatorCase,
ClassMeta<T> commonClassMeta,
Object discriminatorId) {
return new DiscriminatedBuilder<S, T, K>((MapperConfig.DiscriminatorCase<? super S, K, T>) discrimnatorCase,
new DefaultConstantSourceMapperBuilder<S, T, K>(mapperSource, (ClassMeta<T>) discrimnatorCase.classMeta.withReflectionService(commonClassMeta.getReflectionService()), kMapperConfig, mappingContextFactoryBuilder, keyFactory, propertyFinder),
discriminatorId);
}
@Override
public ConstantSourceMapperBuilder<S, T, K> addMapping(K key, ColumnDefinition<K, ?> columnDefinition) {
for(int i = 0; i < builders.length; i++) {
for (DiscriminatedBuilder<S, T, K> builder : builders[i]) {
builder.builder.addMapping(key, columnDefinition);
}
}
final ColumnDefinition<K, ?> composedDefinition = columnDefinition.compose(mapperConfig.columnDefinitions().getColumnDefinition(key));
mapperBuilderErrorHandler.successfullyMapAtLeastToOne(composedDefinition);
return this;
}
@Override
protected <P> void addMapping(final K columnKey, final ColumnDefinition<K, ?> columnDefinition, final PropertyMeta<T, P> prop) {
if (prop instanceof DiscriminatorPropertyFinder.DiscriminatorPropertyMeta) {
DiscriminatorPropertyFinder.DiscriminatorPropertyMeta pm = (DiscriminatorPropertyFinder.DiscriminatorPropertyMeta) prop;
pm.forEachProperty(new Consumer<DiscriminatorPropertyFinder.DiscriminatorMatch>() {
@Override
public void accept(DiscriminatorPropertyFinder.DiscriminatorMatch dm) {
final Type type = dm.type;
final PropertyMeta<?, ?> propertyMeta = dm.matchedProperty.getPropertyMeta();
forBuilderOfType(type, dm.discriminatorId, new Consumer<ConstantSourceMapperBuilder>() {
@Override
public void accept(ConstantSourceMapperBuilder constantSourceMapperBuilder) {
constantSourceMapperBuilder.addMapping(columnKey, columnDefinition, propertyMeta);
}
});
}
});
} else {
for(int i = 0; i < builders.length; i++) {
for (DiscriminatedBuilder<S, T, K> builder : builders[i]) {
builder.builder.addMapping(columnKey, columnDefinition, prop);
}
}
}
}
private void forBuilderOfType(Type type, Object discriminatorId, Consumer<ConstantSourceMapperBuilder> consumer) {
for(int i = 0; i < builders.length; i++) {
for (DiscriminatedBuilder<S, T, K> builder : builders[i]) {
if (TypeHelper.areEquals(builder.builder.getTargetType(), type) && sameDiscriminatorId(builder.discriminatorId, discriminatorId)) {
consumer.accept(builder.builder);
}
}
}
}
@Override
public List<K> getKeys() {
HashSet<K> keys = new HashSet<K>();
for(int i = 0; i < builders.length; i++) {
for (DiscriminatedBuilder<S, T, K> builder : builders[i]) {
keys.addAll(builder.builder.getKeys());
}
}
return new ArrayList<K>(keys);
}
@Override
public <H extends ForEachCallBack<PropertyMapping<T, ?, K>>> H forEachProperties(H handler) {
for(int i = 0; i < builders.length; i++) {
for (DiscriminatedBuilder<S, T, K> builder : builders[i]) {
builder.builder.forEachProperties(handler);
}
}
return handler;
}
@Override
public ContextualSourceFieldMapperImpl<S, T> mapper() {
SourceFieldMapper<S, T> mapper = sourceFieldMapper();
return new ContextualSourceFieldMapperImpl<S, T>(mappingContextFactoryBuilder.build(), mapper);
}
@SuppressWarnings("unchecked")
@Override
public SourceFieldMapper<S, T> sourceFieldMapper() {
List<FieldMapper<S, T>> fieldMappers = new ArrayList<FieldMapper<S, T>>();
DiscriminatedBuilder<S, T, K>[] discBuilder = selectActiveBuilders();
PredicatedInstantiator<S, T>[] predicatedInstantiator = new PredicatedInstantiator[discBuilder.length];
for(int i = 0; i < discBuilder.length; i++) {
DiscriminatedBuilder<S, T, K> builder = discBuilder[i];
final Predicate<? super S> predicate = builder.discrimnatorCase.predicateFactory.apply(builder.findAllDiscriminatoryKeys());
DefaultConstantSourceMapperBuilder.GenericBuilderMapping genericBuilderMapping = builder.builder.getGenericBuilderMapping();
predicatedInstantiator[i] = new PredicatedInstantiator<S, T>(predicate, genericBuilderMapping.genericBuilderInstantiator);
final FieldMapper[] targetFieldMappers = genericBuilderMapping.targetFieldMappers;
fieldMappers.add(new FieldMapper<S, T>() {
@Override
public void mapTo(S source, T target, MappingContext<? super S> context) throws Exception {
if (predicate.test(source)) {
for (FieldMapper fm : targetFieldMappers) {
fm.mapTo(source, target, context);
}
}
}
});
}
boolean oneColumn = isOneColumn(predicatedInstantiator);
BiInstantiator<S, MappingContext<? super S>, GenericBuilder<S, T>> gbi =
oneColumn ?
new OneColumnBuildBiInstantiator<S, T>(predicatedInstantiator) :
new GenericBuildBiInstantiator<S, T>(predicatedInstantiator);
DiscriminatorGenericBuilderMapper<S, T> mapper = new DiscriminatorGenericBuilderMapper<S, T>(gbi);
FieldMapper<S, T>[] targetFieldMappers = fieldMappers.toArray(new FieldMapper[0]);
//
return new TransformSourceFieldMapper<S, GenericBuilder<S, T>, T>(mapper, targetFieldMappers, GenericBuilder.<S, T>buildFunction());
}
private DiscriminatedBuilder<S, T, K>[] selectActiveBuilders() {
List<DiscriminatedBuilder<S, T, K>> activeBuilders = new ArrayList<DiscriminatedBuilder<S, T, K>>();
for(int i = 0; i < builders.length; i++) {
DiscriminatedBuilder<S, T, K>[] pBuilders = builders[i];
MapperConfig.Discriminator<? super S, K, T> d = discriminators[i];
List<K> allDiscriminatoryKeys = pBuilders[0].findAllDiscriminatoryKeys();
if (d.isCompatibleWithKeys(allDiscriminatoryKeys)) {
activeBuilders.addAll(Arrays.asList(pBuilders));
}
}
if (activeBuilders.isEmpty())
throw new IllegalStateException("No Active builders");
return activeBuilders.toArray(new DiscriminatedBuilder[0]);
}
private boolean isOneColumn(PredicatedInstantiator<S, T>[] predicatedInstantiator) {
Getter getter = null;
for(PredicatedInstantiator<S, T> pi : predicatedInstantiator) {
if (!(pi.predicate instanceof AbstractMapperFactory.DiscriminatorConditionBuilder.SourcePredicate)) {
return false;
}
AbstractMapperFactory.DiscriminatorConditionBuilder.SourcePredicate sp = (AbstractMapperFactory.DiscriminatorConditionBuilder.SourcePredicate) pi.predicate;
Getter lg = sp.getter;
if (getter == null) {
getter = lg;
} else if (getter != lg) return false;
if (!(sp.predicate instanceof EqualsPredicate)) return false;
}
return true;
}
@Override
public boolean isRootAggregate() {
return builders[0][0].builder.isRootAggregate();
}
@Override
public MappingContextFactory<? super S> contextFactory() {
return builders[0][0].builder.contextFactory();
}
@Override
public void addMapper(FieldMapper<S, T> mapper) {
for(int i = 0; i < builders.length; i++) {
for (DiscriminatedBuilder<S, T, K> builder : builders[i]) {
builder.builder.addMapper(mapper);
}
}
}
private static class DiscriminatedBuilder<S, T, K extends FieldKey<K>> {
private final MapperConfig.DiscriminatorCase<? super S, K, T> discrimnatorCase;
private final DefaultConstantSourceMapperBuilder<S, T, K> builder;
private final Object discriminatorId;
private DiscriminatedBuilder(MapperConfig.DiscriminatorCase<? super S, K, T> discrimnatorCase, DefaultConstantSourceMapperBuilder<S, T, K> builder, Object discriminatorId) {
this.discrimnatorCase = discrimnatorCase;
this.builder = builder;
this.discriminatorId = discriminatorId;
}
public List<K> findAllDiscriminatoryKeys() {
return builder.findAllDiscriminatorKeys(discriminatorId);
}
}
private static class GenericBuildBiInstantiator<S, T> implements BiInstantiator<S, MappingContext<? super S>, GenericBuilder<S, T>> {
private final PredicatedInstantiator<S, T>[] predicatedInstantiators;
public GenericBuildBiInstantiator(PredicatedInstantiator<S, T>[] predicatedInstantiators) {
this.predicatedInstantiators = predicatedInstantiators;
}
@SuppressWarnings("unchecked")
@Override
public GenericBuilder<S, T> newInstance(S o, MappingContext<? super S> o2) throws Exception {
for(PredicatedInstantiator<S, T> pi : predicatedInstantiators) {
//noinspection unchecked
if (pi.predicate.test(o)) {
return pi.instantiator.newInstance(o, o2);
}
}
throw new IllegalArgumentException("No discrimator matched " + o);
}
}
private static class OneColumnBuildBiInstantiator<S, T> implements BiInstantiator<S, MappingContext<? super S>, GenericBuilder<S, T>> {
private final Getter<S, ?> getter;
private final Map<Object, BiInstantiator<S, MappingContext<? super S>, GenericBuilder<S, T>>> instantiators;
public OneColumnBuildBiInstantiator(PredicatedInstantiator<S, T>[] predicatedInstantiators) {
if (predicatedInstantiators == null || predicatedInstantiators.length == 0) throw new IllegalArgumentException("predicatedInstantiators is null or empty");
getter = ((AbstractMapperFactory.DiscriminatorConditionBuilder.SourcePredicate)predicatedInstantiators[0].predicate).getter;
instantiators = new HashMap<Object, BiInstantiator<S, MappingContext<? super S>, GenericBuilder<S, T>>>();
for(PredicatedInstantiator<S, T> pi : predicatedInstantiators) {
EqualsPredicate ep = (EqualsPredicate) ((AbstractMapperFactory.DiscriminatorConditionBuilder.SourcePredicate)pi.predicate).predicate;
instantiators.put(ep.expected, pi.instantiator);
}
}
@SuppressWarnings("unchecked")
@Override
public GenericBuilder<S, T> newInstance(S o, MappingContext<? super S> o2) throws Exception {
Object value = getter.get(o);
BiInstantiator<S, MappingContext<? super S>, GenericBuilder<S, T>> instantiator = instantiators.get(value);
if (instantiator == null)
throw new IllegalArgumentException("No discrimator matched " + value);
return instantiator.newInstance(o, o2);
}
}
private static class PredicatedInstantiator<S, T> {
private final Predicate predicate;
private final BiInstantiator<S, MappingContext<? super S>, GenericBuilder<S, T>> instantiator;
private PredicatedInstantiator(Predicate predicate, BiInstantiator<S, MappingContext<? super S>, GenericBuilder<S, T>> instantiator) {
this.predicate = predicate;
this.instantiator = instantiator;
}
}
private class DiscriminatorGenericBuilderMapper<S, T> extends AbstractMapper<S, GenericBuilder<S, T>> {
public DiscriminatorGenericBuilderMapper(BiInstantiator<? super S, MappingContext<? super S>, GenericBuilder<S, T>> gbi) {
super(gbi);
}
@Override
protected void mapFields(S source, GenericBuilder<S, T> target, MappingContext<? super S> mappingContext) throws Exception {
target.mapFrom(source, mappingContext);
}
@Override
protected void mapToFields(S source, GenericBuilder<S, T> target, MappingContext<? super S> mappingContext) throws Exception {
target.mapFrom(source, mappingContext);
}
}
private static class CaptureError implements MapperBuilderErrorHandler {
private final MapperBuilderErrorHandler delegate;
private final List<PropertyNotFound> errorCollector;
private final int nbBuilders;
private CaptureError(MapperBuilderErrorHandler delegate, int nbBuilders) {
this.delegate = delegate;
this.nbBuilders = nbBuilders;
errorCollector = new ArrayList<PropertyNotFound>();
}
@Override
public void accessorNotFound(String msg) {
delegate.accessorNotFound(msg);
}
@Override
public void propertyNotFound(Type target, String property) {
errorCollector.add(new PropertyNotFound(target, property));
}
@Override
public void customFieldError(FieldKey<?> key, String message) {
delegate.customFieldError(key, message);
}
public void successfullyMapAtLeastToOne(ColumnDefinition<?, ?> columnDefinition) {
try {
if (errorCollector.size() == nbBuilders && ! columnDefinition.has(OptionalProperty.class)) {
PropertyNotFound propertyNotFound = errorCollector.get(0);
delegate.propertyNotFound(propertyNotFound.target, propertyNotFound.property);
}
} finally {
errorCollector.clear();
}
}
private static class PropertyNotFound {
final Type target;
final String property;
private PropertyNotFound(Type target, String property) {
this.target = target;
this.property = property;
}
}
}
}
|
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/domains/v1alpha2/domains.proto
package com.google.cloud.domains.v1alpha2;
/**
*
*
* <pre>
* Request for the `RetrieveAuthorizationCode` method.
* </pre>
*
* Protobuf type {@code google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest}
*/
public final class RetrieveAuthorizationCodeRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest)
RetrieveAuthorizationCodeRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use RetrieveAuthorizationCodeRequest.newBuilder() to construct.
private RetrieveAuthorizationCodeRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RetrieveAuthorizationCodeRequest() {
registration_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RetrieveAuthorizationCodeRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private RetrieveAuthorizationCodeRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
registration_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.domains.v1alpha2.DomainsProto
.internal_static_google_cloud_domains_v1alpha2_RetrieveAuthorizationCodeRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.domains.v1alpha2.DomainsProto
.internal_static_google_cloud_domains_v1alpha2_RetrieveAuthorizationCodeRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest.class,
com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest.Builder.class);
}
public static final int REGISTRATION_FIELD_NUMBER = 1;
private volatile java.lang.Object registration_;
/**
*
*
* <pre>
* Required. The name of the `Registration` whose authorization code is being retrieved,
* in the format `projects/*/locations/*/registrations/*`.
* </pre>
*
* <code>
* string registration = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The registration.
*/
@java.lang.Override
public java.lang.String getRegistration() {
java.lang.Object ref = registration_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
registration_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the `Registration` whose authorization code is being retrieved,
* in the format `projects/*/locations/*/registrations/*`.
* </pre>
*
* <code>
* string registration = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for registration.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRegistrationBytes() {
java.lang.Object ref = registration_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
registration_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(registration_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, registration_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(registration_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, registration_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest)) {
return super.equals(obj);
}
com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest other =
(com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest) obj;
if (!getRegistration().equals(other.getRegistration())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + REGISTRATION_FIELD_NUMBER;
hash = (53 * hash) + getRegistration().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for the `RetrieveAuthorizationCode` method.
* </pre>
*
* Protobuf type {@code google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest)
com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.domains.v1alpha2.DomainsProto
.internal_static_google_cloud_domains_v1alpha2_RetrieveAuthorizationCodeRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.domains.v1alpha2.DomainsProto
.internal_static_google_cloud_domains_v1alpha2_RetrieveAuthorizationCodeRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest.class,
com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest.Builder.class);
}
// Construct using
// com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
registration_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.domains.v1alpha2.DomainsProto
.internal_static_google_cloud_domains_v1alpha2_RetrieveAuthorizationCodeRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest
getDefaultInstanceForType() {
return com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest build() {
com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest buildPartial() {
com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest result =
new com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest(this);
result.registration_ = registration_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest) {
return mergeFrom(
(com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest other) {
if (other
== com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest
.getDefaultInstance()) return this;
if (!other.getRegistration().isEmpty()) {
registration_ = other.registration_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest)
e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object registration_ = "";
/**
*
*
* <pre>
* Required. The name of the `Registration` whose authorization code is being retrieved,
* in the format `projects/*/locations/*/registrations/*`.
* </pre>
*
* <code>
* string registration = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The registration.
*/
public java.lang.String getRegistration() {
java.lang.Object ref = registration_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
registration_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the `Registration` whose authorization code is being retrieved,
* in the format `projects/*/locations/*/registrations/*`.
* </pre>
*
* <code>
* string registration = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for registration.
*/
public com.google.protobuf.ByteString getRegistrationBytes() {
java.lang.Object ref = registration_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
registration_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the `Registration` whose authorization code is being retrieved,
* in the format `projects/*/locations/*/registrations/*`.
* </pre>
*
* <code>
* string registration = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The registration to set.
* @return This builder for chaining.
*/
public Builder setRegistration(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
registration_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the `Registration` whose authorization code is being retrieved,
* in the format `projects/*/locations/*/registrations/*`.
* </pre>
*
* <code>
* string registration = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearRegistration() {
registration_ = getDefaultInstance().getRegistration();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the `Registration` whose authorization code is being retrieved,
* in the format `projects/*/locations/*/registrations/*`.
* </pre>
*
* <code>
* string registration = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for registration to set.
* @return This builder for chaining.
*/
public Builder setRegistrationBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
registration_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest)
private static final com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest();
}
public static com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RetrieveAuthorizationCodeRequest> PARSER =
new com.google.protobuf.AbstractParser<RetrieveAuthorizationCodeRequest>() {
@java.lang.Override
public RetrieveAuthorizationCodeRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RetrieveAuthorizationCodeRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<RetrieveAuthorizationCodeRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RetrieveAuthorizationCodeRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
|
/*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.plugins.antlr.internal;
import com.google.common.collect.Lists;
import org.gradle.api.GradleException;
import org.gradle.api.plugins.antlr.internal.antlr2.GenerationPlan;
import org.gradle.api.plugins.antlr.internal.antlr2.GenerationPlanBuilder;
import org.gradle.api.plugins.antlr.internal.antlr2.MetadataExtracter;
import org.gradle.api.plugins.antlr.internal.antlr2.XRef;
import org.gradle.internal.os.OperatingSystem;
import org.gradle.internal.reflect.JavaReflectionUtil;
import org.gradle.util.RelativePathUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.List;
public class AntlrExecuter implements AntlrWorker {
private static final Logger LOGGER = LoggerFactory.getLogger(AntlrExecuter.class);
@Override
public AntlrResult runAntlr(AntlrSpec spec) {
AntlrTool antlrTool = new Antlr4Tool();
if (antlrTool.available()) {
LOGGER.info("Processing with ANTLR 4");
return antlrTool.process(spec);
}
antlrTool = new Antlr3Tool();
if (antlrTool.available()) {
LOGGER.info("Processing with ANTLR 3");
return antlrTool.process(spec);
}
antlrTool = new Antlr2Tool();
if (antlrTool.available()) {
LOGGER.info("Processing with ANTLR 2");
return antlrTool.process(spec);
}
throw new IllegalStateException("No Antlr implementation available");
}
private static class Antlr3Tool extends AntlrTool {
@Override
int invoke(List<String> arguments, File inputDirectory) throws ClassNotFoundException {
final Object backedObject = loadTool("org.antlr.Tool", null);
String[] argArray = arguments.toArray(new String[0]);
if (inputDirectory != null) {
JavaReflectionUtil.method(backedObject, Void.class, "setInputDirectory", String.class).invoke(backedObject, inputDirectory.getAbsolutePath());
JavaReflectionUtil.method(backedObject, Void.class, "setForceRelativeOutput", boolean.class).invoke(backedObject, true);
}
JavaReflectionUtil.method(backedObject, Void.class, "processArgs", String[].class).invoke(backedObject, new Object[]{argArray});
JavaReflectionUtil.method(backedObject, Void.class, "process").invoke(backedObject);
return JavaReflectionUtil.method(backedObject, Integer.class, "getNumErrors").invoke(backedObject);
}
@Override
public boolean available() {
try {
loadTool("org.antlr.Tool", null);
} catch (ClassNotFoundException cnf) {
return false;
}
return true;
}
}
private abstract static class AntlrTool {
/**
* Utility method to create an instance of the Tool class.
*
* @throws ClassNotFoundException if class was not on the runtime classpath.
*/
static Object loadTool(String className, String[] args) throws ClassNotFoundException {
try {
Class<?> toolClass = Class.forName(className); // ok to use caller classloader
if (args == null) {
return toolClass.newInstance();
} else {
Constructor<?> constructor = toolClass.getConstructor(String[].class);
return constructor.newInstance(new Object[]{args});
}
}catch(ClassNotFoundException cnf){
throw cnf;
} catch (InvocationTargetException e) {
throw new GradleException("Failed to load ANTLR", e.getCause());
} catch (Exception e) {
throw new GradleException("Failed to load ANTLR", e);
}
}
public final AntlrResult process(AntlrSpec spec) {
try {
return doProcess(spec);
} catch (ClassNotFoundException e) {
//this shouldn't happen if you call check availability with #available first
throw new GradleException("Cannot process antlr sources", e);
}
}
/**
* process used for antlr3/4
*/
public AntlrResult doProcess(AntlrSpec spec) throws ClassNotFoundException {
int numErrors = 0;
if (spec.getInputDirectories().size() == 0) {
// we have not root source folder information for the grammar files,
// so we don't force relativeOutput as we can't calculate it.
// This results in flat generated sources in the output directory
numErrors += invoke(spec.asArgumentsWithFiles(), null);
} else {
boolean onWindows = OperatingSystem.current().isWindows();
for (File inputDirectory : spec.getInputDirectories()) {
final List<String> arguments = spec.getArguments();
arguments.add("-o");
arguments.add(spec.getOutputDirectory().getAbsolutePath());
for (File grammarFile : spec.getGrammarFiles()) {
String relativeGrammarFilePath = RelativePathUtil.relativePath(inputDirectory, grammarFile);
if (onWindows) {
relativeGrammarFilePath = relativeGrammarFilePath.replace('/', File.separatorChar);
}
arguments.add(relativeGrammarFilePath);
}
numErrors += invoke(arguments, inputDirectory);
}
}
return new AntlrResult(numErrors);
}
abstract int invoke(List<String> arguments, File inputDirectory) throws ClassNotFoundException;
public abstract boolean available();
protected static String[] toArray(List<String> strings) {
return strings.toArray(new String[0]);
}
}
static class Antlr4Tool extends AntlrTool {
@Override
int invoke(List<String> arguments, File inputDirectory) throws ClassNotFoundException {
final Object backedObject = loadTool("org.antlr.v4.Tool", toArray(arguments));
if (inputDirectory != null) {
JavaReflectionUtil.writeableField(backedObject.getClass(), "inputDirectory").setValue(backedObject, inputDirectory);
}
JavaReflectionUtil.method(backedObject, Void.class, "processGrammarsOnCommandLine").invoke(backedObject);
return JavaReflectionUtil.method(backedObject, Integer.class, "getNumErrors").invoke(backedObject);
}
@Override
public boolean available() {
try {
loadTool("org.antlr.v4.Tool", null);
} catch (ClassNotFoundException cnf) {
return false;
}
return true;
}
}
private static class Antlr2Tool extends AntlrTool {
public AntlrResult doProcess(AntlrSpec spec) throws ClassNotFoundException {
XRef xref = new MetadataExtracter().extractMetadata(spec.getGrammarFiles());
List<GenerationPlan> generationPlans = new GenerationPlanBuilder(spec.getOutputDirectory()).buildGenerationPlans(xref);
for (GenerationPlan generationPlan : generationPlans) {
List<String> generationPlanArguments = Lists.newArrayList(spec.getArguments());
generationPlanArguments.add("-o");
generationPlanArguments.add(generationPlan.getGenerationDirectory().getAbsolutePath());
generationPlanArguments.add(generationPlan.getSource().getAbsolutePath());
try {
invoke(generationPlanArguments, null);
} catch (RuntimeException e) {
if (e.getMessage().equals("ANTLR Panic: Exiting due to errors.")) {
return new AntlrResult(-1, e);
}
throw e;
}
}
return new AntlrResult(0); // ANTLR 2 always returning 0
}
/**
* inputDirectory is not used in antlr2
* */
@Override
int invoke(List<String> arguments, File inputDirectory) throws ClassNotFoundException {
final Object backedAntlrTool = loadTool("antlr.Tool", null);
JavaReflectionUtil.method(backedAntlrTool, Integer.class, "doEverything", String[].class).invoke(backedAntlrTool, new Object[]{toArray(arguments)});
return 0;
}
@Override
public boolean available() {
try {
loadTool("antlr.Tool", null);
} catch (ClassNotFoundException cnf) {
return false;
}
return true;
}
}
}
|
|
/**
*/
package com.rockwellcollins.atc.agree.agree.impl;
import com.rockwellcollins.atc.agree.agree.AgreePackage;
import com.rockwellcollins.atc.agree.agree.Expr;
import com.rockwellcollins.atc.agree.agree.GetPropertyExpr;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.osate.aadl2.NamedElement;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Get Property Expr</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link com.rockwellcollins.atc.agree.agree.impl.GetPropertyExprImpl#getComponent <em>Component</em>}</li>
* <li>{@link com.rockwellcollins.atc.agree.agree.impl.GetPropertyExprImpl#getProp <em>Prop</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class GetPropertyExprImpl extends ExprImpl implements GetPropertyExpr
{
/**
* The cached value of the '{@link #getComponent() <em>Component</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getComponent()
* @generated
* @ordered
*/
protected Expr component;
/**
* The cached value of the '{@link #getProp() <em>Prop</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getProp()
* @generated
* @ordered
*/
protected NamedElement prop;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected GetPropertyExprImpl()
{
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass()
{
return AgreePackage.Literals.GET_PROPERTY_EXPR;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Expr getComponent()
{
return component;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetComponent(Expr newComponent, NotificationChain msgs)
{
Expr oldComponent = component;
component = newComponent;
if (eNotificationRequired())
{
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, AgreePackage.GET_PROPERTY_EXPR__COMPONENT, oldComponent, newComponent);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setComponent(Expr newComponent)
{
if (newComponent != component)
{
NotificationChain msgs = null;
if (component != null)
msgs = ((InternalEObject)component).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - AgreePackage.GET_PROPERTY_EXPR__COMPONENT, null, msgs);
if (newComponent != null)
msgs = ((InternalEObject)newComponent).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - AgreePackage.GET_PROPERTY_EXPR__COMPONENT, null, msgs);
msgs = basicSetComponent(newComponent, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, AgreePackage.GET_PROPERTY_EXPR__COMPONENT, newComponent, newComponent));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NamedElement getProp()
{
if (prop != null && ((EObject)prop).eIsProxy())
{
InternalEObject oldProp = (InternalEObject)prop;
prop = (NamedElement)eResolveProxy(oldProp);
if (prop != oldProp)
{
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, AgreePackage.GET_PROPERTY_EXPR__PROP, oldProp, prop));
}
}
return prop;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NamedElement basicGetProp()
{
return prop;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setProp(NamedElement newProp)
{
NamedElement oldProp = prop;
prop = newProp;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, AgreePackage.GET_PROPERTY_EXPR__PROP, oldProp, prop));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs)
{
switch (featureID)
{
case AgreePackage.GET_PROPERTY_EXPR__COMPONENT:
return basicSetComponent(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType)
{
switch (featureID)
{
case AgreePackage.GET_PROPERTY_EXPR__COMPONENT:
return getComponent();
case AgreePackage.GET_PROPERTY_EXPR__PROP:
if (resolve) return getProp();
return basicGetProp();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue)
{
switch (featureID)
{
case AgreePackage.GET_PROPERTY_EXPR__COMPONENT:
setComponent((Expr)newValue);
return;
case AgreePackage.GET_PROPERTY_EXPR__PROP:
setProp((NamedElement)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID)
{
switch (featureID)
{
case AgreePackage.GET_PROPERTY_EXPR__COMPONENT:
setComponent((Expr)null);
return;
case AgreePackage.GET_PROPERTY_EXPR__PROP:
setProp((NamedElement)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID)
{
switch (featureID)
{
case AgreePackage.GET_PROPERTY_EXPR__COMPONENT:
return component != null;
case AgreePackage.GET_PROPERTY_EXPR__PROP:
return prop != null;
}
return super.eIsSet(featureID);
}
} //GetPropertyExprImpl
|
|
import java.util.ArrayList;
import com.google.gson.reflect.TypeToken;
import com.google.gson.Gson;
/**
* Class containing all actions the server will do. Sends messages to and
* receives messages from the clients
*
* @author Frederik Emil
*
*/
public class ServerActions {
static Gson gson;
static Position rStartPos;
static int expectingRoad;
static int startIndex;
static boolean isInit;
static int initRoundNumber;
/**
* An object of this class is never instantiated, so this function should be
* called in order to initialize certain variables.
*/
static void initActions() {
gson = new Gson();
expectingRoad = -1;
startIndex = 0;
isInit = true;
initRoundNumber = 0;
}
/**
* Called when game is started. Generates map and updates clients
*/
static void generateMap() {
Hexagon[] hexagons = Hexagon.generateMap();
String message;
for (Hexagon hexagon : hexagons) {
message = "Hexagon " + gson.toJson(hexagon);
NetworkServer.sendToAll(message);
}
}
public static void sendPlayerAmount() {
NetworkServer.sendToAll("PlayerNums " + GameData.players.size());
}
static void collectResources(int number) {
for (int i = 0; i < GameData.players.size(); i++) {
for (int j = 0; j < GameData.buildings.size(); j++) {
if (GameData.buildings.get(j).PLAYER == GameData.players.get(i).NUMBER) {
Hexagon[] nearbyHexagons = GameData.buildings.get(j).POSITION.getNearbyHexagons();
for (int k = 0; k < nearbyHexagons.length; k++) {
if (nearbyHexagons[k].NUMBER == number && !nearbyHexagons[k].isRobbed()) {
if (nearbyHexagons[k].TYPE != ResourceType.DESERT && GameData.buildings.get(j).isUpgraded() && !nearbyHexagons[k].isRobbed()) {
GameData.players.get(i).resources[nearbyHexagons[k].TYPE.toInt()] += 2;
GameData.players.get(i).resourceAmount += 2;
}
else if (nearbyHexagons[k].TYPE != ResourceType.DESERT && !nearbyHexagons[k].isRobbed()) {
GameData.players.get(i).resources[nearbyHexagons[k].TYPE.toInt()]++;
GameData.players.get(i).resourceAmount++;
}
}
}
}
}
GameData.players.get(i).updateResAmount();
}
updatePlayerResources();
}
static void collectResources() {
System.out.println("Collecting resources");
int dieRoll = Dice.dice1 + Dice.dice2;
for (int i = 0; i < GameData.players.size(); i++) {
for (int j = 0; j < GameData.buildings.size(); j++) {
if (GameData.buildings.get(j).PLAYER == GameData.players.get(i).NUMBER) {
Hexagon[] nearbyHexagons = GameData.buildings.get(j).POSITION.getNearbyHexagons();
for (int k = 0; k < nearbyHexagons.length; k++) {
if (nearbyHexagons[k].NUMBER == dieRoll && !nearbyHexagons[k].isRobbed()) {
if (GameData.buildings.get(j).isUpgraded()) {
GameData.players.get(i).resources[nearbyHexagons[k].TYPE.toInt()] += 2;
GameData.players.get(i).resourceAmount += 2;
}
if (nearbyHexagons[k].TYPE != ResourceType.DESERT && !nearbyHexagons[k].isRobbed()) {
GameData.players.get(i).resources[nearbyHexagons[k].TYPE.toInt()]++;
GameData.players.get(i).resourceAmount++;
}
}
}
}
}
GameData.players.get(i).updateResAmount();
}
updatePlayerResources();
}
private static void updatePlayerResources() {
for (int i = 0; i < GameData.players.size(); i++) {
ArrayList<Integer> resources = new ArrayList<Integer>();
for (int j = 0; j < GameData.players.get(i).resources.length; j++)
resources.add(GameData.players.get(i).resources[j]);
String message = gson.toJson(resources, new TypeToken<ArrayList<Integer>>(){}.getType());
NetworkServer.sendToAll("Resource " + i + " " + message);
}
}
static void addDevelopmentCard(int ID) {
GameData.players.get(ID).devCard[DevelopmentCardDeck.BuyCard(ID).toInt()]++;
}
public static CardType buyCard(int ID) {
if (DevelopmentCardDeck.cards.size() != 0 && GameData.players.get(ID).resources[ResourceType.CORN.toInt()] >= 1
&& GameData.players.get(ID).resources[ResourceType.ROCK.toInt()] >= 1
&& GameData.players.get(ID).resources[ResourceType.SHEEP.toInt()] >= 1) {
CardType returnCard = DevelopmentCardDeck.cards.get(0);
DevelopmentCardDeck.cards.remove(0);
return returnCard;
}
System.out.println("No card");
return null;
}
static void playDevelopmentCard(CardType type, int ID) {
switch(type) {
case KNIGHT:
//Move Robber
//Take 1 resourceCard
break;
case VICTORYPOINT:
GameData.players.get(ID).points++;
break;
case YEAROFPLENTY:
//Missing graphical representation
//GameData.players.get(ID).addResource();
break;
case ROADBUILD:
//Missing graphical representation
//Road.buildRoad(Position startPos, Position endPos, ID);
break;
case MONOPOLY:
//Select a resource whicht the other players must hand over.
break;
}
}
/**
* Method called when a message is received from a client
*
* @param clientId
* Index of the player that has send the message
* @param message
* The message received
*/
public synchronized static void received(int clientId, String message) {
System.out.println(message);
if (message.equals("Collect")) {
collectResources();
String outMessage = gson.toJson(GameData.players);
NetworkServer.sendToAll("Collect " + outMessage);
}
else if (message.equals("addDevelop")) {
addDevelopmentCard(clientId);
buyCard(clientId);
String outMessage = gson.toJson(GameData.players);
NetworkServer.sendToAll("Collect " + outMessage);
}
else if (message.equals("PlayDevelop")){
//Missing Graphical Representation
//playDevelopmentCard();
}
else if (message.equals("Roll")) {
NetworkServer.sendToAll("Roll1 " + Dice.RollDice(1));
NetworkServer.sendToAll("Roll2 " + Dice.RollDice(2));
collectResources();
}
else if (expectingRoad == clientId) {
System.out.println("Road");
Position rEndPos = gson.fromJson(message, Position.class);
if (Road.buildRoad(Position.assignPosition(rStartPos.DIVISION, rStartPos.INDEX), Position.assignPosition(rEndPos.DIVISION, rEndPos.INDEX), clientId) != null) {
NetworkServer.sendToAll("Road " + clientId + " " + gson.toJson(rStartPos));
NetworkServer.sendToAll(message);
if (!isInit) {
GameData.players.get(clientId).resources[ResourceType.BRICK.toInt()]--;
GameData.players.get(clientId).resources[ResourceType.TREE.toInt()]--;
updatePlayerResources();
}
}
expectingRoad = -1;
} else if (clientId == GameData.turn && message.equals("rollDice")) {
} else if (message.equals("End turn")) {
GameData.turn = (GameData.turn + 1) % GameData.players.size();
if (GameData.turn == 0 && isInit)
initRoundNumber++;
if (initRoundNumber > 1 && isInit) {
isInit = false;
for (int i = 2; i <= 12; i++) {
collectResources(i);
}
NetworkServer.sendToAll("InitDone");
}
NetworkServer.sendToAll("Turn " + GameData.turn);
} else if (message.equals("TradeAccept")) {
//GameData.tObject = gson.fromJson(message, TradeObject.class);
for (int i = 0; i < GameData.tObject.has.length; i++) {
GameData.players.get(GameData.tObject.initPlayer).resources[i] -= GameData.tObject.has[i];
GameData.players.get(GameData.tObject.initPlayer).resources[i] += GameData.tObject.wants[i];
GameData.players.get(GameData.tObject.acceptPlayer).resources[i] -= GameData.tObject.wants[i];
GameData.players.get(GameData.tObject.acceptPlayer).resources[i] += GameData.tObject.has[i];
}
System.out.println("Accepting trade");
NetworkServer.sendToAll("TradeAccept");
updatePlayerResources();
} else if (message.equals("TradeDecline")) {
NetworkServer.sendToAll("TradeDecline");
}
else {
String objectType = "";
int jsonIndex = 0;
for (int i = 0; i < message.length() && !Character.isSpaceChar(message.charAt(i)); i++) {
objectType += message.charAt(i);
jsonIndex = i + 2;
}
if (jsonIndex >= message.length()) {
System.out.println("ERROR ON LINE: " + message);
return;
}
message = message.substring(jsonIndex);
if (objectType.equals("Building")) {
Position inPos = gson.fromJson(message, Position.class);
if (Building.build(inPos, clientId) != null) {
NetworkServer.sendToAll("Building " + clientId + " " + message);
if (!isInit) {
GameData.players.get(clientId).resources[ResourceType.CORN.toInt()]--;
GameData.players.get(clientId).resources[ResourceType.BRICK.toInt()]--;
GameData.players.get(clientId).resources[ResourceType.SHEEP.toInt()]--;
GameData.players.get(clientId).resources[ResourceType.TREE.toInt()]--;
updatePlayerResources();
}
}
} else if (objectType.equals("Upgrade")) {
Position inPos = gson.fromJson(message, Position.class);
if (Building.getByPosition(inPos).upgrade()) {
NetworkServer.sendToAll("Upgrade " + message);
GameData.players.get(clientId).resources[ResourceType.CORN.toInt()]-=2;
GameData.players.get(clientId).resources[ResourceType.ROCK.toInt()]-=3;
updatePlayerResources();
}
} else if (objectType.equals("Road")) {
rStartPos = gson.fromJson(message, Position.class);
expectingRoad = clientId;
} else if (objectType.equals("Chat")) {
NetworkServer.sendToAll("Chat " + clientId + " " + message);
} else if (objectType.equals("Name")){
System.out.println("Name received");
GameData.players.set(clientId, new Player(message, clientId));
startIndex++;
if (startIndex == GameData.players.size()) {
String outMessage = gson.toJson(GameData.players);
NetworkServer.sendToAll("Players " + outMessage);
}
} else if (objectType.equals("Robber")) {
System.out.println("Received robber");
Hexagon.getHexagons()[Integer.parseInt(message)].rob();
NetworkServer.sendToAll("Robber " + message);
} else if (objectType.equals("Trade")) {
//message = message.substring(jsonIndex);
System.out.println(message);
GameData.tObject = gson.fromJson(message, TradeObject.class);
if (GameData.tObject.acceptPlayer == -1) {
for (int i = 0; i < GameData.tObject.wants.length; i++) {
GameData.players.get(clientId).resources[i] -= GameData.tObject.has[i];
GameData.players.get(clientId).resources[i] += GameData.tObject.wants[i];
}
updatePlayerResources();
} else
NetworkServer.send(GameData.tObject.acceptPlayer ,"Trade " + message);
}
/*
else {
int playerID = 0;
for (int i = 0; !Character.isSpaceChar(message.charAt(i)); i++) {
playerID = Integer.parseInt(String.valueOf(message.charAt(i)));
jsonIndex = i + 2;
}
if (objectType.equals("Trade")) {
message = message.substring(jsonIndex);
GameData.tObject = gson.fromJson(message, TradeObject.class);
NetworkServer.send(playerID ,"Trade " + message);
}
}*/
}
}
public static void nameRequest() {
NetworkServer.sendToAll("SendName");
}
public static void sendId(int i) {
NetworkServer.send(i, "ID " + i);
}
}
|
|
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.directory.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* Container for the parameters to the {@link com.amazonaws.services.directory.AWSDirectoryService#createDirectory(CreateDirectoryRequest) CreateDirectory operation}.
* <p>
* Creates a Simple AD directory.
* </p>
*
* @see com.amazonaws.services.directory.AWSDirectoryService#createDirectory(CreateDirectoryRequest)
*/
public class CreateDirectoryRequest extends AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* The fully qualified name for the directory, such as
* <code>corp.example.com</code>.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>^([a-zA-Z0-9]+[\\.-])+([a-zA-Z0-9])+$<br/>
*/
private String name;
/**
* The short name of the directory, such as <code>CORP</code>.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>^[^\\/:*?\"\<\>|.]+[^\\/:*?\"<>|]*$<br/>
*/
private String shortName;
/**
* The password for the directory administrator. The directory creation
* process creates a directory administrator account with the username
* <code>Administrator</code> and this password.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>(?=^.{8,64}$)((?=.*\d)(?=.*[A-Z])(?=.*[a-z])|(?=.*\d)(?=.*[^A-Za-z0-9])(?=.*[a-z])|(?=.*[^A-Za-z0-9])(?=.*[A-Z])(?=.*[a-z])|(?=.*\d)(?=.*[A-Z])(?=.*[^A-Za-z0-9]))^.*<br/>
*/
private String password;
/**
* A textual description for the directory.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 128<br/>
* <b>Pattern: </b>^([a-zA-Z0-9_])[\\a-zA-Z0-9_@#%*+=:?./!\s-]*$<br/>
*/
private String description;
/**
* The size of the directory.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>Small, Large
*/
private String size;
/**
* A <a>DirectoryVpcSettings</a> object that contains additional
* information for the operation.
*/
private DirectoryVpcSettings vpcSettings;
/**
* The fully qualified name for the directory, such as
* <code>corp.example.com</code>.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>^([a-zA-Z0-9]+[\\.-])+([a-zA-Z0-9])+$<br/>
*
* @return The fully qualified name for the directory, such as
* <code>corp.example.com</code>.
*/
public String getName() {
return name;
}
/**
* The fully qualified name for the directory, such as
* <code>corp.example.com</code>.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>^([a-zA-Z0-9]+[\\.-])+([a-zA-Z0-9])+$<br/>
*
* @param name The fully qualified name for the directory, such as
* <code>corp.example.com</code>.
*/
public void setName(String name) {
this.name = name;
}
/**
* The fully qualified name for the directory, such as
* <code>corp.example.com</code>.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>^([a-zA-Z0-9]+[\\.-])+([a-zA-Z0-9])+$<br/>
*
* @param name The fully qualified name for the directory, such as
* <code>corp.example.com</code>.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public CreateDirectoryRequest withName(String name) {
this.name = name;
return this;
}
/**
* The short name of the directory, such as <code>CORP</code>.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>^[^\\/:*?\"\<\>|.]+[^\\/:*?\"<>|]*$<br/>
*
* @return The short name of the directory, such as <code>CORP</code>.
*/
public String getShortName() {
return shortName;
}
/**
* The short name of the directory, such as <code>CORP</code>.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>^[^\\/:*?\"\<\>|.]+[^\\/:*?\"<>|]*$<br/>
*
* @param shortName The short name of the directory, such as <code>CORP</code>.
*/
public void setShortName(String shortName) {
this.shortName = shortName;
}
/**
* The short name of the directory, such as <code>CORP</code>.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>^[^\\/:*?\"\<\>|.]+[^\\/:*?\"<>|]*$<br/>
*
* @param shortName The short name of the directory, such as <code>CORP</code>.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public CreateDirectoryRequest withShortName(String shortName) {
this.shortName = shortName;
return this;
}
/**
* The password for the directory administrator. The directory creation
* process creates a directory administrator account with the username
* <code>Administrator</code> and this password.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>(?=^.{8,64}$)((?=.*\d)(?=.*[A-Z])(?=.*[a-z])|(?=.*\d)(?=.*[^A-Za-z0-9])(?=.*[a-z])|(?=.*[^A-Za-z0-9])(?=.*[A-Z])(?=.*[a-z])|(?=.*\d)(?=.*[A-Z])(?=.*[^A-Za-z0-9]))^.*<br/>
*
* @return The password for the directory administrator. The directory creation
* process creates a directory administrator account with the username
* <code>Administrator</code> and this password.
*/
public String getPassword() {
return password;
}
/**
* The password for the directory administrator. The directory creation
* process creates a directory administrator account with the username
* <code>Administrator</code> and this password.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>(?=^.{8,64}$)((?=.*\d)(?=.*[A-Z])(?=.*[a-z])|(?=.*\d)(?=.*[^A-Za-z0-9])(?=.*[a-z])|(?=.*[^A-Za-z0-9])(?=.*[A-Z])(?=.*[a-z])|(?=.*\d)(?=.*[A-Z])(?=.*[^A-Za-z0-9]))^.*<br/>
*
* @param password The password for the directory administrator. The directory creation
* process creates a directory administrator account with the username
* <code>Administrator</code> and this password.
*/
public void setPassword(String password) {
this.password = password;
}
/**
* The password for the directory administrator. The directory creation
* process creates a directory administrator account with the username
* <code>Administrator</code> and this password.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>(?=^.{8,64}$)((?=.*\d)(?=.*[A-Z])(?=.*[a-z])|(?=.*\d)(?=.*[^A-Za-z0-9])(?=.*[a-z])|(?=.*[^A-Za-z0-9])(?=.*[A-Z])(?=.*[a-z])|(?=.*\d)(?=.*[A-Z])(?=.*[^A-Za-z0-9]))^.*<br/>
*
* @param password The password for the directory administrator. The directory creation
* process creates a directory administrator account with the username
* <code>Administrator</code> and this password.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public CreateDirectoryRequest withPassword(String password) {
this.password = password;
return this;
}
/**
* A textual description for the directory.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 128<br/>
* <b>Pattern: </b>^([a-zA-Z0-9_])[\\a-zA-Z0-9_@#%*+=:?./!\s-]*$<br/>
*
* @return A textual description for the directory.
*/
public String getDescription() {
return description;
}
/**
* A textual description for the directory.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 128<br/>
* <b>Pattern: </b>^([a-zA-Z0-9_])[\\a-zA-Z0-9_@#%*+=:?./!\s-]*$<br/>
*
* @param description A textual description for the directory.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* A textual description for the directory.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 128<br/>
* <b>Pattern: </b>^([a-zA-Z0-9_])[\\a-zA-Z0-9_@#%*+=:?./!\s-]*$<br/>
*
* @param description A textual description for the directory.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public CreateDirectoryRequest withDescription(String description) {
this.description = description;
return this;
}
/**
* The size of the directory.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>Small, Large
*
* @return The size of the directory.
*
* @see DirectorySize
*/
public String getSize() {
return size;
}
/**
* The size of the directory.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>Small, Large
*
* @param size The size of the directory.
*
* @see DirectorySize
*/
public void setSize(String size) {
this.size = size;
}
/**
* The size of the directory.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>Small, Large
*
* @param size The size of the directory.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see DirectorySize
*/
public CreateDirectoryRequest withSize(String size) {
this.size = size;
return this;
}
/**
* The size of the directory.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>Small, Large
*
* @param size The size of the directory.
*
* @see DirectorySize
*/
public void setSize(DirectorySize size) {
this.size = size.toString();
}
/**
* The size of the directory.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>Small, Large
*
* @param size The size of the directory.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see DirectorySize
*/
public CreateDirectoryRequest withSize(DirectorySize size) {
this.size = size.toString();
return this;
}
/**
* A <a>DirectoryVpcSettings</a> object that contains additional
* information for the operation.
*
* @return A <a>DirectoryVpcSettings</a> object that contains additional
* information for the operation.
*/
public DirectoryVpcSettings getVpcSettings() {
return vpcSettings;
}
/**
* A <a>DirectoryVpcSettings</a> object that contains additional
* information for the operation.
*
* @param vpcSettings A <a>DirectoryVpcSettings</a> object that contains additional
* information for the operation.
*/
public void setVpcSettings(DirectoryVpcSettings vpcSettings) {
this.vpcSettings = vpcSettings;
}
/**
* A <a>DirectoryVpcSettings</a> object that contains additional
* information for the operation.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param vpcSettings A <a>DirectoryVpcSettings</a> object that contains additional
* information for the operation.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public CreateDirectoryRequest withVpcSettings(DirectoryVpcSettings vpcSettings) {
this.vpcSettings = vpcSettings;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getName() != null) sb.append("Name: " + getName() + ",");
if (getShortName() != null) sb.append("ShortName: " + getShortName() + ",");
if (getPassword() != null) sb.append("Password: " + getPassword() + ",");
if (getDescription() != null) sb.append("Description: " + getDescription() + ",");
if (getSize() != null) sb.append("Size: " + getSize() + ",");
if (getVpcSettings() != null) sb.append("VpcSettings: " + getVpcSettings() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getShortName() == null) ? 0 : getShortName().hashCode());
hashCode = prime * hashCode + ((getPassword() == null) ? 0 : getPassword().hashCode());
hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime * hashCode + ((getSize() == null) ? 0 : getSize().hashCode());
hashCode = prime * hashCode + ((getVpcSettings() == null) ? 0 : getVpcSettings().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof CreateDirectoryRequest == false) return false;
CreateDirectoryRequest other = (CreateDirectoryRequest)obj;
if (other.getName() == null ^ this.getName() == null) return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false) return false;
if (other.getShortName() == null ^ this.getShortName() == null) return false;
if (other.getShortName() != null && other.getShortName().equals(this.getShortName()) == false) return false;
if (other.getPassword() == null ^ this.getPassword() == null) return false;
if (other.getPassword() != null && other.getPassword().equals(this.getPassword()) == false) return false;
if (other.getDescription() == null ^ this.getDescription() == null) return false;
if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false;
if (other.getSize() == null ^ this.getSize() == null) return false;
if (other.getSize() != null && other.getSize().equals(this.getSize()) == false) return false;
if (other.getVpcSettings() == null ^ this.getVpcSettings() == null) return false;
if (other.getVpcSettings() != null && other.getVpcSettings().equals(this.getVpcSettings()) == false) return false;
return true;
}
@Override
public CreateDirectoryRequest clone() {
return (CreateDirectoryRequest) super.clone();
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import org.apache.commons.logging.*;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.FSConstants;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.net.Node;
import org.apache.hadoop.net.NodeBase;
import java.util.*;
/** The class is responsible for choosing the desired number of targets
* for placing block replicas.
* The replica placement strategy is that if the writer is on a datanode,
* the 1st replica is placed on the local machine,
* otherwise a random datanode. The 2nd replica is placed on a datanode
* that is on a different rack. The 3rd replica is placed on a datanode
* which is on the same rack as the first replica.
*/
class ReplicationTargetChooser {
private final boolean considerLoad;
private NetworkTopology clusterMap;
private FSNamesystem fs;
ReplicationTargetChooser(boolean considerLoad, FSNamesystem fs,
NetworkTopology clusterMap) {
this.considerLoad = considerLoad;
this.fs = fs;
this.clusterMap = clusterMap;
}
private static class NotEnoughReplicasException extends Exception {
NotEnoughReplicasException(String msg) {
super(msg);
}
}
/**
* choose <i>numOfReplicas</i> data nodes for <i>writer</i> to replicate
* a block with size <i>blocksize</i>
* If not, return as many as we can.
*
* @param numOfReplicas: number of replicas wanted.
* @param writer: the writer's machine, null if not in the cluster.
* @param excludedNodes: datanodes that should not be considered targets.
* @param blocksize: size of the data to be written.
* @return array of DatanodeDescriptor instances chosen as targets
* and sorted as a pipeline.
*/
DatanodeDescriptor[] chooseTarget(int numOfReplicas,
DatanodeDescriptor writer,
HashMap<Node, Node> excludedNodes,
long blocksize) {
if (excludedNodes == null) {
excludedNodes = new HashMap<Node, Node>();
}
return chooseTarget(numOfReplicas, writer,
new ArrayList<DatanodeDescriptor>(), excludedNodes, blocksize);
}
/**
* choose <i>numOfReplicas</i> data nodes for <i>writer</i>
* to re-replicate a block with size <i>blocksize</i>
* If not, return as many as we can.
*
* @param numOfReplicas: additional number of replicas wanted.
* @param writer: the writer's machine, null if not in the cluster.
* @param choosenNodes: datanodes that have been chosen as targets.
* @param excludedNodes: datanodes that should not be considered targets.
* @param blocksize: size of the data to be written.
* @return array of DatanodeDescriptor instances chosen as target
* and sorted as a pipeline.
*/
DatanodeDescriptor[] chooseTarget(int numOfReplicas,
DatanodeDescriptor writer,
List<DatanodeDescriptor> choosenNodes,
HashMap<Node, Node> excludedNodes,
long blocksize) {
if (numOfReplicas == 0 || clusterMap.getNumOfLeaves()==0) {
return new DatanodeDescriptor[0];
}
if (excludedNodes == null) {
excludedNodes = new HashMap<Node, Node>();
}
int clusterSize = clusterMap.getNumOfLeaves();
int totalNumOfReplicas = choosenNodes.size()+numOfReplicas;
if (totalNumOfReplicas > clusterSize) {
numOfReplicas -= (totalNumOfReplicas-clusterSize);
totalNumOfReplicas = clusterSize;
}
int maxNodesPerRack =
(totalNumOfReplicas-1)/clusterMap.getNumOfRacks()+2;
List<DatanodeDescriptor> results =
new ArrayList<DatanodeDescriptor>(choosenNodes);
for (Node node:choosenNodes) {
excludedNodes.put(node, node);
}
if (!clusterMap.contains(writer)) {
writer=null;
}
DatanodeDescriptor localNode = chooseTarget(numOfReplicas, writer,
excludedNodes, blocksize, maxNodesPerRack, results);
results.removeAll(choosenNodes);
// sorting nodes to form a pipeline
return getPipeline((writer==null)?localNode:writer,
results.toArray(new DatanodeDescriptor[results.size()]));
}
/* choose <i>numOfReplicas</i> from all data nodes */
private DatanodeDescriptor chooseTarget(int numOfReplicas,
DatanodeDescriptor writer,
HashMap<Node, Node> excludedNodes,
long blocksize,
int maxNodesPerRack,
List<DatanodeDescriptor> results) {
if (numOfReplicas == 0 || clusterMap.getNumOfLeaves()==0) {
return writer;
}
int numOfResults = results.size();
boolean newBlock = (numOfResults==0);
if (writer == null && !newBlock) {
writer = (DatanodeDescriptor)results.get(0);
}
try {
switch(numOfResults) {
case 0:
writer = chooseLocalNode(writer, excludedNodes,
blocksize, maxNodesPerRack, results);
if (--numOfReplicas == 0) {
break;
}
case 1:
chooseRemoteRack(1, results.get(0), excludedNodes,
blocksize, maxNodesPerRack, results);
if (--numOfReplicas == 0) {
break;
}
case 2:
if (clusterMap.isOnSameRack(results.get(0), results.get(1))) {
chooseRemoteRack(1, results.get(0), excludedNodes,
blocksize, maxNodesPerRack, results);
} else if (newBlock){
chooseLocalRack(results.get(1), excludedNodes, blocksize,
maxNodesPerRack, results);
} else {
chooseLocalRack(writer, excludedNodes, blocksize,
maxNodesPerRack, results);
}
if (--numOfReplicas == 0) {
break;
}
default:
chooseRandom(numOfReplicas, NodeBase.ROOT, excludedNodes,
blocksize, maxNodesPerRack, results);
}
} catch (NotEnoughReplicasException e) {
FSNamesystem.LOG.warn("Not able to place enough replicas, still in need of "
+ numOfReplicas);
}
return writer;
}
/* choose <i>localMachine</i> as the target.
* if <i>localMachine</i> is not available,
* choose a node on the same rack
* @return the chosen node
*/
private DatanodeDescriptor chooseLocalNode(
DatanodeDescriptor localMachine,
HashMap<Node, Node> excludedNodes,
long blocksize,
int maxNodesPerRack,
List<DatanodeDescriptor> results)
throws NotEnoughReplicasException {
// if no local machine, randomly choose one node
if (localMachine == null)
return chooseRandom(NodeBase.ROOT, excludedNodes,
blocksize, maxNodesPerRack, results);
// otherwise try local machine first
Node oldNode = excludedNodes.put(localMachine, localMachine);
if (oldNode == null) { // was not in the excluded list
if (isGoodTarget(localMachine, blocksize,
maxNodesPerRack, false, results)) {
results.add(localMachine);
return localMachine;
}
}
// try a node on local rack
return chooseLocalRack(localMachine, excludedNodes,
blocksize, maxNodesPerRack, results);
}
/* choose one node from the rack that <i>localMachine</i> is on.
* if no such node is available, choose one node from the rack where
* a second replica is on.
* if still no such node is available, choose a random node
* in the cluster.
* @return the chosen node
*/
private DatanodeDescriptor chooseLocalRack(
DatanodeDescriptor localMachine,
HashMap<Node, Node> excludedNodes,
long blocksize,
int maxNodesPerRack,
List<DatanodeDescriptor> results)
throws NotEnoughReplicasException {
// no local machine, so choose a random machine
if (localMachine == null) {
return chooseRandom(NodeBase.ROOT, excludedNodes,
blocksize, maxNodesPerRack, results);
}
// choose one from the local rack
try {
return chooseRandom(
localMachine.getNetworkLocation(),
excludedNodes, blocksize, maxNodesPerRack, results);
} catch (NotEnoughReplicasException e1) {
// find the second replica
DatanodeDescriptor newLocal=null;
for(Iterator<DatanodeDescriptor> iter=results.iterator();
iter.hasNext();) {
DatanodeDescriptor nextNode = iter.next();
if (nextNode != localMachine) {
newLocal = nextNode;
break;
}
}
if (newLocal != null) {
try {
return chooseRandom(
newLocal.getNetworkLocation(),
excludedNodes, blocksize, maxNodesPerRack, results);
} catch(NotEnoughReplicasException e2) {
//otherwise randomly choose one from the network
return chooseRandom(NodeBase.ROOT, excludedNodes,
blocksize, maxNodesPerRack, results);
}
} else {
//otherwise randomly choose one from the network
return chooseRandom(NodeBase.ROOT, excludedNodes,
blocksize, maxNodesPerRack, results);
}
}
}
/* choose <i>numOfReplicas</i> nodes from the racks
* that <i>localMachine</i> is NOT on.
* if not enough nodes are available, choose the remaining ones
* from the local rack
*/
private void chooseRemoteRack(int numOfReplicas,
DatanodeDescriptor localMachine,
HashMap<Node, Node> excludedNodes,
long blocksize,
int maxReplicasPerRack,
List<DatanodeDescriptor> results)
throws NotEnoughReplicasException {
int oldNumOfReplicas = results.size();
// randomly choose one node from remote racks
try {
chooseRandom(numOfReplicas, "~"+localMachine.getNetworkLocation(),
excludedNodes, blocksize, maxReplicasPerRack, results);
} catch (NotEnoughReplicasException e) {
chooseRandom(numOfReplicas-(results.size()-oldNumOfReplicas),
localMachine.getNetworkLocation(), excludedNodes, blocksize,
maxReplicasPerRack, results);
}
}
/* Randomly choose one target from <i>nodes</i>.
* @return the chosen node
*/
private DatanodeDescriptor chooseRandom(
String nodes,
HashMap<Node, Node> excludedNodes,
long blocksize,
int maxNodesPerRack,
List<DatanodeDescriptor> results)
throws NotEnoughReplicasException {
int numOfAvailableNodes =
clusterMap.countNumOfAvailableNodes(nodes, excludedNodes.keySet());
while(numOfAvailableNodes > 0) {
DatanodeDescriptor choosenNode =
(DatanodeDescriptor)(clusterMap.chooseRandom(nodes));
Node oldNode = excludedNodes.put(choosenNode, choosenNode);
if (oldNode == null) { // choosendNode was not in the excluded list
numOfAvailableNodes--;
if (isGoodTarget(choosenNode, blocksize, maxNodesPerRack, results)) {
results.add(choosenNode);
return choosenNode;
}
}
}
throw new NotEnoughReplicasException(
"Not able to place enough replicas");
}
/* Randomly choose <i>numOfReplicas</i> targets from <i>nodes</i>.
*/
private void chooseRandom(int numOfReplicas,
String nodes,
HashMap<Node, Node> excludedNodes,
long blocksize,
int maxNodesPerRack,
List<DatanodeDescriptor> results)
throws NotEnoughReplicasException {
int numOfAvailableNodes =
clusterMap.countNumOfAvailableNodes(nodes, excludedNodes.keySet());
while(numOfReplicas > 0 && numOfAvailableNodes > 0) {
DatanodeDescriptor choosenNode =
(DatanodeDescriptor)(clusterMap.chooseRandom(nodes));
Node oldNode = excludedNodes.put(choosenNode, choosenNode);
if (oldNode == null) {
numOfAvailableNodes--;
if (isGoodTarget(choosenNode, blocksize, maxNodesPerRack, results)) {
numOfReplicas--;
results.add(choosenNode);
}
}
}
if (numOfReplicas>0) {
throw new NotEnoughReplicasException(
"Not able to place enough replicas");
}
}
/* judge if a node is a good target.
* return true if <i>node</i> has enough space,
* does not have too much load, and the rack does not have too many nodes
*/
private boolean isGoodTarget(DatanodeDescriptor node,
long blockSize, int maxTargetPerLoc,
List<DatanodeDescriptor> results) {
return isGoodTarget(node, blockSize, maxTargetPerLoc,
this.considerLoad, results);
}
private boolean isGoodTarget(DatanodeDescriptor node,
long blockSize, int maxTargetPerLoc,
boolean considerLoad,
List<DatanodeDescriptor> results) {
Log logr = FSNamesystem.LOG;
// check if the node is (being) decommissed
if (node.isDecommissionInProgress() || node.isDecommissioned()) {
logr.debug("Node "+NodeBase.getPath(node)+
" is not chosen because the node is (being) decommissioned");
return false;
}
long remaining = node.getRemaining() -
(node.getBlocksScheduled() * blockSize);
// check the remaining capacity of the target machine
if (blockSize* FSConstants.MIN_BLOCKS_FOR_WRITE>remaining) {
logr.debug("Node "+NodeBase.getPath(node)+
" is not chosen because the node does not have enough space");
return false;
}
// check the communication traffic of the target machine
if (considerLoad) {
double avgLoad = 0;
int size = clusterMap.getNumOfLeaves();
if (size != 0) {
avgLoad = (double)fs.getTotalLoad()/size;
}
if (node.getXceiverCount() > (2.0 * avgLoad)) {
logr.debug("Node "+NodeBase.getPath(node)+
" is not chosen because the node is too busy");
return false;
}
}
// check if the target rack has chosen too many nodes
String rackname = node.getNetworkLocation();
int counter=1;
for(Iterator<DatanodeDescriptor> iter = results.iterator();
iter.hasNext();) {
Node result = iter.next();
if (rackname.equals(result.getNetworkLocation())) {
counter++;
}
}
if (counter>maxTargetPerLoc) {
logr.debug("Node "+NodeBase.getPath(node)+
" is not chosen because the rack has too many chosen nodes");
return false;
}
return true;
}
/* Return a pipeline of nodes.
* The pipeline is formed finding a shortest path that
* starts from the writer and traverses all <i>nodes</i>
* This is basically a traveling salesman problem.
*/
private DatanodeDescriptor[] getPipeline(
DatanodeDescriptor writer,
DatanodeDescriptor[] nodes) {
if (nodes.length==0) return nodes;
synchronized(clusterMap) {
int index=0;
if (writer == null || !clusterMap.contains(writer)) {
writer = nodes[0];
}
for(;index<nodes.length; index++) {
DatanodeDescriptor shortestNode = nodes[index];
int shortestDistance = clusterMap.getDistance(writer, shortestNode);
int shortestIndex = index;
for(int i=index+1; i<nodes.length; i++) {
DatanodeDescriptor currentNode = nodes[i];
int currentDistance = clusterMap.getDistance(writer, currentNode);
if (shortestDistance>currentDistance) {
shortestDistance = currentDistance;
shortestNode = currentNode;
shortestIndex = i;
}
}
//switch position index & shortestIndex
if (index != shortestIndex) {
nodes[shortestIndex] = nodes[index];
nodes[index] = shortestNode;
}
writer = shortestNode;
}
}
return nodes;
}
/**
* Verify that the block is replicated on at least 2 different racks
* if there is more than one rack in the system.
*
* @param lBlk block with locations
* @param cluster
* @return 1 if the block must be replicated on additional rack,
* or 0 if the number of racks is sufficient.
*/
public static int verifyBlockPlacement(LocatedBlock lBlk,
short replication,
NetworkTopology cluster) {
int numRacks = verifyBlockPlacement(lBlk, Math.min(2,replication), cluster);
return numRacks < 0 ? 0 : numRacks;
}
/**
* Verify that the block is replicated on at least minRacks different racks
* if there is more than minRacks rack in the system.
*
* @param lBlk block with locations
* @param minRacks number of racks the block should be replicated to
* @param cluster
* @return the difference between the required and the actual number of racks
* the block is replicated to.
*/
public static int verifyBlockPlacement(LocatedBlock lBlk,
int minRacks,
NetworkTopology cluster) {
DatanodeInfo[] locs = lBlk.getLocations();
if (locs == null)
locs = new DatanodeInfo[0];
int numRacks = cluster.getNumOfRacks();
if(numRacks <= 1) // only one rack
return 0;
minRacks = Math.min(minRacks, numRacks);
// 1. Check that all locations are different.
// 2. Count locations on different racks.
Set<String> racks = new TreeSet<String>();
for (DatanodeInfo dn : locs)
racks.add(dn.getNetworkLocation());
return minRacks - racks.size();
}
} //end of Replicator
|
|
/*
* Copyright (c) 2015, The Dattack team (http://www.dattack.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dattack.dbtools.drules.engine;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.Callable;
import org.apache.commons.configuration.AbstractConfiguration;
import org.apache.commons.configuration.CompositeConfiguration;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.dattack.dbtools.drules.beans.ForEachBean;
import com.dattack.dbtools.drules.beans.Identifier;
import com.dattack.dbtools.drules.beans.Identifier.IdentifierBuilder;
import com.dattack.dbtools.drules.beans.SourceBean;
import com.dattack.dbtools.drules.beans.SourceCommandBean;
import com.dattack.dbtools.drules.beans.SourceCommandBeanVisitor;
import com.dattack.dbtools.drules.beans.SqlQueryBean;
import com.dattack.dbtools.drules.exceptions.DrulesNestableException;
import com.dattack.jtoolbox.commons.configuration.ConfigurationUtil;
import com.dattack.jtoolbox.jdbc.JNDIDataSource;
/**
* @author cvarela
* @since 0.1
*/
final class SourceExecutor implements Callable<SourceResult> {
private static final Logger LOGGER = LoggerFactory.getLogger(SourceExecutor.class);
private final SourceBean sourceBean;
private final Configuration initialConfiguration;
private class DefaultSourceCommandBeanVisitor implements SourceCommandBeanVisitor {
private final AbstractConfiguration configuration;
private final Connection connection;
private final Map<Identifier, ResultSet> resultSetMap;
private ResultSet lastResultSet;
DefaultSourceCommandBeanVisitor(final Connection connection) {
configuration = new CompositeConfiguration(ThreadContext.getInstance().getConfiguration());
this.connection = connection;
this.resultSetMap = new HashMap<>();
}
private void executeForEachLoop(final ForEachBean bean) {
for (final SourceCommandBean child : bean.getCommandList()) {
child.accept(this);
}
}
private void forEachRef(final ForEachBean bean) {
if (StringUtils.isBlank(bean.getRef())) {
throw new NullPointerException("Invalid foreach loop (missing 'ref' value)");
}
final Identifier identifier = new IdentifierBuilder().withValue(bean.getRef()).build();
try {
final ResultSet resultSet = resultSetMap.get(identifier);
if (resultSet == null) {
throw new NullPointerException(String.format("Missing ResultSet named '%s'", bean.getRef()));
}
do {
populateConfigurationFromResultSet(identifier, resultSet);
executeForEachLoop(bean);
} while (resultSet.next());
} catch (final SQLException e) {
LOGGER.error(e.getMessage(), e);
}
}
private void forEachValue(final ForEachBean bean) {
for (final String value : bean.getValuesList()) {
configuration.setProperty(bean.getKey(), value);
executeForEachLoop(bean);
}
}
private ResultSet getLastResultSet() {
return lastResultSet;
}
private void populateConfigurationFromFirstRows() throws SQLException {
for (final Entry<Identifier, ResultSet> entry : resultSetMap.entrySet()) {
populateConfigurationFromFirstRows(entry.getKey(), entry.getValue());
}
}
private void populateConfigurationFromFirstRows(final Identifier identifier, final ResultSet resultSet)
throws SQLException {
if (resultSet.isBeforeFirst() && resultSet.next() && identifier != null) {
for (int columnIndex = 1; columnIndex <= resultSet.getMetaData().getColumnCount(); columnIndex++) {
final String columnName = resultSet.getMetaData().getColumnLabel(columnIndex);
final Object value = resultSet.getObject(columnIndex);
final String key = identifier.append(columnName).getValue();
configuration.setProperty(key, value);
}
}
}
private void populateConfigurationFromResultSet(final Identifier identifier, final ResultSet resultSet)
throws SQLException {
if (resultSet.isBeforeFirst()) {
resultSet.next();
}
if (identifier != null) {
for (int columnIndex = 1; columnIndex <= resultSet.getMetaData().getColumnCount(); columnIndex++) {
final String columnName = resultSet.getMetaData().getColumnLabel(columnIndex);
final Object value = resultSet.getObject(columnIndex);
final String key = identifier.append(columnName).getValue();
configuration.setProperty(key, value);
}
}
}
private void setLastValues(final SqlQueryBean sqlQueryBean, final ResultSet resultSet) {
this.lastResultSet = resultSet;
if (resultSet != null) {
resultSetMap.put(sqlQueryBean.getId(), resultSet);
}
}
@Override
public void visit(final ForEachBean bean) {
if (StringUtils.isNotBlank(bean.getRef())) {
// check REF construction
forEachRef(bean);
} else {
forEachValue(bean);
}
}
@Override
public void visit(final SqlQueryBean bean) {
Statement statement = null;
try {
populateConfigurationFromFirstRows();
final String interpolatedSql = ConfigurationUtil.interpolate(bean.getSql(), configuration);
statement = connection.createStatement();
final ResultSet resultSet = executeStatement(statement, interpolatedSql);
setLastValues(bean, resultSet);
} catch (final SQLException e) {
throw new RuntimeException(e);
}
}
}
private static Connection getConnection(final String jndiName) throws SQLException {
return new JNDIDataSource(jndiName).getConnection();
}
SourceExecutor(final SourceBean sourceBean, final Configuration initialConfiguration) {
this.sourceBean = sourceBean;
this.initialConfiguration = initialConfiguration;
}
@Override
public SourceResult call() throws DrulesNestableException {
try {
ThreadContext.getInstance().setInitialConfiguration(initialConfiguration);
final String jndiName = getInterpolatedJndiName();
LOGGER.info("Configuring datasource with JNDI name: '{}'", jndiName);
final Connection connection = getConnection(jndiName);
final DefaultSourceCommandBeanVisitor visitor = new DefaultSourceCommandBeanVisitor(connection);
for (final Iterator<SourceCommandBean> it = sourceBean.getCommandList().iterator(); it.hasNext();) {
final SourceCommandBean command = it.next();
command.accept(visitor);
}
return new SourceResult(sourceBean.getId(), connection, visitor.getLastResultSet());
} catch (final SQLException e) {
throw new DrulesNestableException(e);
}
}
private ResultSet executeStatement(final Statement statement, final String sql) throws SQLException {
LOGGER.info("Executing SQL sentence [{}@{}]: {}", Thread.currentThread().getName(), sourceBean.getId(), sql);
final boolean isResultSet = statement.execute(sql);
if (isResultSet) {
return statement.getResultSet();
}
return null;
}
private String getInterpolatedJndiName() {
return ThreadContext.getInstance().interpolate(sourceBean.getJndi());
}
}
|
|
/*
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.xml.internal.ws.message.stream;
import com.sun.istack.internal.NotNull;
import com.sun.istack.internal.Nullable;
import com.sun.istack.internal.XMLStreamReaderToContentHandler;
import com.sun.xml.internal.bind.api.Bridge;
import com.sun.xml.internal.stream.buffer.MutableXMLStreamBuffer;
import com.sun.xml.internal.stream.buffer.XMLStreamBuffer;
import com.sun.xml.internal.stream.buffer.XMLStreamBufferMark;
import com.sun.xml.internal.stream.buffer.stax.StreamReaderBufferCreator;
import com.sun.xml.internal.ws.api.SOAPVersion;
import com.sun.xml.internal.ws.api.message.AttachmentSet;
import com.sun.xml.internal.ws.api.message.Header;
import com.sun.xml.internal.ws.api.message.HeaderList;
import com.sun.xml.internal.ws.api.message.Message;
import com.sun.xml.internal.ws.api.message.MessageHeaders;
import com.sun.xml.internal.ws.api.message.StreamingSOAP;
import com.sun.xml.internal.ws.api.streaming.XMLStreamReaderFactory;
import com.sun.xml.internal.ws.encoding.TagInfoset;
import com.sun.xml.internal.ws.message.AbstractMessageImpl;
import com.sun.xml.internal.ws.message.AttachmentUnmarshallerImpl;
import com.sun.xml.internal.ws.protocol.soap.VersionMismatchException;
import com.sun.xml.internal.ws.spi.db.XMLBridge;
import com.sun.xml.internal.ws.streaming.XMLStreamReaderUtil;
import com.sun.xml.internal.ws.util.xml.DummyLocation;
import com.sun.xml.internal.ws.util.xml.StAXSource;
import com.sun.xml.internal.ws.util.xml.XMLReaderComposite;
import com.sun.xml.internal.ws.util.xml.XMLStreamReaderToXMLStreamWriter;
import com.sun.xml.internal.ws.util.xml.XMLReaderComposite.ElemInfo;
import org.xml.sax.ContentHandler;
import org.xml.sax.ErrorHandler;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.xml.sax.helpers.NamespaceSupport;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import javax.xml.stream.*;
import static javax.xml.stream.XMLStreamConstants.START_DOCUMENT;
import static javax.xml.stream.XMLStreamConstants.START_ELEMENT;
import static javax.xml.stream.XMLStreamConstants.END_ELEMENT;
import javax.xml.transform.Source;
import javax.xml.ws.WebServiceException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* {@link Message} implementation backed by {@link XMLStreamReader}.
*
* TODO: we need another message class that keeps {@link XMLStreamReader} that points
* at the start of the envelope element.
*/
public class StreamMessage extends AbstractMessageImpl implements StreamingSOAP {
/**
* The reader will be positioned at
* the first child of the SOAP body
*/
private @NotNull XMLStreamReader reader;
// lazily created
private @Nullable MessageHeaders headers;
/**
* Because the StreamMessage leaves out the white spaces around payload
* when being instantiated the space characters between soap:Body opening and
* payload is stored in this field to be reused later (necessary for message security);
* Instantiated after StreamMessage creation
*/
private String bodyPrologue = null;
/**
* instantiated after writing message to XMLStreamWriter
*/
private String bodyEpilogue = null;
private String payloadLocalName;
private String payloadNamespaceURI;
/**
* Used only for debugging. This records where the message was consumed.
*/
private Throwable consumedAt;
private XMLStreamReader envelopeReader;
public StreamMessage(SOAPVersion v) {
super(v);
payloadLocalName = null;
payloadNamespaceURI = null;
}
public StreamMessage(SOAPVersion v, @NotNull XMLStreamReader envelope, @NotNull AttachmentSet attachments) {
super(v);
envelopeReader = envelope;
attachmentSet = attachments;
}
public XMLStreamReader readEnvelope() {
if (envelopeReader == null) {
List<XMLStreamReader> hReaders = new java.util.ArrayList<XMLStreamReader>();
ElemInfo envElem = new ElemInfo(envelopeTag, null);
ElemInfo hdrElem = (headerTag != null) ? new ElemInfo(headerTag, envElem) : null;
ElemInfo bdyElem = new ElemInfo(bodyTag, envElem);
for (Header h : getHeaders().asList()) {
try {
hReaders.add(h.readHeader());
} catch (XMLStreamException e) {
throw new RuntimeException(e);
}
}
XMLStreamReader soapHeader = (hdrElem != null) ? new XMLReaderComposite(hdrElem, hReaders.toArray(new XMLStreamReader[hReaders.size()])) : null;
XMLStreamReader[] payload = {readPayload()};
XMLStreamReader soapBody = new XMLReaderComposite(bdyElem, payload);
XMLStreamReader[] soapContent = (soapHeader != null) ? new XMLStreamReader[]{soapHeader, soapBody} : new XMLStreamReader[]{soapBody};
return new XMLReaderComposite(envElem, soapContent);
}
return envelopeReader;
}
/**
* Creates a {@link StreamMessage} from a {@link XMLStreamReader}
* that points at the start element of the payload, and headers.
*
* <p>
* This method creates a {@link Message} from a payload.
*
* @param headers
* if null, it means no headers. if non-null,
* it will be owned by this message.
* @param reader
* points at the start element/document of the payload (or the end element of the <s:Body>
* if there's no payload)
*/
public StreamMessage(@Nullable MessageHeaders headers, @NotNull AttachmentSet attachmentSet, @NotNull XMLStreamReader reader, @NotNull SOAPVersion soapVersion) {
super(soapVersion);
init(headers, attachmentSet, reader, soapVersion);
}
private void init(@Nullable MessageHeaders headers, @NotNull AttachmentSet attachmentSet, @NotNull XMLStreamReader reader, @NotNull SOAPVersion soapVersion) {
this.headers = headers;
this.attachmentSet = attachmentSet;
this.reader = reader;
if(reader.getEventType()== START_DOCUMENT)
XMLStreamReaderUtil.nextElementContent(reader);
//if the reader is pointing to the end element </soapenv:Body> then its empty message
// or no payload
if(reader.getEventType() == XMLStreamConstants.END_ELEMENT){
String body = reader.getLocalName();
String nsUri = reader.getNamespaceURI();
assert body != null;
assert nsUri != null;
//if its not soapenv:Body then throw exception, we received malformed stream
if(body.equals("Body") && nsUri.equals(soapVersion.nsUri)){
this.payloadLocalName = null;
this.payloadNamespaceURI = null;
}else{ //TODO: i18n and also we should be throwing better message that this
throw new WebServiceException("Malformed stream: {"+nsUri+"}"+body);
}
}else{
this.payloadLocalName = reader.getLocalName();
this.payloadNamespaceURI = reader.getNamespaceURI();
}
// use the default infoset representation for headers
int base = soapVersion.ordinal()*3;
this.envelopeTag = DEFAULT_TAGS.get(base);
this.headerTag = DEFAULT_TAGS.get(base+1);
this.bodyTag = DEFAULT_TAGS.get(base+2);
}
/**
* Creates a {@link StreamMessage} from a {@link XMLStreamReader}
* and the complete infoset of the SOAP envelope.
*
* <p>
* See {@link #StreamMessage(MessageHeaders, AttachmentSet, XMLStreamReader, SOAPVersion)} for
* the description of the basic parameters.
*
* @param headerTag
* Null if the message didn't have a header tag.
*
*/
public StreamMessage(@NotNull TagInfoset envelopeTag, @Nullable TagInfoset headerTag, @NotNull AttachmentSet attachmentSet, @Nullable MessageHeaders headers, @NotNull TagInfoset bodyTag, @NotNull XMLStreamReader reader, @NotNull SOAPVersion soapVersion) {
this(envelopeTag, headerTag, attachmentSet, headers, null, bodyTag, null, reader, soapVersion);
}
public StreamMessage(@NotNull TagInfoset envelopeTag, @Nullable TagInfoset headerTag, @NotNull AttachmentSet attachmentSet, @Nullable MessageHeaders headers, @Nullable String bodyPrologue, @NotNull TagInfoset bodyTag, @Nullable String bodyEpilogue, @NotNull XMLStreamReader reader, @NotNull SOAPVersion soapVersion) {
super(soapVersion);
init(envelopeTag, headerTag, attachmentSet, headers, bodyPrologue, bodyTag, bodyEpilogue, reader, soapVersion);
}
private void init(@NotNull TagInfoset envelopeTag, @Nullable TagInfoset headerTag, @NotNull AttachmentSet attachmentSet, @Nullable MessageHeaders headers, @Nullable String bodyPrologue, @NotNull TagInfoset bodyTag, @Nullable String bodyEpilogue, @NotNull XMLStreamReader reader, @NotNull SOAPVersion soapVersion) {
init(headers,attachmentSet,reader,soapVersion);
if(envelopeTag == null ) {
throw new IllegalArgumentException("EnvelopeTag TagInfoset cannot be null");
}
if(bodyTag == null ) {
throw new IllegalArgumentException("BodyTag TagInfoset cannot be null");
}
this.envelopeTag = envelopeTag;
this.headerTag = headerTag;
this.bodyTag = bodyTag;
this.bodyPrologue = bodyPrologue;
this.bodyEpilogue = bodyEpilogue;
}
public boolean hasHeaders() {
if ( envelopeReader != null ) readEnvelope(this);
return headers!=null && headers.hasHeaders();
}
public MessageHeaders getHeaders() {
if ( envelopeReader != null ) readEnvelope(this);
if (headers == null) {
headers = new HeaderList(getSOAPVersion());
}
return headers;
}
public String getPayloadLocalPart() {
if ( envelopeReader != null ) readEnvelope(this);
return payloadLocalName;
}
public String getPayloadNamespaceURI() {
if ( envelopeReader != null ) readEnvelope(this);
return payloadNamespaceURI;
}
public boolean hasPayload() {
if ( envelopeReader != null ) readEnvelope(this);
return payloadLocalName!=null;
}
public Source readPayloadAsSource() {
if(hasPayload()) {
assert unconsumed();
return new StAXSource(reader, true, getInscopeNamespaces());
} else
return null;
}
/**
* There is no way to enumerate inscope namespaces for XMLStreamReader. That means
* namespaces declared in envelope, and body tags need to be computed using their
* {@link TagInfoset}s.
*
* @return array of the even length of the form { prefix0, uri0, prefix1, uri1, ... }
*/
private String[] getInscopeNamespaces() {
NamespaceSupport nss = new NamespaceSupport();
nss.pushContext();
for(int i=0; i < envelopeTag.ns.length; i+=2) {
nss.declarePrefix(envelopeTag.ns[i], envelopeTag.ns[i+1]);
}
nss.pushContext();
for(int i=0; i < bodyTag.ns.length; i+=2) {
nss.declarePrefix(bodyTag.ns[i], bodyTag.ns[i+1]);
}
List<String> inscope = new ArrayList<String>();
for( Enumeration en = nss.getPrefixes(); en.hasMoreElements(); ) {
String prefix = (String)en.nextElement();
inscope.add(prefix);
inscope.add(nss.getURI(prefix));
}
return inscope.toArray(new String[inscope.size()]);
}
public Object readPayloadAsJAXB(Unmarshaller unmarshaller) throws JAXBException {
if(!hasPayload())
return null;
assert unconsumed();
// TODO: How can the unmarshaller process this as a fragment?
if(hasAttachments())
unmarshaller.setAttachmentUnmarshaller(new AttachmentUnmarshallerImpl(getAttachments()));
try {
return unmarshaller.unmarshal(reader);
} finally{
unmarshaller.setAttachmentUnmarshaller(null);
XMLStreamReaderUtil.readRest(reader);
XMLStreamReaderUtil.close(reader);
XMLStreamReaderFactory.recycle(reader);
}
}
/** @deprecated */
public <T> T readPayloadAsJAXB(Bridge<T> bridge) throws JAXBException {
if(!hasPayload())
return null;
assert unconsumed();
T r = bridge.unmarshal(reader,
hasAttachments() ? new AttachmentUnmarshallerImpl(getAttachments()) : null);
XMLStreamReaderUtil.readRest(reader);
XMLStreamReaderUtil.close(reader);
XMLStreamReaderFactory.recycle(reader);
return r;
}
public <T> T readPayloadAsJAXB(XMLBridge<T> bridge) throws JAXBException {
if(!hasPayload())
return null;
assert unconsumed();
T r = bridge.unmarshal(reader,
hasAttachments() ? new AttachmentUnmarshallerImpl(getAttachments()) : null);
XMLStreamReaderUtil.readRest(reader);
XMLStreamReaderUtil.close(reader);
XMLStreamReaderFactory.recycle(reader);
return r;
}
@Override
public void consume() {
assert unconsumed();
XMLStreamReaderUtil.readRest(reader);
XMLStreamReaderUtil.close(reader);
XMLStreamReaderFactory.recycle(reader);
}
public XMLStreamReader readPayload() {
if(!hasPayload())
return null;
// TODO: What about access at and beyond </soap:Body>
assert unconsumed();
return this.reader;
}
public void writePayloadTo(XMLStreamWriter writer)throws XMLStreamException {
if ( envelopeReader != null ) readEnvelope(this);
assert unconsumed();
if(payloadLocalName==null) {
return; // no body
}
if (bodyPrologue != null) {
writer.writeCharacters(bodyPrologue);
}
XMLStreamReaderToXMLStreamWriter conv = new XMLStreamReaderToXMLStreamWriter();
while(reader.getEventType() != XMLStreamConstants.END_DOCUMENT){
String name = reader.getLocalName();
String nsUri = reader.getNamespaceURI();
// After previous conv.bridge() call the cursor will be at END_ELEMENT.
// Check if its not soapenv:Body then move to next ELEMENT
if(reader.getEventType() == XMLStreamConstants.END_ELEMENT){
if (!isBodyElement(name, nsUri)){
// closing payload element: store epilogue for further signing, if applicable
// however if there more than one payloads exist - the last one is stored
String whiteSpaces = XMLStreamReaderUtil.nextWhiteSpaceContent(reader);
if (whiteSpaces != null) {
this.bodyEpilogue = whiteSpaces;
// write it to the message too
writer.writeCharacters(whiteSpaces);
}
} else {
// body closed > exit
break;
}
} else {
// payload opening element: copy payload to writer
conv.bridge(reader,writer);
}
}
XMLStreamReaderUtil.readRest(reader);
XMLStreamReaderUtil.close(reader);
XMLStreamReaderFactory.recycle(reader);
}
private boolean isBodyElement(String name, String nsUri) {
return name.equals("Body") && nsUri.equals(soapVersion.nsUri);
}
public void writeTo(XMLStreamWriter sw) throws XMLStreamException{
if ( envelopeReader != null ) readEnvelope(this);
writeEnvelope(sw);
}
/**
* This method should be called when the StreamMessage is created with a payload
* @param writer
*/
private void writeEnvelope(XMLStreamWriter writer) throws XMLStreamException {
if ( envelopeReader != null ) readEnvelope(this);
writer.writeStartDocument();
envelopeTag.writeStart(writer);
//write headers
MessageHeaders hl = getHeaders();
if (hl.hasHeaders() && headerTag == null) headerTag = new TagInfoset(envelopeTag.nsUri,"Header",envelopeTag.prefix,EMPTY_ATTS);
if (headerTag != null) {
headerTag.writeStart(writer);
if (hl.hasHeaders()){
for(Header h : hl.asList()){
h.writeTo(writer);
}
}
writer.writeEndElement();
}
bodyTag.writeStart(writer);
if(hasPayload())
writePayloadTo(writer);
writer.writeEndElement();
writer.writeEndElement();
writer.writeEndDocument();
}
public void writePayloadTo(ContentHandler contentHandler, ErrorHandler errorHandler, boolean fragment) throws SAXException {
if ( envelopeReader != null ) readEnvelope(this);
assert unconsumed();
try {
if(payloadLocalName==null)
return; // no body
if (bodyPrologue != null) {
char[] chars = bodyPrologue.toCharArray();
contentHandler.characters(chars, 0, chars.length);
}
XMLStreamReaderToContentHandler conv = new XMLStreamReaderToContentHandler(reader,contentHandler,true,fragment,getInscopeNamespaces());
while(reader.getEventType() != XMLStreamConstants.END_DOCUMENT){
String name = reader.getLocalName();
String nsUri = reader.getNamespaceURI();
// After previous conv.bridge() call the cursor will be at END_ELEMENT.
// Check if its not soapenv:Body then move to next ELEMENT
if(reader.getEventType() == XMLStreamConstants.END_ELEMENT){
if (!isBodyElement(name, nsUri)){
// closing payload element: store epilogue for further signing, if applicable
// however if there more than one payloads exist - the last one is stored
String whiteSpaces = XMLStreamReaderUtil.nextWhiteSpaceContent(reader);
if (whiteSpaces != null) {
this.bodyEpilogue = whiteSpaces;
// write it to the message too
char[] chars = whiteSpaces.toCharArray();
contentHandler.characters(chars, 0, chars.length);
}
} else {
// body closed > exit
break;
}
} else {
// payload opening element: copy payload to writer
conv.bridge();
}
}
XMLStreamReaderUtil.readRest(reader);
XMLStreamReaderUtil.close(reader);
XMLStreamReaderFactory.recycle(reader);
} catch (XMLStreamException e) {
Location loc = e.getLocation();
if(loc==null) loc = DummyLocation.INSTANCE;
SAXParseException x = new SAXParseException(
e.getMessage(),loc.getPublicId(),loc.getSystemId(),loc.getLineNumber(),loc.getColumnNumber(),e);
errorHandler.error(x);
}
}
// TODO: this method should be probably rewritten to respect spaces between elements; is it used at all?
@Override
public Message copy() {
if ( envelopeReader != null ) readEnvelope(this);
try {
assert unconsumed();
consumedAt = null; // but we don't want to mark it as consumed
MutableXMLStreamBuffer xsb = new MutableXMLStreamBuffer();
StreamReaderBufferCreator c = new StreamReaderBufferCreator(xsb);
// preserving inscope namespaces from envelope, and body. Other option
// would be to create a filtering XMLStreamReader from reader+envelopeTag+bodyTag
c.storeElement(envelopeTag.nsUri, envelopeTag.localName, envelopeTag.prefix, envelopeTag.ns);
c.storeElement(bodyTag.nsUri, bodyTag.localName, bodyTag.prefix, bodyTag.ns);
if (hasPayload()) {
// Loop all the way for multi payload case
while(reader.getEventType() != XMLStreamConstants.END_DOCUMENT){
String name = reader.getLocalName();
String nsUri = reader.getNamespaceURI();
if(isBodyElement(name, nsUri) || (reader.getEventType() == XMLStreamConstants.END_DOCUMENT))
break;
c.create(reader);
// Skip whitespaces in between payload and </Body> or between elements
// those won't be in the message itself, but we store them in field bodyEpilogue
if (reader.isWhiteSpace()) {
bodyEpilogue = XMLStreamReaderUtil.currentWhiteSpaceContent(reader);
} else {
// clear it in case the existing was not the last one
// (we are interested only in the last one?)
bodyEpilogue = null;
}
}
}
c.storeEndElement(); // create structure element for </Body>
c.storeEndElement(); // create structure element for </Envelope>
c.storeEndElement(); // create structure element for END_DOCUMENT
XMLStreamReaderUtil.readRest(reader);
XMLStreamReaderUtil.close(reader);
XMLStreamReaderFactory.recycle(reader);
reader = xsb.readAsXMLStreamReader();
XMLStreamReader clone = xsb.readAsXMLStreamReader();
// advance to the start tag of the <Body> first child element
proceedToRootElement(reader);
proceedToRootElement(clone);
return new StreamMessage(envelopeTag, headerTag, attachmentSet, HeaderList.copy(headers), bodyPrologue, bodyTag, bodyEpilogue, clone, soapVersion);
} catch (XMLStreamException e) {
throw new WebServiceException("Failed to copy a message",e);
}
}
private void proceedToRootElement(XMLStreamReader xsr) throws XMLStreamException {
assert xsr.getEventType()==START_DOCUMENT;
xsr.nextTag();
xsr.nextTag();
xsr.nextTag();
assert xsr.getEventType()==START_ELEMENT || xsr.getEventType()==END_ELEMENT;
}
public void writeTo(ContentHandler contentHandler, ErrorHandler errorHandler ) throws SAXException {
if ( envelopeReader != null ) readEnvelope(this);
contentHandler.setDocumentLocator(NULL_LOCATOR);
contentHandler.startDocument();
envelopeTag.writeStart(contentHandler);
if (hasHeaders() && headerTag == null) headerTag = new TagInfoset(envelopeTag.nsUri,"Header",envelopeTag.prefix,EMPTY_ATTS);
if (headerTag != null) {
headerTag.writeStart(contentHandler);
if (hasHeaders()) {
MessageHeaders headers = getHeaders();
for (Header h : headers.asList()) {
// shouldn't JDK be smart enough to use array-style indexing for this foreach!?
h.writeTo(contentHandler,errorHandler);
}
}
headerTag.writeEnd(contentHandler);
}
bodyTag.writeStart(contentHandler);
writePayloadTo(contentHandler,errorHandler, true);
bodyTag.writeEnd(contentHandler);
envelopeTag.writeEnd(contentHandler);
contentHandler.endDocument();
}
/**
* Used for an assertion. Returns true when the message is unconsumed,
* or otherwise throw an exception.
*
* <p>
* Calling this method also marks the stream as 'consumed'
*/
private boolean unconsumed() {
if(payloadLocalName==null)
return true; // no payload. can be consumed multiple times.
if(reader.getEventType()!=XMLStreamReader.START_ELEMENT) {
AssertionError error = new AssertionError("StreamMessage has been already consumed. See the nested exception for where it's consumed");
error.initCause(consumedAt);
throw error;
}
consumedAt = new Exception().fillInStackTrace();
return true;
}
public String getBodyPrologue() {
if ( envelopeReader != null ) readEnvelope(this);
return bodyPrologue;
}
public String getBodyEpilogue() {
if ( envelopeReader != null ) readEnvelope(this);
return bodyEpilogue;
}
public XMLStreamReader getReader() {
if ( envelopeReader != null ) readEnvelope(this);
assert unconsumed();
return reader;
}
private static final String SOAP_ENVELOPE = "Envelope";
private static final String SOAP_HEADER = "Header";
private static final String SOAP_BODY = "Body";
protected interface StreamHeaderDecoder {
public Header decodeHeader(XMLStreamReader reader, XMLStreamBuffer mark);
}
static final StreamHeaderDecoder SOAP12StreamHeaderDecoder = new StreamHeaderDecoder() {
@Override
public Header decodeHeader(XMLStreamReader reader, XMLStreamBuffer mark) {
return new StreamHeader12(reader, mark);
}
};
static final StreamHeaderDecoder SOAP11StreamHeaderDecoder = new StreamHeaderDecoder() {
@Override
public Header decodeHeader(XMLStreamReader reader, XMLStreamBuffer mark) {
return new StreamHeader11(reader, mark);
}
};
static private void readEnvelope(StreamMessage message) {
if ( message.envelopeReader == null ) return;
XMLStreamReader reader = message.envelopeReader;
message.envelopeReader = null;
SOAPVersion soapVersion = message.soapVersion;
// Move to soap:Envelope and verify
if(reader.getEventType()!=XMLStreamConstants.START_ELEMENT)
XMLStreamReaderUtil.nextElementContent(reader);
XMLStreamReaderUtil.verifyReaderState(reader,XMLStreamConstants.START_ELEMENT);
if (SOAP_ENVELOPE.equals(reader.getLocalName()) && !soapVersion.nsUri.equals(reader.getNamespaceURI())) {
throw new VersionMismatchException(soapVersion, soapVersion.nsUri, reader.getNamespaceURI());
}
XMLStreamReaderUtil.verifyTag(reader, soapVersion.nsUri, SOAP_ENVELOPE);
TagInfoset envelopeTag = new TagInfoset(reader);
// Collect namespaces on soap:Envelope
Map<String,String> namespaces = new HashMap<String,String>();
for(int i=0; i< reader.getNamespaceCount();i++){
namespaces.put(reader.getNamespacePrefix(i), reader.getNamespaceURI(i));
}
// Move to next element
XMLStreamReaderUtil.nextElementContent(reader);
XMLStreamReaderUtil.verifyReaderState(reader,
javax.xml.stream.XMLStreamConstants.START_ELEMENT);
HeaderList headers = null;
TagInfoset headerTag = null;
if (reader.getLocalName().equals(SOAP_HEADER)
&& reader.getNamespaceURI().equals(soapVersion.nsUri)) {
headerTag = new TagInfoset(reader);
// Collect namespaces on soap:Header
for(int i=0; i< reader.getNamespaceCount();i++){
namespaces.put(reader.getNamespacePrefix(i), reader.getNamespaceURI(i));
}
// skip <soap:Header>
XMLStreamReaderUtil.nextElementContent(reader);
// If SOAP header blocks are present (i.e. not <soap:Header/>)
if (reader.getEventType() == XMLStreamConstants.START_ELEMENT) {
headers = new HeaderList(soapVersion);
try {
// Cache SOAP header blocks
StreamHeaderDecoder headerDecoder = SOAPVersion.SOAP_11.equals(soapVersion) ? SOAP11StreamHeaderDecoder : SOAP12StreamHeaderDecoder;
cacheHeaders(reader, namespaces, headers, headerDecoder);
} catch (XMLStreamException e) {
// TODO need to throw more meaningful exception
throw new WebServiceException(e);
}
}
// Move to soap:Body
XMLStreamReaderUtil.nextElementContent(reader);
}
// Verify that <soap:Body> is present
XMLStreamReaderUtil.verifyTag(reader, soapVersion.nsUri, SOAP_BODY);
TagInfoset bodyTag = new TagInfoset(reader);
String bodyPrologue = XMLStreamReaderUtil.nextWhiteSpaceContent(reader);
message.init(envelopeTag,headerTag,message.attachmentSet,headers,bodyPrologue,bodyTag,null,reader,soapVersion);
// when there's no payload,
// it's tempting to use EmptyMessageImpl, but it doesn't preserve the infoset
// of <envelope>,<header>, and <body>, so we need to stick to StreamMessage.
}
private static XMLStreamBuffer cacheHeaders(XMLStreamReader reader,
Map<String, String> namespaces, HeaderList headers,
StreamHeaderDecoder headerDecoder) throws XMLStreamException {
MutableXMLStreamBuffer buffer = createXMLStreamBuffer();
StreamReaderBufferCreator creator = new StreamReaderBufferCreator();
creator.setXMLStreamBuffer(buffer);
// Reader is positioned at the first header block
while(reader.getEventType() == javax.xml.stream.XMLStreamConstants.START_ELEMENT) {
Map<String,String> headerBlockNamespaces = namespaces;
// Collect namespaces on SOAP header block
if (reader.getNamespaceCount() > 0) {
headerBlockNamespaces = new HashMap<String,String>(namespaces);
for (int i = 0; i < reader.getNamespaceCount(); i++) {
headerBlockNamespaces.put(reader.getNamespacePrefix(i), reader.getNamespaceURI(i));
}
}
// Mark
XMLStreamBuffer mark = new XMLStreamBufferMark(headerBlockNamespaces, creator);
// Create Header
headers.add(headerDecoder.decodeHeader(reader, mark));
// Cache the header block
// After caching Reader will be positioned at next header block or
// the end of the </soap:header>
creator.createElementFragment(reader, false);
if (reader.getEventType() != XMLStreamConstants.START_ELEMENT &&
reader.getEventType() != XMLStreamConstants.END_ELEMENT) {
XMLStreamReaderUtil.nextElementContent(reader);
}
}
return buffer;
}
private static MutableXMLStreamBuffer createXMLStreamBuffer() {
// TODO: Decode should own one MutableXMLStreamBuffer for reuse
// since it is more efficient. ISSUE: possible issue with
// lifetime of information in the buffer if accessed beyond
// the pipe line.
return new MutableXMLStreamBuffer();
}
}
|
|
/**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package org.oep.core.processmgt.model.impl;
import com.liferay.portal.kernel.bean.AutoEscapeBeanHandler;
import com.liferay.portal.kernel.exception.SystemException;
import com.liferay.portal.kernel.json.JSON;
import com.liferay.portal.kernel.lar.StagedModelType;
import com.liferay.portal.kernel.util.GetterUtil;
import com.liferay.portal.kernel.util.ProxyUtil;
import com.liferay.portal.kernel.util.StringBundler;
import com.liferay.portal.kernel.util.StringPool;
import com.liferay.portal.model.CacheModel;
import com.liferay.portal.model.impl.BaseModelImpl;
import com.liferay.portal.service.ServiceContext;
import com.liferay.portal.util.PortalUtil;
import com.liferay.portlet.expando.model.ExpandoBridge;
import com.liferay.portlet.expando.util.ExpandoBridgeFactoryUtil;
import org.oep.core.processmgt.model.ProcessOrder;
import org.oep.core.processmgt.model.ProcessOrderModel;
import org.oep.core.processmgt.model.ProcessOrderSoap;
import java.io.Serializable;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* The base model implementation for the ProcessOrder service. Represents a row in the "oep_processmgt_processorder" database table, with each column mapped to a property of this class.
*
* <p>
* This implementation and its corresponding interface {@link org.oep.core.processmgt.model.ProcessOrderModel} exist only as a container for the default property accessors generated by ServiceBuilder. Helper methods and all application logic should be put in {@link ProcessOrderImpl}.
* </p>
*
* @author trungdk
* @see ProcessOrderImpl
* @see org.oep.core.processmgt.model.ProcessOrder
* @see org.oep.core.processmgt.model.ProcessOrderModel
* @generated
*/
@JSON(strict = true)
public class ProcessOrderModelImpl extends BaseModelImpl<ProcessOrder>
implements ProcessOrderModel {
/*
* NOTE FOR DEVELOPERS:
*
* Never modify or reference this class directly. All methods that expect a process order model instance should use the {@link org.oep.core.processmgt.model.ProcessOrder} interface instead.
*/
public static final String TABLE_NAME = "oep_processmgt_processorder";
public static final Object[][] TABLE_COLUMNS = {
{ "uuid_", Types.VARCHAR },
{ "processOrderId", Types.BIGINT },
{ "userId", Types.BIGINT },
{ "groupId", Types.BIGINT },
{ "companyId", Types.BIGINT },
{ "createDate", Types.TIMESTAMP },
{ "modifiedDate", Types.TIMESTAMP },
{ "organizationId", Types.BIGINT },
{ "parentProcessOrderId", Types.BIGINT },
{ "dossierId", Types.BIGINT },
{ "dossierProcessId", Types.BIGINT },
{ "dossierStepId", Types.BIGINT },
{ "orderStatus", Types.VARCHAR },
{ "orderResume", Types.VARCHAR },
{ "stepDate", Types.TIMESTAMP },
{ "stepNote", Types.VARCHAR },
{ "assignToUserId", Types.BIGINT },
{ "currentCondition", Types.VARCHAR },
{ "lastStepTransitionId", Types.BIGINT },
{ "stopRollback", Types.INTEGER },
{ "ebPartnerShipId", Types.BIGINT }
};
public static final String TABLE_SQL_CREATE = "create table oep_processmgt_processorder (uuid_ VARCHAR(75) null,processOrderId LONG not null primary key,userId LONG,groupId LONG,companyId LONG,createDate DATE null,modifiedDate DATE null,organizationId LONG,parentProcessOrderId LONG,dossierId LONG,dossierProcessId LONG,dossierStepId LONG,orderStatus VARCHAR(30) null,orderResume VARCHAR(200) null,stepDate DATE null,stepNote VARCHAR(200) null,assignToUserId LONG,currentCondition VARCHAR(30) null,lastStepTransitionId LONG,stopRollback INTEGER,ebPartnerShipId LONG)";
public static final String TABLE_SQL_DROP = "drop table oep_processmgt_processorder";
public static final String ORDER_BY_JPQL = " ORDER BY processOrder.processOrderId ASC";
public static final String ORDER_BY_SQL = " ORDER BY oep_processmgt_processorder.processOrderId ASC";
public static final String DATA_SOURCE = "liferayDataSource";
public static final String SESSION_FACTORY = "liferaySessionFactory";
public static final String TX_MANAGER = "liferayTransactionManager";
public static final boolean ENTITY_CACHE_ENABLED = GetterUtil.getBoolean(com.liferay.util.service.ServiceProps.get(
"value.object.entity.cache.enabled.org.oep.core.processmgt.model.ProcessOrder"),
true);
public static final boolean FINDER_CACHE_ENABLED = GetterUtil.getBoolean(com.liferay.util.service.ServiceProps.get(
"value.object.finder.cache.enabled.org.oep.core.processmgt.model.ProcessOrder"),
true);
public static final boolean COLUMN_BITMASK_ENABLED = GetterUtil.getBoolean(com.liferay.util.service.ServiceProps.get(
"value.object.column.bitmask.enabled.org.oep.core.processmgt.model.ProcessOrder"),
true);
public static long COMPANYID_COLUMN_BITMASK = 1L;
public static long GROUPID_COLUMN_BITMASK = 2L;
public static long UUID_COLUMN_BITMASK = 4L;
public static long PROCESSORDERID_COLUMN_BITMASK = 8L;
/**
* Converts the soap model instance into a normal model instance.
*
* @param soapModel the soap model instance to convert
* @return the normal model instance
*/
public static ProcessOrder toModel(ProcessOrderSoap soapModel) {
if (soapModel == null) {
return null;
}
ProcessOrder model = new ProcessOrderImpl();
model.setUuid(soapModel.getUuid());
model.setProcessOrderId(soapModel.getProcessOrderId());
model.setUserId(soapModel.getUserId());
model.setGroupId(soapModel.getGroupId());
model.setCompanyId(soapModel.getCompanyId());
model.setCreateDate(soapModel.getCreateDate());
model.setModifiedDate(soapModel.getModifiedDate());
model.setOrganizationId(soapModel.getOrganizationId());
model.setParentProcessOrderId(soapModel.getParentProcessOrderId());
model.setDossierId(soapModel.getDossierId());
model.setDossierProcessId(soapModel.getDossierProcessId());
model.setDossierStepId(soapModel.getDossierStepId());
model.setOrderStatus(soapModel.getOrderStatus());
model.setOrderResume(soapModel.getOrderResume());
model.setStepDate(soapModel.getStepDate());
model.setStepNote(soapModel.getStepNote());
model.setAssignToUserId(soapModel.getAssignToUserId());
model.setCurrentCondition(soapModel.getCurrentCondition());
model.setLastStepTransitionId(soapModel.getLastStepTransitionId());
model.setStopRollback(soapModel.getStopRollback());
model.setEbPartnerShipId(soapModel.getEbPartnerShipId());
return model;
}
/**
* Converts the soap model instances into normal model instances.
*
* @param soapModels the soap model instances to convert
* @return the normal model instances
*/
public static List<ProcessOrder> toModels(ProcessOrderSoap[] soapModels) {
if (soapModels == null) {
return null;
}
List<ProcessOrder> models = new ArrayList<ProcessOrder>(soapModels.length);
for (ProcessOrderSoap soapModel : soapModels) {
models.add(toModel(soapModel));
}
return models;
}
public static final long LOCK_EXPIRATION_TIME = GetterUtil.getLong(com.liferay.util.service.ServiceProps.get(
"lock.expiration.time.org.oep.core.processmgt.model.ProcessOrder"));
public ProcessOrderModelImpl() {
}
@Override
public long getPrimaryKey() {
return _processOrderId;
}
@Override
public void setPrimaryKey(long primaryKey) {
setProcessOrderId(primaryKey);
}
@Override
public Serializable getPrimaryKeyObj() {
return _processOrderId;
}
@Override
public void setPrimaryKeyObj(Serializable primaryKeyObj) {
setPrimaryKey(((Long)primaryKeyObj).longValue());
}
@Override
public Class<?> getModelClass() {
return ProcessOrder.class;
}
@Override
public String getModelClassName() {
return ProcessOrder.class.getName();
}
@Override
public Map<String, Object> getModelAttributes() {
Map<String, Object> attributes = new HashMap<String, Object>();
attributes.put("uuid", getUuid());
attributes.put("processOrderId", getProcessOrderId());
attributes.put("userId", getUserId());
attributes.put("groupId", getGroupId());
attributes.put("companyId", getCompanyId());
attributes.put("createDate", getCreateDate());
attributes.put("modifiedDate", getModifiedDate());
attributes.put("organizationId", getOrganizationId());
attributes.put("parentProcessOrderId", getParentProcessOrderId());
attributes.put("dossierId", getDossierId());
attributes.put("dossierProcessId", getDossierProcessId());
attributes.put("dossierStepId", getDossierStepId());
attributes.put("orderStatus", getOrderStatus());
attributes.put("orderResume", getOrderResume());
attributes.put("stepDate", getStepDate());
attributes.put("stepNote", getStepNote());
attributes.put("assignToUserId", getAssignToUserId());
attributes.put("currentCondition", getCurrentCondition());
attributes.put("lastStepTransitionId", getLastStepTransitionId());
attributes.put("stopRollback", getStopRollback());
attributes.put("ebPartnerShipId", getEbPartnerShipId());
return attributes;
}
@Override
public void setModelAttributes(Map<String, Object> attributes) {
String uuid = (String)attributes.get("uuid");
if (uuid != null) {
setUuid(uuid);
}
Long processOrderId = (Long)attributes.get("processOrderId");
if (processOrderId != null) {
setProcessOrderId(processOrderId);
}
Long userId = (Long)attributes.get("userId");
if (userId != null) {
setUserId(userId);
}
Long groupId = (Long)attributes.get("groupId");
if (groupId != null) {
setGroupId(groupId);
}
Long companyId = (Long)attributes.get("companyId");
if (companyId != null) {
setCompanyId(companyId);
}
Date createDate = (Date)attributes.get("createDate");
if (createDate != null) {
setCreateDate(createDate);
}
Date modifiedDate = (Date)attributes.get("modifiedDate");
if (modifiedDate != null) {
setModifiedDate(modifiedDate);
}
Long organizationId = (Long)attributes.get("organizationId");
if (organizationId != null) {
setOrganizationId(organizationId);
}
Long parentProcessOrderId = (Long)attributes.get("parentProcessOrderId");
if (parentProcessOrderId != null) {
setParentProcessOrderId(parentProcessOrderId);
}
Long dossierId = (Long)attributes.get("dossierId");
if (dossierId != null) {
setDossierId(dossierId);
}
Long dossierProcessId = (Long)attributes.get("dossierProcessId");
if (dossierProcessId != null) {
setDossierProcessId(dossierProcessId);
}
Long dossierStepId = (Long)attributes.get("dossierStepId");
if (dossierStepId != null) {
setDossierStepId(dossierStepId);
}
String orderStatus = (String)attributes.get("orderStatus");
if (orderStatus != null) {
setOrderStatus(orderStatus);
}
String orderResume = (String)attributes.get("orderResume");
if (orderResume != null) {
setOrderResume(orderResume);
}
Date stepDate = (Date)attributes.get("stepDate");
if (stepDate != null) {
setStepDate(stepDate);
}
String stepNote = (String)attributes.get("stepNote");
if (stepNote != null) {
setStepNote(stepNote);
}
Long assignToUserId = (Long)attributes.get("assignToUserId");
if (assignToUserId != null) {
setAssignToUserId(assignToUserId);
}
String currentCondition = (String)attributes.get("currentCondition");
if (currentCondition != null) {
setCurrentCondition(currentCondition);
}
Long lastStepTransitionId = (Long)attributes.get("lastStepTransitionId");
if (lastStepTransitionId != null) {
setLastStepTransitionId(lastStepTransitionId);
}
Integer stopRollback = (Integer)attributes.get("stopRollback");
if (stopRollback != null) {
setStopRollback(stopRollback);
}
Long ebPartnerShipId = (Long)attributes.get("ebPartnerShipId");
if (ebPartnerShipId != null) {
setEbPartnerShipId(ebPartnerShipId);
}
}
@JSON
@Override
public String getUuid() {
if (_uuid == null) {
return StringPool.BLANK;
}
else {
return _uuid;
}
}
@Override
public void setUuid(String uuid) {
if (_originalUuid == null) {
_originalUuid = _uuid;
}
_uuid = uuid;
}
public String getOriginalUuid() {
return GetterUtil.getString(_originalUuid);
}
@JSON
@Override
public long getProcessOrderId() {
return _processOrderId;
}
@Override
public void setProcessOrderId(long processOrderId) {
_processOrderId = processOrderId;
}
@JSON
@Override
public long getUserId() {
return _userId;
}
@Override
public void setUserId(long userId) {
_userId = userId;
}
@Override
public String getUserUuid() throws SystemException {
return PortalUtil.getUserValue(getUserId(), "uuid", _userUuid);
}
@Override
public void setUserUuid(String userUuid) {
_userUuid = userUuid;
}
@JSON
@Override
public long getGroupId() {
return _groupId;
}
@Override
public void setGroupId(long groupId) {
_columnBitmask |= GROUPID_COLUMN_BITMASK;
if (!_setOriginalGroupId) {
_setOriginalGroupId = true;
_originalGroupId = _groupId;
}
_groupId = groupId;
}
public long getOriginalGroupId() {
return _originalGroupId;
}
@JSON
@Override
public long getCompanyId() {
return _companyId;
}
@Override
public void setCompanyId(long companyId) {
_columnBitmask |= COMPANYID_COLUMN_BITMASK;
if (!_setOriginalCompanyId) {
_setOriginalCompanyId = true;
_originalCompanyId = _companyId;
}
_companyId = companyId;
}
public long getOriginalCompanyId() {
return _originalCompanyId;
}
@JSON
@Override
public Date getCreateDate() {
return _createDate;
}
@Override
public void setCreateDate(Date createDate) {
_createDate = createDate;
}
@JSON
@Override
public Date getModifiedDate() {
return _modifiedDate;
}
@Override
public void setModifiedDate(Date modifiedDate) {
_modifiedDate = modifiedDate;
}
@JSON
@Override
public long getOrganizationId() {
return _organizationId;
}
@Override
public void setOrganizationId(long organizationId) {
_organizationId = organizationId;
}
@JSON
@Override
public long getParentProcessOrderId() {
return _parentProcessOrderId;
}
@Override
public void setParentProcessOrderId(long parentProcessOrderId) {
_parentProcessOrderId = parentProcessOrderId;
}
@JSON
@Override
public long getDossierId() {
return _dossierId;
}
@Override
public void setDossierId(long dossierId) {
_dossierId = dossierId;
}
@JSON
@Override
public long getDossierProcessId() {
return _dossierProcessId;
}
@Override
public void setDossierProcessId(long dossierProcessId) {
_dossierProcessId = dossierProcessId;
}
@JSON
@Override
public long getDossierStepId() {
return _dossierStepId;
}
@Override
public void setDossierStepId(long dossierStepId) {
_dossierStepId = dossierStepId;
}
@JSON
@Override
public String getOrderStatus() {
if (_orderStatus == null) {
return StringPool.BLANK;
}
else {
return _orderStatus;
}
}
@Override
public void setOrderStatus(String orderStatus) {
_orderStatus = orderStatus;
}
@JSON
@Override
public String getOrderResume() {
if (_orderResume == null) {
return StringPool.BLANK;
}
else {
return _orderResume;
}
}
@Override
public void setOrderResume(String orderResume) {
_orderResume = orderResume;
}
@JSON
@Override
public Date getStepDate() {
return _stepDate;
}
@Override
public void setStepDate(Date stepDate) {
_stepDate = stepDate;
}
@JSON
@Override
public String getStepNote() {
if (_stepNote == null) {
return StringPool.BLANK;
}
else {
return _stepNote;
}
}
@Override
public void setStepNote(String stepNote) {
_stepNote = stepNote;
}
@JSON
@Override
public long getAssignToUserId() {
return _assignToUserId;
}
@Override
public void setAssignToUserId(long assignToUserId) {
_assignToUserId = assignToUserId;
}
@Override
public String getAssignToUserUuid() throws SystemException {
return PortalUtil.getUserValue(getAssignToUserId(), "uuid",
_assignToUserUuid);
}
@Override
public void setAssignToUserUuid(String assignToUserUuid) {
_assignToUserUuid = assignToUserUuid;
}
@JSON
@Override
public String getCurrentCondition() {
if (_currentCondition == null) {
return StringPool.BLANK;
}
else {
return _currentCondition;
}
}
@Override
public void setCurrentCondition(String currentCondition) {
_currentCondition = currentCondition;
}
@JSON
@Override
public long getLastStepTransitionId() {
return _lastStepTransitionId;
}
@Override
public void setLastStepTransitionId(long lastStepTransitionId) {
_lastStepTransitionId = lastStepTransitionId;
}
@JSON
@Override
public int getStopRollback() {
return _stopRollback;
}
@Override
public void setStopRollback(int stopRollback) {
_stopRollback = stopRollback;
}
@JSON
@Override
public long getEbPartnerShipId() {
return _ebPartnerShipId;
}
@Override
public void setEbPartnerShipId(long ebPartnerShipId) {
_ebPartnerShipId = ebPartnerShipId;
}
@Override
public StagedModelType getStagedModelType() {
return new StagedModelType(PortalUtil.getClassNameId(
ProcessOrder.class.getName()));
}
public long getColumnBitmask() {
return _columnBitmask;
}
@Override
public ExpandoBridge getExpandoBridge() {
return ExpandoBridgeFactoryUtil.getExpandoBridge(getCompanyId(),
ProcessOrder.class.getName(), getPrimaryKey());
}
@Override
public void setExpandoBridgeAttributes(ServiceContext serviceContext) {
ExpandoBridge expandoBridge = getExpandoBridge();
expandoBridge.setAttributes(serviceContext);
}
@Override
public ProcessOrder toEscapedModel() {
if (_escapedModel == null) {
_escapedModel = (ProcessOrder)ProxyUtil.newProxyInstance(_classLoader,
_escapedModelInterfaces, new AutoEscapeBeanHandler(this));
}
return _escapedModel;
}
@Override
public Object clone() {
ProcessOrderImpl processOrderImpl = new ProcessOrderImpl();
processOrderImpl.setUuid(getUuid());
processOrderImpl.setProcessOrderId(getProcessOrderId());
processOrderImpl.setUserId(getUserId());
processOrderImpl.setGroupId(getGroupId());
processOrderImpl.setCompanyId(getCompanyId());
processOrderImpl.setCreateDate(getCreateDate());
processOrderImpl.setModifiedDate(getModifiedDate());
processOrderImpl.setOrganizationId(getOrganizationId());
processOrderImpl.setParentProcessOrderId(getParentProcessOrderId());
processOrderImpl.setDossierId(getDossierId());
processOrderImpl.setDossierProcessId(getDossierProcessId());
processOrderImpl.setDossierStepId(getDossierStepId());
processOrderImpl.setOrderStatus(getOrderStatus());
processOrderImpl.setOrderResume(getOrderResume());
processOrderImpl.setStepDate(getStepDate());
processOrderImpl.setStepNote(getStepNote());
processOrderImpl.setAssignToUserId(getAssignToUserId());
processOrderImpl.setCurrentCondition(getCurrentCondition());
processOrderImpl.setLastStepTransitionId(getLastStepTransitionId());
processOrderImpl.setStopRollback(getStopRollback());
processOrderImpl.setEbPartnerShipId(getEbPartnerShipId());
processOrderImpl.resetOriginalValues();
return processOrderImpl;
}
@Override
public int compareTo(ProcessOrder processOrder) {
long primaryKey = processOrder.getPrimaryKey();
if (getPrimaryKey() < primaryKey) {
return -1;
}
else if (getPrimaryKey() > primaryKey) {
return 1;
}
else {
return 0;
}
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof ProcessOrder)) {
return false;
}
ProcessOrder processOrder = (ProcessOrder)obj;
long primaryKey = processOrder.getPrimaryKey();
if (getPrimaryKey() == primaryKey) {
return true;
}
else {
return false;
}
}
@Override
public int hashCode() {
return (int)getPrimaryKey();
}
@Override
public void resetOriginalValues() {
ProcessOrderModelImpl processOrderModelImpl = this;
processOrderModelImpl._originalUuid = processOrderModelImpl._uuid;
processOrderModelImpl._originalGroupId = processOrderModelImpl._groupId;
processOrderModelImpl._setOriginalGroupId = false;
processOrderModelImpl._originalCompanyId = processOrderModelImpl._companyId;
processOrderModelImpl._setOriginalCompanyId = false;
processOrderModelImpl._columnBitmask = 0;
}
@Override
public CacheModel<ProcessOrder> toCacheModel() {
ProcessOrderCacheModel processOrderCacheModel = new ProcessOrderCacheModel();
processOrderCacheModel.uuid = getUuid();
String uuid = processOrderCacheModel.uuid;
if ((uuid != null) && (uuid.length() == 0)) {
processOrderCacheModel.uuid = null;
}
processOrderCacheModel.processOrderId = getProcessOrderId();
processOrderCacheModel.userId = getUserId();
processOrderCacheModel.groupId = getGroupId();
processOrderCacheModel.companyId = getCompanyId();
Date createDate = getCreateDate();
if (createDate != null) {
processOrderCacheModel.createDate = createDate.getTime();
}
else {
processOrderCacheModel.createDate = Long.MIN_VALUE;
}
Date modifiedDate = getModifiedDate();
if (modifiedDate != null) {
processOrderCacheModel.modifiedDate = modifiedDate.getTime();
}
else {
processOrderCacheModel.modifiedDate = Long.MIN_VALUE;
}
processOrderCacheModel.organizationId = getOrganizationId();
processOrderCacheModel.parentProcessOrderId = getParentProcessOrderId();
processOrderCacheModel.dossierId = getDossierId();
processOrderCacheModel.dossierProcessId = getDossierProcessId();
processOrderCacheModel.dossierStepId = getDossierStepId();
processOrderCacheModel.orderStatus = getOrderStatus();
String orderStatus = processOrderCacheModel.orderStatus;
if ((orderStatus != null) && (orderStatus.length() == 0)) {
processOrderCacheModel.orderStatus = null;
}
processOrderCacheModel.orderResume = getOrderResume();
String orderResume = processOrderCacheModel.orderResume;
if ((orderResume != null) && (orderResume.length() == 0)) {
processOrderCacheModel.orderResume = null;
}
Date stepDate = getStepDate();
if (stepDate != null) {
processOrderCacheModel.stepDate = stepDate.getTime();
}
else {
processOrderCacheModel.stepDate = Long.MIN_VALUE;
}
processOrderCacheModel.stepNote = getStepNote();
String stepNote = processOrderCacheModel.stepNote;
if ((stepNote != null) && (stepNote.length() == 0)) {
processOrderCacheModel.stepNote = null;
}
processOrderCacheModel.assignToUserId = getAssignToUserId();
processOrderCacheModel.currentCondition = getCurrentCondition();
String currentCondition = processOrderCacheModel.currentCondition;
if ((currentCondition != null) && (currentCondition.length() == 0)) {
processOrderCacheModel.currentCondition = null;
}
processOrderCacheModel.lastStepTransitionId = getLastStepTransitionId();
processOrderCacheModel.stopRollback = getStopRollback();
processOrderCacheModel.ebPartnerShipId = getEbPartnerShipId();
return processOrderCacheModel;
}
@Override
public String toString() {
StringBundler sb = new StringBundler(43);
sb.append("{uuid=");
sb.append(getUuid());
sb.append(", processOrderId=");
sb.append(getProcessOrderId());
sb.append(", userId=");
sb.append(getUserId());
sb.append(", groupId=");
sb.append(getGroupId());
sb.append(", companyId=");
sb.append(getCompanyId());
sb.append(", createDate=");
sb.append(getCreateDate());
sb.append(", modifiedDate=");
sb.append(getModifiedDate());
sb.append(", organizationId=");
sb.append(getOrganizationId());
sb.append(", parentProcessOrderId=");
sb.append(getParentProcessOrderId());
sb.append(", dossierId=");
sb.append(getDossierId());
sb.append(", dossierProcessId=");
sb.append(getDossierProcessId());
sb.append(", dossierStepId=");
sb.append(getDossierStepId());
sb.append(", orderStatus=");
sb.append(getOrderStatus());
sb.append(", orderResume=");
sb.append(getOrderResume());
sb.append(", stepDate=");
sb.append(getStepDate());
sb.append(", stepNote=");
sb.append(getStepNote());
sb.append(", assignToUserId=");
sb.append(getAssignToUserId());
sb.append(", currentCondition=");
sb.append(getCurrentCondition());
sb.append(", lastStepTransitionId=");
sb.append(getLastStepTransitionId());
sb.append(", stopRollback=");
sb.append(getStopRollback());
sb.append(", ebPartnerShipId=");
sb.append(getEbPartnerShipId());
sb.append("}");
return sb.toString();
}
@Override
public String toXmlString() {
StringBundler sb = new StringBundler(67);
sb.append("<model><model-name>");
sb.append("org.oep.core.processmgt.model.ProcessOrder");
sb.append("</model-name>");
sb.append(
"<column><column-name>uuid</column-name><column-value><![CDATA[");
sb.append(getUuid());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>processOrderId</column-name><column-value><![CDATA[");
sb.append(getProcessOrderId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>userId</column-name><column-value><![CDATA[");
sb.append(getUserId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>groupId</column-name><column-value><![CDATA[");
sb.append(getGroupId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>companyId</column-name><column-value><![CDATA[");
sb.append(getCompanyId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>createDate</column-name><column-value><![CDATA[");
sb.append(getCreateDate());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>modifiedDate</column-name><column-value><![CDATA[");
sb.append(getModifiedDate());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>organizationId</column-name><column-value><![CDATA[");
sb.append(getOrganizationId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>parentProcessOrderId</column-name><column-value><![CDATA[");
sb.append(getParentProcessOrderId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>dossierId</column-name><column-value><![CDATA[");
sb.append(getDossierId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>dossierProcessId</column-name><column-value><![CDATA[");
sb.append(getDossierProcessId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>dossierStepId</column-name><column-value><![CDATA[");
sb.append(getDossierStepId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>orderStatus</column-name><column-value><![CDATA[");
sb.append(getOrderStatus());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>orderResume</column-name><column-value><![CDATA[");
sb.append(getOrderResume());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>stepDate</column-name><column-value><![CDATA[");
sb.append(getStepDate());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>stepNote</column-name><column-value><![CDATA[");
sb.append(getStepNote());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>assignToUserId</column-name><column-value><![CDATA[");
sb.append(getAssignToUserId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>currentCondition</column-name><column-value><![CDATA[");
sb.append(getCurrentCondition());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>lastStepTransitionId</column-name><column-value><![CDATA[");
sb.append(getLastStepTransitionId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>stopRollback</column-name><column-value><![CDATA[");
sb.append(getStopRollback());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>ebPartnerShipId</column-name><column-value><![CDATA[");
sb.append(getEbPartnerShipId());
sb.append("]]></column-value></column>");
sb.append("</model>");
return sb.toString();
}
private static ClassLoader _classLoader = ProcessOrder.class.getClassLoader();
private static Class<?>[] _escapedModelInterfaces = new Class[] {
ProcessOrder.class
};
private String _uuid;
private String _originalUuid;
private long _processOrderId;
private long _userId;
private String _userUuid;
private long _groupId;
private long _originalGroupId;
private boolean _setOriginalGroupId;
private long _companyId;
private long _originalCompanyId;
private boolean _setOriginalCompanyId;
private Date _createDate;
private Date _modifiedDate;
private long _organizationId;
private long _parentProcessOrderId;
private long _dossierId;
private long _dossierProcessId;
private long _dossierStepId;
private String _orderStatus;
private String _orderResume;
private Date _stepDate;
private String _stepNote;
private long _assignToUserId;
private String _assignToUserUuid;
private String _currentCondition;
private long _lastStepTransitionId;
private int _stopRollback;
private long _ebPartnerShipId;
private long _columnBitmask;
private ProcessOrder _escapedModel;
}
|
|
/*
***************************************************************************
* Mica - the Java(tm) Graphics Framework *
***************************************************************************
* NOTICE: Permission to use, copy, and modify this software and its *
* documentation is hereby granted provided that this notice appears in *
* all copies. *
* *
* Permission to distribute un-modified copies of this software and its *
* documentation is hereby granted provided that no fee is charged and *
* that this notice appears in all copies. *
* *
* SOFTWARE FARM MAKES NO REPRESENTATIONS OR WARRANTIES ABOUT THE *
* SUITABILITY OF THE SOFTWARE, EITHER EXPRESS OR IMPLIED, INCLUDING, BUT *
* NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR *
* A PARTICULAR PURPOSE, OR NON-INFRINGEMENT. SOFTWARE FARM SHALL NOT BE *
* LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR *
* CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE, MODIFICATION OR *
* DISTRIBUTION OF THIS SOFTWARE OR ITS DERIVATIVES. *
* *
* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, AND THE AUTHORS AND *
* DISTRIBUTORS HAVE NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, *
* UPDATES, ENHANCEMENTS, OR MODIFICATIONS. *
* *
***************************************************************************
* Copyright (c) 1997-2004 Software Farm, Inc. All Rights Reserved. *
***************************************************************************
*/
package com.swfm.mica;
import com.swfm.mica.util.Utility;
/**----------------------------------------------------------------------------------------------
* A MiGauge is a vertical or horizontally oriented thermometer-like
* widget that is usually used to display a percentage of some quantity.
* <p>
* The 'fluid' filling the gauge can either be solid or seperate 'LED'
* like rectangles. The spacing between LEDs is controlled by the alley
* spacing (see MiLayout#setAlleySpacing);
* <p>
* The size of the LED rectangles when there are multiple LEDs is determined
* by the length of this gauge, the number of LEDs, and the alley spacing.
*
* @version %I% %G%
* @author Michael L. Davis
* @release 1.4.1
* @module %M%
* @language Java (JDK 1.4)
*----------------------------------------------------------------------------------------------*/
public class MiGauge extends MiSlider
{
public static final String Mi_GAUGE_PROTOTYPE_CLASS_NAME = "Mi_GAUGE_PROTOTYPE_CLASS_NAME";
private static MiGauge prototype;
private int numberOfLEDs = 1;
private MiPart LED = new MiRectangle();
private MiPart label;
private boolean isPercentage = true;
private boolean partialLEDsAreOK= true;
private MiBounds tmpBounds = new MiBounds();
/**------------------------------------------------------
* Constructs a new MiGauge.
*------------------------------------------------------*/
public MiGauge()
{
this(Mi_HORIZONTAL);
}
/**------------------------------------------------------
* Constructs a new MiGauge with the given orientation.
* @param orientation Mi_HORIZONTAL or Mi_VERTICAL
*------------------------------------------------------*/
public MiGauge(int orientation)
{
super(orientation);
boolean isThisClass = MiDebug.getMicaClassName(this).equals("MiGauge");
if ((isThisClass) && (prototype != null))
{
copy(prototype);
refreshLookAndFeel();
applyCustomLookAndFeel();
return;
}
setVisibleContainerAutomaticLayoutEnabled(false);
setBorderLook(Mi_INDENTED_BORDER_LOOK);
LED.setBorderLook(Mi_RAISED_BORDER_LOOK);
setCellMargins(1);
appendEventHandler(new MiAdjusterEventHandler());
appendEventHandler(new MiIDragger());
setAlleySpacing(4);
thumb.setVisible(false);
setNormalizedLengthOfThumb(0.0);
refreshLookAndFeel();
applyCustomLookAndFeel();
}
/**------------------------------------------------------
* Creates a new widget from the prototype. This is the
* factory pattern implementation for this widget. If the
* prototype is null, then the default contructor is used.
* @return the new widget
* @see #setPrototype
*------------------------------------------------------*/
public static MiWidget create()
{
if (prototype == null)
return(new MiGauge());
return((MiGauge )prototype.deepCopy());
}
/**------------------------------------------------------
* Sets the label. If the label is textual or a widget
* it's value is always automatically updated to display
* the current percentage value ,if isPercentage() is true,
* else the current value.
* @param orientation Mi_HORIZONTAL or Mi_VERTICAL
* @see #setIsPercentage
* @see MiAdjuster#setMinimumValue
* @see MiAdjuster#setMaximumValue
*------------------------------------------------------*/
public void setLabel(MiPart label)
{
if (this.label != null)
{
removeAttachment(this.label);
}
this.label = label;
if (label != null)
{
appendAttachment(label, Mi_CENTER_LOCATION, null, null);
}
}
/**------------------------------------------------------
* Gets the label.
* @return the label.
*------------------------------------------------------*/
public MiPart getLabel()
{
return(label);
}
/**------------------------------------------------------
* Sets the number of 'LEDs' along the length of this gauge.
* The default is equal to 1, which is a solid bar.
* @param number the number of LEDS (must be >= 1).
* @exception IllegalArgumentException if number < 1
*------------------------------------------------------*/
public void setNumberOfLEDs(int number)
{
if (number < 1)
throw new IllegalArgumentException(this + ": Invalid number of LEDs: " + number);
numberOfLEDs = number;
}
/**------------------------------------------------------
* Gets the number of 'LEDs' along the length of this gauge.
* The default is equal to 1, which is a solid bar.
* @return the number of LEDS
*------------------------------------------------------*/
public int getNumberOfLEDs()
{
return(numberOfLEDs);
}
/**------------------------------------------------------
* Sets whether the label, if any, displays the percentage
* or current values.
* @param flag true if to display the percentage
*------------------------------------------------------*/
public void setIsPercentage(boolean flag)
{
isPercentage = flag;
updateLabel();
}
/**------------------------------------------------------
* Gets whether the label, if any, displays the percentage
* or current values.
* @return true if to display the percentage
*------------------------------------------------------*/
public boolean isPercentage()
{
return(isPercentage);
}
/**------------------------------------------------------
* Gets the MiPart that is used to draw the 'LEDs' (i.e.
* the mercury filler).
* @return the LED
*------------------------------------------------------*/
public MiPart getLED()
{
return(LED);
}
/**------------------------------------------------------
* Sets the MiPart that is used to draw the 'LEDs' (i.e.
* mercury filler).
* @param part the LED
*------------------------------------------------------*/
public void setLED(MiPart part)
{
LED = part;
}
/**------------------------------------------------------
* Sets whether partial LEDs can be drawn or just full size
* LEDs. This is for the case where there are multiple LEDs
* and the current normalized value is not a multiple of
* 1/numberOfLEDs. This is not used unless the number of
* LEDs is greater than 1.
* @param flag true if partially drawn LEDs are OK
*------------------------------------------------------*/
public void setPartialLEDsAreOK(boolean flag)
{
partialLEDsAreOK = flag;
}
/**------------------------------------------------------
* Gets whether partial LEDs can be drawn or just full size
* LEDs.
* @return true if partially drawn LEDs are OK
*------------------------------------------------------*/
public boolean getPartialLEDsAreOK()
{
return(partialLEDsAreOK);
}
/**------------------------------------------------------
* Sets the prototype that is to be copied when the #create
* method is called and to have it's attributes and handlers
* copied whenever any widget of this type is created.
* @param p the new prototype
* @see #getPrototype
* @see #create
* @see MiPart#copy
*------------------------------------------------------*/
public static void setPrototype(MiGauge p)
{
prototype = p;
}
/**------------------------------------------------------
* Gets the prototype that is to be copied when the #create
* method is called and to have it's attributes and handlers
* copied whenever any widget of this type is created.
* @return the prototype
* @see #setPrototype
* @see #create
* @see MiPart#copy
*------------------------------------------------------*/
public static MiWidget getPrototype()
{
return(prototype);
}
/**------------------------------------------------------
* Creates a prototype from the class named by the
* Mi_GAUGE_PROTOTYPE_CLASS_NAME system property,
* if specified.
*------------------------------------------------------*/
static {
String prototypeClassName = MiSystem.getProperty(Mi_GAUGE_PROTOTYPE_CLASS_NAME);
if (prototypeClassName != null)
{
prototype = (MiGauge )Utility.makeInstanceOfClass(prototypeClassName);
}
}
/**------------------------------------------------------
* Sets the value of this MiGauge. Note: assure the value
* specified is not NaN or the LED will render incorrectly.
* @param value the value (between 0.0 and 1.0)
* @overrides MiSlider.setNormalizedValue
*------------------------------------------------------*/
public void setNormalizedValue(double value)
{
if (value != getNormalizedValue())
{
super.setNormalizedValue(value);
updateLabel();
invalidateArea();
}
}
/**------------------------------------------------------
* Sets the attributes associated with the 'LEDs' i.e.
* mercury filler).
* @param renderer the rendering context
* @overrides MiPart.render
*------------------------------------------------------*/
protected void render(MiRenderer renderer)
{
super.render(renderer);
int orientation = getOrientation();
double normalizedValue = getNormalizedValue();
MiBounds innerBounds = getInnerBounds(tmpBounds).subtractMargins(getCellMargins());
MiBounds LEDBounds = new MiBounds(innerBounds);
if (orientation == Mi_HORIZONTAL)
{
MiCoord currentValueX = LEDBounds.getXmin() + normalizedValue *innerBounds.getWidth();
MiDistance minLEDSpacing = getAlleyHSpacing();
if (numberOfLEDs == 1)
{
LEDBounds.setXmax(currentValueX);
LED.setBounds(LEDBounds);
LED.render(renderer);
return;
}
LEDBounds.setXmax(LEDBounds.getXmin() + (innerBounds.getWidth()
- minLEDSpacing * numberOfLEDs)/numberOfLEDs);
MiDistance LEDSpacing
= (innerBounds.getWidth() - numberOfLEDs * LEDBounds.getWidth())
/(numberOfLEDs - 1);
for (int i = 0; i < numberOfLEDs; ++i)
{
if (LEDBounds.xmax > currentValueX)
{
if (partialLEDsAreOK)
LEDBounds.xmax = currentValueX;
else if ((LEDBounds.xmax - currentValueX)/LEDBounds.getWidth() < 0.5)
return;
}
LED.setBounds(LEDBounds);
LED.render(renderer);
LEDBounds.translate(LEDBounds.getWidth() + LEDSpacing, 0);
if (LEDBounds.xmin > currentValueX)
return;
}
}
else
{
MiCoord currentValueY = LEDBounds.getYmin() + normalizedValue *innerBounds.getHeight();
MiDistance minLEDSpacing = getAlleyVSpacing();
if (numberOfLEDs == 1)
{
LEDBounds.setYmax(
LEDBounds.getYmin() + normalizedValue *innerBounds.getHeight());
LED.setBounds(LEDBounds);
LED.render(renderer);
return;
}
LEDBounds.setYmax(LEDBounds.getYmin() + (innerBounds.getHeight()
- minLEDSpacing * numberOfLEDs)/numberOfLEDs);
MiDistance LEDSpacing
= (innerBounds.getHeight() - numberOfLEDs * LEDBounds.getHeight())
/(numberOfLEDs - 1);
for (int i = 0; i < numberOfLEDs; ++i)
{
if (LEDBounds.ymax > currentValueY)
{
if (partialLEDsAreOK)
LEDBounds.ymax = currentValueY;
else if ((LEDBounds.ymax - currentValueY)/LEDBounds.getHeight() < 0.5)
return;
}
LED.setBounds(LEDBounds);
LED.render(renderer);
LEDBounds.translate(0, LEDBounds.getHeight() + LEDSpacing);
if (LEDBounds.ymin > currentValueY)
return;
}
}
}
/**------------------------------------------------------
* Handles the user 'dragging' on the gauge when changing
* the value.
* @param event the drag event
* @return consumes the event
* @implements MiiDraggable
* @overrides MiAdjuster.drag
*------------------------------------------------------*/
public int drag(MiEvent event)
{
setValueFromLocation(event.worldPt);
return(Mi_CONSUME_EVENT);
}
/**------------------------------------------------------
* Updates the label to display the current percentage if
* the label is text based. This value will be the
* percentage, if 'isPercentage()', or the actual value.
*------------------------------------------------------*/
protected void updateLabel()
{
if (label != null)
{
String value;
if (isPercentage)
value = (int )(getNormalizedValue() * 100) + "%";
else
value = getCurrentValue() + "";
if (label instanceof MiText)
((MiText )label).setText(value);
else if (label instanceof MiWidget)
((MiWidget )label).setValue(value);
}
}
}
|
|
/*-
* See the file LICENSE for redistribution information.
*
* Copyright (c) 2002, 2014 Oracle and/or its affiliates. All rights reserved.
*
*/
package com.sleepycat.je.evictor;
import static com.sleepycat.je.evictor.EvictorStatDefinition.AVG_BATCH_DESC;
import static com.sleepycat.je.evictor.EvictorStatDefinition.BIN_EVICTION_TYPE_DESC;
import static com.sleepycat.je.evictor.EvictorStatDefinition.BIN_FETCH;
import static com.sleepycat.je.evictor.EvictorStatDefinition.BIN_FETCH_MISS;
import static com.sleepycat.je.evictor.EvictorStatDefinition.CACHED_IN_COMPACT_KEY;
import static com.sleepycat.je.evictor.EvictorStatDefinition.CACHED_IN_NO_TARGET;
import static com.sleepycat.je.evictor.EvictorStatDefinition.CACHED_IN_SPARSE_TARGET;
import static com.sleepycat.je.evictor.EvictorStatDefinition.EVICTOR_BINS_STRIPPED;
import static com.sleepycat.je.evictor.EvictorStatDefinition.EVICTOR_EVICT_PASSES;
import static com.sleepycat.je.evictor.EvictorStatDefinition.EVICTOR_NODES_EVICTED;
import static com.sleepycat.je.evictor.EvictorStatDefinition.EVICTOR_NODES_SCANNED;
import static com.sleepycat.je.evictor.EvictorStatDefinition.EVICTOR_ROOT_NODES_EVICTED;
import static com.sleepycat.je.evictor.EvictorStatDefinition.GROUP_DESC;
import static com.sleepycat.je.evictor.EvictorStatDefinition.GROUP_NAME;
import static com.sleepycat.je.evictor.EvictorStatDefinition.LN_FETCH;
import static com.sleepycat.je.evictor.EvictorStatDefinition.LN_FETCH_MISS;
import static com.sleepycat.je.evictor.EvictorStatDefinition.NUM_BATCHES_DESC;
import static com.sleepycat.je.evictor.EvictorStatDefinition.THREAD_UNAVAILABLE;
import static com.sleepycat.je.evictor.EvictorStatDefinition.UPPER_IN_EVICTION_TYPE_DESC;
import static com.sleepycat.je.evictor.EvictorStatDefinition.UPPER_IN_FETCH;
import static com.sleepycat.je.evictor.EvictorStatDefinition.UPPER_IN_FETCH_MISS;
import java.util.EnumSet;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.RejectedExecutionHandler;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.sleepycat.je.CacheMode;
import com.sleepycat.je.DatabaseException;
import com.sleepycat.je.EnvironmentFailureException;
import com.sleepycat.je.EnvironmentMutableConfig;
import com.sleepycat.je.StatsConfig;
import com.sleepycat.je.config.EnvironmentParams;
import com.sleepycat.je.dbi.DatabaseId;
import com.sleepycat.je.dbi.DatabaseImpl;
import com.sleepycat.je.dbi.DbConfigManager;
import com.sleepycat.je.dbi.EnvConfigObserver;
import com.sleepycat.je.dbi.EnvironmentImpl;
import com.sleepycat.je.dbi.INList;
import com.sleepycat.je.evictor.TargetSelector.ScanInfo;
import com.sleepycat.je.evictor.TargetSelector.SetupInfo;
import com.sleepycat.je.recovery.Checkpointer;
import com.sleepycat.je.tree.BIN;
import com.sleepycat.je.tree.ChildReference;
import com.sleepycat.je.tree.IN;
import com.sleepycat.je.tree.Node;
import com.sleepycat.je.tree.SearchResult;
import com.sleepycat.je.tree.Tree;
import com.sleepycat.je.tree.WithRootLatched;
import com.sleepycat.je.utilint.AtomicLongStat;
import com.sleepycat.je.utilint.DbLsn;
import com.sleepycat.je.utilint.IntegralLongAvgStat;
import com.sleepycat.je.utilint.LoggerUtils;
import com.sleepycat.je.utilint.LongStat;
import com.sleepycat.je.utilint.StatDefinition;
import com.sleepycat.je.utilint.StatGroup;
import com.sleepycat.je.utilint.StoppableThreadFactory;
import com.sleepycat.je.utilint.TestHook;
import com.sleepycat.je.utilint.TestHookExecute;
/**
* The Evictor is responsible for maintaining the JE cache. Since object sizes
* are not directly manipulated in a Java application, the cache is actually a
* collection of in-memory btree nodes, implemented by
* com.sleepycat.je.dbi.INList. Nodes are selected from the INList for removal,
* which is done by detaching them from the in-memory tree, and by removing
* them from the INList. Once all references to them are removed, they can be
* GC'd by the JVM.
*
* There are three main components.
*
* Arbiter: queries the memory budget to decide whether eviction is needed
* TargetSelector : chooses a target node
* Evictor: does the work of detaching the node.
*
* The TargetSelector and Evictor classes are subclassed to provide
* private/shared cache implementations. A shared cache is used by multiple
* environments within a single JVM, and is seen logically as a single INList
* collection, although it is implemented by an umbrella over multiple INLists.
*
* The Evictor owns a thread pool which is available to handle eviction tasks.
* Eviction is carried out by three types of threads:
* 1. The application thread, in the course of doing critical eviction
* 2. Daemon threads, such as the cleaner or INCompressor, in the course of
* doing their respective duties
* 3. Eviction pool threads
*
* We prefer that the eviction pool threads do as much of the eviction as
* possible, and that the application threads do as little, because eviction
* adds latency to the perceived application response time. To date, it has
* been impossible to completely remove eviction responsiblities from the
* application threads, because the process doesn't have sufficient feedback,
* and can incur an OutOfMemoryException.
*
* The eviction pool is a standard java.util.concurrent thread pool, and can
* be mutably configured in terms of core threads, max threads, and keepalive
* times.
*
* Since three types of threads can concurrently do eviction, it's important
* that eviction is both thread safe and as parallel as possible. Memory
* thresholds are generally accounted for in an unsynchronized fashion, and are
* seen as advisory. The only point of true synchronization is around the
* selection of a node for eviction. The act of eviction itself can be done
* concurrently.
*
* The eviction method is not reentrant, and a simple concurrent hash map
* of threads is used to prevent recursive calls.
*/
public abstract class Evictor implements EnvConfigObserver {
/*
* If new eviction source enums are added, a new stat is created, and
* EnvironmentStats must be updated to add a getter method.
*
* CRITICAL eviction is called by operations executed app or daemon
* threads which detect that the cache has reached its limits
* CACHE_MODE eviction is called by operations that use a specific
* Cursor.
* EVICTORThread is the eviction pool
* MANUAL is the call to Environment.evictMemory, called by recovery or
* application code.
*/
public enum EvictionSource {
/* Using ordinal for array values! */
EVICTORTHREAD, MANUAL, CRITICAL, CACHEMODE, DAEMON;
public StatDefinition getBINStatDef() {
return new StatDefinition("nBINsEvicted" + toString(),
BIN_EVICTION_TYPE_DESC);
}
public StatDefinition getUpperINStatDef() {
return new StatDefinition("nUpperINsEvicted" + toString(),
UPPER_IN_EVICTION_TYPE_DESC);
}
public StatDefinition getNumBatchesStatDef() {
return new StatDefinition("nBatches" + toString(),
NUM_BATCHES_DESC);
}
public StatDefinition getAvgBatchStatDef() {
return new StatDefinition("avgBatch" + toString(),
AVG_BATCH_DESC);
}
}
final EnvironmentImpl envImpl;
private final TargetSelector selector;
private final Arbiter arbiter;
/* The thread pool used to manage the background evictor threads. */
private final ThreadPoolExecutor evictionPool;
private int terminateMillis;
/* Access count after which we clear the DatabaseImpl cache. */
int dbCacheClearCount;
/*
* runEvictor is needed as a distinct flag, rather than setting maxThreads
* to 0, because the ThreadPoolExecutor does not permit maxThreads to be 0.
*/
private boolean runEvictor;
/*
* Whether to allow deltas when logging a BIN.
*/
private final boolean allowBinDeltas;
/* Prevent endless eviction loops under extreme resource constraints. */
private static final int MAX_BATCHES_PER_RUN = 100;
/*
* Stats
*/
private final StatGroup stats;
/* Number of passes made to the evictor. */
private final LongStat nEvictPasses;
/* Number of nodes scanned in order to select the eviction set */
private final LongStat nNodesScanned;
/*
* Number of nodes evicted on this run. This could be understated, as a
* whole subtree may have gone out with a single node.
*/
private final LongStat nNodesEvicted;
/* Number of closed database root nodes evicted on this run. */
private final LongStat nRootNodesEvicted;
/* Number of BINs stripped. */
private final LongStat nBINsStripped;
/*
* Tree related cache hit/miss stats. A subset of the cache misses recorded
* by the log manager, in that these only record tree node hits and misses.
* Recorded by IN.fetchTarget, but grouped with evictor stats. Use
* AtomicLongStat for multithreading safety.
*/
private final AtomicLongStat nLNFetch;
private final AtomicLongStat nBINFetch;
private final AtomicLongStat nUpperINFetch;
private final AtomicLongStat nLNFetchMiss;
private final AtomicLongStat nBINFetchMiss;
private final AtomicLongStat nUpperINFetchMiss;
private final AtomicLongStat nThreadUnavailable;
/* Stats for IN compact array representations currently in cache. */
private final AtomicLong nINSparseTarget;
private final AtomicLong nINNoTarget;
private final AtomicLong nINCompactKey;
/*
* Array of stats is indexed into by the EvictionSource ordinal value.
* A EnumMap could have been an alternative, but would be heavier weight.
*/
private final AtomicLongStat[] binEvictSources;
private final AtomicLongStat[] inEvictSources;
private final AtomicLong[] numBatchTargets;
private final AtomicLongStat[] numBatches;
/* Debugging and unit test support. */
private TestHook<Object> preEvictINHook;
private TestHook<IN> evictProfile;
/* Eviction calls cannot be recursive. */
private final ReentrancyGuard reentrancyGuard;
/* Flag to help shutdown launched eviction tasks. */
private final AtomicBoolean shutdownRequested;
private final Logger logger;
Evictor(EnvironmentImpl envImpl)
throws DatabaseException {
this.envImpl = envImpl;
/* Do the stats definitions. */
stats = new StatGroup(GROUP_NAME, GROUP_DESC);
nEvictPasses = new LongStat(stats, EVICTOR_EVICT_PASSES);
nNodesScanned = new LongStat(stats, EVICTOR_NODES_SCANNED);
nNodesEvicted = new LongStat(stats, EVICTOR_NODES_EVICTED);
nRootNodesEvicted = new LongStat(stats, EVICTOR_ROOT_NODES_EVICTED);
nBINsStripped = new LongStat(stats, EVICTOR_BINS_STRIPPED);
nLNFetch = new AtomicLongStat(stats, LN_FETCH);
nBINFetch = new AtomicLongStat(stats, BIN_FETCH);
nUpperINFetch = new AtomicLongStat(stats, UPPER_IN_FETCH);
nLNFetchMiss = new AtomicLongStat(stats, LN_FETCH_MISS);
nBINFetchMiss = new AtomicLongStat(stats, BIN_FETCH_MISS);
nUpperINFetchMiss = new AtomicLongStat(stats, UPPER_IN_FETCH_MISS);
nThreadUnavailable = new AtomicLongStat(stats, THREAD_UNAVAILABLE);
nINSparseTarget = new AtomicLong(0);
nINNoTarget = new AtomicLong(0);
nINCompactKey = new AtomicLong(0);
EnumSet<EvictionSource> allSources =
EnumSet.allOf(EvictionSource.class);
int numSources = allSources.size();
binEvictSources = new AtomicLongStat[numSources];
inEvictSources = new AtomicLongStat[numSources];
numBatches = new AtomicLongStat[numSources];
numBatchTargets = new AtomicLong[numSources];
for (EvictionSource source : allSources) {
int index = source.ordinal();
binEvictSources[index] =
new AtomicLongStat(stats, source.getBINStatDef());
inEvictSources[index] =
new AtomicLongStat(stats, source.getUpperINStatDef());
numBatchTargets[index] = new AtomicLong();
numBatches[index] =
new AtomicLongStat(stats, source.getNumBatchesStatDef());
}
selector = makeSelector();
arbiter = new Arbiter(envImpl);
logger = LoggerUtils.getLogger(getClass());
reentrancyGuard = new ReentrancyGuard(envImpl, logger);
shutdownRequested = new AtomicBoolean(false);
DbConfigManager configManager = envImpl.getConfigManager();
int corePoolSize =
configManager.getInt(EnvironmentParams.EVICTOR_CORE_THREADS);
int maxPoolSize =
configManager.getInt(EnvironmentParams.EVICTOR_MAX_THREADS);
long keepAliveTime =
configManager.getDuration(EnvironmentParams.EVICTOR_KEEP_ALIVE);
terminateMillis = configManager.getDuration
(EnvironmentParams.EVICTOR_TERMINATE_TIMEOUT);
dbCacheClearCount =
configManager.getInt(EnvironmentParams.ENV_DB_CACHE_CLEAR_COUNT);
RejectedExecutionHandler rejectHandler =
new RejectEvictHandler(nThreadUnavailable);
evictionPool =
new ThreadPoolExecutor(corePoolSize,
maxPoolSize,
keepAliveTime,
TimeUnit.MILLISECONDS,
new ArrayBlockingQueue<Runnable>(1),
new StoppableThreadFactory(envImpl,
"JEEvictor",
logger),
rejectHandler);
runEvictor =
configManager.getBoolean(EnvironmentParams.ENV_RUN_EVICTOR);
allowBinDeltas = configManager.getBoolean
(EnvironmentParams.EVICTOR_ALLOW_BIN_DELTAS);
/*
* Request notification of mutable property changes. Do this after all
* fields in the evictor have been initialized, in case this is called
* quite soon.
*/
envImpl.addConfigObserver(this);
}
/**
* Respond to config updates.
*/
public void envConfigUpdate(DbConfigManager configManager,
EnvironmentMutableConfig ignore)
throws DatabaseException {
int corePoolSize =
configManager.getInt(EnvironmentParams.EVICTOR_CORE_THREADS);
int maxPoolSize =
configManager.getInt(EnvironmentParams.EVICTOR_MAX_THREADS);
long keepAliveTime =
configManager.getDuration(EnvironmentParams.EVICTOR_KEEP_ALIVE);
terminateMillis = configManager.getDuration
(EnvironmentParams.EVICTOR_TERMINATE_TIMEOUT);
dbCacheClearCount =
configManager.getInt(EnvironmentParams.ENV_DB_CACHE_CLEAR_COUNT);
evictionPool.setCorePoolSize(corePoolSize);
evictionPool.setMaximumPoolSize(maxPoolSize);
evictionPool.setKeepAliveTime(keepAliveTime, TimeUnit.MILLISECONDS);
runEvictor =
configManager.getBoolean(EnvironmentParams.ENV_RUN_EVICTOR);
}
/*
* Node selection varies based on whether this is a private or shared
* cache.
*/
abstract TargetSelector makeSelector();
/**
* Load stats.
*/
public StatGroup loadStats(StatsConfig config) {
StatGroup copy = stats.cloneGroup(config.getClear());
/*
* These stats are not cleared. They represent the current state of
* the cache.
*/
new LongStat(copy, CACHED_IN_SPARSE_TARGET, nINSparseTarget.get());
new LongStat(copy, CACHED_IN_NO_TARGET, nINNoTarget.get());
new LongStat(copy, CACHED_IN_COMPACT_KEY, nINCompactKey.get());
copy.addAll(selector.loadStats(config));
copy.addAll(arbiter.loadStats(config));
/*
* The number and average size of batches, by type of caller, is
* calculated each time we collect stats.
*/
EnumSet<EvictionSource> allSources =
EnumSet.allOf(EvictionSource.class);
for (EvictionSource source : allSources) {
int index = source.ordinal();
new IntegralLongAvgStat(copy,
source.getAvgBatchStatDef(),
numBatchTargets[index].get(),
copy.getAtomicLong
(source.getNumBatchesStatDef()));
if (config.getClear()) {
numBatchTargets[index].set(0);
}
}
return copy;
}
/**
* Do some eviction before proceeding on with another operation.
*
* Note that this method is intentionally not synchronized in order to
* minimize overhead when checking for critical eviction. This method is
* called from application threads for every cursor operation, and by many
* daemon threads.
*/
public void doCriticalEviction(boolean backgroundIO) {
if (arbiter.isOverBudget()) {
/*
* Any time there's excessive cache usage, let the thread pool know
* there's work to do.
*/
alert();
/*
* If this is an application thread, only do eviction if the
* memory budget overage fulfills the critical eviction
* requirements. We want to avoid having application thread do
* eviction.
*/
if (arbiter.needCriticalEviction()) {
doEvict(EvictionSource.CRITICAL, backgroundIO);
}
}
}
/**
* Do a check on whether synchronous eviction is needed.
*
* Note that this method is intentionally not synchronized in order to
* minimize overhead when checking for critical eviction. This method is
* called from application threads for every cursor operation.
*/
public void doDaemonEviction(boolean backgroundIO) {
if (arbiter.isOverBudget()) {
/*
* Any time there's excessive cache usage, let the thread pool know
* there's work to do.
*/
alert();
/*
* JE daemon threads should do synchronous eviction any time
* the memory budget is over.
*/
doEvict(EvictionSource.DAEMON, backgroundIO);
}
}
/*
* Eviction invoked by the API
*/
public void doManualEvict()
throws DatabaseException {
doEvict(EvictionSource.MANUAL, true); // backgroundIO
}
/**
* Evict a specific IN, used by cache modes.
*/
public void doEvictOneIN(IN target, EvictionSource source) {
if (!reentrancyGuard.enter()) {
return;
}
try {
evictIN(target, false /* backgroundIO */, source);
} finally {
reentrancyGuard.leave();
}
}
/**
* Let the eviction pool know there's work to do.
*/
public void alert() {
if (!runEvictor) {
return;
}
evictionPool.execute
(new BackgroundEvictTask(this, true /* backgroundIO */));
}
/**
* @hidden
* Return the ThreadPool, used by unit testing only.
*/
public ThreadPoolExecutor getThreadPool() {
return evictionPool;
}
/**
* Can execute concurrently, called by app threads or by background evictor
*/
void doEvict(EvictionSource source, boolean backgroundIO)
throws DatabaseException {
if (!reentrancyGuard.enter()) {
return;
}
try {
/*
* Repeat as necessary to keep up with allocations. Stop if no
* progress is made, to prevent an infinite loop.
*/
boolean progress = true;
int nBatches = 0;
long bytesEvicted = 0;
numBatches[source.ordinal()].increment();
while (progress && (nBatches < MAX_BATCHES_PER_RUN) &&
!shutdownRequested.get()) {
/* Get some work from the arbiter. */
long maxEvictBytes = arbiter.getEvictionPledge();
/* Nothing to do. */
if (maxEvictBytes == 0) {
break;
}
bytesEvicted = evictBatch(source, backgroundIO, maxEvictBytes);
if (bytesEvicted == 0) {
progress = false;
}
nBatches += 1;
}
/* Really for debugging. */
if (source == EvictionSource.EVICTORTHREAD) {
if (logger.isLoggable(Level.FINEST)) {
LoggerUtils.finest(logger, envImpl,
"Thread evicted " + bytesEvicted +
" bytes in " + nBatches + " batches");
}
}
} finally {
reentrancyGuard.leave();
}
}
/**
* Each iteration will attempt to evict maxEvictBytes, but will give up
* after a complete pass over the INList, or if there is nothing more to
* evict, due to actions by concurrently executing threads. This method is
* thread safe and may be called concurrently.
*
* @return the number of bytes evicted, or zero if no progress was made.
* Note that if the INList is completely empty, it's still possible to
* return a non-zero number of bytes due to special eviction of items such
* as utilization info, even though no IN eviction happened.
*/
long evictBatch(Evictor.EvictionSource source,
boolean backgroundIO,
long maxEvictBytes)
throws DatabaseException {
int numNodesScannedThisBatch = 0;
nEvictPasses.increment();
assert TestHookExecute.doHookSetupIfSet(evictProfile);
/*
* Perform class-specific per-batch processing, in advance of getting a
* batch. This is done under the TargetSelector mutex.
*
* TODO: special eviction is done serially. We may want to absolve
* application threads of that responsibility, to avoid blocking, and
* only have evictor threads do special eviction.
*/
final SetupInfo setupInfo =
selector.startBatch(true /*doSpecialEviction*/);
long evictBytes = setupInfo.specialEvictionBytes;
final int maxINsPerBatch = setupInfo.maxINsPerBatch;
if (maxINsPerBatch == 0) {
return evictBytes; // The INList(s) are empty.
}
/* Use local caching to reduce DbTree.getDb overhead. [#21330] */
final DbCache dbCache = createDbCache();
try {
/*
* Keep evicting until we've freed enough memory or we've visited
* the maximum number of nodes allowed. Each iteration of the while
* loop is called an eviction batch.
*
* In order to prevent endless evicting, limit this run to one pass
* over the IN list(s).
*/
while ((evictBytes < maxEvictBytes) &&
(numNodesScannedThisBatch <= maxINsPerBatch) &&
arbiter.stillNeedsEviction()) {
final ScanInfo scanInfo = selector.selectIN(maxINsPerBatch);
final IN target = scanInfo.target;
numNodesScannedThisBatch += scanInfo.numNodesScanned;
if (target == null) {
break;
}
numBatchTargets[source.ordinal()].incrementAndGet();
assert TestHookExecute.doHookIfSet(evictProfile, target);
/*
* Check to make sure the DB was not deleted after selecting
* it, and prevent the DB from being deleted while we're
* working with it.
*
* Also check that the refreshedDb is the same instance as the
* targetDb. If not, then targetDb was recently evicted; it
* and its IN are orphaned and cannot be processed. [#21686]
*/
final DatabaseImpl targetDb = target.getDatabase();
final DatabaseImpl refreshedDb = dbCache.getDb
(targetDb.getDbEnvironment(), targetDb.getId());
if (refreshedDb != null &&
refreshedDb == targetDb &&
!refreshedDb.isDeleted()) {
if (target.isDbRoot()) {
evictBytes += evictRoot(target, backgroundIO);
} else {
evictBytes +=
evictIN(target, backgroundIO, source);
}
} else {
/*
* We don't expect to see an IN that is resident on the
* INList with a database that has finished delete
* processing, because it should have been removed from the
* INList during post-delete cleanup. It may have been
* returned by the INList iterator after being removed from
* the INList (because we're using ConcurrentHashMap), but
* then IN.getInListResident should return false.
*/
if (targetDb.isDeleteFinished() &&
target.getInListResident()) {
final String inInfo =
" IN type=" + target.getLogType() + " id=" +
target.getNodeId() + " not expected on INList";
final String errMsg = (refreshedDb == null) ?
inInfo :
("Database " + refreshedDb.getDebugName() +
" id=" + refreshedDb.getId() + " rootLsn=" +
DbLsn.getNoFormatString
(refreshedDb.getTree().getRootLsn()) +
' ' + inInfo);
throw EnvironmentFailureException.
unexpectedState(errMsg);
}
}
}
} finally {
nNodesScanned.add(numNodesScannedThisBatch);
dbCache.releaseDbs();
}
return evictBytes;
}
/**
* Evict this DB root node. [#13415] Must be thread safe, executes
* concurrently.
*
* @return number of bytes evicted.
*/
private long evictRoot(final IN target,
final boolean backgroundIO)
throws DatabaseException {
final DatabaseImpl db = target.getDatabase();
/* SharedEvictor uses multiple envs, do not use superclass envImpl. */
final EnvironmentImpl useEnvImpl = db.getDbEnvironment();
final INList inList = useEnvImpl.getInMemoryINs();
class RootEvictor implements WithRootLatched {
boolean flushed = false;
long evictBytes = 0;
public IN doWork(ChildReference root)
throws DatabaseException {
/*
* Do not call fetchTarget since this root or DB should be
* resident already if it is to be the target of eviction. If
* it is not present, it has been evicted by another thread and
* should not be fetched for two reasons: 1) this would be
* counterproductive, 2) to guard against bringing in a root
* for an evicted DB. The check for getInListResident below
* also guards against this later possibility. [#21686]
*/
IN rootIN = (IN) root.getTarget();
if (rootIN == null) {
return null;
}
rootIN.latch(CacheMode.UNCHANGED);
try {
/* Re-check that all conditions still hold. */
boolean isDirty = rootIN.getDirty();
if (rootIN == target &&
rootIN.getInListResident() &&
rootIN.isDbRoot() &&
rootIN.isEvictable() &&
!(useEnvImpl.isReadOnly() && isDirty)) {
boolean logProvisional =
coordinateWithCheckpoint(rootIN, null /*parent*/);
/* Flush if dirty. */
if (isDirty) {
long newLsn = rootIN.log
(useEnvImpl.getLogManager(),
false, // allowDeltas
false, // allowCompress
logProvisional,
backgroundIO,
null); // parent
root.setLsn(newLsn);
flushed = true;
}
/* Take off the INList and adjust memory budget. */
inList.remove(rootIN);
evictBytes = rootIN.getBudgetedMemorySize();
/* Evict IN. */
root.clearTarget();
/* Stats */
nRootNodesEvicted.increment();
}
} finally {
rootIN.releaseLatch();
}
return null;
}
}
/* Attempt to evict the DB root IN. */
RootEvictor evictor = new RootEvictor();
db.getTree().withRootLatchedExclusive(evictor);
/* If the root IN was flushed, write the dirtied MapLN. */
if (evictor.flushed) {
useEnvImpl.getDbTree().modifyDbRoot(db);
}
return evictor.evictBytes;
}
/**
* Strip or evict this node. Must be thread safe, executes concurrently.
*
* @param source is EvictSource.CRITICAL or EVICTORTHREAD when this
* operation is invoked by the evictor (either critical eviction or the
* evictor background thread), and is EvictSource.CACHEMODE if invoked by a
* user operation using CacheMode.EVICT_BIN. If CACHEMODE, we will perform
* the eviction regardless of whether:
* 1) we have to wait for a latch, or
* 2) the IN generation changes, or
* 3) we are able to strip LNs.
*
* If not CACHEMODE, any of the above conditions will prevent eviction.
*
* @return number of bytes evicted.
*/
private long evictIN(IN target, boolean backgroundIO, EvictionSource source)
throws DatabaseException {
DatabaseImpl db = target.getDatabase();
/* SharedEvictor uses multiple envs, do not use superclass envImpl. */
EnvironmentImpl useEnvImpl = db.getDbEnvironment();
long evictedBytes = 0;
/*
* Non-BIN INs are evicted by detaching them from their parent. For
* BINS, the first step is to remove deleted entries by compressing
* the BIN. The evictor indicates that we shouldn't fault in
* non-resident children during compression. After compression,
* LN logging and LN stripping may be performed.
*
* If LN stripping is used, first we strip the BIN by logging any dirty
* LN children and detaching all its resident LN targets. If we make
* progress doing that, we stop and will not evict the BIN itself until
* possibly later. If it has no resident LNs then we evict the BIN
* itself using the "regular" detach-from-parent routine.
*
* If the cleaner is doing clustering, we don't do BIN stripping if we
* can write out the BIN. Specifically LN stripping is not performed
* if the BIN is dirty AND the BIN is evictable AND cleaner
* clustering is enabled. In this case the BIN is going to be written
* out soon, and with clustering we want to be sure to write out the
* LNs with the BIN; therefore we don't do stripping.
*/
/*
* Use latchNoWait because if it's latched we don't want the cleaner
* to hold up eviction while it migrates an entire BIN. Latched INs
* have a high generation value, so not evicting makes sense. Pass
* false because we don't want to change the generation during the
* eviction process.
*/
boolean inline = (source == EvictionSource.CACHEMODE);
if (inline) {
target.latch(CacheMode.UNCHANGED);
} else {
if (!target.latchNoWait(CacheMode.UNCHANGED)) {
return evictedBytes;
}
}
boolean targetIsLatched = true;
boolean success = false;
try {
/*
* After latching it, ensure that this node was not evicted by
* another thread. Do this now, before the Btree lookup, since we
* should not compress or evict LNs for an orphaned IN. [#21686]
*/
if (!target.getInListResident()) {
return evictedBytes;
}
if (target instanceof BIN) {
/*
* Strip any resident LN targets right now. This may dirty
* the BIN if dirty LNs were written out. Note that
* migrated BIN entries cannot be stripped.
*/
evictedBytes = ((BIN) target).evictLNs();
if (evictedBytes > 0) {
nBINsStripped.increment();
}
}
/*
* If we were able to free any memory by LN stripping above,
* then we postpone eviction of the BIN until a later pass.
* The presence of migrated entries would have inhibited LN
* stripping. In that case, the BIN can still be evicted,
* but the marked entries will have to be migrated. That would
* happen when the target is logged in evictIN.
*/
if (!inline && evictedBytes != 0) {
success = true;
return evictedBytes;
}
if (!target.isEvictable()) {
success = true;
return evictedBytes;
}
/* Regular eviction. */
Tree tree = db.getTree();
/*
* Unit testing. The target is latched and we are about to release
* that latch and search for the parent. Make sure that other
* operations, such as dirtying an LN in the target BIN, can occur
* safely in this window. [#18227]
*/
assert TestHookExecute.doHookIfSet(preEvictINHook);
/* getParentINForChildIN unlatches target. */
targetIsLatched = false;
SearchResult result = tree.getParentINForChildIN
(target, true /*requireExactMatch*/, CacheMode.UNCHANGED);
if (result.exactParentFound) {
evictedBytes = evictIN(target, result.parent,
result.index, backgroundIO, source);
}
success = true;
return evictedBytes;
} finally {
if (targetIsLatched) {
target.releaseLatch();
}
}
}
/**
* Evict an IN. Dirty nodes are logged before they're evicted.
*/
private long evictIN(IN child,
IN parent,
int index,
boolean backgroundIO,
EvictionSource source)
throws DatabaseException {
long evictBytes = 0;
try {
assert parent.isLatchOwnerForWrite();
long oldGenerationCount = child.getGeneration();
/*
* Get a new reference to the child, in case the reference
* saved in the selection list became out of date because of
* changes to that parent.
*/
IN renewedChild = (IN) parent.getTarget(index);
if (renewedChild == null) {
return evictBytes;
}
boolean inline = (source == EvictionSource.CACHEMODE);
if (!inline && renewedChild.getGeneration() > oldGenerationCount) {
return evictBytes;
}
/*
* See the evictIN() method in this class for an explanation for
* calling latchNoWait().
*/
if (inline) {
renewedChild.latch(CacheMode.UNCHANGED);
} else {
if (!renewedChild.latchNoWait(CacheMode.UNCHANGED)) {
return evictBytes;
}
}
try {
if (!renewedChild.isEvictable()) {
return evictBytes;
}
DatabaseImpl db = renewedChild.getDatabase();
/* Do not use superclass envImpl. */
EnvironmentImpl useEnvImpl = db.getDbEnvironment();
/*
* Log the child if dirty and env is not r/o. Remove
* from IN list.
*/
long renewedChildLsn = DbLsn.NULL_LSN;
boolean newChildLsn = false;
if (renewedChild.getDirty()) {
if (!useEnvImpl.isReadOnly()) {
boolean logProvisional =
coordinateWithCheckpoint(renewedChild, parent);
/*
* Log a full version (no deltas) and with cleaner
* migration allowed. Allow compression of deleted
* slots in full version BINs.
*/
renewedChildLsn = renewedChild.log
(useEnvImpl.getLogManager(),
allowBinDeltas,
true /*allowCompress*/,
logProvisional,
backgroundIO,
parent);
newChildLsn = true;
}
} else {
renewedChildLsn = parent.getLsn(index);
}
if (renewedChildLsn != DbLsn.NULL_LSN) {
/* Take this off the inlist. */
useEnvImpl.getInMemoryINs().remove(renewedChild);
evictBytes = renewedChild.getBudgetedMemorySize();
if (newChildLsn) {
/*
* Update the parent so its reference is
* null and it has the proper LSN.
*/
parent.updateNode
(index, null /*node*/, renewedChildLsn,
null /*lnSlotKey*/);
} else {
/*
* Null out the reference, but don't dirty
* the node since only the reference
* changed.
*/
parent.updateNode
(index, (Node) null /*node*/,
null /*lnSlotKey*/);
}
/* Stats */
nNodesEvicted.increment();
renewedChild.incEvictStats(source);
}
} finally {
renewedChild.releaseLatch();
}
} finally {
parent.releaseLatch();
}
return evictBytes;
}
public void incBINEvictStats(EvictionSource source) {
binEvictSources[source.ordinal()].increment();
}
public void incINEvictStats(EvictionSource source) {
inEvictSources[source.ordinal()].increment();
}
/**
* Update the appropriate fetch stat, based on node type.
*/
public void incLNFetchStats(boolean isMiss) {
nLNFetch.increment();
if (isMiss) {
nLNFetchMiss.increment();
}
}
public void incBINFetchStats(boolean isMiss) {
nBINFetch.increment();
if (isMiss) {
nBINFetchMiss.increment();
}
}
public void incINFetchStats(boolean isMiss) {
nUpperINFetch.increment();
if (isMiss) {
nUpperINFetchMiss.increment();
}
}
public AtomicLong getNINSparseTarget() {
return nINSparseTarget;
}
public AtomicLong getNINNoTarget() {
return nINNoTarget;
}
public AtomicLong getNINCompactKey() {
return nINCompactKey;
}
/**
* Coordinates an eviction with an in-progress checkpoint and returns
* whether provisional logging is needed.
*
* @return true if the target must be logged provisionally.
*/
private boolean coordinateWithCheckpoint(IN target, IN parent) {
/* SharedEvictor uses multiple envs, do not use superclass envImpl. */
EnvironmentImpl useEnvImpl = target.getDatabase().getDbEnvironment();
/*
* The checkpointer could be null if it was shutdown or never
* started.
*/
Checkpointer ckpter = useEnvImpl.getCheckpointer();
if (ckpter == null) {
return false;
}
return ckpter.coordinateEvictionWithCheckpoint(target, parent);
}
public void addEnvironment(EnvironmentImpl additionalEnvImpl) {
selector.addEnvironment(additionalEnvImpl);
}
public void removeEnvironment(EnvironmentImpl targetEnvImpl) {
selector.removeEnvironment(targetEnvImpl);
}
/* For unit testing only. */
public void setPreEvictINHook(TestHook<Object> hook) {
preEvictINHook = hook;
}
/* For unit testing only. */
public void setEvictProfileHook(TestHook<IN> hook) {
evictProfile = hook;
}
/**
* Called whenever INs are added to, or removed from, the INList.
*/
public void noteINListChange(int nINs) {
selector.noteINListChange(nINs);
}
/**
* Only supported by SharedEvictor.
*/
public boolean checkEnv(EnvironmentImpl env) {
return selector.checkEnv(env);
}
public StatGroup getStatsGroup() {
return stats;
}
/* For unit testing only. */
public void setRunnableHook(TestHook<Boolean> hook) {
arbiter.setRunnableHook(hook);
}
public boolean isCacheFull() {
return arbiter.isCacheFull();
}
public boolean wasCacheEverFull() {
return arbiter.wasCacheEverFull();
}
/* For unit test only */
TargetSelector getSelector() {
return selector;
}
/**
* Request and wait for a shutdown of all running eviction tasks.
*/
public void shutdown() {
/*
* Set the shutdown flag so that outstanding eviction tasks end
* early. The call to evictionPool.shutdown is a ThreadPoolExecutor
* call, and is an orderly shutdown that waits for and in flight tasks
* to end.
*/
shutdownRequested.set(true);
evictionPool.shutdown();
/*
* AwaitTermination will wait for the timeout period, or will be
* interrupted, but we don't really care which it is. The evictor
* shouldn't be interrupted, but if it is, something urgent is
* happening.
*/
boolean shutdownFinished = false;
try {
shutdownFinished =
evictionPool.awaitTermination(terminateMillis,
TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
/* We've been interrupted, just give up and end. */
} finally {
if (!shutdownFinished) {
evictionPool.shutdownNow();
}
}
}
public void requestShutdownPool() {
shutdownRequested.set(true);
evictionPool.shutdown();
}
private static class ReentrancyGuard {
private final ConcurrentHashMap<Thread, Thread> activeThreads;
private final EnvironmentImpl envImpl;
private final Logger logger;
ReentrancyGuard(EnvironmentImpl envImpl, Logger logger) {
this.envImpl = envImpl;
this.logger = logger;
activeThreads = new ConcurrentHashMap<Thread, Thread>();
}
boolean enter() {
Thread thisThread = Thread.currentThread();
if (activeThreads.containsKey(thisThread)) {
/* We don't really expect a reentrant call. */
LoggerUtils.severe(logger, envImpl,
"reentrant call to eviction from " +
LoggerUtils.getStackTrace());
/* If running w/assertions, in testing mode, assert here. */
assert false: "reentrant call to eviction from " +
LoggerUtils.getStackTrace();
return false;
}
activeThreads.put(thisThread, thisThread);
return true;
}
void leave() {
assert activeThreads.contains(Thread.currentThread());
activeThreads.remove(Thread.currentThread());
}
}
static class BackgroundEvictTask implements Runnable {
private final Evictor evictor;
private final boolean backgroundIO;
BackgroundEvictTask(Evictor evictor,
boolean backgroundIO) {
this.evictor = evictor;
this.backgroundIO = backgroundIO;
}
public void run() {
evictor.doEvict(EvictionSource.EVICTORTHREAD, backgroundIO);
}
}
static class RejectEvictHandler implements RejectedExecutionHandler {
private final AtomicLongStat threadUnavailableStat;
RejectEvictHandler(AtomicLongStat threadUnavailableStat) {
this.threadUnavailableStat = threadUnavailableStat;
}
public void rejectedExecution(Runnable r,
ThreadPoolExecutor executor) {
threadUnavailableStat.increment();
}
}
/**
* Caches DatabaseImpls to reduce DbTree.getDb overhead. Implemented
* differently by PrivateEvictor and SharedEvictor. [#21330]
*/
interface DbCache {
/**
* Calls DbTree.getDb for the given environment and database ID, and
* caches the result to optimize multiple calls for the same DB.
*
* @param envImpl identifies which environment the dbId parameter
* belongs to. For PrivateEvictor, it is the same as the
* Evictor.envImpl field.
*
* @param dbId is the DB to get.
*/
DatabaseImpl getDb(EnvironmentImpl envImpl, DatabaseId dbId);
/**
* Calls DbTree.releaseDb for cached DBs, and clears the cache.
*/
void releaseDbs();
}
abstract DbCache createDbCache();
}
|
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package jbrush.Core;
import java.awt.Color;
import java.util.ArrayList;
import java.util.Vector;
/**
*
* @author Shirofuji
*/
public class Polygon implements Renderable{
public ArrayList<Vertex> vertices;
public ArrayList<Edge> edges;
public ArrayList<Pixel_3D> pixelBuffer;
public Surface s;
public Vector3d plane;
private int default_color = 0xD3D3D3;
private Integer color = null;
public Polygon(Vertex A, Vertex B, Vertex C, Vertex D){
this.vertices = new ArrayList<>();
this.edges = new ArrayList<>();
this.vertices.add(A);
this.vertices.add(B);
this.vertices.add(C);
this.vertices.add(D);
this.edges.add(new Edge(this.vertices.get(0), this.vertices.get(1)));
this.edges.add(new Edge(this.vertices.get(1), this.vertices.get(2)));
this.edges.add(new Edge(this.vertices.get(2), this.vertices.get(3)));
this.edges.add(new Edge(this.vertices.get(3), this.vertices.get(1)));
}
public Vector3d getNormalVector(){
Vector3d a;
Vector3d b;
Vector3d c;
a = new Vector3d(this.vertices.get(0));
b = new Vector3d(this.vertices.get(1));
c = new Vector3d(this.vertices.get(2));
Vector3d ab = a.copy();
ab.sub(b);
Vector3d bc = c.copy();
bc.sub(b);
Vector3d cross = ab.cross(bc);
return cross;
}
public Vector3d getCenter(){
Vector3d a = new Vector3d(this.vertices.get(0));
Vector3d b = new Vector3d(this.vertices.get(1));
Vector3d c = new Vector3d(this.vertices.get(2));
Vector3d d = new Vector3d(this.vertices.get(3));
Vector3d ab = a.copy();
a.sub(b);
Vector3d cd = c.copy();
c.sub(d);
Vector3d abcd = ab.copy();
abcd.sub(cd);
return abcd;
}
public void scanlineFill(){
pixelBuffer = new ArrayList<>();
boolean fill = false;
for(double x = this.getXMin(); x <= this.getXMax(); x++){
for(double y = this.getYMin(); y<= this.getYMax(); y++){
for(double z = this.getZMin(); z<=this.getZMax(); z++){
Vertex v = new Vertex(x,y,z);
for(Edge e: this.edges){
if(e.intersects(v)){
fill = !fill;
if(fill){
pixelBuffer.add(new Pixel_3D(v));
}
}
}
}
}
}
}
public double getYMax(){
int count = this.vertices.size();
double yMax = 0;
for(int i = 0;i<count;i++){
if(i == 0){
yMax = this.vertices.get(i).y;
}else{
double current_y = this.vertices.get(i).y;
if(current_y > yMax){
yMax = current_y;
}
}
}
return yMax;
}
public double getYMin(){
int count = this.vertices.size();
double yMin = 0;
for(int i = 0;i<count;i++){
if(i == 0){
yMin = this.vertices.get(i).y;
}else{
double current_y = this.vertices.get(i).y;
if(current_y < yMin){
yMin = current_y;
}
}
}
return yMin;
}
public double getXMax(){
int count = this.vertices.size();
double xMax = 0;
for(int i = 0;i<count;i++){
if(i == 0){
xMax = this.vertices.get(i).x;
}else{
double current_x = this.vertices.get(i).x;
if(current_x > xMax){
xMax = current_x;
}
}
}
return xMax;
}
public double getXMin(){
int count = this.vertices.size();
double xMin = 0;
for(int i = 0;i<count;i++){
if(i == 0){
xMin = this.vertices.get(i).x;
}else{
double current_x = this.vertices.get(i).x;
if(current_x < xMin){
xMin = current_x;
}
}
}
return xMin;
}
public double getZMax(){
int count = this.vertices.size();
double zMax = 0;
for(int i = 0;i<count;i++){
if(i == 0){
zMax = this.vertices.get(i).z;
}else{
double current_z = this.vertices.get(i).z;
if(current_z > zMax){
zMax = current_z;
}
}
}
return zMax;
}
public double getZMin(){
int count = this.vertices.size();
double zMin = 0;
for(int i = 0;i<count;i++){
if(i == 0){
zMin = this.vertices.get(i).z;
}else{
double current_z = this.vertices.get(i).z;
if(current_z < zMin){
zMin = current_z;
}
}
}
return zMin;
}
@Override
public boolean intersect(Ray r) {
Vector3d normal = this.getNormalVector();
Vector3d center = this.getCenter();
float denom = normal.dot(r.direction);
if(Math.abs(denom) > 0.0001f){
center.sub(new Vector3d(this.vertices.get(0)));
float t=center.dot(normal);
if(t >= 0f) return true;
}
return false;
}
@Override
public Color Shade(Ray r, Vector lights, Vector objects, Color bgnd) {
float px = r.origin.x + r.t*r.direction.x;
float py = r.origin.y + r.t*r.direction.y;
float pz = r.origin.z + r.t*r.direction.z;
Vector3d center= this.getCenter();
Vector3d p = new Vector3d(px,py,pz);
Vector3d v = new Vector3d(-r.direction.x,-r.direction.y,-r.direction.z);
Vector3d n = new Vector3d(px - center.x,py - center.y,pz-center.z);
return s.Shade(p, v, v, lights, objects, bgnd);
}
}
|
|
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.gradle;
import com.intellij.execution.ExecutionException;
import com.intellij.execution.Executor;
import com.intellij.execution.configurations.RunConfiguration;
import com.intellij.execution.configurations.SearchScopeProvider;
import com.intellij.execution.configurations.SimpleJavaParameters;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.externalSystem.ExternalSystemAutoImportAware;
import com.intellij.openapi.externalSystem.ExternalSystemConfigurableAware;
import com.intellij.openapi.externalSystem.ExternalSystemManager;
import com.intellij.openapi.externalSystem.ExternalSystemUiAware;
import com.intellij.openapi.externalSystem.model.DataNode;
import com.intellij.openapi.externalSystem.model.ExternalProjectInfo;
import com.intellij.openapi.externalSystem.model.ProjectKeys;
import com.intellij.openapi.externalSystem.model.ProjectSystemId;
import com.intellij.openapi.externalSystem.model.execution.ExternalSystemTaskExecutionSettings;
import com.intellij.openapi.externalSystem.model.execution.ExternalTaskExecutionInfo;
import com.intellij.openapi.externalSystem.model.execution.ExternalTaskPojo;
import com.intellij.openapi.externalSystem.model.project.ExternalProjectPojo;
import com.intellij.openapi.externalSystem.model.project.ModuleData;
import com.intellij.openapi.externalSystem.service.execution.ExternalSystemRunConfiguration;
import com.intellij.openapi.externalSystem.service.project.ExternalSystemProjectResolver;
import com.intellij.openapi.externalSystem.service.project.ProjectDataManager;
import com.intellij.openapi.externalSystem.service.project.autoimport.CachingExternalSystemAutoImportAware;
import com.intellij.openapi.externalSystem.service.project.manage.ExternalProjectsManager;
import com.intellij.openapi.externalSystem.service.ui.DefaultExternalSystemUiAware;
import com.intellij.openapi.externalSystem.task.ExternalSystemTaskManager;
import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil;
import com.intellij.openapi.externalSystem.util.ExternalSystemConstants;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.options.Configurable;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.startup.StartupActivity;
import com.intellij.openapi.util.AtomicNotNullLazyValue;
import com.intellij.openapi.util.NotNullLazyValue;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.util.Function;
import com.intellij.util.PathUtil;
import com.intellij.util.PathsList;
import com.intellij.util.containers.ContainerUtilRt;
import com.intellij.util.messages.MessageBusConnection;
import icons.GradleIcons;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.gradle.config.GradleSettingsListenerAdapter;
import org.jetbrains.plugins.gradle.execution.test.runner.GradleConsoleProperties;
import org.jetbrains.plugins.gradle.model.data.BuildParticipant;
import org.jetbrains.plugins.gradle.model.data.GradleSourceSetData;
import org.jetbrains.plugins.gradle.service.GradleInstallationManager;
import org.jetbrains.plugins.gradle.service.project.GradleAutoImportAware;
import org.jetbrains.plugins.gradle.service.project.GradleProjectResolver;
import org.jetbrains.plugins.gradle.service.project.GradleProjectResolverExtension;
import org.jetbrains.plugins.gradle.service.settings.GradleConfigurable;
import org.jetbrains.plugins.gradle.service.task.GradleTaskManager;
import org.jetbrains.plugins.gradle.settings.*;
import org.jetbrains.plugins.gradle.util.GradleConstants;
import org.jetbrains.plugins.gradle.util.GradleUtil;
import javax.swing.*;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.*;
import static com.intellij.openapi.util.io.FileUtil.pathsEqual;
/**
* @author Denis Zhdanov
* @since 4/10/13 1:19 PM
*/
public class GradleManager
implements ExternalSystemConfigurableAware, ExternalSystemUiAware, ExternalSystemAutoImportAware, StartupActivity, ExternalSystemManager<
GradleProjectSettings,
GradleSettingsListener,
GradleSettings,
GradleLocalSettings,
GradleExecutionSettings> {
private static final Logger LOG = Logger.getInstance(GradleManager.class);
@NotNull private final ExternalSystemAutoImportAware myAutoImportDelegate =
new CachingExternalSystemAutoImportAware(new GradleAutoImportAware());
@NotNull
private final GradleInstallationManager myInstallationManager;
@NotNull private static final NotNullLazyValue<List<GradleProjectResolverExtension>> RESOLVER_EXTENSIONS =
new AtomicNotNullLazyValue<List<GradleProjectResolverExtension>>() {
@NotNull
@Override
protected List<GradleProjectResolverExtension> compute() {
List<GradleProjectResolverExtension> result = ContainerUtilRt.newArrayList();
Collections.addAll(result, GradleProjectResolverExtension.EP_NAME.getExtensions());
ExternalSystemApiUtil.orderAwareSort(result);
return result;
}
};
public GradleManager(@NotNull GradleInstallationManager manager) {
myInstallationManager = manager;
}
@NotNull
@Override
public ProjectSystemId getSystemId() {
return GradleConstants.SYSTEM_ID;
}
@NotNull
@Override
public Function<Project, GradleSettings> getSettingsProvider() {
return project -> GradleSettings.getInstance(project);
}
@NotNull
@Override
public Function<Project, GradleLocalSettings> getLocalSettingsProvider() {
return project -> GradleLocalSettings.getInstance(project);
}
@NotNull
@Override
public Function<Pair<Project, String>, GradleExecutionSettings> getExecutionSettingsProvider() {
return pair -> {
final Project project = pair.first;
GradleSettings settings = GradleSettings.getInstance(project);
File gradleHome = myInstallationManager.getGradleHome(project, pair.second);
String localGradlePath = null;
if (gradleHome != null) {
try {
// Try to resolve symbolic links as there were problems with them at the gradle side.
localGradlePath = gradleHome.getCanonicalPath();
}
catch (IOException e) {
localGradlePath = gradleHome.getAbsolutePath();
}
}
GradleProjectSettings projectLevelSettings = settings.getLinkedProjectSettings(pair.second);
final DistributionType distributionType;
if (projectLevelSettings == null) {
distributionType =
GradleUtil.isGradleDefaultWrapperFilesExist(pair.second) ? DistributionType.DEFAULT_WRAPPED : DistributionType.BUNDLED;
}
else {
distributionType =
projectLevelSettings.getDistributionType() == null ? DistributionType.LOCAL : projectLevelSettings.getDistributionType();
}
GradleExecutionSettings result = new GradleExecutionSettings(localGradlePath,
settings.getServiceDirectoryPath(),
distributionType,
settings.getGradleVmOptions(),
settings.isOfflineWork());
for (GradleProjectResolverExtension extension : RESOLVER_EXTENSIONS.getValue()) {
result.addResolverExtensionClass(ClassHolder.from(extension.getClass()));
}
final String rootProjectPath = projectLevelSettings != null ? projectLevelSettings.getExternalProjectPath() : pair.second;
final Sdk gradleJdk = myInstallationManager.getGradleJdk(project, rootProjectPath);
final String javaHome = gradleJdk != null ? gradleJdk.getHomePath() : null;
if (!StringUtil.isEmpty(javaHome)) {
LOG.info("Instructing gradle to use java from " + javaHome);
}
result.setJavaHome(javaHome);
String ideProjectPath;
if (project.getBasePath() == null ||
(project.getProjectFilePath() != null && StringUtil.endsWith(project.getProjectFilePath(), ".ipr"))) {
ideProjectPath = rootProjectPath;
}
else {
ideProjectPath = project.getBasePath() + "/.idea/modules";
}
result.setIdeProjectPath(ideProjectPath);
if (projectLevelSettings != null) {
result.setResolveModulePerSourceSet(projectLevelSettings.isResolveModulePerSourceSet());
result.setUseQualifiedModuleNames(projectLevelSettings.isUseQualifiedModuleNames());
}
configureExecutionWorkspace(projectLevelSettings, settings, result, project, pair.second);
return result;
};
}
/**
* Add composite participants
*/
private static void configureExecutionWorkspace(@Nullable GradleProjectSettings compositeRootSettings,
GradleSettings settings,
GradleExecutionSettings result,
Project project,
String projectPath) {
if (compositeRootSettings == null || compositeRootSettings.getCompositeBuild() == null) return;
GradleProjectSettings.CompositeBuild compositeBuild = compositeRootSettings.getCompositeBuild();
if (compositeBuild.getCompositeDefinitionSource() == CompositeDefinitionSource.SCRIPT) {
if (pathsEqual(compositeRootSettings.getExternalProjectPath(), projectPath)) return;
for (BuildParticipant buildParticipant : compositeBuild.getCompositeParticipants()) {
if (pathsEqual(buildParticipant.getRootPath(), projectPath)) continue;
if (buildParticipant.getProjects().stream().anyMatch(path -> pathsEqual(path, projectPath))) {
continue;
}
result.getExecutionWorkspace().addBuildParticipant(new GradleBuildParticipant(buildParticipant.getRootPath()));
}
return;
}
for (GradleProjectSettings projectSettings : settings.getLinkedProjectsSettings()) {
if (projectSettings == compositeRootSettings) continue;
if (compositeBuild.getCompositeParticipants()
.stream()
.noneMatch(participant -> pathsEqual(participant.getRootPath(), projectSettings.getExternalProjectPath()))) {
continue;
}
GradleBuildParticipant buildParticipant = new GradleBuildParticipant(projectSettings.getExternalProjectPath());
ExternalProjectInfo projectData = ProjectDataManager.getInstance()
.getExternalProjectData(project, GradleConstants.SYSTEM_ID, projectSettings.getExternalProjectPath());
if (projectData == null || projectData.getExternalProjectStructure() == null) continue;
Collection<DataNode<ModuleData>> moduleNodes =
ExternalSystemApiUtil.findAll(projectData.getExternalProjectStructure(), ProjectKeys.MODULE);
for (DataNode<ModuleData> moduleNode : moduleNodes) {
ModuleData moduleData = moduleNode.getData();
if (moduleData.getArtifacts().isEmpty()) {
Collection<DataNode<GradleSourceSetData>> sourceSetNodes = ExternalSystemApiUtil.findAll(moduleNode, GradleSourceSetData.KEY);
for (DataNode<GradleSourceSetData> sourceSetNode : sourceSetNodes) {
buildParticipant.addModule(sourceSetNode.getData());
}
}
else {
buildParticipant.addModule(moduleData);
}
}
result.getExecutionWorkspace().addBuildParticipant(buildParticipant);
}
}
@Override
public void enhanceRemoteProcessing(@NotNull SimpleJavaParameters parameters) throws ExecutionException {
final Set<String> additionalEntries = ContainerUtilRt.newHashSet();
for (GradleProjectResolverExtension extension : RESOLVER_EXTENSIONS.getValue()) {
ContainerUtilRt.addIfNotNull(additionalEntries, PathUtil.getJarPathForClass(extension.getClass()));
for (Class aClass : extension.getExtraProjectModelClasses()) {
ContainerUtilRt.addIfNotNull(additionalEntries, PathUtil.getJarPathForClass(aClass));
}
extension.enhanceRemoteProcessing(parameters);
}
final PathsList classPath = parameters.getClassPath();
for (String entry : additionalEntries) {
classPath.add(entry);
}
parameters.getVMParametersList().addProperty(
ExternalSystemConstants.EXTERNAL_SYSTEM_ID_KEY, GradleConstants.SYSTEM_ID.getId());
}
@Override
public void enhanceLocalProcessing(@NotNull List<URL> urls) {
}
@NotNull
@Override
public Class<? extends ExternalSystemProjectResolver<GradleExecutionSettings>> getProjectResolverClass() {
return GradleProjectResolver.class;
}
@Override
public Class<? extends ExternalSystemTaskManager<GradleExecutionSettings>> getTaskManagerClass() {
return GradleTaskManager.class;
}
@NotNull
@Override
public Configurable getConfigurable(@NotNull Project project) {
return new GradleConfigurable(project);
}
@Nullable
@Override
public FileChooserDescriptor getExternalProjectConfigDescriptor() {
// project *.gradle script can be absent for gradle subproject
return FileChooserDescriptorFactory.createSingleFolderDescriptor();
}
@Nullable
@Override
public Icon getProjectIcon() {
return GradleIcons.Gradle;
}
@Nullable
@Override
public Icon getTaskIcon() {
return DefaultExternalSystemUiAware.INSTANCE.getTaskIcon();
}
@NotNull
@Override
public String getProjectRepresentationName(@NotNull String targetProjectPath, @Nullable String rootProjectPath) {
return ExternalSystemApiUtil.getProjectRepresentationName(targetProjectPath, rootProjectPath);
}
@NotNull
@Override
public String getProjectRepresentationName(@NotNull Project project,
@NotNull String targetProjectPath,
@Nullable String rootProjectPath) {
GradleProjectSettings projectSettings = GradleSettings.getInstance(project).getLinkedProjectSettings(targetProjectPath);
if (projectSettings != null && projectSettings.getCompositeBuild() != null) {
for (BuildParticipant buildParticipant : projectSettings.getCompositeBuild().getCompositeParticipants()) {
if (buildParticipant.getProjects().contains(targetProjectPath)) {
return ExternalSystemApiUtil.getProjectRepresentationName(targetProjectPath, buildParticipant.getRootPath());
}
}
}
return ExternalSystemApiUtil.getProjectRepresentationName(targetProjectPath, rootProjectPath);
}
@Nullable
@Override
public String getAffectedExternalProjectPath(@NotNull String changedFileOrDirPath, @NotNull Project project) {
return myAutoImportDelegate.getAffectedExternalProjectPath(changedFileOrDirPath, project);
}
@Override
public List<File> getAffectedExternalProjectFiles(String projectPath, @NotNull Project project) {
return myAutoImportDelegate.getAffectedExternalProjectFiles(projectPath, project);
}
@NotNull
@Override
public FileChooserDescriptor getExternalProjectDescriptor() {
return GradleUtil.getGradleProjectFileChooserDescriptor();
}
@Nullable
@Override
public GlobalSearchScope getSearchScope(@NotNull Project project, @NotNull ExternalSystemTaskExecutionSettings taskExecutionSettings) {
String projectPath = taskExecutionSettings.getExternalProjectPath();
if (StringUtil.isEmpty(projectPath)) return null;
GradleProjectSettings projectSettings = getSettingsProvider().fun(project).getLinkedProjectSettings(projectPath);
if (projectSettings == null) return null;
if (!projectSettings.isResolveModulePerSourceSet()) {
// use default implementation which will find target module using projectPathFile
return null;
}
else {
Module[] modules = Arrays.stream(ModuleManager.getInstance(project).getModules())
.filter(module -> StringUtil.equals(projectPath, ExternalSystemApiUtil.getExternalProjectPath(module)))
.toArray(Module[]::new);
return modules.length > 0 ? SearchScopeProvider.createSearchScope(modules) : null;
}
}
@Nullable
@Override
public Object createTestConsoleProperties(@NotNull Project project,
@NotNull Executor executor,
@NotNull RunConfiguration runConfiguration) {
if (runConfiguration instanceof ExternalSystemRunConfiguration) {
return new GradleConsoleProperties((ExternalSystemRunConfiguration)runConfiguration, executor);
}
return null;
}
@Override
public void runActivity(@NotNull final Project project) {
// We want to automatically refresh linked projects on gradle service directory change.
MessageBusConnection connection = project.getMessageBus().connect(project);
connection.subscribe(GradleSettings.getInstance(project).getChangesTopic(), new GradleSettingsListenerAdapter() {
@Override
public void onServiceDirectoryPathChange(@Nullable String oldPath, @Nullable String newPath) {
for (GradleProjectSettings projectSettings : GradleSettings.getInstance(project).getLinkedProjectsSettings()) {
ExternalProjectsManager.getInstance(project).getExternalProjectsWatcher().markDirty(projectSettings.getExternalProjectPath());
}
}
@Override
public void onGradleHomeChange(@Nullable String oldPath, @Nullable String newPath, @NotNull String linkedProjectPath) {
ExternalProjectsManager.getInstance(project).getExternalProjectsWatcher().markDirty(linkedProjectPath);
}
@Override
public void onGradleDistributionTypeChange(DistributionType currentValue, @NotNull String linkedProjectPath) {
ExternalProjectsManager.getInstance(project).getExternalProjectsWatcher().markDirty(linkedProjectPath);
}
});
// We used to assume that gradle scripts are always named 'build.gradle' and kept path to that build.gradle file at ide settings.
// However, it was found out that that is incorrect assumption (IDEA-109064). Now we keep paths to gradle script's directories
// instead. However, we don't want to force old users to re-import gradle projects because of that. That's why we check gradle
// config and re-point it from build.gradle to the parent dir if necessary.
Map<String, String> adjustedPaths = patchLinkedProjects(project);
if (adjustedPaths == null) {
return;
}
GradleLocalSettings localSettings = GradleLocalSettings.getInstance(project);
patchRecentTasks(adjustedPaths, localSettings);
patchAvailableProjects(adjustedPaths, localSettings);
patchAvailableTasks(adjustedPaths, localSettings);
}
@Nullable
private static Map<String, String> patchLinkedProjects(@NotNull Project project) {
GradleSettings settings = GradleSettings.getInstance(project);
Collection<GradleProjectSettings> correctedSettings = ContainerUtilRt.newArrayList();
Map<String/* old path */, String/* new path */> adjustedPaths = ContainerUtilRt.newHashMap();
for (GradleProjectSettings projectSettings : settings.getLinkedProjectsSettings()) {
String oldPath = projectSettings.getExternalProjectPath();
if (oldPath != null && new File(oldPath).isFile() && FileUtilRt.extensionEquals(oldPath, GradleConstants.EXTENSION)) {
try {
String newPath = new File(oldPath).getParentFile().getCanonicalPath();
projectSettings.setExternalProjectPath(newPath);
adjustedPaths.put(oldPath, newPath);
}
catch (IOException e) {
LOG.warn(String.format(
"Unexpected exception occurred on attempt to re-point linked gradle project path from build.gradle to its parent dir. Path: %s",
oldPath
), e);
}
}
correctedSettings.add(projectSettings);
}
if (adjustedPaths.isEmpty()) {
return null;
}
settings.setLinkedProjectsSettings(correctedSettings);
return adjustedPaths;
}
private static void patchAvailableTasks(@NotNull Map<String, String> adjustedPaths, @NotNull GradleLocalSettings localSettings) {
Map<String, Collection<ExternalTaskPojo>> adjustedAvailableTasks = ContainerUtilRt.newHashMap();
for (Map.Entry<String, Collection<ExternalTaskPojo>> entry : localSettings.getAvailableTasks().entrySet()) {
String newPath = adjustedPaths.get(entry.getKey());
if (newPath == null) {
adjustedAvailableTasks.put(entry.getKey(), entry.getValue());
}
else {
for (ExternalTaskPojo task : entry.getValue()) {
String newTaskPath = adjustedPaths.get(task.getLinkedExternalProjectPath());
if (newTaskPath != null) {
task.setLinkedExternalProjectPath(newTaskPath);
}
}
adjustedAvailableTasks.put(newPath, entry.getValue());
}
}
localSettings.setAvailableTasks(adjustedAvailableTasks);
}
private static void patchAvailableProjects(@NotNull Map<String, String> adjustedPaths, @NotNull GradleLocalSettings localSettings) {
Map<ExternalProjectPojo, Collection<ExternalProjectPojo>> adjustedAvailableProjects = ContainerUtilRt.newHashMap();
for (Map.Entry<ExternalProjectPojo, Collection<ExternalProjectPojo>> entry : localSettings.getAvailableProjects().entrySet()) {
String newPath = adjustedPaths.get(entry.getKey().getPath());
if (newPath == null) {
adjustedAvailableProjects.put(entry.getKey(), entry.getValue());
}
else {
adjustedAvailableProjects.put(new ExternalProjectPojo(entry.getKey().getName(), newPath), entry.getValue());
}
}
localSettings.setAvailableProjects(adjustedAvailableProjects);
}
private static void patchRecentTasks(@NotNull Map<String, String> adjustedPaths, @NotNull GradleLocalSettings localSettings) {
for (ExternalTaskExecutionInfo taskInfo : localSettings.getRecentTasks()) {
ExternalSystemTaskExecutionSettings s = taskInfo.getSettings();
String newPath = adjustedPaths.get(s.getExternalProjectPath());
if (newPath != null) {
s.setExternalProjectPath(newPath);
}
}
}
}
|
|
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.findUsages;
import com.intellij.openapi.application.QueryExecutorBase;
import com.intellij.openapi.application.ReadActionProcessor;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.*;
import com.intellij.psi.impl.light.LightMemberReference;
import com.intellij.psi.search.SearchRequestCollector;
import com.intellij.psi.search.SearchScope;
import com.intellij.psi.search.searches.DirectClassInheritorsSearch;
import com.intellij.psi.search.searches.MethodReferencesSearch;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.util.PairProcessor;
import com.intellij.util.Processor;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.groovy.codeInspection.utils.ControlFlowUtils;
import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElement;
import org.jetbrains.plugins.groovy.lang.psi.api.auxiliary.GrListOrMap;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrConstructorInvocation;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrStatement;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrVariable;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrVariableDeclaration;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrOpenBlock;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrNewExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrSafeCastExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrTypeCastExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.params.GrParameter;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrAnonymousClassDefinition;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrEnumConstant;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMethod;
import org.jetbrains.plugins.groovy.lang.psi.api.types.GrCodeReferenceElement;
import org.jetbrains.plugins.groovy.lang.psi.api.types.GrTypeElement;
import org.jetbrains.plugins.groovy.lang.psi.controlFlow.Instruction;
import java.util.Set;
/**
* @author Maxim.Medvedev
*/
public class GroovyConstructorUsagesSearcher extends QueryExecutorBase<PsiReference, MethodReferencesSearch.SearchParameters> {
public GroovyConstructorUsagesSearcher() {
super(true);
}
@Override
public void processQuery(@NotNull MethodReferencesSearch.SearchParameters p, @NotNull Processor<PsiReference> consumer) {
processConstructorUsages(p.getMethod(), p.getEffectiveSearchScope(), consumer, p.getOptimizer(), !p.isStrictSignatureSearch());
}
public static final Key<Set<PsiClass>> LITERALLY_CONSTRUCTED_CLASSES = Key.create("LITERALLY_CONSTRUCTED_CLASSES");
static void processConstructorUsages(final PsiMethod constructor,
final SearchScope searchScope,
final Processor<PsiReference> consumer,
final SearchRequestCollector collector,
final boolean includeOverloads) {
if (!constructor.isConstructor()) return;
final PsiClass clazz = constructor.getContainingClass();
if (clazz == null) return;
SearchScope onlyGroovy = GroovyScopeUtil.restrictScopeToGroovyFiles(searchScope, GroovyScopeUtil.getEffectiveScope(constructor));
Set<PsiClass> processed = collector.getSearchSession().getUserData(LITERALLY_CONSTRUCTED_CLASSES);
if (processed == null) {
collector.getSearchSession().putUserData(LITERALLY_CONSTRUCTED_CLASSES, processed = ContainerUtil.newConcurrentSet());
}
if (!processed.add(clazz)) return;
if (clazz.isEnum() && clazz instanceof GroovyPsiElement) {
for (PsiField field : clazz.getFields()) {
if (field instanceof GrEnumConstant) {
final PsiReference ref = field.getReference();
if (ref != null && ref.isReferenceTo(constructor)) {
if (!consumer.process(ref)) return;
}
}
}
}
final LiteralConstructorSearcher literalProcessor = new LiteralConstructorSearcher(constructor, consumer, includeOverloads);
final Processor<GrNewExpression> newExpressionProcessor = new Processor<GrNewExpression>() {
@Override
public boolean process(GrNewExpression grNewExpression) {
final PsiMethod resolvedConstructor = grNewExpression.resolveMethod();
if (includeOverloads || constructor.getManager().areElementsEquivalent(resolvedConstructor, constructor)) {
return consumer.process(grNewExpression.getReferenceElement());
}
return true;
}
};
processGroovyClassUsages(clazz, searchScope, collector, newExpressionProcessor, literalProcessor);
//this()
if (clazz instanceof GrTypeDefinition) {
if (!processConstructors(constructor, consumer, clazz, true)) {
return;
}
}
//super()
DirectClassInheritorsSearch.search(clazz, onlyGroovy).forEach(new ReadActionProcessor<PsiClass>() {
@Override
public boolean processInReadAction(PsiClass inheritor) {
if (inheritor instanceof GrTypeDefinition) {
if (!processConstructors(constructor, consumer, inheritor, false)) return false;
}
return true;
}
});
}
public static void processGroovyClassUsages(final PsiClass clazz,
final SearchScope scope,
SearchRequestCollector collector,
final Processor<GrNewExpression> newExpressionProcessor,
final LiteralConstructorSearcher literalProcessor) {
ReferencesSearch.searchOptimized(clazz, scope, false, collector, true, new PairProcessor<PsiReference, SearchRequestCollector>() {
@Override
public boolean process(PsiReference ref, SearchRequestCollector collector) {
final PsiElement element = ref.getElement();
if (element instanceof GrCodeReferenceElement) {
if (!processGroovyConstructorUsages((GrCodeReferenceElement)element, newExpressionProcessor, literalProcessor)) {
return false;
}
}
return true;
}
});
}
private static boolean processGroovyConstructorUsages(GrCodeReferenceElement element,
final Processor<GrNewExpression> newExpressionProcessor,
final LiteralConstructorSearcher literalProcessor) {
PsiElement parent = element.getParent();
if (parent instanceof GrAnonymousClassDefinition) {
parent = parent.getParent();
}
if (parent instanceof GrNewExpression) {
return newExpressionProcessor.process((GrNewExpression)parent);
}
if (parent instanceof GrTypeElement) {
final GrTypeElement typeElement = (GrTypeElement)parent;
final PsiElement grandpa = typeElement.getParent();
if (grandpa instanceof GrVariableDeclaration) {
final GrVariable[] vars = ((GrVariableDeclaration)grandpa).getVariables();
if (vars.length == 1) {
final GrVariable variable = vars[0];
if (!checkLiteralInstantiation(variable.getInitializerGroovy(), literalProcessor)) {
return false;
}
}
}
else if (grandpa instanceof GrMethod) {
final GrMethod method = (GrMethod)grandpa;
if (typeElement == method.getReturnTypeElementGroovy()) {
ControlFlowUtils.visitAllExitPoints(method.getBlock(), new ControlFlowUtils.ExitPointVisitor() {
@Override
public boolean visitExitPoint(Instruction instruction, @Nullable GrExpression returnValue) {
if (!checkLiteralInstantiation(returnValue, literalProcessor)) {
return false;
}
return true;
}
});
}
}
else if (grandpa instanceof GrTypeCastExpression) {
final GrTypeCastExpression cast = (GrTypeCastExpression)grandpa;
if (cast.getCastTypeElement() == typeElement &&
!checkLiteralInstantiation(cast.getOperand(), literalProcessor)) {
return false;
}
}
else if (grandpa instanceof GrSafeCastExpression) {
final GrSafeCastExpression cast = (GrSafeCastExpression)grandpa;
if (cast.getCastTypeElement() == typeElement &&
!checkLiteralInstantiation(cast.getOperand(), literalProcessor)) {
return false;
}
}
}
return true;
}
private static boolean checkLiteralInstantiation(GrExpression expression,
final LiteralConstructorSearcher literalProcessor) {
if (expression instanceof GrListOrMap) {
return literalProcessor.processLiteral((GrListOrMap)expression);
}
return true;
}
private static boolean processConstructors(final PsiMethod searchedConstructor, final Processor<PsiReference> consumer, final PsiClass clazz,
final boolean processThisRefs) {
final PsiMethod[] constructors = clazz.getConstructors();
if (constructors.length == 0) {
processImplicitConstructorCall(clazz, consumer, searchedConstructor);
}
for (PsiMethod constructor : constructors) {
if (!(constructor instanceof GrMethod)) continue;
final GrOpenBlock block = ((GrMethod)constructor).getBlock();
if (block != null) {
final GrStatement[] statements = block.getStatements();
if (statements.length > 0 && statements[0] instanceof GrConstructorInvocation) {
final GrConstructorInvocation invocation = (GrConstructorInvocation)statements[0];
if (invocation.isThisCall() == processThisRefs &&
invocation.getManager().areElementsEquivalent(invocation.resolveMethod(), searchedConstructor) &&
!consumer.process(invocation.getInvokedExpression())) {
return false;
}
}
else {
processImplicitConstructorCall(constructor, consumer, searchedConstructor);
}
}
}
return true;
}
private static void processImplicitConstructorCall(@NotNull final PsiMember usage,
final Processor<PsiReference> processor,
final PsiMethod constructor) {
if (constructor instanceof GrMethod) {
GrParameter[] grParameters = (GrParameter[])constructor.getParameterList().getParameters();
if (grParameters.length > 0 && !grParameters[0].isOptional()) return;
}
else if (constructor.getParameterList().getParameters().length > 0) return;
PsiManager manager = constructor.getManager();
if (manager.areElementsEquivalent(usage, constructor) || manager.areElementsEquivalent(constructor.getContainingClass(), usage.getContainingClass())) return;
processor.process(new LightMemberReference(manager, usage, PsiSubstitutor.EMPTY) {
@Override
public PsiElement getElement() {
return usage;
}
@Override
public TextRange getRangeInElement() {
if (usage instanceof PsiClass) {
PsiIdentifier identifier = ((PsiClass)usage).getNameIdentifier();
if (identifier != null) return TextRange.from(identifier.getStartOffsetInParent(), identifier.getTextLength());
}
else if (usage instanceof PsiMethod) {
PsiIdentifier identifier = ((PsiMethod)usage).getNameIdentifier();
if (identifier != null) return TextRange.from(identifier.getStartOffsetInParent(), identifier.getTextLength());
}
return super.getRangeInElement();
}
});
}
}
|
|
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 COLLOMB-GRISET
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*
*/
package fr.upem.ir1.curvysnake.view;
import fr.umlv.zen5.Application;
import fr.umlv.zen5.Event;
import fr.umlv.zen5.Event.Action;
import fr.umlv.zen5.KeyboardKey;
import fr.umlv.zen5.ScreenInfo;
import fr.upem.ir1.curvysnake.controller.BonusListInGame;
import fr.upem.ir1.curvysnake.controller.MoveTo;
import fr.upem.ir1.curvysnake.controller.Snake;
import fr.upem.ir1.curvysnake.controller.exception.CollisionException;
import java.awt.*;
import java.awt.geom.Rectangle2D;
import java.awt.geom.RectangularShape;
import java.util.ArrayList;
import java.util.List;
/**
* @author collombj
* @project EelZen
* @package fr.upem.ir1.curvysnake.controller
* @date 05/06/2015
*/
public class MultiPlayer {
/**
* Create a gameSize
*/
private static Rectangle gameSize = new Rectangle(0, 0, 500, 500);
/**
* Create a bonus list fo the game
*/
private static BonusListInGame bonusListInGame = new BonusListInGame();
/**
*
*/
public static void run() {
// Set environnement
Snake.setGameSize(gameSize);
// Set Bonus List
Snake.setBonusListInGame(bonusListInGame);
Player player1 = new Player(new Snake(new Point(
(int) gameSize.getCenterX(), (int) gameSize.getCenterY()), 0),
Color.GREEN);
List<RectangularShape> add1 = new ArrayList<>();
Player player2 = new Player(new Snake(new Point(
(int) gameSize.getCenterX(), (int) gameSize.getCenterY() + 50),
0), Color.RED);
List<RectangularShape> add2 = new ArrayList<>();
List<RectangularShape> erase = new ArrayList<>();
/*
* Interface Graphique initialisation
*/
Application.run(Color.WHITE, context -> {
Draw.context = context;
// get the size of the screen
ScreenInfo screenInfo = context.getScreenInfo();
float width = screenInfo.getWidth();
float height = screenInfo.getHeight();
System.out.println("size of the screen (" + width + " x "
+ height + ")");
gameSize.height = (int) height;
gameSize.width = (int) width;
context.renderFrame(graphics -> {
graphics.setColor(Color.WHITE);
graphics.fill(new Rectangle2D.Float(0, 0, width, height));
});
int time = 0;
while(true) {
Event event = context.pollOrWaitEvent(1);
time++;
if(event != null) { // no event
Action action = event.getAction();
if(action == Action.KEY_PRESSED) {
KeyboardKey key = event.getKey();
try {
// Player 1
if(key == KeyboardKey.D) {
player1.getPlayer().changeDirection(MoveTo.RIGHT, false);
}
if(key == KeyboardKey.Q) {
player1.getPlayer().changeDirection(MoveTo.LEFT, false);
}
// Player 2
if(key == KeyboardKey.RIGHT) {
player2.getPlayer().changeDirection(MoveTo.RIGHT, false);
} else if(key == KeyboardKey.LEFT) {
player2.getPlayer().changeDirection(MoveTo.LEFT, false);
}
} catch(IllegalAccessException e) {
e.printStackTrace();
context.exit(-1);
return;
}
// Exit
if(key == KeyboardKey.P) {
context.exit(0);
return;
}
}
}
if(time >= 25) {
try {
if(player1.isAlive())
player1.getPlayer().move(add1, erase);
} catch(CollisionException e) {
player1.kill();
} catch(Exception e) {
e.printStackTrace();
context.exit(-1);
return;
}
try {
if(player2.isAlive())
player2.getPlayer().move(add2, erase);
} catch(CollisionException e) {
player2.kill();
} catch(Exception e) {
e.printStackTrace();
context.exit(-1);
return;
}
time = 0;
}
erase.forEach(Draw::undraw);
erase.clear();
add1.forEach(rectangularShape -> Draw.draw(rectangularShape, player1.getColor()));
add1.clear();
add2.forEach(rectangularShape -> Draw.draw(rectangularShape, player2.getColor()));
add2.clear();
if(player1.isAlive())
Draw.draw(player1.getPlayer().getQueue(), player1.getColor());
if(player2.isAlive())
Draw.draw(player2.getPlayer().getQueue(), player2.getColor());
Draw.drawBonus(bonusListInGame.random());
Snake.decrementAll();
if(!player1.isAlive() && !player2.isAlive()) {
context.exit(0);
return;
}
}
});
/*
* Interface Graphique fin
*/
}
}
|
|
package net.graphical.model.causality.learning.gies;
import net.graphical.model.causality.graph.algorithm.graphTypeChecking.ChainGraphChecker;
import net.graphical.model.causality.graph.model.AdjImpl.ChainGraph;
import net.graphical.model.causality.graph.model.AdjImpl.ConnectedUndirectedGraph;
import net.graphical.model.causality.graph.model.AdjImpl.Dag;
import net.graphical.model.causality.graph.model.Edge;
import net.graphical.model.causality.graph.model.EdgeType;
import net.graphical.model.causality.graph.model.Node;
import net.graphical.model.causality.graph.model.intervention.Intervention;
import net.graphical.model.causality.graph.model.intervention.InterventionFamily;
import net.graphical.model.causality.interventionData.DataSet;
import net.graphical.model.causality.interventionData.dataGenerator.CausalModel;
import net.graphical.model.causality.interventionData.dataGenerator.OneCpd;
import net.graphical.model.causality.learning.gies.operation.Operation;
import net.graphical.model.causality.learning.gies.operation.TurningEssential;
import net.graphical.model.causality.scoreFunction.BicScoreFunction;
import net.graphical.model.causality.scoreFunction.ScoreFunction;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertEquals;
/**
* Created by sli on 12/1/15.
*/
public class StepTest {
@Test
public void testLearner_3Variables_vstructure_inv0() throws Exception {
Intervention intervention = new Intervention(Arrays.asList());
Intervention intervention1 = new Intervention(Arrays.asList(new Node(1)));
List<Intervention> interventions = Arrays.asList(intervention, intervention1);
int noOfNodes = 3;
List<Node> nodes = new ArrayList<>();
for(int i = 1; i <= noOfNodes; i++){
Node node = new Node(i);
node.addLevel("0","1");
nodes.add(node);
}
List<Edge> edges = Arrays.asList(
new Edge(1,2, EdgeType.UNDIRECTED), new Edge(2,3, EdgeType.UNDIRECTED)
);
Dag dag = new Dag(nodes, edges);
CausalModel causalModel = new CausalModel(dag, new OneCpd());
DataSet dataSet = causalModel.generateDataSet(8000, interventions);
BicScoreFunction localScore = new BicScoreFunction(dataSet);
StepTurning stepTurning = new StepTurning(dag, localScore, dataSet.getInterventionFamily());
stepTurning.perturbEssentialGraph();
}
//REF_HB
@Test
public void testStepForward_Backward() throws Exception {
ScoreFunction score_Function_insert = new ScoreFunction() {
@Override
public double getLocalScore(Node node, List<Node> parentNodes) {
return 5 * parentNodes.size();
}
@Override
public double score(ChainGraph dag) throws Exception {
return 0;
}
};
ChainGraph ed = getEssentialD();
Intervention inv0 = new Intervention(Arrays.asList());
Intervention inv1 = new Intervention( Arrays.asList(new Node(4)));
InterventionFamily interventionFamily = new InterventionFamily(Arrays.asList(inv0, inv1));
Step stepForward = new StepForward(ed, score_Function_insert, interventionFamily);
stepForward.execute();
ChainGraph snapShot = ed.deepCopy();
List<ConnectedUndirectedGraph> chainComponents = snapShot.getChainComponents();
assertEquals(chainComponents.size(), 3);
ChainGraphChecker checker = new ChainGraphChecker(snapShot);
assertEquals(checker.isTrue(), true);
ScoreFunction score_Function_delete = new ScoreFunction() {
@Override
public double getLocalScore(Node node, List<Node> parentNodes) {
return -5 * parentNodes.size();
}
@Override
public double score(ChainGraph dag) throws Exception {
return 0;
}
};
Step stepBackward = new StepBackward(ed, score_Function_delete, interventionFamily);
stepBackward.execute();
ChainGraph snapShot2 = ed.deepCopy();
ChainGraphChecker checker2 = new ChainGraphChecker(snapShot2);
assertEquals(checker2.isTrue(), true);
List<ConnectedUndirectedGraph> chainComponents2 = snapShot2.getChainComponents();
assertEquals(chainComponents2.size(), 7);
}
@Test
public void testStepTurning_Turning_4_simplified() throws Exception {// 3 nodes will pass.
List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4)
);
List<Edge> edges = Arrays.asList(
new Edge(1,2, EdgeType.UNDIRECTED),new Edge(1,3, EdgeType.UNDIRECTED),new Edge(1,4, EdgeType.DIRECTED_PLUS)
,new Edge(2,3, EdgeType.UNDIRECTED),new Edge(2,4, EdgeType.DIRECTED_PLUS)
,new Edge(3,4, EdgeType.DIRECTED_PLUS)
);
ChainGraph ed = new ChainGraph(nodes, edges);
Intervention inv0 = new Intervention(Arrays.asList());
Intervention inv1 = new Intervention( Arrays.asList(new Node(4)));
InterventionFamily interventionFamily = new InterventionFamily(Arrays.asList(inv0, inv1));
ScoreFunction score_Function_turning = new ScoreFunction() {
@Override
public double getLocalScore(Node node, List<Node> parentNodes) {
if(parentNodes.size() > 0){
return parentNodes.get(0).getNumber() - node.getNumber();
}
return -node.getNumber();
}
@Override
public double score(ChainGraph dag) throws Exception {
return 0;
}
};
Operation turningEssential = new TurningEssential(interventionFamily, ed, new Node(4), new Node(1), score_Function_turning).propose();
turningEssential.commit();
ChainGraphChecker checker = new ChainGraphChecker(ed);
assertEquals(checker.isTrue(), true);
List<ConnectedUndirectedGraph> chainComponents2 = ed.getChainComponents();
assertEquals(chainComponents2.size(), 3);
}
@Test
public void testStepTurning_Turning_4_simplified_1() throws Exception {// 3 nodes will pass.
List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4)
);
List<Edge> edges = Arrays.asList(
new Edge(1,2, EdgeType.UNDIRECTED),new Edge(1,3, EdgeType.DIRECTED_MINUS),new Edge(1,4, EdgeType.DIRECTED_MINUS)
,new Edge(2,3, EdgeType.DIRECTED_MINUS),new Edge(2,4, EdgeType.DIRECTED_MINUS)
,new Edge(3,4, EdgeType.DIRECTED_PLUS)
);
ChainGraph ed = new ChainGraph(nodes, edges);
Intervention inv0 = new Intervention(Arrays.asList());
Intervention inv1 = new Intervention( Arrays.asList(new Node(4)));
InterventionFamily interventionFamily = new InterventionFamily(Arrays.asList(inv0, inv1));
ed.makeEssential(interventionFamily);
ScoreFunction score_Function_turning = new ScoreFunction() {
@Override
public double getLocalScore(Node node, List<Node> parentNodes) {
if(parentNodes.size() > 0){
return parentNodes.get(0).getNumber() - node.getNumber();
}
return -node.getNumber();
}
@Override
public double score(ChainGraph dag) throws Exception {
return 0;
}
};
Operation turningEssential = new TurningEssential(interventionFamily, ed, new Node(4), new Node(1), score_Function_turning).propose();
turningEssential.commit();
ChainGraphChecker checker = new ChainGraphChecker(ed);
assertEquals(checker.isTrue(), true);
List<ConnectedUndirectedGraph> chainComponents2 = ed.getChainComponents();
assertEquals(chainComponents2.size(), 3);
}
@Test
public void testStepTurning_Turning_4_simplified_failed() throws Exception {// 3 nodes will pass.
List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4)
);
List<Edge> edges = Arrays.asList(
new Edge(1,2, EdgeType.UNDIRECTED),new Edge(1,3, EdgeType.DIRECTED_MINUS),new Edge(1,4, EdgeType.DIRECTED_MINUS)
,new Edge(2,3, EdgeType.DIRECTED_MINUS),new Edge(2,4, EdgeType.DIRECTED_MINUS)
,new Edge(3,4, EdgeType.DIRECTED_PLUS)
);
ChainGraph ed = new ChainGraph(nodes, edges);
Intervention inv0 = new Intervention(Arrays.asList());
Intervention inv1 = new Intervention( Arrays.asList(new Node(4)));
InterventionFamily interventionFamily = new InterventionFamily(Arrays.asList(inv0, inv1));
ed.makeEssential(interventionFamily);
ScoreFunction score_Function_turning = new ScoreFunction() {
@Override
public double getLocalScore(Node node, List<Node> parentNodes) {
if(parentNodes.size() > 0){
return parentNodes.get(0).getNumber() - node.getNumber();
}
return -node.getNumber();
}
@Override
public double score(ChainGraph dag) throws Exception {
return 0;
}
};
Operation turningEssential = new TurningEssential(interventionFamily, ed, new Node(2), new Node(3), score_Function_turning).propose();
turningEssential.commit();
ChainGraphChecker checker = new ChainGraphChecker(ed);
assertEquals(checker.isTrue(), true);
List<ConnectedUndirectedGraph> chainComponents2 = ed.getChainComponents();
assertEquals(chainComponents2.size(), 3);
}
@Test
public void testStepForward_Turning_4() throws Exception {// 3 nodes will pass.
ScoreFunction score_Function_insert = new ScoreFunction() {
@Override
public double getLocalScore(Node node, List<Node> parentNodes) {
return 5 * parentNodes.size();
}
@Override
public double score(ChainGraph dag) throws Exception {
return 0;
}
};
List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4)
);
ChainGraph ed = new ChainGraph(nodes, Arrays.asList());
Intervention inv0 = new Intervention(Arrays.asList());
Intervention inv1 = new Intervention( Arrays.asList(new Node(4)));
InterventionFamily interventionFamily = new InterventionFamily(Arrays.asList(inv0, inv1));
Step stepForward = new StepForward(ed, score_Function_insert, interventionFamily);
stepForward.execute();
ChainGraph snapShot = ed.deepCopy();
List<ConnectedUndirectedGraph> chainComponents = snapShot.getChainComponents();
assertEquals(chainComponents.size(), 2);
ChainGraphChecker checker = new ChainGraphChecker(snapShot);
assertEquals(checker.isTrue(), true);
ScoreFunction score_Function_turning = new ScoreFunction() {
@Override
public double getLocalScore(Node node, List<Node> parentNodes) {
if(parentNodes.size() > 0){
return parentNodes.get(0).getNumber() - node.getNumber();
}
return -node.getNumber();
}
@Override
public double score(ChainGraph dag) throws Exception {
return 0;
}
};
Step stepTurning = new StepTurning(ed, score_Function_turning, interventionFamily);
stepTurning.execute();
ChainGraph snapShot2 = ed.deepCopy();
List<ConnectedUndirectedGraph> chainComponents2 = snapShot2.getChainComponents();
assertEquals(chainComponents2.size(), 2);
ChainGraphChecker checker2 = new ChainGraphChecker(snapShot2);
assertEquals(checker2.isTrue(), true);
}
//REF_HB
@Test
public void testStepForward_Turning_5() throws Exception {
ScoreFunction score_Function_insert = new ScoreFunction() {
@Override
public double getLocalScore(Node node, List<Node> parentNodes) {
return 5 * parentNodes.size();
}
@Override
public double score(ChainGraph dag) throws Exception {
return 0;
}
};
List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4), new Node(5)
);
ChainGraph ed = new ChainGraph(nodes, Arrays.asList());
Intervention inv0 = new Intervention(Arrays.asList());
Intervention inv1 = new Intervention( Arrays.asList(new Node(4)));
InterventionFamily interventionFamily = new InterventionFamily(Arrays.asList(inv0, inv1));
Step stepForward = new StepForward(ed, score_Function_insert, interventionFamily);
stepForward.execute();
ChainGraph snapShot = ed.deepCopy();
List<ConnectedUndirectedGraph> chainComponents = snapShot.getChainComponents();
assertEquals(chainComponents.size(), 3);
ChainGraphChecker checker = new ChainGraphChecker(snapShot);
assertEquals(checker.isTrue(), true);
ScoreFunction score_Function_turning = new ScoreFunction() {
@Override
public double getLocalScore(Node node, List<Node> parentNodes) {
if(parentNodes.size() > 0){
return parentNodes.get(0).getNumber() - node.getNumber();
}
return -node.getNumber();
}
@Override
public double score(ChainGraph dag) throws Exception {
return 0;
}
};
Step stepTurning = new StepTurning(ed, score_Function_turning, interventionFamily);
stepTurning.execute();
ChainGraph snapShot2 = ed.deepCopy();
ChainGraphChecker checker2 = new ChainGraphChecker(snapShot2);
assertEquals(checker2.isTrue(), true);
List<ConnectedUndirectedGraph> chainComponents2 = snapShot2.getChainComponents();
assertEquals(chainComponents2.size(), 3);
}
//REF_HB
@Test
public void testStepForward_Turning_7() throws Exception {
ScoreFunction score_Function_insert = new ScoreFunction() {
@Override
public double getLocalScore(Node node, List<Node> parentNodes) {
return 5 * parentNodes.size();
}
@Override
public double score(ChainGraph dag) throws Exception {
return 0;
}
};
List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4), new Node(5), new Node(6), new Node(7)
);
ChainGraph ed = new ChainGraph(nodes, Arrays.asList());
Intervention inv0 = new Intervention(Arrays.asList());
Intervention inv1 = new Intervention( Arrays.asList(new Node(4)));
InterventionFamily interventionFamily = new InterventionFamily(Arrays.asList(inv0, inv1));
Step stepForward = new StepForward(ed, score_Function_insert, interventionFamily);
stepForward.execute();
ChainGraph snapshot = ed.deepCopy();
List<ConnectedUndirectedGraph> chainComponents = snapshot.getChainComponents();
assertEquals(chainComponents.size(), 3);
ChainGraphChecker checker = new ChainGraphChecker(snapshot);
assertEquals(checker.isTrue(), true);
ScoreFunction score_Function_turning = new ScoreFunction() {
@Override
public double getLocalScore(Node node, List<Node> parentNodes) {
if(parentNodes.size() > 0){
return parentNodes.get(0).getNumber() - node.getNumber();
}
return -node.getNumber();
}
@Override
public double score(ChainGraph dag) throws Exception {
return 0;
}
};
Step stepTurning = new StepTurning(ed, score_Function_turning, interventionFamily);
stepTurning.execute();
ChainGraph snapshot2 = ed.deepCopy();
ChainGraphChecker checker2 = new ChainGraphChecker(snapshot2);
assertEquals(checker2.isTrue(), true);
List<ConnectedUndirectedGraph> chainComponents2 = snapshot2.getChainComponents();
assertEquals(chainComponents2.size(), 3);
}
private ChainGraph getEssentialD(){
List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4), new Node(5), new Node(6), new Node(7)
);
List<Edge> edges = Arrays.asList();
return new ChainGraph(nodes, edges);
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.analyzer;
import com.facebook.presto.Session;
import com.facebook.presto.security.AccessControl;
import com.facebook.presto.security.AllowAllAccessControl;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.metadata.MetadataUtil;
import com.facebook.presto.metadata.QualifiedTableName;
import com.facebook.presto.metadata.SessionPropertyManager.SessionPropertyValue;
import com.facebook.presto.metadata.TableHandle;
import com.facebook.presto.spi.ColumnMetadata;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.sql.parser.SqlParser;
import com.facebook.presto.sql.tree.AllColumns;
import com.facebook.presto.sql.tree.Cast;
import com.facebook.presto.sql.tree.CreateTableAsSelect;
import com.facebook.presto.sql.tree.CreateView;
import com.facebook.presto.sql.tree.DefaultTraversalVisitor;
import com.facebook.presto.sql.tree.Delete;
import com.facebook.presto.sql.tree.Explain;
import com.facebook.presto.sql.tree.ExplainFormat;
import com.facebook.presto.sql.tree.ExplainOption;
import com.facebook.presto.sql.tree.ExplainType;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.Insert;
import com.facebook.presto.sql.tree.LikePredicate;
import com.facebook.presto.sql.tree.LongLiteral;
import com.facebook.presto.sql.tree.QualifiedName;
import com.facebook.presto.sql.tree.Query;
import com.facebook.presto.sql.tree.Relation;
import com.facebook.presto.sql.tree.SelectItem;
import com.facebook.presto.sql.tree.ShowCatalogs;
import com.facebook.presto.sql.tree.ShowColumns;
import com.facebook.presto.sql.tree.ShowFunctions;
import com.facebook.presto.sql.tree.ShowPartitions;
import com.facebook.presto.sql.tree.ShowSchemas;
import com.facebook.presto.sql.tree.ShowSession;
import com.facebook.presto.sql.tree.ShowTables;
import com.facebook.presto.sql.tree.SingleColumn;
import com.facebook.presto.sql.tree.SortItem;
import com.facebook.presto.sql.tree.Statement;
import com.facebook.presto.sql.tree.StringLiteral;
import com.facebook.presto.sql.tree.Use;
import com.facebook.presto.sql.tree.Values;
import com.facebook.presto.sql.tree.With;
import com.facebook.presto.sql.tree.WithQuery;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.primitives.Ints;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import static com.facebook.presto.connector.informationSchema.InformationSchemaMetadata.TABLE_COLUMNS;
import static com.facebook.presto.connector.informationSchema.InformationSchemaMetadata.TABLE_INTERNAL_FUNCTIONS;
import static com.facebook.presto.connector.informationSchema.InformationSchemaMetadata.TABLE_INTERNAL_PARTITIONS;
import static com.facebook.presto.connector.informationSchema.InformationSchemaMetadata.TABLE_SCHEMATA;
import static com.facebook.presto.connector.informationSchema.InformationSchemaMetadata.TABLE_TABLES;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.sql.QueryUtil.aliased;
import static com.facebook.presto.sql.QueryUtil.aliasedName;
import static com.facebook.presto.sql.QueryUtil.aliasedNullToEmpty;
import static com.facebook.presto.sql.QueryUtil.aliasedYesNoToBoolean;
import static com.facebook.presto.sql.QueryUtil.ascending;
import static com.facebook.presto.sql.QueryUtil.caseWhen;
import static com.facebook.presto.sql.QueryUtil.equal;
import static com.facebook.presto.sql.QueryUtil.functionCall;
import static com.facebook.presto.sql.QueryUtil.logicalAnd;
import static com.facebook.presto.sql.QueryUtil.nameReference;
import static com.facebook.presto.sql.QueryUtil.ordering;
import static com.facebook.presto.sql.QueryUtil.row;
import static com.facebook.presto.sql.QueryUtil.selectAll;
import static com.facebook.presto.sql.QueryUtil.selectList;
import static com.facebook.presto.sql.QueryUtil.simpleQuery;
import static com.facebook.presto.sql.QueryUtil.subquery;
import static com.facebook.presto.sql.QueryUtil.table;
import static com.facebook.presto.sql.QueryUtil.unaliasedName;
import static com.facebook.presto.sql.QueryUtil.values;
import static com.facebook.presto.sql.analyzer.ExpressionAnalyzer.createConstantAnalyzer;
import static com.facebook.presto.sql.analyzer.SemanticErrorCode.COLUMN_NAME_NOT_SPECIFIED;
import static com.facebook.presto.sql.analyzer.SemanticErrorCode.DUPLICATE_COLUMN_NAME;
import static com.facebook.presto.sql.analyzer.SemanticErrorCode.DUPLICATE_RELATION;
import static com.facebook.presto.sql.analyzer.SemanticErrorCode.INVALID_ORDINAL;
import static com.facebook.presto.sql.analyzer.SemanticErrorCode.INVALID_SCHEMA_NAME;
import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISMATCHED_SET_COLUMN_TYPES;
import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_SCHEMA;
import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_TABLE;
import static com.facebook.presto.sql.analyzer.SemanticErrorCode.NOT_SUPPORTED;
import static com.facebook.presto.sql.analyzer.SemanticErrorCode.TABLE_ALREADY_EXISTS;
import static com.facebook.presto.sql.tree.BooleanLiteral.FALSE_LITERAL;
import static com.facebook.presto.sql.tree.BooleanLiteral.TRUE_LITERAL;
import static com.facebook.presto.sql.tree.ExplainFormat.Type.TEXT;
import static com.facebook.presto.sql.tree.ExplainType.Type.LOGICAL;
import static com.facebook.presto.util.ImmutableCollectors.toImmutableList;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Strings.nullToEmpty;
import static com.google.common.collect.Iterables.elementsEqual;
import static com.google.common.collect.Iterables.transform;
import static java.util.stream.Collectors.toList;
class StatementAnalyzer
extends DefaultTraversalVisitor<TupleDescriptor, AnalysisContext>
{
private final Analysis analysis;
private final Metadata metadata;
private final Session session;
private final Optional<QueryExplainer> queryExplainer;
private final boolean experimentalSyntaxEnabled;
private final SqlParser sqlParser;
private final AccessControl accessControl;
public StatementAnalyzer(
Analysis analysis,
Metadata metadata,
SqlParser sqlParser,
AccessControl accessControl, Session session,
boolean experimentalSyntaxEnabled,
Optional<QueryExplainer> queryExplainer)
{
this.analysis = checkNotNull(analysis, "analysis is null");
this.metadata = checkNotNull(metadata, "metadata is null");
this.sqlParser = checkNotNull(sqlParser, "sqlParser is null");
this.accessControl = checkNotNull(accessControl, "accessControl is null");
this.session = checkNotNull(session, "session is null");
this.experimentalSyntaxEnabled = experimentalSyntaxEnabled;
this.queryExplainer = checkNotNull(queryExplainer, "queryExplainer is null");
}
@Override
protected TupleDescriptor visitShowTables(ShowTables showTables, AnalysisContext context)
{
String catalogName = session.getCatalog();
String schemaName = session.getSchema();
Optional<QualifiedName> schema = showTables.getSchema();
if (schema.isPresent()) {
List<String> parts = schema.get().getParts();
if (parts.size() > 2) {
throw new SemanticException(INVALID_SCHEMA_NAME, showTables, "too many parts in schema name: %s", schema);
}
if (parts.size() == 2) {
catalogName = parts.get(0);
}
schemaName = schema.get().getSuffix();
}
if (!metadata.listSchemaNames(session, catalogName).contains(schemaName)) {
throw new SemanticException(MISSING_SCHEMA, showTables, "Schema '%s' does not exist", schemaName);
}
Expression predicate = equal(nameReference("table_schema"), new StringLiteral(schemaName));
Optional<String> likePattern = showTables.getLikePattern();
if (likePattern.isPresent()) {
Expression likePredicate = new LikePredicate(nameReference("table_name"), new StringLiteral(likePattern.get()), null);
predicate = logicalAnd(predicate, likePredicate);
}
Query query = simpleQuery(
selectList(aliasedName("table_name", "Table")),
from(catalogName, TABLE_TABLES),
predicate,
ordering(ascending("table_name")));
return process(query, context);
}
@Override
protected TupleDescriptor visitShowSchemas(ShowSchemas node, AnalysisContext context)
{
Query query = simpleQuery(
selectList(aliasedName("schema_name", "Schema")),
from(node.getCatalog().orElse(session.getCatalog()), TABLE_SCHEMATA),
ordering(ascending("schema_name")));
return process(query, context);
}
@Override
protected TupleDescriptor visitShowCatalogs(ShowCatalogs node, AnalysisContext context)
{
List<Expression> rows = metadata.getCatalogNames().keySet().stream()
.map(name -> row(new StringLiteral(name)))
.collect(toList());
Query query = simpleQuery(
selectList(new AllColumns()),
aliased(new Values(rows), "catalogs", ImmutableList.of("Catalog")));
return process(query, context);
}
@Override
protected TupleDescriptor visitShowColumns(ShowColumns showColumns, AnalysisContext context)
{
QualifiedTableName tableName = MetadataUtil.createQualifiedTableName(session, showColumns.getTable());
if (!metadata.getView(session, tableName).isPresent() &&
!metadata.getTableHandle(session, tableName).isPresent()) {
throw new SemanticException(MISSING_TABLE, showColumns, "Table '%s' does not exist", tableName);
}
Query query = simpleQuery(
selectList(
aliasedName("column_name", "Column"),
aliasedName("data_type", "Type"),
aliasedYesNoToBoolean("is_nullable", "Null"),
aliasedYesNoToBoolean("is_partition_key", "Partition Key"),
aliasedNullToEmpty("comment", "Comment")),
from(tableName.getCatalogName(), TABLE_COLUMNS),
logicalAnd(
equal(nameReference("table_schema"), new StringLiteral(tableName.getSchemaName())),
equal(nameReference("table_name"), new StringLiteral(tableName.getTableName()))),
ordering(ascending("ordinal_position")));
return process(query, context);
}
@Override
protected TupleDescriptor visitUse(Use node, AnalysisContext context)
{
analysis.setUpdateType("USE");
throw new SemanticException(NOT_SUPPORTED, node, "USE statement is not supported");
}
@Override
protected TupleDescriptor visitShowPartitions(ShowPartitions showPartitions, AnalysisContext context)
{
QualifiedTableName table = MetadataUtil.createQualifiedTableName(session, showPartitions.getTable());
Optional<TableHandle> tableHandle = metadata.getTableHandle(session, table);
if (!tableHandle.isPresent()) {
throw new SemanticException(MISSING_TABLE, showPartitions, "Table '%s' does not exist", table);
}
/*
Generate a dynamic pivot to output one column per partition key.
For example, a table with two partition keys (ds, cluster_name)
would generate the following query:
SELECT
partition_number
, max(CASE WHEN partition_key = 'ds' THEN partition_value END) ds
, max(CASE WHEN partition_key = 'cluster_name' THEN partition_value END) cluster_name
FROM ...
GROUP BY partition_number
The values are also cast to the type of the partition column.
The query is then wrapped to allow custom filtering and ordering.
*/
ImmutableList.Builder<SelectItem> selectList = ImmutableList.builder();
ImmutableList.Builder<SelectItem> wrappedList = ImmutableList.builder();
selectList.add(unaliasedName("partition_number"));
for (ColumnMetadata column : metadata.getTableMetadata(session, tableHandle.get()).getColumns()) {
if (!column.isPartitionKey()) {
continue;
}
Expression key = equal(nameReference("partition_key"), new StringLiteral(column.getName()));
Expression value = caseWhen(key, nameReference("partition_value"));
value = new Cast(value, column.getType().getTypeSignature().toString());
Expression function = functionCall("max", value);
selectList.add(new SingleColumn(function, column.getName()));
wrappedList.add(unaliasedName(column.getName()));
}
Query query = simpleQuery(
selectAll(selectList.build()),
from(table.getCatalogName(), TABLE_INTERNAL_PARTITIONS),
Optional.of(logicalAnd(
equal(nameReference("table_schema"), new StringLiteral(table.getSchemaName())),
equal(nameReference("table_name"), new StringLiteral(table.getTableName())))),
ImmutableList.of(nameReference("partition_number")),
Optional.empty(),
ImmutableList.of(),
Optional.empty());
query = simpleQuery(
selectAll(wrappedList.build()),
subquery(query),
showPartitions.getWhere(),
ImmutableList.of(),
Optional.empty(),
ImmutableList.<SortItem>builder()
.addAll(showPartitions.getOrderBy())
.add(ascending("partition_number"))
.build(),
showPartitions.getLimit());
return process(query, context);
}
@Override
protected TupleDescriptor visitShowFunctions(ShowFunctions node, AnalysisContext context)
{
Query query = simpleQuery(selectList(
aliasedName("function_name", "Function"),
aliasedName("return_type", "Return Type"),
aliasedName("argument_types", "Argument Types"),
aliasedName("function_type", "Function Type"),
aliasedName("deterministic", "Deterministic"),
aliasedName("description", "Description")),
from(session.getCatalog(), TABLE_INTERNAL_FUNCTIONS),
ordering(
ascending("function_name"),
ascending("return_type"),
ascending("argument_types"),
ascending("function_type")));
return process(query, context);
}
@Override
protected TupleDescriptor visitShowSession(ShowSession node, AnalysisContext context)
{
ImmutableList.Builder<Expression> rows = ImmutableList.builder();
List<SessionPropertyValue> sessionProperties = metadata.getSessionPropertyManager().getAllSessionProperties(session);
for (SessionPropertyValue sessionProperty : sessionProperties) {
String value = sessionProperty.getValue();
String defaultValue = sessionProperty.getDefaultValue();
rows.add(row(
new StringLiteral(sessionProperty.getFullyQualifiedName()),
new StringLiteral(nullToEmpty(value)),
new StringLiteral(nullToEmpty(defaultValue)),
new StringLiteral(sessionProperty.getType()),
new StringLiteral(sessionProperty.getDescription()),
TRUE_LITERAL));
}
// add bogus row so we can support empty sessions
StringLiteral empty = new StringLiteral("");
rows.add(row(empty, empty, empty, empty, empty, FALSE_LITERAL));
Query query = simpleQuery(
selectList(
aliasedName("name", "Name"),
aliasedName("value", "Value"),
aliasedName("default", "Default"),
aliasedName("type", "Type"),
aliasedName("description", "Description")),
aliased(
new Values(rows.build()),
"session",
ImmutableList.of("name", "value", "default", "type", "description", "include")),
nameReference("include"));
return process(query, context);
}
@Override
protected TupleDescriptor visitInsert(Insert insert, AnalysisContext context)
{
QualifiedTableName targetTable = MetadataUtil.createQualifiedTableName(session, insert.getTarget());
if (metadata.getView(session, targetTable).isPresent()) {
throw new SemanticException(NOT_SUPPORTED, insert, "Inserting into views is not supported");
}
analysis.setUpdateType("INSERT");
// analyze the query that creates the data
TupleDescriptor descriptor = process(insert.getQuery(), context);
// verify the insert destination columns match the query
Optional<TableHandle> targetTableHandle = metadata.getTableHandle(session, targetTable);
if (!targetTableHandle.isPresent()) {
throw new SemanticException(MISSING_TABLE, insert, "Table '%s' does not exist", targetTable);
}
accessControl.checkCanInsertIntoTable(session.getIdentity(), targetTable);
analysis.setInsertTarget(targetTableHandle.get());
List<ColumnMetadata> columns = metadata.getTableMetadata(session, targetTableHandle.get()).getColumns();
Iterable<Type> tableTypes = columns.stream()
.filter(column -> !column.isHidden())
.map(ColumnMetadata::getType)
.collect(toImmutableList());
Iterable<Type> queryTypes = transform(descriptor.getVisibleFields(), Field::getType);
if (!elementsEqual(tableTypes, queryTypes)) {
throw new SemanticException(MISMATCHED_SET_COLUMN_TYPES, insert, "Insert query has mismatched column types: " +
"Table: (" + Joiner.on(", ").join(tableTypes) + "), " +
"Query: (" + Joiner.on(", ").join(queryTypes) + ")");
}
return new TupleDescriptor(Field.newUnqualified("rows", BIGINT));
}
@Override
protected TupleDescriptor visitDelete(Delete node, AnalysisContext context)
{
QualifiedTableName tableName = MetadataUtil.createQualifiedTableName(session, node.getTable().getName());
if (metadata.getView(session, tableName).isPresent()) {
throw new SemanticException(NOT_SUPPORTED, node, "Deleting from views is not supported");
}
analysis.setUpdateType("DELETE");
analysis.setDelete(node);
// Tuple analyzer checks for select permissions but DELETE has a separate permission, so disable access checks
TupleAnalyzer analyzer = new TupleAnalyzer(analysis, session, metadata, sqlParser, new AllowAllAccessControl(), experimentalSyntaxEnabled);
TupleDescriptor descriptor = analyzer.process(node.getTable(), context);
node.getWhere().ifPresent(where -> analyzer.analyzeWhere(node, descriptor, context, where));
accessControl.checkCanDeleteFromTable(session.getIdentity(), tableName);
return new TupleDescriptor(Field.newUnqualified("rows", BIGINT));
}
@Override
protected TupleDescriptor visitCreateTableAsSelect(CreateTableAsSelect node, AnalysisContext context)
{
analysis.setUpdateType("CREATE TABLE");
// turn this into a query that has a new table writer node on top.
QualifiedTableName targetTable = MetadataUtil.createQualifiedTableName(session, node.getName());
analysis.setCreateTableDestination(targetTable);
for (Expression expression : node.getProperties().values()) {
// analyze table property value expressions which must be constant
createConstantAnalyzer(metadata, session)
.analyze(expression, new TupleDescriptor(), context);
}
analysis.setCreateTableProperties(node.getProperties());
Optional<TableHandle> targetTableHandle = metadata.getTableHandle(session, targetTable);
if (targetTableHandle.isPresent()) {
throw new SemanticException(TABLE_ALREADY_EXISTS, node, "Destination table '%s' already exists", targetTable);
}
accessControl.checkCanCreateTable(session.getIdentity(), targetTable);
// analyze the query that creates the table
TupleDescriptor descriptor = process(node.getQuery(), context);
validateColumnNames(node, descriptor);
return new TupleDescriptor(Field.newUnqualified("rows", BIGINT));
}
@Override
protected TupleDescriptor visitCreateView(CreateView node, AnalysisContext context)
{
analysis.setUpdateType("CREATE VIEW");
// analyze the query that creates the view
TupleDescriptor descriptor = process(node.getQuery(), context);
QualifiedTableName viewName = MetadataUtil.createQualifiedTableName(session, node.getName());
accessControl.checkCanCreateView(session.getIdentity(), viewName);
validateColumnNames(node, descriptor);
return descriptor;
}
private static void validateColumnNames(Statement node, TupleDescriptor descriptor)
{
// verify that all column names are specified and unique
// TODO: collect errors and return them all at once
Set<String> names = new HashSet<>();
for (Field field : descriptor.getVisibleFields()) {
Optional<String> fieldName = field.getName();
if (!fieldName.isPresent()) {
throw new SemanticException(COLUMN_NAME_NOT_SPECIFIED, node, "Column name not specified at position %s", descriptor.indexOf(field) + 1);
}
if (!names.add(fieldName.get())) {
throw new SemanticException(DUPLICATE_COLUMN_NAME, node, "Column name '%s' specified more than once", fieldName.get());
}
}
}
@Override
protected TupleDescriptor visitExplain(Explain node, AnalysisContext context)
throws SemanticException
{
checkState(queryExplainer.isPresent(), "query explainer not available");
ExplainType.Type planType = LOGICAL;
ExplainFormat.Type planFormat = TEXT;
List<ExplainOption> options = node.getOptions();
for (ExplainOption option : options) {
if (option instanceof ExplainType) {
planType = ((ExplainType) option).getType();
break;
}
}
for (ExplainOption option : options) {
if (option instanceof ExplainFormat) {
planFormat = ((ExplainFormat) option).getType();
break;
}
}
String queryPlan = getQueryPlan(node, planType, planFormat);
Query query = simpleQuery(
selectList(new AllColumns()),
aliased(
values(row(new StringLiteral((queryPlan)))),
"plan",
ImmutableList.of("Query Plan")));
return process(query, context);
}
private String getQueryPlan(Explain node, ExplainType.Type planType, ExplainFormat.Type planFormat)
throws IllegalArgumentException
{
switch (planFormat) {
case GRAPHVIZ:
return queryExplainer.get().getGraphvizPlan(session, node.getStatement(), planType);
case TEXT:
return queryExplainer.get().getPlan(session, node.getStatement(), planType);
case JSON:
// ignore planType if planFormat is JSON
return queryExplainer.get().getJsonPlan(session, node.getStatement());
}
throw new IllegalArgumentException("Invalid Explain Format: " + planFormat.toString());
}
@Override
protected TupleDescriptor visitQuery(Query node, AnalysisContext parentContext)
{
AnalysisContext context = new AnalysisContext(parentContext);
if (node.getApproximate().isPresent()) {
if (!experimentalSyntaxEnabled) {
throw new SemanticException(NOT_SUPPORTED, node, "approximate queries are not enabled");
}
context.setApproximate(true);
}
analyzeWith(node, context);
TupleAnalyzer analyzer = new TupleAnalyzer(analysis, session, metadata, sqlParser, accessControl, experimentalSyntaxEnabled);
TupleDescriptor descriptor = analyzer.process(node.getQueryBody(), context);
analyzeOrderBy(node, descriptor, context);
// Input fields == Output fields
analysis.setOutputDescriptor(node, descriptor);
analysis.setOutputExpressions(node, descriptorToFields(descriptor));
analysis.setQuery(node);
return descriptor;
}
private static List<FieldOrExpression> descriptorToFields(TupleDescriptor tupleDescriptor)
{
ImmutableList.Builder<FieldOrExpression> builder = ImmutableList.builder();
for (int fieldIndex = 0; fieldIndex < tupleDescriptor.getAllFieldCount(); fieldIndex++) {
builder.add(new FieldOrExpression(fieldIndex));
}
return builder.build();
}
private void analyzeWith(Query node, AnalysisContext context)
{
// analyze WITH clause
if (!node.getWith().isPresent()) {
return;
}
With with = node.getWith().get();
if (with.isRecursive()) {
throw new SemanticException(NOT_SUPPORTED, with, "Recursive WITH queries are not supported");
}
for (WithQuery withQuery : with.getQueries()) {
if (withQuery.getColumnNames() != null && !withQuery.getColumnNames().isEmpty()) {
throw new SemanticException(NOT_SUPPORTED, withQuery, "Column alias not supported in WITH queries");
}
Query query = withQuery.getQuery();
process(query, context);
String name = withQuery.getName();
if (context.isNamedQueryDeclared(name)) {
throw new SemanticException(DUPLICATE_RELATION, withQuery, "WITH query name '%s' specified more than once", name);
}
context.addNamedQuery(name, query);
}
}
private void analyzeOrderBy(Query node, TupleDescriptor tupleDescriptor, AnalysisContext context)
{
List<SortItem> items = node.getOrderBy();
ImmutableList.Builder<FieldOrExpression> orderByFieldsBuilder = ImmutableList.builder();
if (!items.isEmpty()) {
for (SortItem item : items) {
Expression expression = item.getSortKey();
FieldOrExpression orderByField;
if (expression instanceof LongLiteral) {
// this is an ordinal in the output tuple
long ordinal = ((LongLiteral) expression).getValue();
if (ordinal < 1 || ordinal > tupleDescriptor.getVisibleFieldCount()) {
throw new SemanticException(INVALID_ORDINAL, expression, "ORDER BY position %s is not in select list", ordinal);
}
orderByField = new FieldOrExpression(Ints.checkedCast(ordinal - 1));
}
else {
// otherwise, just use the expression as is
orderByField = new FieldOrExpression(expression);
ExpressionAnalysis expressionAnalysis = ExpressionAnalyzer.analyzeExpression(session,
metadata,
accessControl, sqlParser,
tupleDescriptor,
analysis,
experimentalSyntaxEnabled,
context,
orderByField.getExpression());
analysis.addInPredicates(node, expressionAnalysis.getSubqueryInPredicates());
}
orderByFieldsBuilder.add(orderByField);
}
}
analysis.setOrderByExpressions(node, orderByFieldsBuilder.build());
}
private static Relation from(String catalog, SchemaTableName table)
{
return table(QualifiedName.of(catalog, table.getSchemaName(), table.getTableName()));
}
}
|
|
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.spi.impl.operationexecutor.impl;
import com.hazelcast.instance.NodeExtension;
import com.hazelcast.internal.metrics.MetricsProvider;
import com.hazelcast.internal.metrics.MetricsRegistry;
import com.hazelcast.internal.metrics.Probe;
import com.hazelcast.internal.util.RuntimeAvailableProcessors;
import com.hazelcast.internal.util.concurrent.MPSCQueue;
import com.hazelcast.logging.ILogger;
import com.hazelcast.logging.LoggingService;
import com.hazelcast.nio.Address;
import com.hazelcast.nio.Packet;
import com.hazelcast.spi.LiveOperations;
import com.hazelcast.spi.Operation;
import com.hazelcast.spi.UrgentSystemOperation;
import com.hazelcast.spi.impl.PartitionSpecificRunnable;
import com.hazelcast.spi.impl.operationexecutor.OperationExecutor;
import com.hazelcast.spi.impl.operationexecutor.OperationHostileThread;
import com.hazelcast.spi.impl.operationexecutor.OperationRunner;
import com.hazelcast.spi.impl.operationexecutor.OperationRunnerFactory;
import com.hazelcast.spi.impl.operationservice.impl.operations.Backup;
import com.hazelcast.spi.properties.HazelcastProperties;
import com.hazelcast.spi.properties.HazelcastProperty;
import com.hazelcast.util.concurrent.IdleStrategy;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.LinkedBlockingQueue;
import static com.hazelcast.internal.metrics.ProbeLevel.MANDATORY;
import static com.hazelcast.spi.impl.operationservice.impl.AsyncInboundResponseHandler.getIdleStrategy;
import static com.hazelcast.spi.properties.GroupProperty.GENERIC_OPERATION_THREAD_COUNT;
import static com.hazelcast.spi.properties.GroupProperty.PARTITION_COUNT;
import static com.hazelcast.spi.properties.GroupProperty.PARTITION_OPERATION_THREAD_COUNT;
import static com.hazelcast.spi.properties.GroupProperty.PRIORITY_GENERIC_OPERATION_THREAD_COUNT;
import static com.hazelcast.util.Preconditions.checkNotNull;
import static com.hazelcast.util.ThreadUtil.createThreadPoolName;
import static java.util.concurrent.TimeUnit.SECONDS;
/**
* A {@link com.hazelcast.spi.impl.operationexecutor.OperationExecutor} that schedules:
* <ol>
* <li>partition specific operations to a specific partition-operation-thread (using a mod on the partition ID)</li>
* <li>non specific operations to generic-operation-threads</li>
* </ol>
* The {@link #execute(Object, int, boolean)} accepts an Object instead of a runnable to prevent needing to
* create wrapper runnables around tasks. This is done to reduce the amount of object litter and therefor
* reduce pressure on the GC.
* <p>
* There are 2 category of operation threads:
* <ol>
* <li>partition specific operation threads: these threads are responsible for executing e.g. a map.put.
* Operations for the same partition, always end up in the same thread.
* </li>
* <li>
* generic operation threads: these threads are responsible for executing operations that are not
* specific to a partition, e.g. a heart beat.
* </li>
* </ol>
*/
@SuppressWarnings("checkstyle:methodcount")
public final class OperationExecutorImpl implements OperationExecutor, MetricsProvider {
public static final HazelcastProperty IDLE_STRATEGY
= new HazelcastProperty("hazelcast.operation.partitionthread.idlestrategy", "block");
private static final int TERMINATION_TIMEOUT_SECONDS = 3;
private final ILogger logger;
// all operations for specific partitions will be executed on these threads, e.g. map.put(key, value)
private final PartitionOperationThread[] partitionThreads;
private final OperationRunner[] partitionOperationRunners;
private final OperationQueue genericQueue
= new DefaultOperationQueue(new LinkedBlockingQueue<Object>(), new LinkedBlockingQueue<Object>());
// all operations that are not specific for a partition will be executed here, e.g. heartbeat or map.size()
private final GenericOperationThread[] genericThreads;
private final OperationRunner[] genericOperationRunners;
private final Address thisAddress;
private final OperationRunner adHocOperationRunner;
private final int priorityThreadCount;
public OperationExecutorImpl(HazelcastProperties properties,
LoggingService loggerService,
Address thisAddress,
OperationRunnerFactory runnerFactory,
NodeExtension nodeExtension,
String hzName,
ClassLoader configClassLoader) {
this.thisAddress = thisAddress;
this.logger = loggerService.getLogger(OperationExecutorImpl.class);
this.adHocOperationRunner = runnerFactory.createAdHocRunner();
this.partitionOperationRunners = initPartitionOperationRunners(properties, runnerFactory);
this.partitionThreads = initPartitionThreads(properties, hzName, nodeExtension, configClassLoader);
this.priorityThreadCount = properties.getInteger(PRIORITY_GENERIC_OPERATION_THREAD_COUNT);
this.genericOperationRunners = initGenericOperationRunners(properties, runnerFactory);
this.genericThreads = initGenericThreads(hzName, nodeExtension, configClassLoader);
}
private OperationRunner[] initPartitionOperationRunners(HazelcastProperties properties,
OperationRunnerFactory runnerFactory) {
OperationRunner[] operationRunners = new OperationRunner[properties.getInteger(PARTITION_COUNT)];
for (int partitionId = 0; partitionId < operationRunners.length; partitionId++) {
operationRunners[partitionId] = runnerFactory.createPartitionRunner(partitionId);
}
return operationRunners;
}
private OperationRunner[] initGenericOperationRunners(HazelcastProperties properties, OperationRunnerFactory runnerFactory) {
int threadCount = properties.getInteger(GENERIC_OPERATION_THREAD_COUNT);
if (threadCount <= 0) {
// default generic operation thread count
int coreSize = RuntimeAvailableProcessors.get();
threadCount = Math.max(2, coreSize / 2);
}
OperationRunner[] operationRunners = new OperationRunner[threadCount + priorityThreadCount];
for (int partitionId = 0; partitionId < operationRunners.length; partitionId++) {
operationRunners[partitionId] = runnerFactory.createGenericRunner();
}
return operationRunners;
}
private PartitionOperationThread[] initPartitionThreads(HazelcastProperties properties, String hzName,
NodeExtension nodeExtension, ClassLoader configClassLoader) {
int threadCount = properties.getInteger(PARTITION_OPERATION_THREAD_COUNT);
if (threadCount <= 0) {
// default partition operation thread count
int coreSize = RuntimeAvailableProcessors.get();
threadCount = Math.max(2, coreSize);
}
IdleStrategy idleStrategy = getIdleStrategy(properties, IDLE_STRATEGY);
PartitionOperationThread[] threads = new PartitionOperationThread[threadCount];
for (int threadId = 0; threadId < threads.length; threadId++) {
String threadName = createThreadPoolName(hzName, "partition-operation") + threadId;
// the normalQueue will be a blocking queue. We don't want to idle, because there are many operation threads.
MPSCQueue<Object> normalQueue = new MPSCQueue<Object>(idleStrategy);
OperationQueue operationQueue = new DefaultOperationQueue(normalQueue, new ConcurrentLinkedQueue<Object>());
PartitionOperationThread partitionThread = new PartitionOperationThread(threadName, threadId, operationQueue, logger,
nodeExtension, partitionOperationRunners, configClassLoader);
threads[threadId] = partitionThread;
normalQueue.setConsumerThread(partitionThread);
}
// we need to assign the PartitionOperationThreads to all OperationRunners they own
for (int partitionId = 0; partitionId < partitionOperationRunners.length; partitionId++) {
int threadId = getPartitionThreadId(partitionId, threadCount);
Thread thread = threads[threadId];
OperationRunner runner = partitionOperationRunners[partitionId];
runner.setCurrentThread(thread);
}
return threads;
}
private static int getPartitionThreadId(int partitionId, int partitionThreadCount) {
return partitionId % partitionThreadCount;
}
private GenericOperationThread[] initGenericThreads(String hzName, NodeExtension nodeExtension,
ClassLoader configClassLoader) {
// we created as many generic operation handlers, as there are generic threads
int threadCount = genericOperationRunners.length;
GenericOperationThread[] threads = new GenericOperationThread[threadCount];
int threadId = 0;
for (int threadIndex = 0; threadIndex < threads.length; threadIndex++) {
boolean priority = threadIndex < priorityThreadCount;
String baseName = priority ? "priority-generic-operation" : "generic-operation";
String threadName = createThreadPoolName(hzName, baseName) + threadId;
OperationRunner operationRunner = genericOperationRunners[threadIndex];
GenericOperationThread operationThread = new GenericOperationThread(
threadName, threadIndex, genericQueue, logger, nodeExtension, operationRunner, priority, configClassLoader);
threads[threadIndex] = operationThread;
operationRunner.setCurrentThread(operationThread);
if (threadIndex == priorityThreadCount - 1) {
threadId = 0;
} else {
threadId++;
}
}
return threads;
}
@Override
public void provideMetrics(MetricsRegistry registry) {
registry.scanAndRegister(this, "operation");
registry.collectMetrics((Object[]) genericThreads);
registry.collectMetrics((Object[]) partitionThreads);
registry.collectMetrics(adHocOperationRunner);
registry.collectMetrics((Object[]) genericOperationRunners);
registry.collectMetrics((Object[]) partitionOperationRunners);
}
@SuppressFBWarnings("EI_EXPOSE_REP")
@Override
public OperationRunner[] getPartitionOperationRunners() {
return partitionOperationRunners;
}
@SuppressFBWarnings("EI_EXPOSE_REP")
@Override
public OperationRunner[] getGenericOperationRunners() {
return genericOperationRunners;
}
@Override
public void populate(LiveOperations liveOperations) {
scan(partitionOperationRunners, liveOperations);
scan(genericOperationRunners, liveOperations);
}
private void scan(OperationRunner[] runners, LiveOperations result) {
for (OperationRunner runner : runners) {
Object task = runner.currentTask();
if (!(task instanceof Operation) || task.getClass() == Backup.class) {
continue;
}
Operation operation = (Operation) task;
result.add(operation.getCallerAddress(), operation.getCallId());
}
}
@Probe(name = "runningCount")
@Override
public int getRunningOperationCount() {
return getRunningPartitionOperationCount() + getRunningGenericOperationCount();
}
@Probe(name = "runningPartitionCount")
private int getRunningPartitionOperationCount() {
return getRunningOperationCount(partitionOperationRunners);
}
@Probe(name = "runningGenericCount")
private int getRunningGenericOperationCount() {
return getRunningOperationCount(genericOperationRunners);
}
private static int getRunningOperationCount(OperationRunner[] runners) {
int result = 0;
for (OperationRunner runner : runners) {
if (runner.currentTask() != null) {
result++;
}
}
return result;
}
@Override
@Probe(name = "queueSize", level = MANDATORY)
public int getQueueSize() {
int size = 0;
for (PartitionOperationThread partitionThread : partitionThreads) {
size += partitionThread.queue.normalSize();
}
size += genericQueue.normalSize();
return size;
}
@Override
@Probe(name = "priorityQueueSize", level = MANDATORY)
public int getPriorityQueueSize() {
int size = 0;
for (PartitionOperationThread partitionThread : partitionThreads) {
size += partitionThread.queue.prioritySize();
}
size += genericQueue.prioritySize();
return size;
}
@Probe
private int getGenericQueueSize() {
return genericQueue.normalSize();
}
@Probe
private int getGenericPriorityQueueSize() {
return genericQueue.prioritySize();
}
@Probe(name = "completedCount", level = MANDATORY)
public long getExecutedOperationCount() {
long result = adHocOperationRunner.executedOperationsCount();
for (OperationRunner runner : genericOperationRunners) {
result += runner.executedOperationsCount();
}
for (OperationRunner runner : partitionOperationRunners) {
result += runner.executedOperationsCount();
}
return result;
}
@Override
@Probe
public int getPartitionThreadCount() {
return partitionThreads.length;
}
@Override
@Probe
public int getGenericThreadCount() {
return genericThreads.length;
}
@Override
public boolean isOperationThread() {
return Thread.currentThread() instanceof OperationThread;
}
@Override
public int getPartitionThreadId(int partitionId) {
return getPartitionThreadId(partitionId, partitionThreads.length);
}
@Override
public void execute(Operation op) {
checkNotNull(op, "op can't be null");
execute(op, op.getPartitionId(), op.isUrgent());
}
@Override
public void execute(PartitionSpecificRunnable task) {
checkNotNull(task, "task can't be null");
execute(task, task.getPartitionId(), task instanceof UrgentSystemOperation);
}
@Override
public void handle(Packet packet) {
execute(packet, packet.getPartitionId(), packet.isUrgent());
}
private void execute(Object task, int partitionId, boolean priority) {
if (partitionId < 0) {
genericQueue.add(task, priority);
} else {
OperationThread partitionThread = partitionThreads[toPartitionThreadIndex(partitionId)];
partitionThread.queue.add(task, priority);
}
}
@Override
public void executeOnPartitionThreads(Runnable task) {
checkNotNull(task, "task can't be null");
for (OperationThread partitionThread : partitionThreads) {
partitionThread.queue.add(task, true);
}
}
@Override
public void interruptPartitionThreads() {
for (PartitionOperationThread partitionThread : partitionThreads) {
partitionThread.interrupt();
}
}
@Override
public void run(Operation operation) {
checkNotNull(operation, "operation can't be null");
if (!isRunAllowed(operation)) {
throw new IllegalThreadStateException("Operation '" + operation + "' cannot be run in current thread: "
+ Thread.currentThread());
}
OperationRunner operationRunner = getOperationRunner(operation);
operationRunner.run(operation);
}
OperationRunner getOperationRunner(Operation operation) {
checkNotNull(operation, "operation can't be null");
if (operation.getPartitionId() >= 0) {
// retrieving an OperationRunner for a partition specific operation is easy; we can just use the partition ID.
return partitionOperationRunners[operation.getPartitionId()];
}
Thread currentThread = Thread.currentThread();
if (!(currentThread instanceof OperationThread)) {
// if thread is not an operation thread, we return the adHocOperationRunner
return adHocOperationRunner;
}
// It is a generic operation and we are running on an operation-thread. So we can just return the operation-runner
// for that thread. There won't be any partition-conflict since generic operations are allowed to be executed by
// a partition-specific operation-runner.
OperationThread operationThread = (OperationThread) currentThread;
return operationThread.currentRunner;
}
@Override
public void runOrExecute(Operation op) {
if (isRunAllowed(op)) {
run(op);
} else {
execute(op);
}
}
@Override
public boolean isRunAllowed(Operation op) {
checkNotNull(op, "op can't be null");
Thread currentThread = Thread.currentThread();
// IO threads are not allowed to run any operation
if (currentThread instanceof OperationHostileThread) {
return false;
}
int partitionId = op.getPartitionId();
// TODO: do we want to allow non partition specific tasks to be run on a partitionSpecific operation thread?
if (partitionId < 0) {
return true;
}
// we are only allowed to execute partition aware actions on an OperationThread
if (currentThread.getClass() != PartitionOperationThread.class) {
return false;
}
PartitionOperationThread partitionThread = (PartitionOperationThread) currentThread;
// so it's a partition operation thread, now we need to make sure that this operation thread is allowed
// to execute operations for this particular partitionId
return toPartitionThreadIndex(partitionId) == partitionThread.threadId;
}
@Override
public boolean isInvocationAllowed(Operation op, boolean isAsync) {
checkNotNull(op, "op can't be null");
Thread currentThread = Thread.currentThread();
// IO threads are not allowed to run any operation
if (currentThread instanceof OperationHostileThread) {
return false;
}
// if it is async we don't need to check if it is PartitionOperationThread or not
if (isAsync) {
return true;
}
// allowed to invoke non partition specific task
if (op.getPartitionId() < 0) {
return true;
}
// allowed to invoke from non PartitionOperationThreads (including GenericOperationThread)
if (currentThread.getClass() != PartitionOperationThread.class) {
return true;
}
PartitionOperationThread partitionThread = (PartitionOperationThread) currentThread;
OperationRunner runner = partitionThread.currentRunner;
if (runner != null) {
// non null runner means it's a nested call
// in this case partitionId of both inner and outer operations have to match
return runner.getPartitionId() == op.getPartitionId();
}
return toPartitionThreadIndex(op.getPartitionId()) == partitionThread.threadId;
}
// public for testing purposes
public int toPartitionThreadIndex(int partitionId) {
return partitionId % partitionThreads.length;
}
@Override
public void start() {
logger.info("Starting " + partitionThreads.length + " partition threads and "
+ genericThreads.length + " generic threads (" + priorityThreadCount + " dedicated for priority tasks)");
startAll(partitionThreads);
startAll(genericThreads);
}
private static void startAll(OperationThread[] operationThreads) {
for (OperationThread thread : operationThreads) {
thread.start();
}
}
@Override
public void shutdown() {
shutdownAll(partitionThreads);
shutdownAll(genericThreads);
awaitTermination(partitionThreads);
awaitTermination(genericThreads);
}
private static void shutdownAll(OperationThread[] operationThreads) {
for (OperationThread thread : operationThreads) {
thread.shutdown();
}
}
private static void awaitTermination(OperationThread[] operationThreads) {
for (OperationThread thread : operationThreads) {
try {
thread.awaitTermination(TERMINATION_TIMEOUT_SECONDS, SECONDS);
} catch (InterruptedException ignored) {
Thread.currentThread().interrupt();
}
}
}
@Override
public String toString() {
return "OperationExecutorImpl{node=" + thisAddress + '}';
}
}
|
|
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.containers;
import com.intellij.util.SmartList;
import gnu.trove.THashMap;
import gnu.trove.TObjectHashingStrategy;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.Serializable;
import java.util.*;
/**
* Consider to use factory methods {@link #createLinked()}, {@link #createSet()}, {@link #createSmart()}, {@link #create(gnu.trove.TObjectHashingStrategy)} instead of override.
* @see com.intellij.util.containers.BidirectionalMultiMap
* @see com.intellij.util.containers.ConcurrentMultiMap
* @author Dmitry Avdeev
*/
public class MultiMap<K, V> implements Serializable {
public static final MultiMap EMPTY = new EmptyMap();
private static final long serialVersionUID = -2632269270151455493L;
protected final Map<K, Collection<V>> myMap;
private Collection<V> values;
public MultiMap() {
myMap = createMap();
}
public MultiMap(@NotNull MultiMap<? extends K, ? extends V> toCopy) {
this();
putAllValues(toCopy);
}
public MultiMap(int i, float v) {
myMap = createMap(i, v);
}
@NotNull
protected Map<K, Collection<V>> createMap() {
return new HashMap<K, Collection<V>>();
}
@NotNull
protected Map<K, Collection<V>> createMap(int initialCapacity, float loadFactor) {
return new HashMap<K, Collection<V>>(initialCapacity, loadFactor);
}
@NotNull
protected Collection<V> createCollection() {
return new SmartList<V>();
}
@NotNull
protected Collection<V> createEmptyCollection() {
return Collections.emptyList();
}
public <Kk extends K, Vv extends V> void putAllValues(@NotNull MultiMap<Kk, Vv> from) {
for (Map.Entry<Kk, Collection<Vv>> entry : from.entrySet()) {
putValues(entry.getKey(), entry.getValue());
}
}
public void putValues(K key, @NotNull Collection<? extends V> values) {
Collection<V> list = myMap.get(key);
if (list == null) {
list = createCollection();
myMap.put(key, list);
}
list.addAll(values);
}
public void putValue(@Nullable K key, V value) {
Collection<V> list = myMap.get(key);
if (list == null) {
list = createCollection();
myMap.put(key, list);
}
list.add(value);
}
@NotNull
public Set<Map.Entry<K, Collection<V>>> entrySet() {
return myMap.entrySet();
}
public boolean isEmpty() {
if (myMap.isEmpty()) return true;
for(Collection<V> valueList: myMap.values()) {
if (!valueList.isEmpty()) {
return false;
}
}
return true;
}
public boolean containsKey(K key) {
return myMap.containsKey(key);
}
public boolean containsScalarValue(V value) {
for(Collection<V> valueList: myMap.values()) {
if (valueList.contains(value)) {
return true;
}
}
return false;
}
@NotNull
public Collection<V> get(final K key) {
final Collection<V> collection = myMap.get(key);
return collection == null ? createEmptyCollection() : collection;
}
@NotNull
public Collection<V> getModifiable(final K key) {
Collection<V> collection = myMap.get(key);
if (collection == null) {
myMap.put(key, collection = createCollection());
}
return collection;
}
@NotNull
public Set<K> keySet() {
return myMap.keySet();
}
public int size() {
return myMap.size();
}
public void put(final K key, Collection<V> values) {
myMap.put(key, values);
}
/**
* @deprecated use {@link #remove(Object, Object)} instead
*/
public void removeValue(K key, V value) {
remove(key, value);
}
public boolean remove(final K key, final V value) {
final Collection<V> values = myMap.get(key);
if (values != null) {
boolean removed = values.remove(value);
if (values.isEmpty()) {
myMap.remove(key);
}
return removed;
}
return false;
}
@NotNull
public Collection<? extends V> values() {
if (values == null) {
values = new AbstractCollection<V>() {
@NotNull
@Override
public Iterator<V> iterator() {
return new Iterator<V>() {
private final Iterator<Collection<V>> mapIterator = myMap.values().iterator();
private Iterator<V> itr = EmptyIterator.getInstance();
@Override
public boolean hasNext() {
do {
if (itr.hasNext()) return true;
if (!mapIterator.hasNext()) return false;
itr = mapIterator.next().iterator();
} while (true);
}
@Override
public V next() {
do {
if (itr.hasNext()) return itr.next();
if (!mapIterator.hasNext()) throw new NoSuchElementException();
itr = mapIterator.next().iterator();
} while (true);
}
@Override
public void remove() {
itr.remove();
}
};
}
@Override
public int size() {
int res = 0;
for (Collection<V> vs : myMap.values()) {
res += vs.size();
}
return res;
}
// Don't remove this method!!!
@Override
public boolean contains(Object o) {
for (Collection<V> vs : myMap.values()) {
if (vs.contains(o)) return true;
}
return false;
}
};
}
return values;
}
public void clear() {
myMap.clear();
}
public Collection<V> remove(K key) {
return myMap.remove(key);
}
@NotNull
public static <K, V> MultiMap<K, V> emptyInstance() {
@SuppressWarnings({"unchecked"}) final MultiMap<K, V> empty = EMPTY;
return empty;
}
/**
* Null keys supported.
*/
@NotNull
public static <K, V> MultiMap<K, V> create() {
return new MultiMap<K, V>();
}
@NotNull
public static <K, V> MultiMap<K, V> create(@NotNull final TObjectHashingStrategy<K> strategy) {
return new MultiMap<K, V>() {
@NotNull
@Override
protected Map<K, Collection<V>> createMap() {
return new THashMap<K, Collection<V>>(strategy);
}
};
}
@NotNull
public static <K, V> MultiMap<K, V> createLinked() {
return new LinkedMultiMap<K, V>();
}
@NotNull
public static <K, V> MultiMap<K, V> createLinkedSet() {
return new LinkedMultiMap<K, V>() {
@NotNull
@Override
protected Collection<V> createCollection() {
return ContainerUtil.newLinkedHashSet();
}
};
}
@Deprecated
@SuppressWarnings("unused")
@NotNull
/**
* @deprecated Use {@link #createSmart()}
*/
public static <K, V> MultiMap<K, V> createSmartList() {
return createSmart();
}
@NotNull
public static <K, V> MultiMap<K, V> createSmart() {
return new MultiMap<K, V>() {
@NotNull
@Override
protected Map<K, Collection<V>> createMap() {
return new THashMap<K, Collection<V>>();
}
};
}
@NotNull
public static <K, V> MultiMap<K, V> createConcurrentSet() {
return new MultiMap<K, V>() {
@NotNull
@Override
protected Collection<V> createCollection() {
return ContainerUtil.newConcurrentSet();
}
@NotNull
@Override
protected Collection<V> createEmptyCollection() {
return Collections.emptySet();
}
@NotNull
@Override
protected Map<K, Collection<V>> createMap() {
return ContainerUtil.newConcurrentMap();
}
};
}
@NotNull
public static <K, V> MultiMap<K, V> createSet() {
return new MultiMap<K, V>() {
@NotNull
@Override
protected Collection<V> createCollection() {
return new SmartHashSet<V>();
}
@NotNull
@Override
protected Collection<V> createEmptyCollection() {
return Collections.emptySet();
}
@NotNull
@Override
protected Map<K, Collection<V>> createMap() {
return new THashMap<K, Collection<V>>();
}
};
}
@NotNull
public static <K, V> MultiMap<K, V> create(int i, float v) {
return new MultiMap<K, V>(i, v);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof MultiMap)) return false;
return myMap.equals(((MultiMap)o).myMap);
}
@Override
public int hashCode() {
return myMap.hashCode();
}
@Override
public String toString() {
return myMap.toString();
}
private static class EmptyMap extends MultiMap {
@NotNull
@Override
protected Map createMap() {
return Collections.emptyMap();
}
}
}
|