method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
sequence
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
sequence
libraries_info
stringlengths
6
661
id
int64
0
2.92M
public byte[] getXMLMetadata() { COSReference refMetadata = dRoot.getReference(COSName.METADATA); if (refMetadata == null) return null; COSStream dMetadata = context.objectCache.getStream(refMetadata); if (dMetadata == null) return null; return dMetadata.getData(); }
byte[] function() { COSReference refMetadata = dRoot.getReference(COSName.METADATA); if (refMetadata == null) return null; COSStream dMetadata = context.objectCache.getStream(refMetadata); if (dMetadata == null) return null; return dMetadata.getData(); }
/** * Get the metadata that is part of the document catalog. This will * return null if there is no meta data for this object. * * @return The metadata for this object. */
Get the metadata that is part of the document catalog. This will return null if there is no meta data for this object
getXMLMetadata
{ "repo_name": "aishwaryakumar/PdfParse-android", "path": "src/org/pdfparse/PDFDocCatalog.java", "license": "lgpl-2.1", "size": 4357 }
[ "org.pdfparse.cos.COSName", "org.pdfparse.cos.COSReference", "org.pdfparse.cos.COSStream" ]
import org.pdfparse.cos.COSName; import org.pdfparse.cos.COSReference; import org.pdfparse.cos.COSStream;
import org.pdfparse.cos.*;
[ "org.pdfparse.cos" ]
org.pdfparse.cos;
2,346,309
@Override public String getRegistryID() { String IDRegistry = XMLConfig.get(configConf, "@id-registry", ""); logger.debug("IDRegistry = " + IDRegistry); return IDRegistry; }
String function() { String IDRegistry = XMLConfig.get(configConf, STR, STRIDRegistry = " + IDRegistry); return IDRegistry; }
/** * This method return service details. * * @see it.greenvulcano.gvesb.j2ee.xmlRegistry.Registry#getRegistryID() */
This method return service details
getRegistryID
{ "repo_name": "green-vulcano/gv-engine", "path": "gvengine/gvbase/src/main/java/it/greenvulcano/gvesb/j2ee/xmlRegistry/impl/RegistryImpl.java", "license": "lgpl-3.0", "size": 23946 }
[ "it.greenvulcano.configuration.XMLConfig" ]
import it.greenvulcano.configuration.XMLConfig;
import it.greenvulcano.configuration.*;
[ "it.greenvulcano.configuration" ]
it.greenvulcano.configuration;
1,119,460
public void removeDatatransferProgressListener( OnDatatransferProgressListener listener, Account account, OCFile file ) { if (account == null || file == null || listener == null) return; String targetKey = buildRemoteName(account.name, file.getRemotePath()); if (mBoundListeners.get(targetKey) == listener) { mBoundListeners.remove(targetKey); } }
void function( OnDatatransferProgressListener listener, Account account, OCFile file ) { if (account == null file == null listener == null) return; String targetKey = buildRemoteName(account.name, file.getRemotePath()); if (mBoundListeners.get(targetKey) == listener) { mBoundListeners.remove(targetKey); } }
/** * Removes a listener interested in the progress of the upload for a concrete file. * * @param listener Object to notify about progress of transfer. * @param account ownCloud account holding the file of interest. * @param file {@link OCFile} of interest for listener. */
Removes a listener interested in the progress of the upload for a concrete file
removeDatatransferProgressListener
{ "repo_name": "gdieleman/android", "path": "src/com/owncloud/android/files/services/FileUploader.java", "license": "gpl-2.0", "size": 47234 }
[ "android.accounts.Account", "com.owncloud.android.datamodel.OCFile", "com.owncloud.android.lib.common.network.OnDatatransferProgressListener" ]
import android.accounts.Account; import com.owncloud.android.datamodel.OCFile; import com.owncloud.android.lib.common.network.OnDatatransferProgressListener;
import android.accounts.*; import com.owncloud.android.datamodel.*; import com.owncloud.android.lib.common.network.*;
[ "android.accounts", "com.owncloud.android" ]
android.accounts; com.owncloud.android;
581,708
public void getMassData (MassData massData) { m_shape.computeMass(massData, m_density); }
void function (MassData massData) { m_shape.computeMass(massData, m_density); }
/** Get the mass data for this fixture. The mass data is based on the density and the shape. The rotational inertia is about the * shape's origin. * * @return */
Get the mass data for this fixture. The mass data is based on the density and the shape. The rotational inertia is about the shape's origin
getMassData
{ "repo_name": "domix/libgdx", "path": "backends/gdx-backends-gwt/src/com/badlogic/gdx/backends/gwt/emu/org/jbox2d/dynamics/Fixture.java", "license": "apache-2.0", "size": 11348 }
[ "org.jbox2d.collision.shapes.MassData" ]
import org.jbox2d.collision.shapes.MassData;
import org.jbox2d.collision.shapes.*;
[ "org.jbox2d.collision" ]
org.jbox2d.collision;
2,000,229
protected void readNonEmptyExistingRow(UCon pUCon) { //Check the change number String lCurrentChangeNum = XFUtil.nvl(mDOMAccessor.readChangeNumber(pUCon), ""); if(!lCurrentChangeNum.equals(mDOMChangeNumber)) { Track.info("ChangeNumberMismatch", "Change number mismatch; reloading document " + (XFUtil.isNull(mDOMChangeNumber) ? "(cached change number was null)" : "(cached=" + mDOMChangeNumber + ", database=" + lCurrentChangeNum + ")")); Track.pushDebug("RetrieveDOM"); try { //Important: update the DOM reference to a new object, don't just update the existing DOM contents. If another thread //is using the DOM it retrieved from this WorkDoc we could get concurrency issues if we didn't do this. mDOM = mDOMAccessor.retrieveDOM(pUCon); } finally { Track.pop("RetrieveDOM"); } mDOMChangeNumber = mDOM.getAttr(CHANGE_NUMBER_ATTR_NAME); Track.info("RetrievedDOMChangeNumber", mDOMChangeNumber); } else { //We have the latest version of the DOM Track.info("ChangeNumberMatch", "Change numbers consistent; no reload required (" + mDOMChangeNumber + ")"); } }
void function(UCon pUCon) { String lCurrentChangeNum = XFUtil.nvl(mDOMAccessor.readChangeNumber(pUCon), STRChangeNumberMismatchSTRChange number mismatch; reloading document STR(cached change number was null)STR(cached=STR, database=STR)STRRetrieveDOMSTRRetrieveDOMSTRRetrievedDOMChangeNumberSTRChangeNumberMatchSTRChange numbers consistent; no reload required (STR)"); } }
/** * Optionally reads the DOM from the current accessor if the change number on the database is different to the cached * change number. If the change numbers match, no action is taken. * @param pUCon */
Optionally reads the DOM from the current accessor if the change number on the database is different to the cached change number. If the change numbers match, no action is taken
readNonEmptyExistingRow
{ "repo_name": "Fivium/FOXopen", "path": "src/main/java/net/foxopen/fox/database/storage/dom/XMLWorkDoc.java", "license": "gpl-3.0", "size": 12688 }
[ "net.foxopen.fox.XFUtil", "net.foxopen.fox.database.UCon" ]
import net.foxopen.fox.XFUtil; import net.foxopen.fox.database.UCon;
import net.foxopen.fox.*; import net.foxopen.fox.database.*;
[ "net.foxopen.fox" ]
net.foxopen.fox;
1,313,383
void storeProjectSettings(String sessionId, long projectId, String settings) throws InvalidSessionException;
void storeProjectSettings(String sessionId, long projectId, String settings) throws InvalidSessionException;
/** * Stores a string with the project settings. * @param sessionId current session id * @param projectId project ID * @param settings project settings */
Stores a string with the project settings
storeProjectSettings
{ "repo_name": "AlanRosenthal/appinventor-sources", "path": "appinventor/appengine/src/com/google/appinventor/shared/rpc/project/ProjectService.java", "license": "mit", "size": 8719 }
[ "com.google.appinventor.shared.rpc.InvalidSessionException" ]
import com.google.appinventor.shared.rpc.InvalidSessionException;
import com.google.appinventor.shared.rpc.*;
[ "com.google.appinventor" ]
com.google.appinventor;
1,765,559
private static Way makeCycleWay(Way way) { Way cycleWay = new Way(way.getId(), way.getPoints()); cycleWay.copyTags(way); cycleWay.addTag("access", "no"); cycleWay.addTag("bicycle", "yes"); cycleWay.addTag("mkgmap:synthesised", "yes"); cycleWay.addTag(onewayTagKey, "no"); // remove explicit access tags cycleWay.deleteTag("foot"); cycleWay.deleteTag("motorcar"); cycleWay.deleteTag("goods"); cycleWay.deleteTag("hgv"); cycleWay.deleteTag("bus"); cycleWay.deleteTag("taxi"); cycleWay.deleteTag("emergency"); cycleWay.deleteTag("vehicle"); cycleWay.deleteTag("motor_vehicle"); cycleWay.deleteTag("carpool"); cycleWay.deleteTag("motorcycle"); cycleWay.deleteTag("psv"); cycleWay.deleteTag("truck"); return cycleWay; }
static Way function(Way way) { Way cycleWay = new Way(way.getId(), way.getPoints()); cycleWay.copyTags(way); cycleWay.addTag(STR, "no"); cycleWay.addTag(STR, "yes"); cycleWay.addTag(STR, "yes"); cycleWay.addTag(onewayTagKey, "no"); cycleWay.deleteTag("foot"); cycleWay.deleteTag(STR); cycleWay.deleteTag("goods"); cycleWay.deleteTag("hgv"); cycleWay.deleteTag("bus"); cycleWay.deleteTag("taxi"); cycleWay.deleteTag(STR); cycleWay.deleteTag(STR); cycleWay.deleteTag(STR); cycleWay.deleteTag(STR); cycleWay.deleteTag(STR); cycleWay.deleteTag("psv"); cycleWay.deleteTag("truck"); return cycleWay; }
/** * Construct a cycleway that has the same points as an existing way. Used for separate * cycle lanes. * @param way The original way. * @return The new way, which will have the same points and have suitable cycle tags. */
Construct a cycleway that has the same points as an existing way. Used for separate cycle lanes
makeCycleWay
{ "repo_name": "openstreetmap/mkgmap", "path": "src/uk/me/parabola/mkgmap/osmstyle/StyledConverter.java", "license": "gpl-2.0", "size": 76098 }
[ "uk.me.parabola.mkgmap.reader.osm.Way" ]
import uk.me.parabola.mkgmap.reader.osm.Way;
import uk.me.parabola.mkgmap.reader.osm.*;
[ "uk.me.parabola" ]
uk.me.parabola;
2,752,409
private Map<Integer, List<InstanceId>> getFFDAllocation() { Map<String, Integer> parallelismMap = TopologyUtils.getComponentParallelism(topology); HashMap<Integer, List<InstanceId>> allocation = new HashMap<>(); assignInstancesToContainers(new ArrayList<Container>(), allocation, parallelismMap, 1, maxContainerResources); return allocation; }
Map<Integer, List<InstanceId>> function() { Map<String, Integer> parallelismMap = TopologyUtils.getComponentParallelism(topology); HashMap<Integer, List<InstanceId>> allocation = new HashMap<>(); assignInstancesToContainers(new ArrayList<Container>(), allocation, parallelismMap, 1, maxContainerResources); return allocation; }
/** * Get the instances' allocation based on the First Fit Decreasing algorithm * * @return Map &lt; containerId, list of InstanceId belonging to this container &gt; */
Get the instances' allocation based on the First Fit Decreasing algorithm
getFFDAllocation
{ "repo_name": "wangli1426/heron", "path": "heron/packing/src/java/com/twitter/heron/packing/binpacking/FirstFitDecreasingPacking.java", "license": "apache-2.0", "size": 18464 }
[ "com.twitter.heron.packing.Container", "com.twitter.heron.spi.packing.InstanceId", "com.twitter.heron.spi.utils.TopologyUtils", "java.util.ArrayList", "java.util.HashMap", "java.util.List", "java.util.Map" ]
import com.twitter.heron.packing.Container; import com.twitter.heron.spi.packing.InstanceId; import com.twitter.heron.spi.utils.TopologyUtils; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map;
import com.twitter.heron.packing.*; import com.twitter.heron.spi.packing.*; import com.twitter.heron.spi.utils.*; import java.util.*;
[ "com.twitter.heron", "java.util" ]
com.twitter.heron; java.util;
205,177
public final void initiate(AjaxRequestTarget target) { target.appendJavaScript(createDownloadJsStatement(false).render()); }
final void function(AjaxRequestTarget target) { target.appendJavaScript(createDownloadJsStatement(false).render()); }
/** * Call this method to initiate the download after an ajax refresh. */
Call this method to initiate the download after an ajax refresh
initiate
{ "repo_name": "openwide-java/owsi-core-parent", "path": "owsi-core/owsi-core-components/owsi-core-component-wicket-more/src/main/java/fr/openwide/core/wicket/more/export/AbstractDeferredDownloadBehavior.java", "license": "apache-2.0", "size": 5847 }
[ "org.apache.wicket.ajax.AjaxRequestTarget" ]
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.*;
[ "org.apache.wicket" ]
org.apache.wicket;
1,785,888
public CompletableFuture<Optional<CliReply>> deleteConfigRemoteLogServer() { RemoteLogServer logServer = RemoteLogServer.newBuilder().build(); GrpcResponseObserver<CliReply> observer = new GrpcResponseObserver<>(address, DELETE_CONFIG_REMOTE_LOG_SERVER); log.info("Delete remote log server for switch {}", address); stub.delConfigRemoteLogServer(logServer, observer); return observer.future .thenApply(responses -> responses.stream().findFirst()); }
CompletableFuture<Optional<CliReply>> function() { RemoteLogServer logServer = RemoteLogServer.newBuilder().build(); GrpcResponseObserver<CliReply> observer = new GrpcResponseObserver<>(address, DELETE_CONFIG_REMOTE_LOG_SERVER); log.info(STR, address); stub.delConfigRemoteLogServer(logServer, observer); return observer.future .thenApply(responses -> responses.stream().findFirst()); }
/** * Deletes a remote log server configuration. * * @return {@link CompletableFuture} with operation result. */
Deletes a remote log server configuration
deleteConfigRemoteLogServer
{ "repo_name": "jonvestal/open-kilda", "path": "src-java/grpc-speaker/grpc-service/src/main/java/org/openkilda/grpc/speaker/client/GrpcSession.java", "license": "apache-2.0", "size": 17314 }
[ "io.grpc.noviflow.CliReply", "io.grpc.noviflow.RemoteLogServer", "java.util.Optional", "java.util.concurrent.CompletableFuture" ]
import io.grpc.noviflow.CliReply; import io.grpc.noviflow.RemoteLogServer; import java.util.Optional; import java.util.concurrent.CompletableFuture;
import io.grpc.noviflow.*; import java.util.*; import java.util.concurrent.*;
[ "io.grpc.noviflow", "java.util" ]
io.grpc.noviflow; java.util;
2,055,246
public com.mozu.api.contracts.productruntime.ProductCollection getProducts(String filter, Integer startIndex, Integer pageSize, String sortBy, String responseFields) throws Exception { MozuClient<com.mozu.api.contracts.productruntime.ProductCollection> client = com.mozu.api.clients.commerce.catalog.storefront.ProductClient.getProductsClient(_dataViewMode, filter, startIndex, pageSize, sortBy, responseFields); client.setContext(_apiContext); client.executeRequest(); return client.getResult(); }
com.mozu.api.contracts.productruntime.ProductCollection function(String filter, Integer startIndex, Integer pageSize, String sortBy, String responseFields) throws Exception { MozuClient<com.mozu.api.contracts.productruntime.ProductCollection> client = com.mozu.api.clients.commerce.catalog.storefront.ProductClient.getProductsClient(_dataViewMode, filter, startIndex, pageSize, sortBy, responseFields); client.setContext(_apiContext); client.executeRequest(); return client.getResult(); }
/** * Retrieves a list of products that appear on the web storefront according to any specified filter criteria and sort options. * <p><pre><code> * Product product = new Product(); * ProductCollection productCollection = product.getProducts( filter, startIndex, pageSize, sortBy, responseFields); * </code></pre></p> * @param filter A set of expressions that consist of a field, operator, and value and represent search parameter syntax when filtering results of a query. Valid operators include equals (eq), does not equal (ne), greater than (gt), less than (lt), greater than or equal to (ge), less than or equal to (le), starts with (sw), or contains (cont). For example - "filter=IsDisplayed+eq+true" * @param pageSize The number of results to display on each page when creating paged results from a query. The maximum value is 200. * @param responseFields A list or array of fields returned for a call. These fields may be customized and may be used for various types of data calls in Mozu. For example, responseFields are returned for retrieving or updating attributes, carts, and messages in Mozu. * @param sortBy * @param startIndex * @return com.mozu.api.contracts.productruntime.ProductCollection * @see com.mozu.api.contracts.productruntime.ProductCollection */
Retrieves a list of products that appear on the web storefront according to any specified filter criteria and sort options. <code><code> Product product = new Product(); ProductCollection productCollection = product.getProducts( filter, startIndex, pageSize, sortBy, responseFields); </code></code>
getProducts
{ "repo_name": "johngatti/mozu-java", "path": "mozu-java-core/src/main/java/com/mozu/api/resources/commerce/catalog/storefront/ProductResource.java", "license": "mit", "size": 21082 }
[ "com.mozu.api.MozuClient" ]
import com.mozu.api.MozuClient;
import com.mozu.api.*;
[ "com.mozu.api" ]
com.mozu.api;
1,308,248
private final void sendChunkTo(ByteBuffer in, SocketChannel sc, ByteBuffer out) throws IOException { int bytesSent = in.remaining(); if (in.isDirect()) { flushBuffer(sc, out); while (in.remaining() > 0) { sc.write(in); } } else { // copy in to out. If out fills flush it int OUT_MAX = out.remaining(); if (bytesSent <= OUT_MAX) { out.put(in); } else { final byte[] bytes = in.array(); int off = in.arrayOffset() + in.position(); int len = bytesSent; while (len > 0) { int bytesThisTime = len; if (bytesThisTime > OUT_MAX) { bytesThisTime = OUT_MAX; } out.put(bytes, off, bytesThisTime); off += bytesThisTime; len -= bytesThisTime; flushBuffer(sc, out); OUT_MAX = out.remaining(); } in.position(in.limit()); } } this.size -= bytesSent; }
final void function(ByteBuffer in, SocketChannel sc, ByteBuffer out) throws IOException { int bytesSent = in.remaining(); if (in.isDirect()) { flushBuffer(sc, out); while (in.remaining() > 0) { sc.write(in); } } else { int OUT_MAX = out.remaining(); if (bytesSent <= OUT_MAX) { out.put(in); } else { final byte[] bytes = in.array(); int off = in.arrayOffset() + in.position(); int len = bytesSent; while (len > 0) { int bytesThisTime = len; if (bytesThisTime > OUT_MAX) { bytesThisTime = OUT_MAX; } out.put(bytes, off, bytesThisTime); off += bytesThisTime; len -= bytesThisTime; flushBuffer(sc, out); OUT_MAX = out.remaining(); } in.position(in.limit()); } } this.size -= bytesSent; }
/** * sends the data from "in" by writing it to "sc" through "out" (out is used * to chunk to data and is probably a direct memory buffer). */
sends the data from "in" by writing it to "sc" through "out" (out is used to chunk to data and is probably a direct memory buffer)
sendChunkTo
{ "repo_name": "sshcherbakov/incubator-geode", "path": "gemfire-core/src/main/java/com/gemstone/gemfire/internal/HeapDataOutputStream.java", "license": "apache-2.0", "size": 43910 }
[ "java.io.IOException", "java.nio.ByteBuffer", "java.nio.channels.SocketChannel" ]
import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.SocketChannel;
import java.io.*; import java.nio.*; import java.nio.channels.*;
[ "java.io", "java.nio" ]
java.io; java.nio;
1,761,181
public static CutsInfo calculateXCuts(ExporterNature nature, List pages, int startPageIndex, int endPageIndex, int width, int offsetX) { CutsInfo xCuts = new CutsInfo(); for (int pageIndex = startPageIndex; pageIndex <= endPageIndex; pageIndex++) { JRPrintPage page = (JRPrintPage) pages.get(pageIndex); addXCuts(nature, page.getElements(), offsetX, xCuts); } // add a cut at the page width if no element goes beyond the width int lastCut = xCuts.getLastCut(); if (lastCut < width) { xCuts.addCut(width); } return xCuts; }
static CutsInfo function(ExporterNature nature, List pages, int startPageIndex, int endPageIndex, int width, int offsetX) { CutsInfo xCuts = new CutsInfo(); for (int pageIndex = startPageIndex; pageIndex <= endPageIndex; pageIndex++) { JRPrintPage page = (JRPrintPage) pages.get(pageIndex); addXCuts(nature, page.getElements(), offsetX, xCuts); } int lastCut = xCuts.getLastCut(); if (lastCut < width) { xCuts.addCut(width); } return xCuts; }
/** * This static method calculates all the X cuts for a list of pages. * * @param pages * The list of pages. * @param startPageIndex * The first page to consider. * @param endPageIndex * The last page to consider. * @param width * The page width * @param offsetX * horizontal element position offset */
This static method calculates all the X cuts for a list of pages
calculateXCuts
{ "repo_name": "delafer/j7project", "path": "jasper352/csb-jasperreport-dep/src/net/sf/jasperreports/engine/export/JRGridLayout.java", "license": "gpl-2.0", "size": 23064 }
[ "java.util.List", "net.sf.jasperreports.engine.JRPrintPage" ]
import java.util.List; import net.sf.jasperreports.engine.JRPrintPage;
import java.util.*; import net.sf.jasperreports.engine.*;
[ "java.util", "net.sf.jasperreports" ]
java.util; net.sf.jasperreports;
605,945
private void untagGivenTask(Task taskToUntag, String tag) throws TaskTagException { if (tag.isEmpty()) { untagTaskAll(taskToUntag); } else { taskToUntag.deleteTag(tag); if (tags.get(tag.toLowerCase()).size() == 1) { tags.remove(tag.toLowerCase()); } else { List<Task> tagTaskList = tags.remove(tag.toLowerCase()); tagTaskList.remove(taskToUntag); tags.put(tag.toLowerCase(), tagTaskList); } } }
void function(Task taskToUntag, String tag) throws TaskTagException { if (tag.isEmpty()) { untagTaskAll(taskToUntag); } else { taskToUntag.deleteTag(tag); if (tags.get(tag.toLowerCase()).size() == 1) { tags.remove(tag.toLowerCase()); } else { List<Task> tagTaskList = tags.remove(tag.toLowerCase()); tagTaskList.remove(taskToUntag); tags.put(tag.toLowerCase(), tagTaskList); } } }
/** * This method removes a tag from the task. * If no tag is given, all tags from the given task are removed. * * @param taskToUntag The task that is to be untagged. * @param tag The tag to be removed from the task. * @throws TaskTagException if the task does not have the tag to remove. */
This method removes a tag from the task. If no tag is given, all tags from the given task are removed
untagGivenTask
{ "repo_name": "CS2103TAug2014-W15-4J/main", "path": "src/model/TaskList.java", "license": "mit", "size": 62475 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
758,624
public void test0084() throws JavaScriptModelException { IJavaScriptUnit sourceUnit = getCompilationUnit("Converter" , "src", "test0084", "Test.js"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ char[] source = sourceUnit.getSource().toCharArray(); ASTNode result = runConversion(sourceUnit, false); ASTNode node = getASTNode((JavaScriptUnit) result, 0, 0, 0); assertNotNull("Expression should not be null", node); //$NON-NLS-1$ ForStatement forStatement = this.ast.newForStatement(); VariableDeclarationFragment variableDeclarationFragment = this.ast.newVariableDeclarationFragment(); variableDeclarationFragment.setName(this.ast.newSimpleName("i")); //$NON-NLS-1$ variableDeclarationFragment.setInitializer(this.ast.newNumberLiteral("0"));//$NON-NLS-1$ VariableDeclarationExpression variableDeclarationExpression = this.ast.newVariableDeclarationExpression(variableDeclarationFragment); variableDeclarationExpression.setModifiers(Modifier.NONE); // variableDeclarationExpression.setType(this.ast.newPrimitiveType(PrimitiveType.INT)); forStatement.initializers().add(variableDeclarationExpression); PostfixExpression postfixExpression = this.ast.newPostfixExpression(); postfixExpression.setOperand(this.ast.newSimpleName("i"));//$NON-NLS-1$ postfixExpression.setOperator(PostfixExpression.Operator.INCREMENT); forStatement.updaters().add(postfixExpression); InfixExpression infixExpression = this.ast.newInfixExpression(); infixExpression.setLeftOperand(this.ast.newSimpleName("i")); //$NON-NLS-1$ infixExpression.setOperator(InfixExpression.Operator.LESS); infixExpression.setRightOperand(this.ast.newNumberLiteral("10")); //$NON-NLS-1$ forStatement.setExpression(infixExpression); forStatement.setBody(this.ast.newEmptyStatement()); assertTrue("Both AST trees should be identical", forStatement.subtreeMatch(new ASTMatcher(), node)); //$NON-NLS-1$ checkSourceRange(node, "for (var i = 0; i < 10; i++);", source); //$NON-NLS-1$ }
void function() throws JavaScriptModelException { IJavaScriptUnit sourceUnit = getCompilationUnit(STR , "src", STR, STR); char[] source = sourceUnit.getSource().toCharArray(); ASTNode result = runConversion(sourceUnit, false); ASTNode node = getASTNode((JavaScriptUnit) result, 0, 0, 0); assertNotNull(STR, node); ForStatement forStatement = this.ast.newForStatement(); VariableDeclarationFragment variableDeclarationFragment = this.ast.newVariableDeclarationFragment(); variableDeclarationFragment.setName(this.ast.newSimpleName("i")); variableDeclarationFragment.setInitializer(this.ast.newNumberLiteral("0")); VariableDeclarationExpression variableDeclarationExpression = this.ast.newVariableDeclarationExpression(variableDeclarationFragment); variableDeclarationExpression.setModifiers(Modifier.NONE); forStatement.initializers().add(variableDeclarationExpression); PostfixExpression postfixExpression = this.ast.newPostfixExpression(); postfixExpression.setOperand(this.ast.newSimpleName("i")); postfixExpression.setOperator(PostfixExpression.Operator.INCREMENT); forStatement.updaters().add(postfixExpression); InfixExpression infixExpression = this.ast.newInfixExpression(); infixExpression.setLeftOperand(this.ast.newSimpleName("i")); infixExpression.setOperator(InfixExpression.Operator.LESS); infixExpression.setRightOperand(this.ast.newNumberLiteral("10")); forStatement.setExpression(infixExpression); forStatement.setBody(this.ast.newEmptyStatement()); assertTrue(STR, forStatement.subtreeMatch(new ASTMatcher(), node)); checkSourceRange(node, STR, source); }
/** * ForStatement ==> ForStatement * @deprecated using deprecated code */
ForStatement ==> ForStatement
test0084
{ "repo_name": "echoes-tech/eclipse.jsdt.core", "path": "org.eclipse.wst.jsdt.core.tests.model/src/org/eclipse/wst/jsdt/core/tests/dom/ASTConverterTest.java", "license": "epl-1.0", "size": 521652 }
[ "org.eclipse.wst.jsdt.core.IJavaScriptUnit", "org.eclipse.wst.jsdt.core.JavaScriptModelException", "org.eclipse.wst.jsdt.core.dom.ASTMatcher", "org.eclipse.wst.jsdt.core.dom.ASTNode", "org.eclipse.wst.jsdt.core.dom.ForStatement", "org.eclipse.wst.jsdt.core.dom.InfixExpression", "org.eclipse.wst.jsdt.core.dom.JavaScriptUnit", "org.eclipse.wst.jsdt.core.dom.Modifier", "org.eclipse.wst.jsdt.core.dom.PostfixExpression", "org.eclipse.wst.jsdt.core.dom.VariableDeclarationExpression", "org.eclipse.wst.jsdt.core.dom.VariableDeclarationFragment" ]
import org.eclipse.wst.jsdt.core.IJavaScriptUnit; import org.eclipse.wst.jsdt.core.JavaScriptModelException; import org.eclipse.wst.jsdt.core.dom.ASTMatcher; import org.eclipse.wst.jsdt.core.dom.ASTNode; import org.eclipse.wst.jsdt.core.dom.ForStatement; import org.eclipse.wst.jsdt.core.dom.InfixExpression; import org.eclipse.wst.jsdt.core.dom.JavaScriptUnit; import org.eclipse.wst.jsdt.core.dom.Modifier; import org.eclipse.wst.jsdt.core.dom.PostfixExpression; import org.eclipse.wst.jsdt.core.dom.VariableDeclarationExpression; import org.eclipse.wst.jsdt.core.dom.VariableDeclarationFragment;
import org.eclipse.wst.jsdt.core.*; import org.eclipse.wst.jsdt.core.dom.*;
[ "org.eclipse.wst" ]
org.eclipse.wst;
2,129,309
// <editor-fold defaultstate="collapsed" desc="HttpServlet methods. Click on the + sign on the left to edit the code."> @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); }
void function(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); }
/** * Handles the HTTP <code>GET</code> method. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */
Handles the HTTP <code>GET</code> method
doGet
{ "repo_name": "bragex/the-vigilantes", "path": "Is-202/Sprint_1/StudentProsjektV2/src/java/Servlet/SendEmail.java", "license": "mit", "size": 2583 }
[ "java.io.IOException", "javax.servlet.ServletException", "javax.servlet.http.HttpServletRequest", "javax.servlet.http.HttpServletResponse" ]
import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse;
import java.io.*; import javax.servlet.*; import javax.servlet.http.*;
[ "java.io", "javax.servlet" ]
java.io; javax.servlet;
2,629,950
@Test public void testGetHeadPacket_InPacket() throws Exception { PacketQueue queue = new InPacketQueue(); queue.enqueuePacket(new InPacket()); Response result = Whitebox.invokeMethod(target, "getHeadPacket", queue); assertThat(result.statusCode, is(Response.OK)); }
void function() throws Exception { PacketQueue queue = new InPacketQueue(); queue.enqueuePacket(new InPacket()); Response result = Whitebox.invokeMethod(target, STR, queue); assertThat(result.statusCode, is(Response.OK)); }
/** * Test method for {@link org.o3project.odenos.core.component.network.Network#getHeadPacket(PacketQueue)}. * * @throws Exception */
Test method for <code>org.o3project.odenos.core.component.network.Network#getHeadPacket(PacketQueue)</code>
testGetHeadPacket_InPacket
{ "repo_name": "y-higuchi/odenos", "path": "src/test/java/org/o3project/odenos/core/component/network/NetworkTest.java", "license": "apache-2.0", "size": 116642 }
[ "org.hamcrest.CoreMatchers", "org.junit.Assert", "org.o3project.odenos.core.component.network.packet.InPacket", "org.o3project.odenos.core.component.network.packet.InPacketQueue", "org.o3project.odenos.core.component.network.packet.PacketQueue", "org.o3project.odenos.remoteobject.message.Response", "org.powermock.reflect.Whitebox" ]
import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.o3project.odenos.core.component.network.packet.InPacket; import org.o3project.odenos.core.component.network.packet.InPacketQueue; import org.o3project.odenos.core.component.network.packet.PacketQueue; import org.o3project.odenos.remoteobject.message.Response; import org.powermock.reflect.Whitebox;
import org.hamcrest.*; import org.junit.*; import org.o3project.odenos.core.component.network.packet.*; import org.o3project.odenos.remoteobject.message.*; import org.powermock.reflect.*;
[ "org.hamcrest", "org.junit", "org.o3project.odenos", "org.powermock.reflect" ]
org.hamcrest; org.junit; org.o3project.odenos; org.powermock.reflect;
1,632,220
@PreAuthorize ("isAuthenticated ()") @Transactional (readOnly=false, propagation=Propagation.REQUIRED) @Caching (evict = { @CacheEvict(value = "user", key = "#user.getUUID ()"), @CacheEvict(value = "userByName", key = "#user.username.toLowerCase()"), @CacheEvict(value = "json_user", key = "#user")}) public void selfUpdateUser (User user) throws RootNotModifiableException, RequiredFieldMissingException, EmailNotSentException { User u = userDao.read (user.getUUID ()); checkRoot (u); u.setEmail (user.getEmail ()); u.setFirstname (user.getFirstname ()); u.setLastname (user.getLastname ()); u.setAddress (user.getAddress ()); u.setPhone (user.getPhone ()); u.setCountry (user.getCountry ()); u.setUsage (user.getUsage ()); u.setSubUsage (user.getSubUsage ()); u.setDomain (user.getDomain ()); u.setSubDomain (user.getSubDomain ()); checkRequiredFields (u); userDao.update (u); }
@PreAuthorize (STR) @Transactional (readOnly=false, propagation=Propagation.REQUIRED) @Caching (evict = { @CacheEvict(value = "user", key = STR), @CacheEvict(value = STR, key = STR), @CacheEvict(value = STR, key = "#user")}) void function (User user) throws RootNotModifiableException, RequiredFieldMissingException, EmailNotSentException { User u = userDao.read (user.getUUID ()); checkRoot (u); u.setEmail (user.getEmail ()); u.setFirstname (user.getFirstname ()); u.setLastname (user.getLastname ()); u.setAddress (user.getAddress ()); u.setPhone (user.getPhone ()); u.setCountry (user.getCountry ()); u.setUsage (user.getUsage ()); u.setSubUsage (user.getSubUsage ()); u.setDomain (user.getDomain ()); u.setSubDomain (user.getSubDomain ()); checkRequiredFields (u); userDao.update (u); }
/** * Update given User, after checking required fields. * * @param user * @throws RootNotModifiableException * @throws RequiredFieldMissingException */
Update given User, after checking required fields
selfUpdateUser
{ "repo_name": "SentinelDataHub/DataHubSystem", "path": "core/src/main/java/fr/gael/dhus/service/UserService.java", "license": "agpl-3.0", "size": 35460 }
[ "fr.gael.dhus.database.object.User", "fr.gael.dhus.service.exception.EmailNotSentException", "fr.gael.dhus.service.exception.RequiredFieldMissingException", "fr.gael.dhus.service.exception.RootNotModifiableException", "org.springframework.cache.annotation.CacheEvict", "org.springframework.cache.annotation.Caching", "org.springframework.security.access.prepost.PreAuthorize", "org.springframework.transaction.annotation.Propagation", "org.springframework.transaction.annotation.Transactional" ]
import fr.gael.dhus.database.object.User; import fr.gael.dhus.service.exception.EmailNotSentException; import fr.gael.dhus.service.exception.RequiredFieldMissingException; import fr.gael.dhus.service.exception.RootNotModifiableException; import org.springframework.cache.annotation.CacheEvict; import org.springframework.cache.annotation.Caching; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional;
import fr.gael.dhus.database.object.*; import fr.gael.dhus.service.exception.*; import org.springframework.cache.annotation.*; import org.springframework.security.access.prepost.*; import org.springframework.transaction.annotation.*;
[ "fr.gael.dhus", "org.springframework.cache", "org.springframework.security", "org.springframework.transaction" ]
fr.gael.dhus; org.springframework.cache; org.springframework.security; org.springframework.transaction;
2,909,818
public int getHierarchicalColumn() { return convertColumnIndexToView(((TreeTableModel) renderer.getModel()).getHierarchicalColumn()); } /** * {@inheritDoc}
int function() { return convertColumnIndexToView(((TreeTableModel) renderer.getModel()).getHierarchicalColumn()); } /** * {@inheritDoc}
/** * Returns the index of the hierarchical column. This is the column that is * displayed as the tree. * * @return the index of the hierarchical column, -1 if there is * no hierarchical column * */
Returns the index of the hierarchical column. This is the column that is displayed as the tree
getHierarchicalColumn
{ "repo_name": "syncer/swingx", "path": "swingx-core/src/main/java/org/jdesktop/swingx/JXTreeTable.java", "license": "lgpl-2.1", "size": 132592 }
[ "org.jdesktop.swingx.treetable.TreeTableModel" ]
import org.jdesktop.swingx.treetable.TreeTableModel;
import org.jdesktop.swingx.treetable.*;
[ "org.jdesktop.swingx" ]
org.jdesktop.swingx;
2,617,240
public void drawResultBitmap(Bitmap barcode) { resultBitmap = barcode; invalidate(); }
void function(Bitmap barcode) { resultBitmap = barcode; invalidate(); }
/** * Draw a bitmap with the result points highlighted instead of the live * scanning display. * * @param barcode An image of the decoded barcode. */
Draw a bitmap with the result points highlighted instead of the live scanning display
drawResultBitmap
{ "repo_name": "beanu/smart-farmer-android", "path": "l2_zxing/src/main/java/com/beanu/l2_zxing/view/ViewfinderView.java", "license": "apache-2.0", "size": 6798 }
[ "android.graphics.Bitmap" ]
import android.graphics.Bitmap;
import android.graphics.*;
[ "android.graphics" ]
android.graphics;
1,955,090
public void testEqualConditionInRangeJunction_AND() { LogWriter logger = CacheUtils.getLogger(); try { CompiledComparison cv[] = null; ExecutionContext context = new QueryExecutionContext(null, CacheUtils .getCache()); this.bindIteratorsAndCreateIndex(context); // Case 1 : a = 7 and a !=4 and a != 5 and a != 8 cv = new CompiledComparison[4]; cv[0] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(7)), OQLLexerTokenTypes.TOK_EQ); cv[1] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(4)), OQLLexerTokenTypes.TOK_NE); cv[2] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(5)), OQLLexerTokenTypes.TOK_NE); cv[3] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(8)), OQLLexerTokenTypes.TOK_NE); OrganizedOperands oo = this.oganizedOperandsSingleRangeJunctionCreation( OQLLexerTokenTypes.LITERAL_and, cv, context); assertTrue( "Filter Openad of OrganizedOperand is not of type RangeJunction", oo.filterOperand instanceof RangeJunction); RangeJunction rj = (RangeJunction)oo.filterOperand; OrganizedOperands oo1 = rj.organizeOperands(context); assertEquals(oo1.filterOperand, cv[0]); // Case 2 : a > 7 and a !=4 and a != 5 and a = 8 cv = new CompiledComparison[4]; cv[0] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(7)), OQLLexerTokenTypes.TOK_GT); cv[1] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(4)), OQLLexerTokenTypes.TOK_NE); cv[2] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(5)), OQLLexerTokenTypes.TOK_NE); cv[3] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(8)), OQLLexerTokenTypes.TOK_EQ); oo = this.oganizedOperandsSingleRangeJunctionCreation( OQLLexerTokenTypes.LITERAL_and, cv, context); assertTrue( "Filter Openad of OrganizedOperand is not of type RangeJunction", oo.filterOperand instanceof RangeJunction); rj = (RangeJunction)oo.filterOperand; oo1 = rj.organizeOperands(context); assertEquals(oo1.filterOperand, cv[3]); // Case3 : a < 7 and a !=4 and a =8 and a != 5 cv = new CompiledComparison[4]; cv[0] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(7)), OQLLexerTokenTypes.TOK_LT); cv[1] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(4)), OQLLexerTokenTypes.TOK_NE); cv[2] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(8)), OQLLexerTokenTypes.TOK_EQ); cv[3] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(5)), OQLLexerTokenTypes.TOK_NE); oo = this.oganizedOperandsSingleRangeJunctionCreation( OQLLexerTokenTypes.LITERAL_and, cv, context); assertTrue( "Filter Openad of OrganizedOperand is not of type RangeJunction", oo.filterOperand instanceof RangeJunction); rj = (RangeJunction)oo.filterOperand; oo1 = rj.organizeOperands(context); assertTrue(oo1.filterOperand instanceof CompiledLiteral); assertFalse(((Boolean)((CompiledLiteral)oo1.filterOperand) .evaluate(context)).booleanValue()); // Case4 : a > 7 and a !=4 and a !=8 and a = 14 cv = new CompiledComparison[4]; cv[0] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(7)), OQLLexerTokenTypes.TOK_GT); cv[1] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(4)), OQLLexerTokenTypes.TOK_NE); cv[2] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(8)), OQLLexerTokenTypes.TOK_NE); cv[3] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_EQ); oo = this.oganizedOperandsSingleRangeJunctionCreation( OQLLexerTokenTypes.LITERAL_and, cv, context); assertTrue( "Filter Openad of OrganizedOperand is not of type RangeJunction", oo.filterOperand instanceof RangeJunction); rj = (RangeJunction)oo.filterOperand; oo1 = rj.organizeOperands(context); assertTrue(oo1.filterOperand == cv[3]); // Case5 : a <= 14 and a !=4 and a !=8 and a = 14 cv = new CompiledComparison[4]; cv[0] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_LE); cv[1] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(4)), OQLLexerTokenTypes.TOK_NE); cv[2] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(8)), OQLLexerTokenTypes.TOK_NE); cv[3] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_EQ); oo = this.oganizedOperandsSingleRangeJunctionCreation( OQLLexerTokenTypes.LITERAL_and, cv, context); assertTrue( "Filter Openad of OrganizedOperand is not of type RangeJunction", oo.filterOperand instanceof RangeJunction); rj = (RangeJunction)oo.filterOperand; oo1 = rj.organizeOperands(context); assertTrue(oo1.filterOperand == cv[3]); // Case6 : a >= 14 and a !=4 and a !=8 and a = 14 cv = new CompiledComparison[4]; cv[0] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_GE); cv[1] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(4)), OQLLexerTokenTypes.TOK_NE); cv[2] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(8)), OQLLexerTokenTypes.TOK_NE); cv[3] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_EQ); oo = this.oganizedOperandsSingleRangeJunctionCreation( OQLLexerTokenTypes.LITERAL_and, cv, context); assertTrue( "Filter Openad of OrganizedOperand is not of type RangeJunction", oo.filterOperand instanceof RangeJunction); rj = (RangeJunction)oo.filterOperand; oo1 = rj.organizeOperands(context); assertTrue(oo1.filterOperand == cv[3]); // Case7 : a >= 14 and a !=4 and a =9 and a = 14 cv = new CompiledComparison[4]; cv[0] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_GE); cv[1] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(4)), OQLLexerTokenTypes.TOK_NE); cv[2] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(9)), OQLLexerTokenTypes.TOK_EQ); cv[3] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_EQ); oo = this.oganizedOperandsSingleRangeJunctionCreation( OQLLexerTokenTypes.LITERAL_and, cv, context); assertTrue( "Filter Openad of OrganizedOperand is not of type RangeJunction", oo.filterOperand instanceof RangeJunction); rj = (RangeJunction)oo.filterOperand; oo1 = rj.organizeOperands(context); assertTrue(oo1.filterOperand instanceof CompiledLiteral); assertFalse(((Boolean)((CompiledLiteral)oo1.filterOperand) .evaluate(context)).booleanValue()); // Case8 : a > 7 and a !=4 and a !=8 and a = 14 and a <18 cv = new CompiledComparison[5]; cv[0] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(7)), OQLLexerTokenTypes.TOK_GT); cv[1] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(4)), OQLLexerTokenTypes.TOK_NE); cv[2] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(8)), OQLLexerTokenTypes.TOK_NE); cv[3] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_EQ); cv[4] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(18)), OQLLexerTokenTypes.TOK_LT); oo = this.oganizedOperandsSingleRangeJunctionCreation( OQLLexerTokenTypes.LITERAL_and, cv, context); assertTrue( "Filter Openad of OrganizedOperand is not of type RangeJunction", oo.filterOperand instanceof RangeJunction); rj = (RangeJunction)oo.filterOperand; oo1 = rj.organizeOperands(context); assertTrue(oo1.filterOperand == cv[3]); // case9:a > 7 and a !=4 and a !=8 and a = 14 and a <14 cv = new CompiledComparison[5]; cv[0] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_GT); cv[1] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(4)), OQLLexerTokenTypes.TOK_NE); cv[2] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(9)), OQLLexerTokenTypes.TOK_EQ); cv[3] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_EQ); cv[4] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_LT); oo = this.oganizedOperandsSingleRangeJunctionCreation( OQLLexerTokenTypes.LITERAL_and, cv, context); assertTrue( "Filter Openad of OrganizedOperand is not of type RangeJunction", oo.filterOperand instanceof RangeJunction); rj = (RangeJunction)oo.filterOperand; oo1 = rj.organizeOperands(context); assertTrue(oo1.filterOperand instanceof CompiledLiteral); assertFalse(((Boolean)((CompiledLiteral)oo1.filterOperand) .evaluate(context)).booleanValue()); } catch (Exception e) { logger.error(e.toString()); } }
void function() { LogWriter logger = CacheUtils.getLogger(); try { CompiledComparison cv[] = null; ExecutionContext context = new QueryExecutionContext(null, CacheUtils .getCache()); this.bindIteratorsAndCreateIndex(context); cv = new CompiledComparison[4]; cv[0] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(7)), OQLLexerTokenTypes.TOK_EQ); cv[1] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(4)), OQLLexerTokenTypes.TOK_NE); cv[2] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(5)), OQLLexerTokenTypes.TOK_NE); cv[3] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(8)), OQLLexerTokenTypes.TOK_NE); OrganizedOperands oo = this.oganizedOperandsSingleRangeJunctionCreation( OQLLexerTokenTypes.LITERAL_and, cv, context); assertTrue( STR, oo.filterOperand instanceof RangeJunction); RangeJunction rj = (RangeJunction)oo.filterOperand; OrganizedOperands oo1 = rj.organizeOperands(context); assertEquals(oo1.filterOperand, cv[0]); cv = new CompiledComparison[4]; cv[0] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(7)), OQLLexerTokenTypes.TOK_GT); cv[1] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(4)), OQLLexerTokenTypes.TOK_NE); cv[2] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(5)), OQLLexerTokenTypes.TOK_NE); cv[3] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(8)), OQLLexerTokenTypes.TOK_EQ); oo = this.oganizedOperandsSingleRangeJunctionCreation( OQLLexerTokenTypes.LITERAL_and, cv, context); assertTrue( STR, oo.filterOperand instanceof RangeJunction); rj = (RangeJunction)oo.filterOperand; oo1 = rj.organizeOperands(context); assertEquals(oo1.filterOperand, cv[3]); cv = new CompiledComparison[4]; cv[0] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(7)), OQLLexerTokenTypes.TOK_LT); cv[1] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(4)), OQLLexerTokenTypes.TOK_NE); cv[2] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(8)), OQLLexerTokenTypes.TOK_EQ); cv[3] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(5)), OQLLexerTokenTypes.TOK_NE); oo = this.oganizedOperandsSingleRangeJunctionCreation( OQLLexerTokenTypes.LITERAL_and, cv, context); assertTrue( STR, oo.filterOperand instanceof RangeJunction); rj = (RangeJunction)oo.filterOperand; oo1 = rj.organizeOperands(context); assertTrue(oo1.filterOperand instanceof CompiledLiteral); assertFalse(((Boolean)((CompiledLiteral)oo1.filterOperand) .evaluate(context)).booleanValue()); cv = new CompiledComparison[4]; cv[0] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(7)), OQLLexerTokenTypes.TOK_GT); cv[1] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(4)), OQLLexerTokenTypes.TOK_NE); cv[2] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(8)), OQLLexerTokenTypes.TOK_NE); cv[3] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_EQ); oo = this.oganizedOperandsSingleRangeJunctionCreation( OQLLexerTokenTypes.LITERAL_and, cv, context); assertTrue( STR, oo.filterOperand instanceof RangeJunction); rj = (RangeJunction)oo.filterOperand; oo1 = rj.organizeOperands(context); assertTrue(oo1.filterOperand == cv[3]); cv = new CompiledComparison[4]; cv[0] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_LE); cv[1] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(4)), OQLLexerTokenTypes.TOK_NE); cv[2] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(8)), OQLLexerTokenTypes.TOK_NE); cv[3] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_EQ); oo = this.oganizedOperandsSingleRangeJunctionCreation( OQLLexerTokenTypes.LITERAL_and, cv, context); assertTrue( STR, oo.filterOperand instanceof RangeJunction); rj = (RangeJunction)oo.filterOperand; oo1 = rj.organizeOperands(context); assertTrue(oo1.filterOperand == cv[3]); cv = new CompiledComparison[4]; cv[0] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_GE); cv[1] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(4)), OQLLexerTokenTypes.TOK_NE); cv[2] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(8)), OQLLexerTokenTypes.TOK_NE); cv[3] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_EQ); oo = this.oganizedOperandsSingleRangeJunctionCreation( OQLLexerTokenTypes.LITERAL_and, cv, context); assertTrue( STR, oo.filterOperand instanceof RangeJunction); rj = (RangeJunction)oo.filterOperand; oo1 = rj.organizeOperands(context); assertTrue(oo1.filterOperand == cv[3]); cv = new CompiledComparison[4]; cv[0] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_GE); cv[1] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(4)), OQLLexerTokenTypes.TOK_NE); cv[2] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(9)), OQLLexerTokenTypes.TOK_EQ); cv[3] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_EQ); oo = this.oganizedOperandsSingleRangeJunctionCreation( OQLLexerTokenTypes.LITERAL_and, cv, context); assertTrue( STR, oo.filterOperand instanceof RangeJunction); rj = (RangeJunction)oo.filterOperand; oo1 = rj.organizeOperands(context); assertTrue(oo1.filterOperand instanceof CompiledLiteral); assertFalse(((Boolean)((CompiledLiteral)oo1.filterOperand) .evaluate(context)).booleanValue()); cv = new CompiledComparison[5]; cv[0] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(7)), OQLLexerTokenTypes.TOK_GT); cv[1] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(4)), OQLLexerTokenTypes.TOK_NE); cv[2] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(8)), OQLLexerTokenTypes.TOK_NE); cv[3] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_EQ); cv[4] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(18)), OQLLexerTokenTypes.TOK_LT); oo = this.oganizedOperandsSingleRangeJunctionCreation( OQLLexerTokenTypes.LITERAL_and, cv, context); assertTrue( STR, oo.filterOperand instanceof RangeJunction); rj = (RangeJunction)oo.filterOperand; oo1 = rj.organizeOperands(context); assertTrue(oo1.filterOperand == cv[3]); cv = new CompiledComparison[5]; cv[0] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_GT); cv[1] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(4)), OQLLexerTokenTypes.TOK_NE); cv[2] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(9)), OQLLexerTokenTypes.TOK_EQ); cv[3] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_EQ); cv[4] = new CompiledComparison( new CompiledPath(new CompiledID("p"), "ID"), new CompiledLiteral( new Integer(14)), OQLLexerTokenTypes.TOK_LT); oo = this.oganizedOperandsSingleRangeJunctionCreation( OQLLexerTokenTypes.LITERAL_and, cv, context); assertTrue( STR, oo.filterOperand instanceof RangeJunction); rj = (RangeJunction)oo.filterOperand; oo1 = rj.organizeOperands(context); assertTrue(oo1.filterOperand instanceof CompiledLiteral); assertFalse(((Boolean)((CompiledLiteral)oo1.filterOperand) .evaluate(context)).booleanValue()); } catch (Exception e) { logger.error(e.toString()); } }
/** * Test presence of equal condition in a AND RangeJunction An equal condition * if accompanied by any other condition in a RangeJunction ( except not null) * should always return the equal condition or a boolean false indicating * empty resultset */
Test presence of equal condition in a AND RangeJunction An equal condition if accompanied by any other condition in a RangeJunction ( except not null) should always return the equal condition or a boolean false indicating empty resultset
testEqualConditionInRangeJunction_AND
{ "repo_name": "gemxd/gemfirexd-oss", "path": "tests/core/src/main/java/com/gemstone/gemfire/cache/query/internal/CompiledJunctionInternalsTest.java", "license": "apache-2.0", "size": 169431 }
[ "com.gemstone.gemfire.LogWriter", "com.gemstone.gemfire.cache.query.CacheUtils", "com.gemstone.gemfire.cache.query.internal.parse.OQLLexerTokenTypes" ]
import com.gemstone.gemfire.LogWriter; import com.gemstone.gemfire.cache.query.CacheUtils; import com.gemstone.gemfire.cache.query.internal.parse.OQLLexerTokenTypes;
import com.gemstone.gemfire.*; import com.gemstone.gemfire.cache.query.*; import com.gemstone.gemfire.cache.query.internal.parse.*;
[ "com.gemstone.gemfire" ]
com.gemstone.gemfire;
834,764
@Override public void setFree() { changeState(NodeState.FREE); this.owner = null; } /** * Changes the state of this node to {@link NodeState#CONFIGURING}
void function() { changeState(NodeState.FREE); this.owner = null; } /** * Changes the state of this node to {@link NodeState#CONFIGURING}
/** * Changes the state of this node to {@link NodeState#FREE}. */
Changes the state of this node to <code>NodeState#FREE</code>
setFree
{ "repo_name": "zeineb/scheduling", "path": "rm/rm-server/src/main/java/org/ow2/proactive/resourcemanager/rmnode/RMNodeImpl.java", "license": "agpl-3.0", "size": 14631 }
[ "org.ow2.proactive.resourcemanager.common.NodeState" ]
import org.ow2.proactive.resourcemanager.common.NodeState;
import org.ow2.proactive.resourcemanager.common.*;
[ "org.ow2.proactive" ]
org.ow2.proactive;
58,556
void addConnectionFactories(ConnectionFactoryConfigurer connectionFactoryConfigurer, Environment environment);
void addConnectionFactories(ConnectionFactoryConfigurer connectionFactoryConfigurer, Environment environment);
/** * Callback method to allow configuration of {@link ConnectionFactory}s. * @param connectionFactoryConfigurer A configurer for adding {@link ConnectionFactory} instances. * @param environment The Spring environment, useful for fetching application credentials needed to create a {@link ConnectionFactory} instance. */
Callback method to allow configuration of <code>ConnectionFactory</code>s
addConnectionFactories
{ "repo_name": "domix/spring-social", "path": "spring-social-config/src/main/java/org/springframework/social/config/annotation/SocialConfigurer.java", "license": "apache-2.0", "size": 2490 }
[ "org.springframework.core.env.Environment" ]
import org.springframework.core.env.Environment;
import org.springframework.core.env.*;
[ "org.springframework.core" ]
org.springframework.core;
995,140
public List<ITerm> getAttributes() { return Collections .unmodifiableList(_attributes); } //---------------------------------------- // Class Functions //---------------------------------------- public SQLProjection() { super(SQLType.Projection); }
List<ITerm> function() { return Collections .unmodifiableList(_attributes); } public SQLProjection() { super(SQLType.Projection); }
/** * Gets the projected Attributes. */
Gets the projected Attributes
getAttributes
{ "repo_name": "Project-PROPhEcy/PROPhEcy-Framework", "path": "src/main/java/com/prophecy/processing/input/sql/SQLProjection.java", "license": "mit", "size": 1575 }
[ "com.prophecy.processing.input.term.ITerm", "java.util.Collections", "java.util.List" ]
import com.prophecy.processing.input.term.ITerm; import java.util.Collections; import java.util.List;
import com.prophecy.processing.input.term.*; import java.util.*;
[ "com.prophecy.processing", "java.util" ]
com.prophecy.processing; java.util;
231,704
private void populateRMCore() { when(mockedCaller.checkPermission(any(Permission.class), any(String.class))).thenReturn(true); when(mockedSelectionManager.selectNodes(any(Criteria.class), any(Client.class))).thenReturn(new NodeSet()); nodeSources = new HashMap<String, NodeSource>(1); configureNodeSource(mockedNodeSource, "NODESOURCE-test"); nodeSources.put(mockedNodeSource.getName(), mockedNodeSource); // MockedRMNodeParameters(String url, boolean isFree, boolean isDown, boolean isLocked, NodeSource nodeSource, RMNode rmNode) configureRMNode(new MockedRMNodeParameters("mockedRemovableNode", true, true, false, mockedNodeSource, "NODESOURCE-test", mockedRemovableNode)); configureRMNode(new MockedRMNodeParameters("mockedUnremovableNode", false, true, false, mockedNodeSource, "NODESOURCE-test", mockedUnremovableNode)); configureRMNode(new MockedRMNodeParameters(RMDeployingNode.PROTOCOL_ID + "://removableNode", true, true, false, mockedNodeSource, "NODESOURCE-test", mockedRemovableNodeInDeploy)); configureRMNode(new MockedRMNodeParameters(RMDeployingNode.PROTOCOL_ID + "://unRemovableNode", false, false, false, mockedNodeSource, "NODESOURCE-test", mockedUnremovableNodeInDeploy)); configureRMNode(new MockedRMNodeParameters("mockedBusyNode", false, false, true, mockedNodeSource, "NODESOURCE-test", mockedBusyNode)); configureRMNode(new MockedRMNodeParameters("mockedFreeButLockedNode", true, false, true, mockedNodeSource, "NODESOURCE-test", mockedFreeButLockedNode)); HashMap<String, RMNode> nodes = new HashMap<>(6); nodes.put(mockedRemovableNodeInDeploy.getNodeName(), mockedRemovableNodeInDeploy); nodes.put(mockedUnremovableNodeInDeploy.getNodeName(), mockedUnremovableNodeInDeploy); nodes.put(mockedRemovableNode.getNodeName(), mockedRemovableNode); nodes.put(mockedUnremovableNode.getNodeName(), mockedUnremovableNode); nodes.put(mockedBusyNode.getNodeName(), mockedBusyNode); nodes.put(mockedFreeButLockedNode.getNodeName(), mockedFreeButLockedNode); ArrayList<RMNode> freeNodes = new ArrayList<>(3); freeNodes.add(mockedRemovableNodeInDeploy); freeNodes.add(mockedRemovableNode); freeNodes.add(mockedFreeButLockedNode); rmCore = new RMCore(nodeSources, new ArrayList<String>(), nodes, mockedCaller, mockedMonitoring, mockedSelectionManager, freeNodes, dbManager); rmCore = spy(rmCore); nodesLockRestorationManager = null;
void function() { when(mockedCaller.checkPermission(any(Permission.class), any(String.class))).thenReturn(true); when(mockedSelectionManager.selectNodes(any(Criteria.class), any(Client.class))).thenReturn(new NodeSet()); nodeSources = new HashMap<String, NodeSource>(1); configureNodeSource(mockedNodeSource, STR); nodeSources.put(mockedNodeSource.getName(), mockedNodeSource); configureRMNode(new MockedRMNodeParameters(STR, true, true, false, mockedNodeSource, STR, mockedRemovableNode)); configureRMNode(new MockedRMNodeParameters(STR, false, true, false, mockedNodeSource, STR, mockedUnremovableNode)); configureRMNode(new MockedRMNodeParameters(RMDeployingNode.PROTOCOL_ID + ": true, true, false, mockedNodeSource, STR, mockedRemovableNodeInDeploy)); configureRMNode(new MockedRMNodeParameters(RMDeployingNode.PROTOCOL_ID + ": false, false, false, mockedNodeSource, STR, mockedUnremovableNodeInDeploy)); configureRMNode(new MockedRMNodeParameters(STR, false, false, true, mockedNodeSource, STR, mockedBusyNode)); configureRMNode(new MockedRMNodeParameters(STR, true, false, true, mockedNodeSource, STR, mockedFreeButLockedNode)); HashMap<String, RMNode> nodes = new HashMap<>(6); nodes.put(mockedRemovableNodeInDeploy.getNodeName(), mockedRemovableNodeInDeploy); nodes.put(mockedUnremovableNodeInDeploy.getNodeName(), mockedUnremovableNodeInDeploy); nodes.put(mockedRemovableNode.getNodeName(), mockedRemovableNode); nodes.put(mockedUnremovableNode.getNodeName(), mockedUnremovableNode); nodes.put(mockedBusyNode.getNodeName(), mockedBusyNode); nodes.put(mockedFreeButLockedNode.getNodeName(), mockedFreeButLockedNode); ArrayList<RMNode> freeNodes = new ArrayList<>(3); freeNodes.add(mockedRemovableNodeInDeploy); freeNodes.add(mockedRemovableNode); freeNodes.add(mockedFreeButLockedNode); rmCore = new RMCore(nodeSources, new ArrayList<String>(), nodes, mockedCaller, mockedMonitoring, mockedSelectionManager, freeNodes, dbManager); rmCore = spy(rmCore); nodesLockRestorationManager = null;
/** * 6 nodes (same nodesource). */
6 nodes (same nodesource)
populateRMCore
{ "repo_name": "laurianed/scheduling", "path": "rm/rm-server/src/test/java/org/ow2/proactive/resourcemanager/core/RMCoreTest.java", "license": "agpl-3.0", "size": 38990 }
[ "java.security.Permission", "java.util.ArrayList", "java.util.HashMap", "org.mockito.Matchers", "org.mockito.Mockito", "org.ow2.proactive.resourcemanager.authentication.Client", "org.ow2.proactive.resourcemanager.nodesource.NodeSource", "org.ow2.proactive.resourcemanager.rmnode.RMDeployingNode", "org.ow2.proactive.resourcemanager.rmnode.RMNode", "org.ow2.proactive.utils.Criteria", "org.ow2.proactive.utils.NodeSet" ]
import java.security.Permission; import java.util.ArrayList; import java.util.HashMap; import org.mockito.Matchers; import org.mockito.Mockito; import org.ow2.proactive.resourcemanager.authentication.Client; import org.ow2.proactive.resourcemanager.nodesource.NodeSource; import org.ow2.proactive.resourcemanager.rmnode.RMDeployingNode; import org.ow2.proactive.resourcemanager.rmnode.RMNode; import org.ow2.proactive.utils.Criteria; import org.ow2.proactive.utils.NodeSet;
import java.security.*; import java.util.*; import org.mockito.*; import org.ow2.proactive.resourcemanager.authentication.*; import org.ow2.proactive.resourcemanager.nodesource.*; import org.ow2.proactive.resourcemanager.rmnode.*; import org.ow2.proactive.utils.*;
[ "java.security", "java.util", "org.mockito", "org.ow2.proactive" ]
java.security; java.util; org.mockito; org.ow2.proactive;
2,253,494
public EnrichClause<ProcessorDefinition<Type>> pollEnrichWith(@AsEndpointUri String resourceUri, long timeout, boolean aggregateOnException) { EnrichClause<ProcessorDefinition<Type>> clause = new EnrichClause<>(this); pollEnrich(resourceUri, timeout, clause, aggregateOnException); return clause; }
EnrichClause<ProcessorDefinition<Type>> function(@AsEndpointUri String resourceUri, long timeout, boolean aggregateOnException) { EnrichClause<ProcessorDefinition<Type>> clause = new EnrichClause<>(this); pollEnrich(resourceUri, timeout, clause, aggregateOnException); return clause; }
/** * The <a href="http://camel.apache.org/content-enricher.html">Content Enricher EIP</a> * enriches an exchange with additional data obtained from a <code>resourceUri</code> * and with an aggregation strategy created using a fluent builder using * a {@link org.apache.camel.PollingConsumer} to poll the endpoint. */
The Content Enricher EIP enriches an exchange with additional data obtained from a <code>resourceUri</code> and with an aggregation strategy created using a fluent builder using a <code>org.apache.camel.PollingConsumer</code> to poll the endpoint
pollEnrichWith
{ "repo_name": "davidkarlsen/camel", "path": "core/camel-core/src/main/java/org/apache/camel/model/ProcessorDefinition.java", "license": "apache-2.0", "size": 149529 }
[ "org.apache.camel.builder.EnrichClause", "org.apache.camel.spi.AsEndpointUri" ]
import org.apache.camel.builder.EnrichClause; import org.apache.camel.spi.AsEndpointUri;
import org.apache.camel.builder.*; import org.apache.camel.spi.*;
[ "org.apache.camel" ]
org.apache.camel;
1,640,845
public SonarMetric calculateTestCoverage(final Map<String, SonarMetric> mapOfMetrics) { SonarMetric linesToCover = mapOfMetrics.get(SonarMetricConverterService.LINES_TO_COVER_KEY); SonarMetric uncoveredLines = mapOfMetrics.get(SonarMetricConverterService.UNCOVERED_LINES_KEY); BigDecimal linesToCoverValue = new BigDecimal(linesToCover.getRawValue()); BigDecimal uncoveredLinesValue = new BigDecimal(uncoveredLines.getRawValue()); BigDecimal linesCoveredValue = linesToCoverValue.subtract(uncoveredLinesValue); BigDecimal linesCoveredDecimal; if(linesToCoverValue.equals(ZERO) || linesCoveredValue.equals(ZERO)) { linesCoveredDecimal = ZERO; } else { linesCoveredDecimal = linesCoveredValue.setScale(4) .divide(linesToCoverValue.setScale(4), RoundingMode.HALF_UP).setScale(4); } BigDecimal linesCoveredPercentage = linesCoveredDecimal.multiply(ONE_HUNDRED).setScale(2); return SonarMetric.builder().key(TEST_COVERAGE_KEY).name(TEST_COVERAGE_NAME) .value(linesCoveredPercentage).rawValue(linesCoveredPercentage.toString()).build(); }
SonarMetric function(final Map<String, SonarMetric> mapOfMetrics) { SonarMetric linesToCover = mapOfMetrics.get(SonarMetricConverterService.LINES_TO_COVER_KEY); SonarMetric uncoveredLines = mapOfMetrics.get(SonarMetricConverterService.UNCOVERED_LINES_KEY); BigDecimal linesToCoverValue = new BigDecimal(linesToCover.getRawValue()); BigDecimal uncoveredLinesValue = new BigDecimal(uncoveredLines.getRawValue()); BigDecimal linesCoveredValue = linesToCoverValue.subtract(uncoveredLinesValue); BigDecimal linesCoveredDecimal; if(linesToCoverValue.equals(ZERO) linesCoveredValue.equals(ZERO)) { linesCoveredDecimal = ZERO; } else { linesCoveredDecimal = linesCoveredValue.setScale(4) .divide(linesToCoverValue.setScale(4), RoundingMode.HALF_UP).setScale(4); } BigDecimal linesCoveredPercentage = linesCoveredDecimal.multiply(ONE_HUNDRED).setScale(2); return SonarMetric.builder().key(TEST_COVERAGE_KEY).name(TEST_COVERAGE_NAME) .value(linesCoveredPercentage).rawValue(linesCoveredPercentage.toString()).build(); }
/** * Calculate the test coverage based on the passed in metrics. * * @param mapOfMetrics * @return */
Calculate the test coverage based on the passed in metrics
calculateTestCoverage
{ "repo_name": "JohnCannon87/Hammerhead-StatsCollector", "path": "StatsCollector/src/main/java/com/statscollector/neo/sonar/service/DerivedSonarMetricService.java", "license": "mit", "size": 7601 }
[ "com.statscollector.neo.sonar.model.SonarMetric", "com.statscollector.neo.sonar.service.metrics.SonarMetricConverterService", "java.math.BigDecimal", "java.math.RoundingMode", "java.util.Map" ]
import com.statscollector.neo.sonar.model.SonarMetric; import com.statscollector.neo.sonar.service.metrics.SonarMetricConverterService; import java.math.BigDecimal; import java.math.RoundingMode; import java.util.Map;
import com.statscollector.neo.sonar.model.*; import com.statscollector.neo.sonar.service.metrics.*; import java.math.*; import java.util.*;
[ "com.statscollector.neo", "java.math", "java.util" ]
com.statscollector.neo; java.math; java.util;
270,050
@Override public ByteBuffer getRoutingKey(ProtocolVersion protocolVersion, CodecRegistry codecRegistry) { for (RegularStatement statement : statements) { ByteBuffer routingKey = statement.getRoutingKey(protocolVersion, codecRegistry); if (routingKey != null) { return routingKey; } } return null; }
ByteBuffer function(ProtocolVersion protocolVersion, CodecRegistry codecRegistry) { for (RegularStatement statement : statements) { ByteBuffer routingKey = statement.getRoutingKey(protocolVersion, codecRegistry); if (routingKey != null) { return routingKey; } } return null; }
/** * Returns the first non-null routing key of the statements in this batch * or null otherwise. * * @return the routing key for this batch statement. */
Returns the first non-null routing key of the statements in this batch or null otherwise
getRoutingKey
{ "repo_name": "mebigfatguy/java-driver", "path": "driver-core/src/main/java/com/datastax/driver/core/querybuilder/Batch.java", "license": "apache-2.0", "size": 7536 }
[ "com.datastax.driver.core.CodecRegistry", "com.datastax.driver.core.ProtocolVersion", "com.datastax.driver.core.RegularStatement", "java.nio.ByteBuffer" ]
import com.datastax.driver.core.CodecRegistry; import com.datastax.driver.core.ProtocolVersion; import com.datastax.driver.core.RegularStatement; import java.nio.ByteBuffer;
import com.datastax.driver.core.*; import java.nio.*;
[ "com.datastax.driver", "java.nio" ]
com.datastax.driver; java.nio;
2,502,025
boolean removeMember(@Nonnull Tree groupTree, @Nonnull Tree memberTree) { if (writer.removeMember(groupTree, getContentID(memberTree))) { return true; } else { log.debug("Authorizable {} was not member of {}", memberTree.getName(), groupTree.getName()); return false; } }
boolean removeMember(@Nonnull Tree groupTree, @Nonnull Tree memberTree) { if (writer.removeMember(groupTree, getContentID(memberTree))) { return true; } else { log.debug(STR, memberTree.getName(), groupTree.getName()); return false; } }
/** * Removes the member from the given group. * * @param groupTree group to remove the member from * @param memberTree member to remove * @return {@code true} if the member was removed. */
Removes the member from the given group
removeMember
{ "repo_name": "meggermo/jackrabbit-oak", "path": "oak-core/src/main/java/org/apache/jackrabbit/oak/security/user/MembershipProvider.java", "license": "apache-2.0", "size": 20530 }
[ "javax.annotation.Nonnull", "org.apache.jackrabbit.oak.api.Tree" ]
import javax.annotation.Nonnull; import org.apache.jackrabbit.oak.api.Tree;
import javax.annotation.*; import org.apache.jackrabbit.oak.api.*;
[ "javax.annotation", "org.apache.jackrabbit" ]
javax.annotation; org.apache.jackrabbit;
1,535,317
@Test public void testCustomReplicationEndpoint() throws IOException, ReplicationException, InterruptedException, MQClientException, RemotingException, MQBrokerException { final DefaultMQPullConsumer consumer = new DefaultMQPullConsumer(CONSUMER_GROUP_NAME); try { createTestTable(); final Map<TableName, List<String>> tableCfs = new HashMap<>(); List<String> cfs = new ArrayList<>(); cfs.add(COLUMN_FAMILY); tableCfs.put(TABLE_NAME, cfs); addPeer(utility.getConfiguration(), PEER_NAME, tableCfs); // wait for new peer to be added Thread.sleep(500); final int numberOfRecords = 10; final Transaction inTransaction = insertData(numberOfRecords); // wait for data to be replicated Thread.sleep(500); consumer.setNamesrvAddr(NAMESERVER); consumer.setMessageModel(MessageModel.valueOf("BROADCASTING")); consumer.registerMessageQueueListener(ROCKETMQ_TOPIC, null); consumer.start(); int receiveNum = 0; String receiveMsg = null; Set<MessageQueue> queues = consumer.fetchSubscribeMessageQueues(ROCKETMQ_TOPIC); for (MessageQueue queue : queues) { long offset = getMessageQueueOffset(consumer, queue); PullResult pullResult = consumer.pull(queue, null, offset, batchSize); if (pullResult.getPullStatus() == PullStatus.FOUND) { for (MessageExt message : pullResult.getMsgFoundList()) { byte[] body = message.getBody(); receiveMsg = new String(body, "UTF-8"); // String[] receiveMsgKv = receiveMsg.split(","); // msgs.remove(receiveMsgKv[1]); logger.info("receive message : {}", receiveMsg); receiveNum++; } long nextBeginOffset = pullResult.getNextBeginOffset(); consumer.updateConsumeOffset(queue, offset); } } logger.info("receive message num={}", receiveNum); // wait for processQueueTable init Thread.sleep(1000); assertEquals(inTransaction.toJson(), receiveMsg); } finally { removePeer(); consumer.shutdown(); } }
void function() throws IOException, ReplicationException, InterruptedException, MQClientException, RemotingException, MQBrokerException { final DefaultMQPullConsumer consumer = new DefaultMQPullConsumer(CONSUMER_GROUP_NAME); try { createTestTable(); final Map<TableName, List<String>> tableCfs = new HashMap<>(); List<String> cfs = new ArrayList<>(); cfs.add(COLUMN_FAMILY); tableCfs.put(TABLE_NAME, cfs); addPeer(utility.getConfiguration(), PEER_NAME, tableCfs); Thread.sleep(500); final int numberOfRecords = 10; final Transaction inTransaction = insertData(numberOfRecords); Thread.sleep(500); consumer.setNamesrvAddr(NAMESERVER); consumer.setMessageModel(MessageModel.valueOf(STR)); consumer.registerMessageQueueListener(ROCKETMQ_TOPIC, null); consumer.start(); int receiveNum = 0; String receiveMsg = null; Set<MessageQueue> queues = consumer.fetchSubscribeMessageQueues(ROCKETMQ_TOPIC); for (MessageQueue queue : queues) { long offset = getMessageQueueOffset(consumer, queue); PullResult pullResult = consumer.pull(queue, null, offset, batchSize); if (pullResult.getPullStatus() == PullStatus.FOUND) { for (MessageExt message : pullResult.getMsgFoundList()) { byte[] body = message.getBody(); receiveMsg = new String(body, "UTF-8"); logger.info(STR, receiveMsg); receiveNum++; } long nextBeginOffset = pullResult.getNextBeginOffset(); consumer.updateConsumeOffset(queue, offset); } } logger.info(STR, receiveNum); Thread.sleep(1000); assertEquals(inTransaction.toJson(), receiveMsg); } finally { removePeer(); consumer.shutdown(); } }
/** * This method tests the replicator by writing data from hbase to rocketmq and reading it back. * * @throws IOException * @throws ReplicationException * @throws InterruptedException * @throws MQClientException * @throws RemotingException * @throws MQBrokerException */
This method tests the replicator by writing data from hbase to rocketmq and reading it back
testCustomReplicationEndpoint
{ "repo_name": "StyleTang/incubator-rocketmq-externals", "path": "rocketmq-hbase/rocketmq-hbase-sink/src/test/java/org/apache/rocketmq/sink/ReplicatorTest.java", "license": "apache-2.0", "size": 13903 }
[ "java.io.IOException", "java.util.ArrayList", "java.util.HashMap", "java.util.List", "java.util.Map", "java.util.Set", "org.apache.hadoop.hbase.TableName", "org.apache.hadoop.hbase.replication.ReplicationException", "org.apache.rocketmq.client.consumer.DefaultMQPullConsumer", "org.apache.rocketmq.client.consumer.PullResult", "org.apache.rocketmq.client.consumer.PullStatus", "org.apache.rocketmq.client.exception.MQBrokerException", "org.apache.rocketmq.client.exception.MQClientException", "org.apache.rocketmq.common.message.MessageExt", "org.apache.rocketmq.common.message.MessageQueue", "org.apache.rocketmq.common.protocol.heartbeat.MessageModel", "org.apache.rocketmq.hbase.sink.Transaction", "org.apache.rocketmq.remoting.exception.RemotingException", "org.junit.Assert" ]
import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.replication.ReplicationException; import org.apache.rocketmq.client.consumer.DefaultMQPullConsumer; import org.apache.rocketmq.client.consumer.PullResult; import org.apache.rocketmq.client.consumer.PullStatus; import org.apache.rocketmq.client.exception.MQBrokerException; import org.apache.rocketmq.client.exception.MQClientException; import org.apache.rocketmq.common.message.MessageExt; import org.apache.rocketmq.common.message.MessageQueue; import org.apache.rocketmq.common.protocol.heartbeat.MessageModel; import org.apache.rocketmq.hbase.sink.Transaction; import org.apache.rocketmq.remoting.exception.RemotingException; import org.junit.Assert;
import java.io.*; import java.util.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.replication.*; import org.apache.rocketmq.client.consumer.*; import org.apache.rocketmq.client.exception.*; import org.apache.rocketmq.common.message.*; import org.apache.rocketmq.common.protocol.heartbeat.*; import org.apache.rocketmq.hbase.sink.*; import org.apache.rocketmq.remoting.exception.*; import org.junit.*;
[ "java.io", "java.util", "org.apache.hadoop", "org.apache.rocketmq", "org.junit" ]
java.io; java.util; org.apache.hadoop; org.apache.rocketmq; org.junit;
1,161,329
protected Response newResponse() { return new StringResponse(); }
Response function() { return new StringResponse(); }
/** * Create a new response object which is used to store the markup generated by the child * objects. * * @return Response object. Must not be null */
Create a new response object which is used to store the markup generated by the child objects
newResponse
{ "repo_name": "astubbs/wicket.get-portals2", "path": "wicket/src/main/java/org/apache/wicket/markup/transformer/AbstractTransformerBehavior.java", "license": "apache-2.0", "size": 4228 }
[ "org.apache.wicket.Response", "org.apache.wicket.response.StringResponse" ]
import org.apache.wicket.Response; import org.apache.wicket.response.StringResponse;
import org.apache.wicket.*; import org.apache.wicket.response.*;
[ "org.apache.wicket" ]
org.apache.wicket;
2,143,144
public void initCaptureCollection(final CaptureCollection[] captureCollection) { this.captureCollection = captureCollection; } /** * Initialize the controller by using the definitions inside the {@link Properties}
void function(final CaptureCollection[] captureCollection) { this.captureCollection = captureCollection; } /** * Initialize the controller by using the definitions inside the {@link Properties}
/** * Initialize the given {@link CaptureCollection}. The default implementation just stores it inside the current * instance * * @param captureCollection */
Initialize the given <code>CaptureCollection</code>. The default implementation just stores it inside the current instance
initCaptureCollection
{ "repo_name": "BraintagsGmbH/netrelay", "path": "src/main/java/de/braintags/netrelay/controller/AbstractController.java", "license": "epl-1.0", "size": 11447 }
[ "de.braintags.netrelay.routing.CaptureCollection", "java.util.Properties" ]
import de.braintags.netrelay.routing.CaptureCollection; import java.util.Properties;
import de.braintags.netrelay.routing.*; import java.util.*;
[ "de.braintags.netrelay", "java.util" ]
de.braintags.netrelay; java.util;
2,675,008
@SuppressWarnings("unused") public void customize(Session session) throws Exception { JNDIConnector connector = null; Context context = null; try { context = new InitialContext(); if (null != context) { connector = (JNDIConnector) session.getLogin().getConnector(); // possible // CCE // Change from COMPOSITE_NAME_LOOKUP to STRING_LOOKUP // Note: if both jta and non-jta elements exist this will only change // the first one - and may still result in // the COMPOSITE_NAME_LOOKUP being set // Make sure only jta-data-source is in persistence.xml with no // non-jta-data-source property set connector.setLookupType(JNDIConnector.STRING_LOOKUP); // Or, if you are specifying both JTA and non-JTA in your // persistence.xml then set both connectors to be safe JNDIConnector writeConnector = (JNDIConnector) session.getLogin().getConnector(); writeConnector.setLookupType(JNDIConnector.STRING_LOOKUP); JNDIConnector readConnector = (JNDIConnector) ((DatabaseLogin) ((ServerSession) session) .getReadConnectionPool().getLogin()).getConnector();// ((DatabaseLogin) // ((ServerSession) // session).getReadConnectionPool().getLogin()).getConnector(); readConnector.setLookupType(JNDIConnector.STRING_LOOKUP); System.out.println("_JPAEclipseLinkSessionCustomizer: configured " + connector.getName()); } else { throw new Exception("_JPAEclipseLinkSessionCustomizer: Context is null"); } } catch (Exception e) { e.printStackTrace(); } }
@SuppressWarnings(STR) void function(Session session) throws Exception { JNDIConnector connector = null; Context context = null; try { context = new InitialContext(); if (null != context) { connector = (JNDIConnector) session.getLogin().getConnector(); connector.setLookupType(JNDIConnector.STRING_LOOKUP); JNDIConnector writeConnector = (JNDIConnector) session.getLogin().getConnector(); writeConnector.setLookupType(JNDIConnector.STRING_LOOKUP); JNDIConnector readConnector = (JNDIConnector) ((DatabaseLogin) ((ServerSession) session) .getReadConnectionPool().getLogin()).getConnector(); readConnector.setLookupType(JNDIConnector.STRING_LOOKUP); System.out.println(STR + connector.getName()); } else { throw new Exception(STR); } } catch (Exception e) { e.printStackTrace(); } }
/** * Get a dataSource connection and set it on the session with * lookupType=STRING_LOOKUP */
Get a dataSource connection and set it on the session with lookupType=STRING_LOOKUP
customize
{ "repo_name": "danielbchapman/production-management", "path": "ProductionEJB/src/main/java/com/danielbchapman/jboss/haxx/JPAEclipseLinkSessionCustomizer.java", "license": "gpl-3.0", "size": 2616 }
[ "javax.naming.Context", "javax.naming.InitialContext", "org.eclipse.persistence.sessions.DatabaseLogin", "org.eclipse.persistence.sessions.JNDIConnector", "org.eclipse.persistence.sessions.Session", "org.eclipse.persistence.sessions.server.ServerSession" ]
import javax.naming.Context; import javax.naming.InitialContext; import org.eclipse.persistence.sessions.DatabaseLogin; import org.eclipse.persistence.sessions.JNDIConnector; import org.eclipse.persistence.sessions.Session; import org.eclipse.persistence.sessions.server.ServerSession;
import javax.naming.*; import org.eclipse.persistence.sessions.*; import org.eclipse.persistence.sessions.server.*;
[ "javax.naming", "org.eclipse.persistence" ]
javax.naming; org.eclipse.persistence;
2,826,083
// TODO Move this to a crypto utility class private Key getPublicKey(KeyStore keystore, String alias, String password) throws Exception { Key key = keystore.getKey(alias, password.toCharArray()); if (key instanceof PublicKey) { return key; } else { java.security.cert.Certificate cert = keystore.getCertificate(alias); // Get public key PublicKey publicKey = cert.getPublicKey(); return publicKey; } }
Key function(KeyStore keystore, String alias, String password) throws Exception { Key key = keystore.getKey(alias, password.toCharArray()); if (key instanceof PublicKey) { return key; } else { java.security.cert.Certificate cert = keystore.getCertificate(alias); PublicKey publicKey = cert.getPublicKey(); return publicKey; } }
/** * Returns the public key for the specified alias, or null if the alias or private key is not found. */
Returns the public key for the specified alias, or null if the alias or private key is not found
getPublicKey
{ "repo_name": "cexbrayat/camel", "path": "components/camel-xmlsecurity/src/main/java/org/apache/camel/dataformat/xmlsecurity/XMLSecurityDataFormat.java", "license": "apache-2.0", "size": 31776 }
[ "java.security.Key", "java.security.KeyStore", "java.security.PublicKey" ]
import java.security.Key; import java.security.KeyStore; import java.security.PublicKey;
import java.security.*;
[ "java.security" ]
java.security;
654,845
public void testInvalidMoveOfNavalUnitToALandTile() { // For this test we need a different map Map map = getCoastTestMap(plainsType); game.setMap(map); Tile unitTile = map.getTile(10, 9); assertFalse("Unit tile should be ocean", unitTile.isLand()); Unit unit = new ServerUnit(game, unitTile, game.getCurrentPlayer(), galleonType); Tile landTile = map.getTile(9, 9); assertTrue("Tile should be land", landTile.isLand()); // Execute final CostDecider decider = CostDeciders.avoidSettlements(); int cost = decider.getCost(unit, unitTile, landTile, 4); assertTrue("Move should be invalid", cost == CostDecider.ILLEGAL_MOVE); }
void function() { Map map = getCoastTestMap(plainsType); game.setMap(map); Tile unitTile = map.getTile(10, 9); assertFalse(STR, unitTile.isLand()); Unit unit = new ServerUnit(game, unitTile, game.getCurrentPlayer(), galleonType); Tile landTile = map.getTile(9, 9); assertTrue(STR, landTile.isLand()); final CostDecider decider = CostDeciders.avoidSettlements(); int cost = decider.getCost(unit, unitTile, landTile, 4); assertTrue(STR, cost == CostDecider.ILLEGAL_MOVE); }
/** * Checks possible move of a naval unit to a land tile without settlement * Verifies that is invalid */
Checks possible move of a naval unit to a land tile without settlement Verifies that is invalid
testInvalidMoveOfNavalUnitToALandTile
{ "repo_name": "edijman/SOEN_6431_Colonization_Game", "path": "test/src/net/sf/freecol/common/model/BaseCostDeciderTest.java", "license": "gpl-2.0", "size": 10258 }
[ "net.sf.freecol.common.model.Map", "net.sf.freecol.common.model.pathfinding.CostDecider", "net.sf.freecol.common.model.pathfinding.CostDeciders", "net.sf.freecol.server.model.ServerUnit" ]
import net.sf.freecol.common.model.Map; import net.sf.freecol.common.model.pathfinding.CostDecider; import net.sf.freecol.common.model.pathfinding.CostDeciders; import net.sf.freecol.server.model.ServerUnit;
import net.sf.freecol.common.model.*; import net.sf.freecol.common.model.pathfinding.*; import net.sf.freecol.server.model.*;
[ "net.sf.freecol" ]
net.sf.freecol;
953,484
private static void ensureCoLocatedVerticesInSameRegion( List<DefaultSchedulingPipelinedRegion> pipelinedRegions, ExecutionGraph executionGraph) { final Map<CoLocationConstraint, DefaultSchedulingPipelinedRegion> constraintToRegion = new IdentityHashMap<>(); for (DefaultSchedulingPipelinedRegion region : pipelinedRegions) { for (DefaultExecutionVertex vertex : region.getVertices()) { final CoLocationConstraint constraint = getCoLocationConstraint(vertex.getId(), executionGraph); if (constraint != null) { final DefaultSchedulingPipelinedRegion regionOfConstraint = constraintToRegion.get(constraint); checkState( regionOfConstraint == null || regionOfConstraint == region, "co-located tasks must be in the same pipelined region"); constraintToRegion.putIfAbsent(constraint, region); } } } }
static void function( List<DefaultSchedulingPipelinedRegion> pipelinedRegions, ExecutionGraph executionGraph) { final Map<CoLocationConstraint, DefaultSchedulingPipelinedRegion> constraintToRegion = new IdentityHashMap<>(); for (DefaultSchedulingPipelinedRegion region : pipelinedRegions) { for (DefaultExecutionVertex vertex : region.getVertices()) { final CoLocationConstraint constraint = getCoLocationConstraint(vertex.getId(), executionGraph); if (constraint != null) { final DefaultSchedulingPipelinedRegion regionOfConstraint = constraintToRegion.get(constraint); checkState( regionOfConstraint == null regionOfConstraint == region, STR); constraintToRegion.putIfAbsent(constraint, region); } } } }
/** * Co-location constraints are only used for iteration head and tail. A paired head and tail * needs to be in the same pipelined region so that they can be restarted together. */
Co-location constraints are only used for iteration head and tail. A paired head and tail needs to be in the same pipelined region so that they can be restarted together
ensureCoLocatedVerticesInSameRegion
{ "repo_name": "aljoscha/flink", "path": "flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adapter/DefaultExecutionTopology.java", "license": "apache-2.0", "size": 15101 }
[ "java.util.IdentityHashMap", "java.util.List", "java.util.Map", "org.apache.flink.runtime.executiongraph.ExecutionGraph", "org.apache.flink.runtime.jobmanager.scheduler.CoLocationConstraint", "org.apache.flink.util.Preconditions" ]
import java.util.IdentityHashMap; import java.util.List; import java.util.Map; import org.apache.flink.runtime.executiongraph.ExecutionGraph; import org.apache.flink.runtime.jobmanager.scheduler.CoLocationConstraint; import org.apache.flink.util.Preconditions;
import java.util.*; import org.apache.flink.runtime.executiongraph.*; import org.apache.flink.runtime.jobmanager.scheduler.*; import org.apache.flink.util.*;
[ "java.util", "org.apache.flink" ]
java.util; org.apache.flink;
673,673
@Test public void testToStringASTVariable() { String in = "vAr"; Expression out = Expression.parse(in); Expression desired = new Variable("vAr"); assertTrue(out.toStringAST().equals(desired.toStringAST())); }
void function() { String in = "vAr"; Expression out = Expression.parse(in); Expression desired = new Variable("vAr"); assertTrue(out.toStringAST().equals(desired.toStringAST())); }
/** * Tests toStringAST of a variable */
Tests toStringAST of a variable
testToStringASTVariable
{ "repo_name": "hcgatewood/main", "path": "6.005 Expression Final Project/src/test/ExpressionTest.java", "license": "mit", "size": 26294 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
2,683,575
private static void addFrameOutput(FramingContext state, Object parent, String property, Object output) { if (parent instanceof Map) { List<Object> prop = (List<Object>) ((Map<String, Object>) parent).get(property); if (prop == null) { prop = new ArrayList<Object>(); ((Map<String, Object>) parent).put(property, prop); } prop.add(output); } else { ((List) parent).add(output); } }
static void function(FramingContext state, Object parent, String property, Object output) { if (parent instanceof Map) { List<Object> prop = (List<Object>) ((Map<String, Object>) parent).get(property); if (prop == null) { prop = new ArrayList<Object>(); ((Map<String, Object>) parent).put(property, prop); } prop.add(output); } else { ((List) parent).add(output); } }
/** * Adds framing output to the given parent. * * @param state the current framing state. * @param parent the parent to add to. * @param property the parent property. * @param output the output to add. */
Adds framing output to the given parent
addFrameOutput
{ "repo_name": "boumba100/JsonldAndroid", "path": "src/main/java/com/nbouma/jsonldjava/core/JsonLdApi.java", "license": "bsd-3-clause", "size": 90876 }
[ "java.util.ArrayList", "java.util.List", "java.util.Map" ]
import java.util.ArrayList; import java.util.List; import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
158,280
public @NonNull PlotCommentContainer getPlotCommentContainer() { return this.plotCommentContainer; }
@NonNull PlotCommentContainer function() { return this.plotCommentContainer; }
/** * Get the plot comment container. This can be used to manage * and access plot comments * * @return Plot comment container */
Get the plot comment container. This can be used to manage and access plot comments
getPlotCommentContainer
{ "repo_name": "IntellectualSites/PlotSquared", "path": "Core/src/main/java/com/plotsquared/core/plot/Plot.java", "license": "gpl-3.0", "size": 117872 }
[ "org.checkerframework.checker.nullness.qual.NonNull" ]
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.*;
[ "org.checkerframework.checker" ]
org.checkerframework.checker;
1,659,202
public Observable<ServiceResponse<DataBoxEdgeDeviceInner>> getByResourceGroupWithServiceResponseAsync(String deviceName, String resourceGroupName) { if (deviceName == null) { throw new IllegalArgumentException("Parameter deviceName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); }
Observable<ServiceResponse<DataBoxEdgeDeviceInner>> function(String deviceName, String resourceGroupName) { if (deviceName == null) { throw new IllegalArgumentException(STR); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); } if (resourceGroupName == null) { throw new IllegalArgumentException(STR); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException(STR); }
/** * Gets the properties of the data box edge/gateway device. * * @param deviceName The device name. * @param resourceGroupName The resource group name. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the DataBoxEdgeDeviceInner object */
Gets the properties of the data box edge/gateway device
getByResourceGroupWithServiceResponseAsync
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/edgegateway/mgmt-v2019_03_01/src/main/java/com/microsoft/azure/management/edgegateway/v2019_03_01/implementation/DevicesInner.java", "license": "mit", "size": 143759 }
[ "com.microsoft.rest.ServiceResponse" ]
import com.microsoft.rest.ServiceResponse;
import com.microsoft.rest.*;
[ "com.microsoft.rest" ]
com.microsoft.rest;
2,483,662
public double priceFromNetBasis(final BondFuturesSecurity futures, final IssuerProviderInterface issuerMulticurves, final double netBasis) { ArgumentChecker.notNull(futures, "Future"); ArgumentChecker.notNull(issuerMulticurves, "Issuer and multi-curves provider"); final double[] priceFromBond = new double[futures.getDeliveryBasketAtDeliveryDate().length]; for (int loopbasket = 0; loopbasket < futures.getDeliveryBasketAtDeliveryDate().length; loopbasket++) { priceFromBond[loopbasket] = (BOND_METHOD.cleanPriceFromCurves(futures.getDeliveryBasketAtDeliveryDate()[loopbasket], issuerMulticurves) - netBasis) / futures.getConversionFactor()[loopbasket]; } final double priceFuture = MIN_FUNCTION.evaluate(priceFromBond); return priceFuture; }
double function(final BondFuturesSecurity futures, final IssuerProviderInterface issuerMulticurves, final double netBasis) { ArgumentChecker.notNull(futures, STR); ArgumentChecker.notNull(issuerMulticurves, STR); final double[] priceFromBond = new double[futures.getDeliveryBasketAtDeliveryDate().length]; for (int loopbasket = 0; loopbasket < futures.getDeliveryBasketAtDeliveryDate().length; loopbasket++) { priceFromBond[loopbasket] = (BOND_METHOD.cleanPriceFromCurves(futures.getDeliveryBasketAtDeliveryDate()[loopbasket], issuerMulticurves) - netBasis) / futures.getConversionFactor()[loopbasket]; } final double priceFuture = MIN_FUNCTION.evaluate(priceFromBond); return priceFuture; }
/** * Computes the futures price from the curves used to price the underlying bonds and the net basis. * @param futures The future security. * @param issuerMulticurves The issuer and multi-curves provider. * @param netBasis The net basis associated to the future. * @return The future price. */
Computes the futures price from the curves used to price the underlying bonds and the net basis
priceFromNetBasis
{ "repo_name": "jeorme/OG-Platform", "path": "projects/OG-Analytics/src/main/java/com/opengamma/analytics/financial/interestrate/future/provider/BondFuturesSecurityDiscountingMethod.java", "license": "apache-2.0", "size": 6977 }
[ "com.opengamma.analytics.financial.interestrate.future.derivative.BondFuturesSecurity", "com.opengamma.analytics.financial.provider.description.interestrate.IssuerProviderInterface", "com.opengamma.util.ArgumentChecker" ]
import com.opengamma.analytics.financial.interestrate.future.derivative.BondFuturesSecurity; import com.opengamma.analytics.financial.provider.description.interestrate.IssuerProviderInterface; import com.opengamma.util.ArgumentChecker;
import com.opengamma.analytics.financial.interestrate.future.derivative.*; import com.opengamma.analytics.financial.provider.description.interestrate.*; import com.opengamma.util.*;
[ "com.opengamma.analytics", "com.opengamma.util" ]
com.opengamma.analytics; com.opengamma.util;
2,855,802
@ServiceMethod(returns = ReturnType.SINGLE) private Mono<Void> deleteAsync(String resourceGroupName, String firewallPolicyName, Context context) { return beginDeleteAsync(resourceGroupName, firewallPolicyName, context) .last() .flatMap(this.client::getLroFinalResultOrError); }
@ServiceMethod(returns = ReturnType.SINGLE) Mono<Void> function(String resourceGroupName, String firewallPolicyName, Context context) { return beginDeleteAsync(resourceGroupName, firewallPolicyName, context) .last() .flatMap(this.client::getLroFinalResultOrError); }
/** * Deletes the specified Firewall Policy. * * @param resourceGroupName The name of the resource group. * @param firewallPolicyName The name of the Firewall Policy. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */
Deletes the specified Firewall Policy
deleteAsync
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/implementation/FirewallPoliciesClientImpl.java", "license": "mit", "size": 59146 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.core.util.Context" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.util.Context;
import com.azure.core.annotation.*; import com.azure.core.util.*;
[ "com.azure.core" ]
com.azure.core;
135,112
@Override public void setUnknownTokenBoundaries() { Deque<HiveParserASTNode> stack1 = new ArrayDeque<HiveParserASTNode>(); Deque<HiveParserASTNode> stack2 = new ArrayDeque<HiveParserASTNode>(); stack1.push(this); while (!stack1.isEmpty()) { HiveParserASTNode next = stack1.pop(); stack2.push(next); if (next.children != null) { for (int i = next.children.size() - 1; i >= 0; i--) { stack1.push((HiveParserASTNode) next.children.get(i)); } } } while (!stack2.isEmpty()) { HiveParserASTNode next = stack2.pop(); if (next.children == null) { if (next.startIndex < 0 || next.stopIndex < 0) { next.startIndex = next.stopIndex = next.token.getTokenIndex(); } } else if (next.startIndex >= 0 && next.stopIndex >= 0) { continue; } else if (next.children.size() > 0) { HiveParserASTNode firstChild = (HiveParserASTNode) next.children.get(0); HiveParserASTNode lastChild = (HiveParserASTNode) next.children.get(next.children.size() - 1); next.startIndex = firstChild.getTokenStartIndex(); next.stopIndex = lastChild.getTokenStopIndex(); } } }
void function() { Deque<HiveParserASTNode> stack1 = new ArrayDeque<HiveParserASTNode>(); Deque<HiveParserASTNode> stack2 = new ArrayDeque<HiveParserASTNode>(); stack1.push(this); while (!stack1.isEmpty()) { HiveParserASTNode next = stack1.pop(); stack2.push(next); if (next.children != null) { for (int i = next.children.size() - 1; i >= 0; i--) { stack1.push((HiveParserASTNode) next.children.get(i)); } } } while (!stack2.isEmpty()) { HiveParserASTNode next = stack2.pop(); if (next.children == null) { if (next.startIndex < 0 next.stopIndex < 0) { next.startIndex = next.stopIndex = next.token.getTokenIndex(); } } else if (next.startIndex >= 0 && next.stopIndex >= 0) { continue; } else if (next.children.size() > 0) { HiveParserASTNode firstChild = (HiveParserASTNode) next.children.get(0); HiveParserASTNode lastChild = (HiveParserASTNode) next.children.get(next.children.size() - 1); next.startIndex = firstChild.getTokenStartIndex(); next.stopIndex = lastChild.getTokenStopIndex(); } } }
/** * For every node in this subtree, make sure it's start/stop token's are set. Walk depth first, * visit bottom up. Only updates nodes with at least one token index < 0. * * <p>In contrast to the method in the parent class, this method is iterative. */
For every node in this subtree, make sure it's start/stop token's are set. Walk depth first, visit bottom up. Only updates nodes with at least one token index In contrast to the method in the parent class, this method is iterative
setUnknownTokenBoundaries
{ "repo_name": "lincoln-lil/flink", "path": "flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/copy/HiveParserASTNode.java", "license": "apache-2.0", "size": 11240 }
[ "java.util.ArrayDeque", "java.util.Deque" ]
import java.util.ArrayDeque; import java.util.Deque;
import java.util.*;
[ "java.util" ]
java.util;
1,159,297
@SuppressWarnings("unchecked") public T findByPk(ID id) throws DatabaseException { log.debug("findByPk({})", id); Session session = null; try { session = HibernateUtil.getSessionFactory().openSession(); T ret = (T) session.load(persistentClass, id); Hibernate.initialize(ret); log.debug("findByPk: {}", ret); return ret; } catch (HibernateException e) { throw new DatabaseException(e.getMessage(), e); } finally { HibernateUtil.close(session); } }
@SuppressWarnings(STR) T function(ID id) throws DatabaseException { log.debug(STR, id); Session session = null; try { session = HibernateUtil.getSessionFactory().openSession(); T ret = (T) session.load(persistentClass, id); Hibernate.initialize(ret); log.debug(STR, ret); return ret; } catch (HibernateException e) { throw new DatabaseException(e.getMessage(), e); } finally { HibernateUtil.close(session); } }
/** * Find by primary key */
Find by primary key
findByPk
{ "repo_name": "codelibs/n2dms", "path": "src/main/java/com/openkm/dao/GenericDAO.java", "license": "gpl-2.0", "size": 5603 }
[ "com.openkm.core.DatabaseException", "org.hibernate.Hibernate", "org.hibernate.HibernateException", "org.hibernate.Session" ]
import com.openkm.core.DatabaseException; import org.hibernate.Hibernate; import org.hibernate.HibernateException; import org.hibernate.Session;
import com.openkm.core.*; import org.hibernate.*;
[ "com.openkm.core", "org.hibernate" ]
com.openkm.core; org.hibernate;
89,833
@Test public void testFullProject() { MockEarlySchemaReader reader = new MockEarlySchemaReader(); reader.batchLimit = 1; // Select table and implicit columns. FileScanFixtureBuilder builder = new FileScanFixtureBuilder(); builder.setProjection("dir0", "b", "filename", "c", "suffix"); builder.addReader(reader); ScanFixture scanFixture = builder.build(); ScanOperatorExec scan = scanFixture.scanOp; // Expect data and implicit columns SchemaBuilder schemaBuilder = new SchemaBuilder() .addNullable("dir0", MinorType.VARCHAR) .addNullable("b", MinorType.VARCHAR, 10) .add("filename", MinorType.VARCHAR) .addNullable("c", MinorType.INT) .add("suffix", MinorType.VARCHAR); BatchSchema expectedSchema = new BatchSchemaBuilder() .withSchemaBuilder(schemaBuilder) .build(); SingleRowSet expected = fixture.rowSetBuilder(expectedSchema) .addRow(MOCK_DIR0, "fred", MOCK_FILE_NAME, null, MOCK_SUFFIX) .addRow(MOCK_DIR0, "wilma", MOCK_FILE_NAME, null, MOCK_SUFFIX) .build(); // Schema should include implicit columns. assertTrue(scan.buildSchema()); assertEquals(expectedSchema, scan.batchAccessor().schema()); scan.batchAccessor().release(); // Read one batch, should contain implicit columns assertTrue(scan.next()); RowSetUtilities.verify(expected, fixture.wrap(scan.batchAccessor().container())); // EOF assertFalse(scan.next()); assertEquals(0, scan.batchAccessor().rowCount()); scanFixture.close(); }
void function() { MockEarlySchemaReader reader = new MockEarlySchemaReader(); reader.batchLimit = 1; FileScanFixtureBuilder builder = new FileScanFixtureBuilder(); builder.setProjection("dir0", "b", STR, "c", STR); builder.addReader(reader); ScanFixture scanFixture = builder.build(); ScanOperatorExec scan = scanFixture.scanOp; SchemaBuilder schemaBuilder = new SchemaBuilder() .addNullable("dir0", MinorType.VARCHAR) .addNullable("b", MinorType.VARCHAR, 10) .add(STR, MinorType.VARCHAR) .addNullable("c", MinorType.INT) .add(STR, MinorType.VARCHAR); BatchSchema expectedSchema = new BatchSchemaBuilder() .withSchemaBuilder(schemaBuilder) .build(); SingleRowSet expected = fixture.rowSetBuilder(expectedSchema) .addRow(MOCK_DIR0, "fred", MOCK_FILE_NAME, null, MOCK_SUFFIX) .addRow(MOCK_DIR0, "wilma", MOCK_FILE_NAME, null, MOCK_SUFFIX) .build(); assertTrue(scan.buildSchema()); assertEquals(expectedSchema, scan.batchAccessor().schema()); scan.batchAccessor().release(); assertTrue(scan.next()); RowSetUtilities.verify(expected, fixture.wrap(scan.batchAccessor().container())); assertFalse(scan.next()); assertEquals(0, scan.batchAccessor().rowCount()); scanFixture.close(); }
/** * Exercise the major project operations: subset of table * columns, implicit, partition, missing columns, and output * order (and positions) different than table. These cases * are more fully test on lower level components; here we verify * that the components are wired up correctly. */
Exercise the major project operations: subset of table columns, implicit, partition, missing columns, and output order (and positions) different than table. These cases are more fully test on lower level components; here we verify that the components are wired up correctly
testFullProject
{ "repo_name": "apache/drill", "path": "exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestFileScanFramework.java", "license": "apache-2.0", "size": 19356 }
[ "org.apache.drill.common.types.TypeProtos", "org.apache.drill.exec.physical.impl.scan.ScanTestUtils", "org.apache.drill.exec.physical.rowSet.RowSet", "org.apache.drill.exec.record.BatchSchema", "org.apache.drill.exec.record.BatchSchemaBuilder", "org.apache.drill.exec.record.metadata.SchemaBuilder", "org.apache.drill.test.rowSet.RowSetUtilities", "org.junit.Assert" ]
import org.apache.drill.common.types.TypeProtos; import org.apache.drill.exec.physical.impl.scan.ScanTestUtils; import org.apache.drill.exec.physical.rowSet.RowSet; import org.apache.drill.exec.record.BatchSchema; import org.apache.drill.exec.record.BatchSchemaBuilder; import org.apache.drill.exec.record.metadata.SchemaBuilder; import org.apache.drill.test.rowSet.RowSetUtilities; import org.junit.Assert;
import org.apache.drill.common.types.*; import org.apache.drill.exec.physical.*; import org.apache.drill.exec.physical.impl.scan.*; import org.apache.drill.exec.record.*; import org.apache.drill.exec.record.metadata.*; import org.apache.drill.test.*; import org.junit.*;
[ "org.apache.drill", "org.junit" ]
org.apache.drill; org.junit;
795,372
synchronized void addDelta(ByteStringMessage<ProtocolWaveletDelta> delta, CertificateManager.SignatureResultListener resultListener) { Preconditions.checkState(acceptMoreDeltas.get()); deltasToSign.put(delta, resultListener); if (deltasToSign.size() >= maximumDeltaBundleSize) { acceptMoreDeltas.set(false); // If there is a task scheduled, attempt to cancel, but do not interrupt if it's // already executing. If cancel() returns false the task has already been run. if (scheduledFuture == null || scheduledFuture.cancel(false)) { executorService.execute(signingTask); } } else if (scheduledFuture == null){ scheduledFuture = executorService.schedule(signingTask, bundlingAccumulationDelayMs, TimeUnit.MILLISECONDS); } } } BundlingDeltaSigner(ScheduledExecutorService executorService, WaveSigner signer, int maximumDeltaBundleSize, int bundlingAccumulationDelayMs) { this.executorService = executorService; this.signer = signer; this.currentBundle = null; Preconditions.checkArgument(maximumDeltaBundleSize > 1); this.maximumDeltaBundleSize = maximumDeltaBundleSize; Preconditions.checkArgument(bundlingAccumulationDelayMs > 0); this.bundlingAccumulationDelayMs = bundlingAccumulationDelayMs; }
synchronized void addDelta(ByteStringMessage<ProtocolWaveletDelta> delta, CertificateManager.SignatureResultListener resultListener) { Preconditions.checkState(acceptMoreDeltas.get()); deltasToSign.put(delta, resultListener); if (deltasToSign.size() >= maximumDeltaBundleSize) { acceptMoreDeltas.set(false); if (scheduledFuture == null scheduledFuture.cancel(false)) { executorService.execute(signingTask); } } else if (scheduledFuture == null){ scheduledFuture = executorService.schedule(signingTask, bundlingAccumulationDelayMs, TimeUnit.MILLISECONDS); } } } BundlingDeltaSigner(ScheduledExecutorService executorService, WaveSigner signer, int maximumDeltaBundleSize, int bundlingAccumulationDelayMs) { this.executorService = executorService; this.signer = signer; this.currentBundle = null; Preconditions.checkArgument(maximumDeltaBundleSize > 1); this.maximumDeltaBundleSize = maximumDeltaBundleSize; Preconditions.checkArgument(bundlingAccumulationDelayMs > 0); this.bundlingAccumulationDelayMs = bundlingAccumulationDelayMs; }
/** * Add the delta to the bundle. This may trigger signing now or later. The * resultListener will be called once the delta has been signed. May only * be called when canAcceptMoreDeltas is true. */
Add the delta to the bundle. This may trigger signing now or later. The resultListener will be called once the delta has been signed. May only be called when canAcceptMoreDeltas is true
addDelta
{ "repo_name": "scrosby/fedone", "path": "src/org/waveprotocol/wave/examples/fedone/waveserver/BundlingDeltaSigner.java", "license": "apache-2.0", "size": 5990 }
[ "com.google.common.base.Preconditions", "java.util.concurrent.ScheduledExecutorService", "java.util.concurrent.TimeUnit", "org.waveprotocol.wave.crypto.WaveSigner", "org.waveprotocol.wave.examples.fedone.waveserver.CertificateManager", "org.waveprotocol.wave.federation.Proto" ]
import com.google.common.base.Preconditions; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.waveprotocol.wave.crypto.WaveSigner; import org.waveprotocol.wave.examples.fedone.waveserver.CertificateManager; import org.waveprotocol.wave.federation.Proto;
import com.google.common.base.*; import java.util.concurrent.*; import org.waveprotocol.wave.crypto.*; import org.waveprotocol.wave.examples.fedone.waveserver.*; import org.waveprotocol.wave.federation.*;
[ "com.google.common", "java.util", "org.waveprotocol.wave" ]
com.google.common; java.util; org.waveprotocol.wave;
204,541
static Type canonicalize(Type type) { if (type instanceof Class) { Class<?> c = (Class<?>) type; return c.isArray() ? new GenericArrayTypeImpl(canonicalize(c.getComponentType())) : c; } else if (type instanceof ParameterizedType) { if (type instanceof ParameterizedTypeImpl) return type; ParameterizedType p = (ParameterizedType) type; return new ParameterizedTypeImpl(p.getOwnerType(), p.getRawType(), p.getActualTypeArguments()); } else if (type instanceof GenericArrayType) { if (type instanceof GenericArrayTypeImpl) return type; GenericArrayType g = (GenericArrayType) type; return new GenericArrayTypeImpl(g.getGenericComponentType()); } else if (type instanceof WildcardType) { if (type instanceof WildcardTypeImpl) return type; WildcardType w = (WildcardType) type; return new WildcardTypeImpl(w.getUpperBounds(), w.getLowerBounds()); } else { return type; // This type is unsupported! } }
static Type canonicalize(Type type) { if (type instanceof Class) { Class<?> c = (Class<?>) type; return c.isArray() ? new GenericArrayTypeImpl(canonicalize(c.getComponentType())) : c; } else if (type instanceof ParameterizedType) { if (type instanceof ParameterizedTypeImpl) return type; ParameterizedType p = (ParameterizedType) type; return new ParameterizedTypeImpl(p.getOwnerType(), p.getRawType(), p.getActualTypeArguments()); } else if (type instanceof GenericArrayType) { if (type instanceof GenericArrayTypeImpl) return type; GenericArrayType g = (GenericArrayType) type; return new GenericArrayTypeImpl(g.getGenericComponentType()); } else if (type instanceof WildcardType) { if (type instanceof WildcardTypeImpl) return type; WildcardType w = (WildcardType) type; return new WildcardTypeImpl(w.getUpperBounds(), w.getLowerBounds()); } else { return type; } }
/** * Returns a type that is functionally equal but not necessarily equal according to {@link * Object#equals(Object) Object.equals()}. */
Returns a type that is functionally equal but not necessarily equal according to <code>Object#equals(Object) Object.equals()</code>
canonicalize
{ "repo_name": "serj-lotutovici/moshi", "path": "moshi/src/main/java/com/squareup/moshi/Types.java", "license": "apache-2.0", "size": 24324 }
[ "java.lang.reflect.GenericArrayType", "java.lang.reflect.ParameterizedType", "java.lang.reflect.Type", "java.lang.reflect.WildcardType" ]
import java.lang.reflect.GenericArrayType; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.lang.reflect.WildcardType;
import java.lang.reflect.*;
[ "java.lang" ]
java.lang;
282,614
private Composite createStatusAndButtons(Composite parent) { Composite panel= new Composite(parent, SWT.NULL); GridLayout layout= new GridLayout(); layout.numColumns= 3; layout.marginWidth= 0; layout.marginHeight= 0; panel.setLayout(layout); statusLabel= new Label(panel, SWT.LEFT); setGridData(statusLabel, SWT.FILL, true, SWT.CENTER, false); applyButton= createButton(panel, BUTTON_ID_APPLY, TEXT_APPLY_LABEL, false); setGridData(applyButton, SWT.RIGHT, false, SWT.CENTER, false); closeButton= createButton(panel, BUTTON_ID_CANCEL, TEXT_CLOSE_LABEL, false); setGridData(closeButton, SWT.RIGHT, false, SWT.CENTER, false); return panel; }
Composite function(Composite parent) { Composite panel= new Composite(parent, SWT.NULL); GridLayout layout= new GridLayout(); layout.numColumns= 3; layout.marginWidth= 0; layout.marginHeight= 0; panel.setLayout(layout); statusLabel= new Label(panel, SWT.LEFT); setGridData(statusLabel, SWT.FILL, true, SWT.CENTER, false); applyButton= createButton(panel, BUTTON_ID_APPLY, TEXT_APPLY_LABEL, false); setGridData(applyButton, SWT.RIGHT, false, SWT.CENTER, false); closeButton= createButton(panel, BUTTON_ID_CANCEL, TEXT_CLOSE_LABEL, false); setGridData(closeButton, SWT.RIGHT, false, SWT.CENTER, false); return panel; }
/** * Creates the status label and buttons section of the dialog. * * @param parent the parent composite * @return the status/button panel */
Creates the status label and buttons section of the dialog
createStatusAndButtons
{ "repo_name": "chrisGerken/grauthor", "path": "org.gramar.eclipse.ui/src/org/gramar/eclipse/ui/dialog/ChooseGramarDialog.java", "license": "apache-2.0", "size": 8305 }
[ "org.eclipse.swt.layout.GridLayout", "org.eclipse.swt.widgets.Composite", "org.eclipse.swt.widgets.Label" ]
import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.layout.*; import org.eclipse.swt.widgets.*;
[ "org.eclipse.swt" ]
org.eclipse.swt;
1,258,398
public Date getGeneralizedTime(int len) throws IOException { if (len > available()) throw new IOException("short read of DER Generalized Time"); if (len < 13 || len > 23) throw new IOException("DER Generalized Time length error"); return getTime(len, true); }
Date function(int len) throws IOException { if (len > available()) throw new IOException(STR); if (len < 13 len > 23) throw new IOException(STR); return getTime(len, true); }
/** * Returns the Generalized Time value that takes up the specified * number of bytes in this buffer. * @param len the number of bytes to use */
Returns the Generalized Time value that takes up the specified number of bytes in this buffer
getGeneralizedTime
{ "repo_name": "Taichi-SHINDO/jdk9-jdk", "path": "src/java.base/share/classes/sun/security/util/DerInputBuffer.java", "license": "gpl-2.0", "size": 14587 }
[ "java.io.IOException", "java.util.Date" ]
import java.io.IOException; import java.util.Date;
import java.io.*; import java.util.*;
[ "java.io", "java.util" ]
java.io; java.util;
2,319,982
public void read(DataInput dataInput) throws IOException { this.classification = MessageHelper.readString(dataInput); this.interpretation = MessageHelper.readSerializable(dataInput); this.confidence = dataInput.readDouble(); this.toStringOverride = MessageHelper.readString(dataInput); final int numC2Vs = dataInput.readInt(); if (numC2Vs > 0) { this.category2Value = new HashMap<String, Serializable>(); for (int c2vNum = 0; c2vNum < numC2Vs; ++c2vNum) { category2Value.put(MessageHelper.readString(dataInput), MessageHelper.readSerializable(dataInput)); } } this._interpXml = MessageHelper.readString(dataInput); this._parse = (Parse)MessageHelper.readPublishable(dataInput); }
void function(DataInput dataInput) throws IOException { this.classification = MessageHelper.readString(dataInput); this.interpretation = MessageHelper.readSerializable(dataInput); this.confidence = dataInput.readDouble(); this.toStringOverride = MessageHelper.readString(dataInput); final int numC2Vs = dataInput.readInt(); if (numC2Vs > 0) { this.category2Value = new HashMap<String, Serializable>(); for (int c2vNum = 0; c2vNum < numC2Vs; ++c2vNum) { category2Value.put(MessageHelper.readString(dataInput), MessageHelper.readSerializable(dataInput)); } } this._interpXml = MessageHelper.readString(dataInput); this._parse = (Parse)MessageHelper.readPublishable(dataInput); }
/** * Read this message's contents from the dataInput stream that was written by * this.write(dataOutput). * <p> * NOTE: this requires all implementing classes to have a default constructor * with no args. * * @param dataInput the data output to write to. */
Read this message's contents from the dataInput stream that was written by this.write(dataOutput). with no args
read
{ "repo_name": "KoehlerSB747/sd-tools", "path": "src/main/java/org/sd/atn/ParseInterpretation.java", "license": "apache-2.0", "size": 13886 }
[ "java.io.DataInput", "java.io.IOException", "java.io.Serializable", "java.util.HashMap", "org.sd.cio.MessageHelper" ]
import java.io.DataInput; import java.io.IOException; import java.io.Serializable; import java.util.HashMap; import org.sd.cio.MessageHelper;
import java.io.*; import java.util.*; import org.sd.cio.*;
[ "java.io", "java.util", "org.sd.cio" ]
java.io; java.util; org.sd.cio;
1,978,819
public String toString() { final StringBuilder buffer = new StringBuilder("[ "); final Iterator<T> elems = iterator(); if (elems.hasNext()) buffer.append(elems.next()); while(elems.hasNext()) { buffer.append(", "); buffer.append(elems.next()); } buffer.append(" ]"); return buffer.toString(); }
String function() { final StringBuilder buffer = new StringBuilder(STR); final Iterator<T> elems = iterator(); if (elems.hasNext()) buffer.append(elems.next()); while(elems.hasNext()) { buffer.append(STR); buffer.append(elems.next()); } buffer.append(STR); return buffer.toString(); }
/** * Returns a string represention of this stack. */
Returns a string represention of this stack
toString
{ "repo_name": "ModelWriter/Tarski", "path": "Source/eu.modelwriter.alloyanalyzer/src/kodkod/util/collections/Stack.java", "license": "epl-1.0", "size": 4968 }
[ "java.util.Iterator" ]
import java.util.Iterator;
import java.util.*;
[ "java.util" ]
java.util;
2,623,592
public boolean supportsTransactions() throws SQLException { if (JdbcDebugCfg.entryActive) debug[methodId_supportsTransactions].methodEntry(); try { return true; } finally { if (JdbcDebugCfg.entryActive) debug[methodId_supportsTransactions].methodExit(); } }
boolean function() throws SQLException { if (JdbcDebugCfg.entryActive) debug[methodId_supportsTransactions].methodEntry(); try { return true; } finally { if (JdbcDebugCfg.entryActive) debug[methodId_supportsTransactions].methodExit(); } }
/** * Retrieves whether this database supports transactions. If not, invoking * the method commit is a noop, and the isolation level is TRANSACTION_NONE. * * @return true * @throws SQLException * - if a database access error occurs **/
Retrieves whether this database supports transactions. If not, invoking the method commit is a noop, and the isolation level is TRANSACTION_NONE
supportsTransactions
{ "repo_name": "mashengchen/incubator-trafodion", "path": "core/conn/jdbc_type2/src/main/java/org/apache/trafodion/jdbc/t2/SQLMXDatabaseMetaData.java", "license": "apache-2.0", "size": 191582 }
[ "java.sql.SQLException" ]
import java.sql.SQLException;
import java.sql.*;
[ "java.sql" ]
java.sql;
2,236,288
if (object != null) { Object first = object.getFirst(); if (first != null) { TextWriterWriterInterface<Object> tw = (TextWriterWriterInterface<Object>) out.getWriterFor(first); if (tw == null) { throw new IOException("No handler for database object itself: " + first.getClass().getSimpleName()); } tw.write(out, label, first); } Object second = object.getSecond(); if (second != null) { TextWriterWriterInterface<Object> tw = (TextWriterWriterInterface<Object>) out.getWriterFor(second); if (tw == null) { throw new IOException("No handler for database object itself: " + second.getClass().getSimpleName()); } tw.write(out, label, second); } } }
if (object != null) { Object first = object.getFirst(); if (first != null) { TextWriterWriterInterface<Object> tw = (TextWriterWriterInterface<Object>) out.getWriterFor(first); if (tw == null) { throw new IOException(STR + first.getClass().getSimpleName()); } tw.write(out, label, first); } Object second = object.getSecond(); if (second != null) { TextWriterWriterInterface<Object> tw = (TextWriterWriterInterface<Object>) out.getWriterFor(second); if (tw == null) { throw new IOException(STR + second.getClass().getSimpleName()); } tw.write(out, label, second); } } }
/** * Serialize a pair, component-wise */
Serialize a pair, component-wise
write
{ "repo_name": "elki-project/elki", "path": "elki/src/main/java/elki/result/textwriter/writers/TextWriterPair.java", "license": "agpl-3.0", "size": 2152 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,032,843
List<StreamEntry> xrevrange(String key, StreamEntryID end, StreamEntryID start, int count);
List<StreamEntry> xrevrange(String key, StreamEntryID end, StreamEntryID start, int count);
/** * XREVRANGE key end start [COUNT <n>] * * @param key * @param start minimum {@link StreamEntryID} for the retrieved range, passing <code>null</code> will indicate minimum ID possible in the stream * @param end maximum {@link StreamEntryID} for the retrieved range, passing <code>null</code> will indicate maximum ID possible in the stream * @param count The entries with IDs matching the specified range. * @return the entries with IDs matching the specified range, from the higher ID to the lower ID matching. */
XREVRANGE key end start [COUNT ]
xrevrange
{ "repo_name": "sohutv/cachecloud", "path": "cachecloud-client/cachecloud-jedis/src/main/java/redis/clients/jedis/commands/JedisCommands.java", "license": "apache-2.0", "size": 14759 }
[ "java.util.List", "redis.clients.jedis.StreamEntry", "redis.clients.jedis.StreamEntryID" ]
import java.util.List; import redis.clients.jedis.StreamEntry; import redis.clients.jedis.StreamEntryID;
import java.util.*; import redis.clients.jedis.*;
[ "java.util", "redis.clients.jedis" ]
java.util; redis.clients.jedis;
1,402,805
@Override public Component.BaselineResizeBehavior getBaselineResizeBehavior( JComponent c) { super.getBaselineResizeBehavior(c); return Component.BaselineResizeBehavior.OTHER; } /** * Transposes the view rectangles as appropriate for a vertical view * before invoking the super method and copies them after they have been * altered by {@link SwingUtilities#layoutCompoundLabel(FontMetrics, String, * Icon, int, int, int, int, Rectangle, Rectangle, Rectangle, int)}
Component.BaselineResizeBehavior function( JComponent c) { super.getBaselineResizeBehavior(c); return Component.BaselineResizeBehavior.OTHER; } /** * Transposes the view rectangles as appropriate for a vertical view * before invoking the super method and copies them after they have been * altered by {@link SwingUtilities#layoutCompoundLabel(FontMetrics, String, * Icon, int, int, int, int, Rectangle, Rectangle, Rectangle, int)}
/** * Overridden to always return Component.BaselineResizeBehavior.OTHER, * since a vertical label does not have a meaningful baseline * * @see ComponentUI#getBaselineResizeBehavior(javax.swing.JComponent) */
Overridden to always return Component.BaselineResizeBehavior.OTHER, since a vertical label does not have a meaningful baseline
getBaselineResizeBehavior
{ "repo_name": "ambro2/jabref", "path": "src/main/java/net/sf/jabref/gui/util/component/VerticalLabelUI.java", "license": "gpl-2.0", "size": 5278 }
[ "java.awt.Component", "java.awt.FontMetrics", "java.awt.Rectangle", "javax.swing.Icon", "javax.swing.JComponent" ]
import java.awt.Component; import java.awt.FontMetrics; import java.awt.Rectangle; import javax.swing.Icon; import javax.swing.JComponent;
import java.awt.*; import javax.swing.*;
[ "java.awt", "javax.swing" ]
java.awt; javax.swing;
1,071,111
public java.util.List<fr.lip6.move.pnml.hlpn.strings.hlapi.StringConstantHLAPI> getSubterm_strings_StringConstantHLAPI(){ java.util.List<fr.lip6.move.pnml.hlpn.strings.hlapi.StringConstantHLAPI> retour = new ArrayList<fr.lip6.move.pnml.hlpn.strings.hlapi.StringConstantHLAPI>(); for (Term elemnt : getSubterm()) { if(elemnt.getClass().equals(fr.lip6.move.pnml.hlpn.strings.impl.StringConstantImpl.class)){ retour.add(new fr.lip6.move.pnml.hlpn.strings.hlapi.StringConstantHLAPI( (fr.lip6.move.pnml.hlpn.strings.StringConstant)elemnt )); } } return retour; }
java.util.List<fr.lip6.move.pnml.hlpn.strings.hlapi.StringConstantHLAPI> function(){ java.util.List<fr.lip6.move.pnml.hlpn.strings.hlapi.StringConstantHLAPI> retour = new ArrayList<fr.lip6.move.pnml.hlpn.strings.hlapi.StringConstantHLAPI>(); for (Term elemnt : getSubterm()) { if(elemnt.getClass().equals(fr.lip6.move.pnml.hlpn.strings.impl.StringConstantImpl.class)){ retour.add(new fr.lip6.move.pnml.hlpn.strings.hlapi.StringConstantHLAPI( (fr.lip6.move.pnml.hlpn.strings.StringConstant)elemnt )); } } return retour; }
/** * This accessor return a list of encapsulated subelement, only of StringConstantHLAPI kind. * WARNING : this method can creates a lot of new object in memory. */
This accessor return a list of encapsulated subelement, only of StringConstantHLAPI kind. WARNING : this method can creates a lot of new object in memory
getSubterm_strings_StringConstantHLAPI
{ "repo_name": "lhillah/pnmlframework", "path": "pnmlFw-HLPN/src/fr/lip6/move/pnml/hlpn/lists/hlapi/MakeListHLAPI.java", "license": "epl-1.0", "size": 113889 }
[ "fr.lip6.move.pnml.hlpn.terms.Term", "java.util.ArrayList", "java.util.List" ]
import fr.lip6.move.pnml.hlpn.terms.Term; import java.util.ArrayList; import java.util.List;
import fr.lip6.move.pnml.hlpn.terms.*; import java.util.*;
[ "fr.lip6.move", "java.util" ]
fr.lip6.move; java.util;
123,477
public static <K, V> DefaultedMap<K, V> defaultedMap(final Map<K, V> map, final Factory<? extends V> factory) { if (factory == null) { throw new IllegalArgumentException("Factory must not be null"); } return new DefaultedMap<K, V>(map, FactoryTransformer.factoryTransformer(factory)); }
static <K, V> DefaultedMap<K, V> function(final Map<K, V> map, final Factory<? extends V> factory) { if (factory == null) { throw new IllegalArgumentException(STR); } return new DefaultedMap<K, V>(map, FactoryTransformer.factoryTransformer(factory)); }
/** * Factory method to create a defaulting map. * <p> * The factory specified is called when a missing key is found. * The result will be returned as the result of the map get(key) method. * * @param <K> the key type * @param <V> the value type * @param map the map to decorate, must not be null * @param factory the factory to use to create entries, must not be null * @return a new defaulting map * @throws IllegalArgumentException if map or factory is null * @since 4.0 */
Factory method to create a defaulting map. The factory specified is called when a missing key is found. The result will be returned as the result of the map get(key) method
defaultedMap
{ "repo_name": "gonmarques/commons-collections", "path": "src/main/java/org/apache/commons/collections4/map/DefaultedMap.java", "license": "apache-2.0", "size": 8215 }
[ "java.util.Map", "org.apache.commons.collections4.Factory", "org.apache.commons.collections4.functors.FactoryTransformer" ]
import java.util.Map; import org.apache.commons.collections4.Factory; import org.apache.commons.collections4.functors.FactoryTransformer;
import java.util.*; import org.apache.commons.collections4.*; import org.apache.commons.collections4.functors.*;
[ "java.util", "org.apache.commons" ]
java.util; org.apache.commons;
2,550,648
public Collection<Table> getDescendants(Table catalog_tbl) { Database catalog_db = (Database) catalog_tbl.getParent(); Set<Table> ret = new HashSet<Table>(); String key = CatalogKey.createKey(catalog_tbl); boolean contains = this.table_descendants.containsKey(key); if (contains == false) { LOG.warn("Missing " + key + "???"); LOG.warn(this.debug()); // System.out.println(this.table_descendants.keySet()); LOG.warn(CatalogUtil.debug(catalog_db.getTables())); } assert (contains) : "No table descendants for " + key + " (" + contains + ")"; for (String dependent_key : this.table_descendants.get(key)) { Table dependent_tbl = CatalogKey.getFromKey(catalog_db, dependent_key, Table.class); // If the table is missing, that's ok... if (dependent_tbl != null) ret.add(dependent_tbl); } // FOR return (ret); }
Collection<Table> function(Table catalog_tbl) { Database catalog_db = (Database) catalog_tbl.getParent(); Set<Table> ret = new HashSet<Table>(); String key = CatalogKey.createKey(catalog_tbl); boolean contains = this.table_descendants.containsKey(key); if (contains == false) { LOG.warn(STR + key + "???"); LOG.warn(this.debug()); LOG.warn(CatalogUtil.debug(catalog_db.getTables())); } assert (contains) : STR + key + STR + contains + ")"; for (String dependent_key : this.table_descendants.get(key)) { Table dependent_tbl = CatalogKey.getFromKey(catalog_db, dependent_key, Table.class); if (dependent_tbl != null) ret.add(dependent_tbl); } return (ret); }
/** * Return an unordered set of foreign key descendant tables for the given table * @param catalog_tbl * @return */
Return an unordered set of foreign key descendant tables for the given table
getDescendants
{ "repo_name": "apavlo/h-store", "path": "src/frontend/edu/brown/catalog/DependencyUtil.java", "license": "gpl-3.0", "size": 11335 }
[ "java.util.Collection", "java.util.HashSet", "java.util.Set", "org.voltdb.catalog.Database", "org.voltdb.catalog.Table" ]
import java.util.Collection; import java.util.HashSet; import java.util.Set; import org.voltdb.catalog.Database; import org.voltdb.catalog.Table;
import java.util.*; import org.voltdb.catalog.*;
[ "java.util", "org.voltdb.catalog" ]
java.util; org.voltdb.catalog;
350,665
public void testNPEinTriggerFire() throws SQLException { Statement s = createStatement(); String sql = " CREATE TABLE TRADE(ID INT PRIMARY KEY GENERATED "+ "BY DEFAULT AS IDENTITY (START WITH 1000), BUYID INT NOT NULL," + "QTY FLOAT(2) NOT NULL)"; s.executeUpdate(sql); sql = "CREATE TABLE TOTAL(BUYID INT NOT NULL, TOTALQTY FLOAT(2) NOT NULL)"; s.executeUpdate(sql); sql = "CREATE TRIGGER TRADE_INSERT AFTER INSERT ON TRADE REFERENCING "+ "NEW AS NEWROW FOR EACH ROW MODE DB2SQL UPDATE TOTAL SET TOTALQTY "+ "= NEWROW.QTY WHERE BUYID = NEWROW.BUYID"; s.executeUpdate(sql); s.executeUpdate("INSERT INTO TOTAL VALUES (1, 0)"); //Before DERBY-3718 was fixed, following would cause NPE in 10.4 and //trunk. This happened because starting 10.4, rather than saving the //TypeId of the DataTypeDescriptor (in writeExternal method), we rely //on reconstructing TypeId (in readExternal) by using the Types.xxx //information(DERBY-2917 revision r619995). This approach does not //work for internal datatype REF, because we use Types.OTHER for REF //datatypes. Types.OTHER is not enough to know that the type to be //constructed is REF. //To get around the problem, for reconstructing TypeId, we will //use the type name rather than Types.xxx. Since we have the correct //type name for internal datatype REF, we can successfully reconstruct //REF datatype. s.executeUpdate("INSERT INTO TRADE VALUES(1, 1, 10)"); commit(); }
void function() throws SQLException { Statement s = createStatement(); String sql = STR+ STR + STR; s.executeUpdate(sql); sql = STR; s.executeUpdate(sql); sql = STR+ STR+ STR; s.executeUpdate(sql); s.executeUpdate(STR); s.executeUpdate(STR); commit(); }
/** * Test for DERBY-3718 NPE when a trigger is fired * * @throws SQLException */
Test for DERBY-3718 NPE when a trigger is fired
testNPEinTriggerFire
{ "repo_name": "apache/derby", "path": "java/org.apache.derby.tests/org/apache/derbyTesting/functionTests/tests/lang/TriggerTest.java", "license": "apache-2.0", "size": 118077 }
[ "java.sql.SQLException", "java.sql.Statement" ]
import java.sql.SQLException; import java.sql.Statement;
import java.sql.*;
[ "java.sql" ]
java.sql;
877,895
public CompletableFuture<Void> closePoolLedger( ) throws IndyException { return closePoolLedger(this); }
CompletableFuture<Void> function( ) throws IndyException { return closePoolLedger(this); }
/** * Closes opened pool ledger, opened nodes connections and frees allocated resources. * * @return A future that does not resolve a value. * @throws IndyException Thrown if an error occurs when calling the underlying SDK. */
Closes opened pool ledger, opened nodes connections and frees allocated resources
closePoolLedger
{ "repo_name": "peacekeeper/indy-sdk", "path": "wrappers/java/src/main/java/org/hyperledger/indy/sdk/pool/Pool.java", "license": "apache-2.0", "size": 7891 }
[ "java.util.concurrent.CompletableFuture", "org.hyperledger.indy.sdk.IndyException" ]
import java.util.concurrent.CompletableFuture; import org.hyperledger.indy.sdk.IndyException;
import java.util.concurrent.*; import org.hyperledger.indy.sdk.*;
[ "java.util", "org.hyperledger.indy" ]
java.util; org.hyperledger.indy;
1,214,328
public int getQtiVersion() { if (!QTIVersion.isValid(qtiVersion)) { qtiVersion = QTIVersion.VERSION_1_2; // default } log.debug("xml controller getQtiVersion()=" + qtiVersion); return qtiVersion; }
int function() { if (!QTIVersion.isValid(qtiVersion)) { qtiVersion = QTIVersion.VERSION_1_2; } log.debug(STR + qtiVersion); return qtiVersion; }
/** * Always returns a valid QTI version. * @return */
Always returns a valid QTI version
getQtiVersion
{ "repo_name": "bzhouduke123/sakai", "path": "samigo/samigo-app/src/java/org/sakaiproject/tool/assessment/ui/bean/qti/XMLController.java", "license": "apache-2.0", "size": 10352 }
[ "org.sakaiproject.tool.assessment.qti.constants.QTIVersion" ]
import org.sakaiproject.tool.assessment.qti.constants.QTIVersion;
import org.sakaiproject.tool.assessment.qti.constants.*;
[ "org.sakaiproject.tool" ]
org.sakaiproject.tool;
504,243
public void upgradeToLiteral() { RegisterSpecList oldSources = insn.getSources(); insn = insn.withSourceLiteral(); getBlock().getParent().onSourcesChanged(this, oldSources); }
void function() { RegisterSpecList oldSources = insn.getSources(); insn = insn.withSourceLiteral(); getBlock().getParent().onSourcesChanged(this, oldSources); }
/** * Upgrades this insn to a version that represents the constant source * literally. If the upgrade is not possible, this does nothing. * * @see Insn#withSourceLiteral */
Upgrades this insn to a version that represents the constant source literally. If the upgrade is not possible, this does nothing
upgradeToLiteral
{ "repo_name": "alibaba/atlas", "path": "atlas-gradle-plugin/dexpatch/src/main/java/com/taobao/android/dx/ssa/NormalSsaInsn.java", "license": "apache-2.0", "size": 6666 }
[ "com.taobao.android.dx.rop.code.RegisterSpecList" ]
import com.taobao.android.dx.rop.code.RegisterSpecList;
import com.taobao.android.dx.rop.code.*;
[ "com.taobao.android" ]
com.taobao.android;
2,725,210
public DataNode setEnergy_error(IDataset energy_error);
DataNode function(IDataset energy_error);
/** * energy standard deviation * <p> * <b>Type:</b> NX_FLOAT * <b>Units:</b> NX_ENERGY * </p> * * @param energy_error the energy_error */
energy standard deviation Type: NX_FLOAT Units: NX_ENERGY
setEnergy_error
{ "repo_name": "jamesmudd/dawnsci", "path": "org.eclipse.dawnsci.nexus/autogen/org/eclipse/dawnsci/nexus/NXmonochromator.java", "license": "epl-1.0", "size": 8669 }
[ "org.eclipse.dawnsci.analysis.api.tree.DataNode", "org.eclipse.january.dataset.IDataset" ]
import org.eclipse.dawnsci.analysis.api.tree.DataNode; import org.eclipse.january.dataset.IDataset;
import org.eclipse.dawnsci.analysis.api.tree.*; import org.eclipse.january.dataset.*;
[ "org.eclipse.dawnsci", "org.eclipse.january" ]
org.eclipse.dawnsci; org.eclipse.january;
334,016
@Override public Query updateQuery(Query query, Map<String, Object> parameters) { throw new UnsupportedOperationException(); }
Query function(Query query, Map<String, Object> parameters) { throw new UnsupportedOperationException(); }
/** * This method implements the update operation of the resource's query. * * @param query Instance of the resource's query that gonna be updated. * This instance must have an id to identify the updatable data. * @param parameters Some parameters that could be necessaries * in order to update an instance of the resource's query. * @return Updated instance of the resource's query. */
This method implements the update operation of the resource's query
updateQuery
{ "repo_name": "kevchuk/HolandaCatalinaFw", "path": "src/main/java/org/hcjf/layers/crud/CrudLayer.java", "license": "apache-2.0", "size": 11070 }
[ "java.util.Map", "org.hcjf.layers.query.Query" ]
import java.util.Map; import org.hcjf.layers.query.Query;
import java.util.*; import org.hcjf.layers.query.*;
[ "java.util", "org.hcjf.layers" ]
java.util; org.hcjf.layers;
426,726
@SuppressWarnings("unchecked") private void gatherData(SplashScreen splash) { // if it's there but a file, delete it if (Files.exists(workingDir) && !Files.isDirectory(workingDir)) { try { Files.delete(workingDir); } catch (IOException e) { e.printStackTrace(); JOptionPane.showMessageDialog(null, workingDir.toString() + " exists but is not a directory and can't be deleted, please delete it manually and restart FurryCrossposter", "FurryCrossposter", JOptionPane.ERROR_MESSAGE); System.exit(-1); } } // if it's not there or was a file and therefore deleted, make it if (!Files.exists(workingDir)) { try { Files.createDirectory(workingDir); } catch (IOException e) { e.printStackTrace(); JOptionPane.showMessageDialog(null, "Could not create " + workingDir.toString(), "FurryCrossposter", JOptionPane.ERROR_MESSAGE); System.exit(-1); } } // now that we have a working dir, download latest driver System.setProperty("wdm.targetPath", workingDir.toString()); WebDriverManager.chromedriver().setup(); // ChromeDriverManager.getInstance().setup(Architecture // .valueOf("x" + System.getProperty("sun.arch.data.model")), "2.38"); // if it doesn't contain the tag file, first start wizard did not run if (!Files.exists(workingDir.resolve("tags"))) { // data dir does not exist or is empty, first start wizard splash.stop(); new FirstStart(workingDir); } TreeSet<Tag> tagsSet = new TreeSet<>(); try (FileInputStream fis = new FileInputStream(workingDir.resolve("tags").toString())) { try (ObjectInputStream ois = new ObjectInputStream(fis)) { tagsSet = (TreeSet<Tag>)ois.readObject(); } } catch (Exception e) { e.printStackTrace(); JOptionPane.showMessageDialog(null, "Could not open " + workingDir.resolve("tags") + ", " + e.getMessage(), "FurryCrossposter", JOptionPane.ERROR_MESSAGE); System.exit(-1); } // reform to map for easier searching FurryCrossposter.tags = new TreeMap<>(); for (Tag tag : tagsSet) { FurryCrossposter.tags.put(tag.getName(), tag); } System.out.println("loaded " + FurryCrossposter.tags.size() + " tags"); Properties probs = new Properties(); try (InputStream in = new FileInputStream(workingDir.resolve("FurryCrossposter.properties").toString())) { probs.load(in); String ffprofile = probs.getProperty("ProfileFolder"); if (ffprofile.equals("generic")) { FurryCrossposter.chromeProfile = null; } else { FurryCrossposter.chromeProfile = Paths.get(probs.getProperty("ProfileFolder")); } } catch (IOException e) { e.printStackTrace(); JOptionPane.showMessageDialog(null, "Could not open " + workingDir.resolve("FurryCrossposter.properties") + ", " + e.getMessage(), "FurryCrossposter", JOptionPane.ERROR_MESSAGE); System.exit(-1); } if (FurryCrossposter.chromeProfile != null && (!Files.exists(FurryCrossposter.chromeProfile) || !Files.isDirectory(FurryCrossposter.chromeProfile))) { JOptionPane.showMessageDialog(null, "Chrome profile folder " + FurryCrossposter.chromeProfile + " not found, please delete content of " + workingDir + " to run first start wizard again", "FurryCrossposter", JOptionPane.ERROR_MESSAGE); System.exit(-1); } if (FurryCrossposter.chromeProfile == null) { System.out.println("using generic chrome profile"); } else { System.out.println("loaded chrome profile path " + FurryCrossposter.chromeProfile); } }
@SuppressWarnings(STR) void function(SplashScreen splash) { if (Files.exists(workingDir) && !Files.isDirectory(workingDir)) { try { Files.delete(workingDir); } catch (IOException e) { e.printStackTrace(); JOptionPane.showMessageDialog(null, workingDir.toString() + STR, STR, JOptionPane.ERROR_MESSAGE); System.exit(-1); } } if (!Files.exists(workingDir)) { try { Files.createDirectory(workingDir); } catch (IOException e) { e.printStackTrace(); JOptionPane.showMessageDialog(null, STR + workingDir.toString(), STR, JOptionPane.ERROR_MESSAGE); System.exit(-1); } } System.setProperty(STR, workingDir.toString()); WebDriverManager.chromedriver().setup(); if (!Files.exists(workingDir.resolve("tags"))) { splash.stop(); new FirstStart(workingDir); } TreeSet<Tag> tagsSet = new TreeSet<>(); try (FileInputStream fis = new FileInputStream(workingDir.resolve("tags").toString())) { try (ObjectInputStream ois = new ObjectInputStream(fis)) { tagsSet = (TreeSet<Tag>)ois.readObject(); } } catch (Exception e) { e.printStackTrace(); JOptionPane.showMessageDialog(null, STR + workingDir.resolve("tags") + STR + e.getMessage(), STR, JOptionPane.ERROR_MESSAGE); System.exit(-1); } FurryCrossposter.tags = new TreeMap<>(); for (Tag tag : tagsSet) { FurryCrossposter.tags.put(tag.getName(), tag); } System.out.println(STR + FurryCrossposter.tags.size() + STR); Properties probs = new Properties(); try (InputStream in = new FileInputStream(workingDir.resolve(STR).toString())) { probs.load(in); String ffprofile = probs.getProperty(STR); if (ffprofile.equals(STR)) { FurryCrossposter.chromeProfile = null; } else { FurryCrossposter.chromeProfile = Paths.get(probs.getProperty(STR)); } } catch (IOException e) { e.printStackTrace(); JOptionPane.showMessageDialog(null, STR + workingDir.resolve(STR) + STR + e.getMessage(), STR, JOptionPane.ERROR_MESSAGE); System.exit(-1); } if (FurryCrossposter.chromeProfile != null && (!Files.exists(FurryCrossposter.chromeProfile) !Files.isDirectory(FurryCrossposter.chromeProfile))) { JOptionPane.showMessageDialog(null, STR + FurryCrossposter.chromeProfile + STR + workingDir + STR, STR, JOptionPane.ERROR_MESSAGE); System.exit(-1); } if (FurryCrossposter.chromeProfile == null) { System.out.println(STR); } else { System.out.println(STR + FurryCrossposter.chromeProfile); } }
/** * tries to gather the data from the user directory or starts the first run wizard if none around */
tries to gather the data from the user directory or starts the first run wizard if none around
gatherData
{ "repo_name": "Akhlys/Furry-Crossposter", "path": "src/klaue/furrycrossposter/FurryCrossposter.java", "license": "mit", "size": 4692 }
[ "io.github.bonigarcia.wdm.WebDriverManager", "java.io.FileInputStream", "java.io.IOException", "java.io.InputStream", "java.io.ObjectInputStream", "java.nio.file.Files", "java.nio.file.Paths", "java.util.Properties", "java.util.TreeMap", "java.util.TreeSet", "javax.swing.JOptionPane" ]
import io.github.bonigarcia.wdm.WebDriverManager; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.nio.file.Files; import java.nio.file.Paths; import java.util.Properties; import java.util.TreeMap; import java.util.TreeSet; import javax.swing.JOptionPane;
import io.github.bonigarcia.wdm.*; import java.io.*; import java.nio.file.*; import java.util.*; import javax.swing.*;
[ "io.github.bonigarcia", "java.io", "java.nio", "java.util", "javax.swing" ]
io.github.bonigarcia; java.io; java.nio; java.util; javax.swing;
708,650
public void setPullRequestTitle(RequestData title) { mPullRequest.setVisibility(VISIBLE); mPullRequest.setText(title.getPullRequestTitle()); }
void function(RequestData title) { mPullRequest.setVisibility(VISIBLE); mPullRequest.setText(title.getPullRequestTitle()); }
/** * Sets the title for pull request * * @param title Title for Pull Request */
Sets the title for pull request
setPullRequestTitle
{ "repo_name": "dkhmelenko/Varis-Android", "path": "app/src/main/java/com/khmelenko/lab/varis/widget/BuildView.java", "license": "apache-2.0", "size": 5236 }
[ "com.khmelenko.lab.varis.network.response.RequestData" ]
import com.khmelenko.lab.varis.network.response.RequestData;
import com.khmelenko.lab.varis.network.response.*;
[ "com.khmelenko.lab" ]
com.khmelenko.lab;
2,739,627
private static String makeFulldataRefIds(ReferenceList refs) { return StringUtil.join(refs.getRefIds(), " "); }
static String function(ReferenceList refs) { return StringUtil.join(refs.getRefIds(), " "); }
/** * Get the item ids from a ReferenceList and make a String for the fulldata ReferenceList. */
Get the item ids from a ReferenceList and make a String for the fulldata ReferenceList
makeFulldataRefIds
{ "repo_name": "joshkh/intermine", "path": "intermine/integrate/main/src/org/intermine/xml/full/ItemHelper.java", "license": "lgpl-2.1", "size": 5180 }
[ "org.intermine.metadata.StringUtil" ]
import org.intermine.metadata.StringUtil;
import org.intermine.metadata.*;
[ "org.intermine.metadata" ]
org.intermine.metadata;
2,114,554
public Concept getFalseConcept();
Concept function();
/** * Returns the FALSE concept * * @return false concept * @should return the false concept */
Returns the FALSE concept
getFalseConcept
{ "repo_name": "preethi29/openmrs-core", "path": "api/src/main/java/org/openmrs/api/ConceptService.java", "license": "mpl-2.0", "size": 72694 }
[ "org.openmrs.Concept" ]
import org.openmrs.Concept;
import org.openmrs.*;
[ "org.openmrs" ]
org.openmrs;
806,024
public BindingSet convert(Document bindingSet) throws BindingSetConversionException;
BindingSet function(Document bindingSet) throws BindingSetConversionException;
/** * Converts a MongoDB model into a {@link BindingSet}. * * @param bindingSet - The bson that will be converted. (not null) * @return The BindingSet created from a Mongo Bson object. * @throws BindingSetConversionException The Bson was unable to be * converted. This will happen if one of the values could not be * converted into a BindingSet. */
Converts a MongoDB model into a <code>BindingSet</code>
convert
{ "repo_name": "kchilton2/incubator-rya", "path": "extras/rya.indexing.pcj/src/main/java/org/apache/rya/indexing/pcj/storage/mongo/MongoBindingSetConverter.java", "license": "apache-2.0", "size": 2364 }
[ "org.bson.Document", "org.eclipse.rdf4j.query.BindingSet" ]
import org.bson.Document; import org.eclipse.rdf4j.query.BindingSet;
import org.bson.*; import org.eclipse.rdf4j.query.*;
[ "org.bson", "org.eclipse.rdf4j" ]
org.bson; org.eclipse.rdf4j;
2,804,765
@Override public InputStream getInputStream() throws IOException, IllegalStateException { Preconditions.checkState(this.read, "InputStream has already been read - " + "do not use InputStreamResource if a stream needs to be read multiple times"); this.read = true; return this.inputStream; }
InputStream function() throws IOException, IllegalStateException { Preconditions.checkState(this.read, STR + STR); this.read = true; return this.inputStream; }
/** * This implementation throws IllegalStateException if attempting to * read the underlying stream multiple times. */
This implementation throws IllegalStateException if attempting to read the underlying stream multiple times
getInputStream
{ "repo_name": "proliming/commons", "path": "commons-io/src/main/java/com/proliming/commons/io/InputStreamResource.java", "license": "apache-2.0", "size": 3922 }
[ "com.google.common.base.Preconditions", "java.io.IOException", "java.io.InputStream" ]
import com.google.common.base.Preconditions; import java.io.IOException; import java.io.InputStream;
import com.google.common.base.*; import java.io.*;
[ "com.google.common", "java.io" ]
com.google.common; java.io;
559,886
private void handleIllegalMove(String move) { Alert dialog = new ErrorAlert(stage, "Illegal engine move.", "The engine \"" + searchEngine.getName() + "\" proposed an illegal move (" + move + ")."); dialog.showAndWait(); }
void function(String move) { Alert dialog = new ErrorAlert(stage, STR, STRSTR\STR + move + ")."); dialog.showAndWait(); }
/** * Displays an error alert with the details. * * @param move The illegal move. */
Displays an error alert with the details
handleIllegalMove
{ "repo_name": "ViktorC/DETROID", "path": "src/main/java/net/viktorc/detroid/framework/gui/controllers/MainController.java", "license": "gpl-2.0", "size": 38769 }
[ "net.viktorc.detroid.framework.gui.dialogs.ErrorAlert" ]
import net.viktorc.detroid.framework.gui.dialogs.ErrorAlert;
import net.viktorc.detroid.framework.gui.dialogs.*;
[ "net.viktorc.detroid" ]
net.viktorc.detroid;
1,822,380
protected void tryHandleCallbacks(Callback... callbacks) throws SaslException, UnsupportedCallbackException { try { callbackHandler.handle(callbacks); } catch (SaslException | UnsupportedCallbackException e) { throw e; } catch (Throwable t) { throw log.saslCallbackHandlerFailedForUnknownReason(getMechanismName(), t); } } public void init() {}
void function(Callback... callbacks) throws SaslException, UnsupportedCallbackException { try { callbackHandler.handle(callbacks); } catch (SaslException UnsupportedCallbackException e) { throw e; } catch (Throwable t) { throw log.saslCallbackHandlerFailedForUnknownReason(getMechanismName(), t); } } public void init() {}
/** * Handle callbacks, wrapping exceptions as needed. * * @param callbacks the callbacks to handle * @throws SaslException if a callback failed * @throws UnsupportedCallbackException if a callback isn't supported */
Handle callbacks, wrapping exceptions as needed
tryHandleCallbacks
{ "repo_name": "girirajsharma/wildfly-elytron", "path": "src/main/java/org/wildfly/security/sasl/util/AbstractSaslParticipant.java", "license": "apache-2.0", "size": 9582 }
[ "javax.security.auth.callback.Callback", "javax.security.auth.callback.UnsupportedCallbackException", "javax.security.sasl.SaslException" ]
import javax.security.auth.callback.Callback; import javax.security.auth.callback.UnsupportedCallbackException; import javax.security.sasl.SaslException;
import javax.security.auth.callback.*; import javax.security.sasl.*;
[ "javax.security" ]
javax.security;
289,614
public void testCombinedEvents003_01() { logWriter.println("==> testCombinedEvents003_01 started"); byte[] EXPECTED_EVENTS_ARRAY = { JDWPConstants.EventKind.METHOD_ENTRY, JDWPConstants.EventKind.BREAKPOINT, JDWPConstants.EventKind.SINGLE_STEP, JDWPConstants.EventKind.METHOD_EXIT}; String debuggeeMainThreadName = synchronizer.receiveMessage(); long debuggeeClassID = debuggeeWrapper.vmMirror .getClassID(debuggeeSignature); logWriter.println("=> debuggeeClassID = " + debuggeeClassID); long threadID = debuggeeWrapper.vmMirror.getThreadID(debuggeeMainThreadName); logWriter.println("=> threadID = " + threadID); long runMethodID = debuggeeWrapper.vmMirror.getMethodID(debuggeeClassID, "run"); logWriter.println("=> runMethodID = " + runMethodID); long dummyMethodID = debuggeeWrapper.vmMirror.getMethodID(debuggeeClassID, "dummyMethod"); logWriter.println("=> dummyMethodID = " + dummyMethodID); logWriter.println(""); logWriter.println("=> Info for tested method '" + methodForEvents + "':"); long testedMethodID = debuggeeWrapper.vmMirror.getMethodID(debuggeeClassID, methodForEvents); if (testedMethodID == -1 ) { String failureMessage = "## FAILURE: Can NOT get MethodID for class '" + getDebuggeeClassName() + "'; Method name = " + methodForEvents; printErrorAndFail(failureMessage); } logWriter.println("=> testedMethodID = " + testedMethodID); printMethodLineTable(debuggeeClassID, null, methodForEvents); // set requests for events that will be checked logWriter.println(""); logWriter.println("=> Set request for BREAKPOINT event in debuggee: " + getDebuggeeClassName() + ", beginning of method: " + methodForEvents); Location combinedEventsLocation = getMethodEntryLocation(debuggeeClassID, methodForEvents); if ( combinedEventsLocation == null ) { String failureMessage = "## FAILURE: Can NOT get MethodEntryLocation for method '" + methodForEvents + "'"; printErrorAndFail(failureMessage); } ReplyPacket reply = debuggeeWrapper.vmMirror.setBreakpoint(combinedEventsLocation); int breakpointRequestID = reply.getNextValueAsInt(); logWriter.println("=> Breakpoint requestID = " + breakpointRequestID); logWriter.println("=> Set request for METHOD_ENTRY event in debuggee: " + getDebuggeeClassName()); reply = debuggeeWrapper.vmMirror .setMethodEntry(methodEntryClassNameRegexp); checkReplyPacket(reply, "Set METHOD_ENTRY event"); int methodEntryRequestID = reply.getNextValueAsInt(); logWriter.println("=> METHOD_ENTRY requestID = " + methodEntryRequestID); logWriter.println("=> Set request for METHOD_EXIT event in debuggee: " + getDebuggeeClassName()); reply = debuggeeWrapper.vmMirror .setMethodExit(methodEntryClassNameRegexp); checkReplyPacket(reply, "Set METHOD_EXIT event"); int methodExitRequestID = reply.getNextValueAsInt(); logWriter.println("=> METHOD_EXIT requestID = " + methodExitRequestID); logWriter.println("=> Set request for SINGLE_STEP event in class " + debuggeeSignature); CommandPacket setRequestCommand = new CommandPacket( JDWPCommands.EventRequestCommandSet.CommandSetID, JDWPCommands.EventRequestCommandSet.SetCommand); setRequestCommand .setNextValueAsByte(JDWPConstants.EventKind.SINGLE_STEP); setRequestCommand.setNextValueAsByte(JDWPConstants.SuspendPolicy.ALL); setRequestCommand.setNextValueAsInt(2); setRequestCommand.setNextValueAsByte(EventMod.ModKind.Step); setRequestCommand.setNextValueAsThreadID(threadID); setRequestCommand.setNextValueAsInt(JDWPConstants.StepSize.MIN); setRequestCommand.setNextValueAsInt(JDWPConstants.StepDepth.INTO); setRequestCommand.setNextValueAsByte(EventMod.ModKind.ClassOnly); setRequestCommand.setNextValueAsReferenceTypeID(debuggeeClassID); ReplyPacket setRequestReply = debuggeeWrapper.vmMirror .performCommand(setRequestCommand); checkReplyPacket(setRequestReply, "EventRequest::Set command"); int stepRequestID = setRequestReply.getNextValueAsInt(); logWriter.println("=> SINGLE_STEP requestID = " + stepRequestID); logWriter.println(""); logWriter.println("=> Send SGNL_CONTINUE signal to debuggee..."); synchronizer.sendMessage(JPDADebuggeeSynchronizer.SGNL_CONTINUE); logWriter.println("=> Try to receive and check combined events: " + " METHOD_ENTRY, SINGLE_STEP, BREAKPOINT, METHOD_EXIT events; ignore single SINGLE_STEP event"); receiveAndCheckEvents(EXPECTED_EVENTS_ARRAY, combinedEventsLocation); if ( eventVmDeathReceived ) { logWriter.println("==> testCombinedEvents001 is FINISHing as VM_DEATH is received!"); return; } logWriter.println(""); logWriter.println("=> Clean request for METHOD_ENTRY event..."); ReplyPacket clearReply = debuggeeWrapper.vmMirror.clearEvent( JDWPConstants.EventKind.METHOD_ENTRY, methodEntryRequestID); checkReplyPacket(clearReply, "EventRequest::Clear"); logWriter.println(""); logWriter.println("=> Clean request for SINGLE_STEP event..."); clearReply = debuggeeWrapper.vmMirror.clearEvent( JDWPConstants.EventKind.SINGLE_STEP, stepRequestID); checkReplyPacket(clearReply, "EventRequest::Clear"); logWriter.println("=> Resume debuggee"); debuggeeWrapper.vmMirror.resume(); // check that no other events, except VM_DEATH, will be received checkVMDeathEvent(); logWriter.println(""); logWriter.println("==> testCombinedEvents003_01 PASSED"); }
void function() { logWriter.println(STR); byte[] EXPECTED_EVENTS_ARRAY = { JDWPConstants.EventKind.METHOD_ENTRY, JDWPConstants.EventKind.BREAKPOINT, JDWPConstants.EventKind.SINGLE_STEP, JDWPConstants.EventKind.METHOD_EXIT}; String debuggeeMainThreadName = synchronizer.receiveMessage(); long debuggeeClassID = debuggeeWrapper.vmMirror .getClassID(debuggeeSignature); logWriter.println(STR + debuggeeClassID); long threadID = debuggeeWrapper.vmMirror.getThreadID(debuggeeMainThreadName); logWriter.println(STR + threadID); long runMethodID = debuggeeWrapper.vmMirror.getMethodID(debuggeeClassID, "run"); logWriter.println(STR + runMethodID); long dummyMethodID = debuggeeWrapper.vmMirror.getMethodID(debuggeeClassID, STR); logWriter.println(STR + dummyMethodID); logWriter.println(STR=> Info for tested method 'STR':STR## FAILURE: Can NOT get MethodID for class 'STR'; Method name = STR=> testedMethodID = " + testedMethodID); printMethodLineTable(debuggeeClassID, null, methodForEvents); logWriter.println(STR=> Set request for BREAKPOINT event in debuggee: STR, beginning of method: STR## FAILURE: Can NOT get MethodEntryLocation for method 'STR'STR=> Breakpoint requestID = STR=> Set request for METHOD_ENTRY event in debuggee: STRSet METHOD_ENTRY eventSTR=> METHOD_ENTRY requestID = STR=> Set request for METHOD_EXIT event in debuggee: STRSet METHOD_EXIT eventSTR=> METHOD_EXIT requestID = STR=> Set request for SINGLE_STEP event in class STREventRequest::Set commandSTR=> SINGLE_STEP requestID = " + stepRequestID); logWriter.println(STR=> Send SGNL_CONTINUE signal to debuggee...STR=> Try to receive and check combined events: STR METHOD_ENTRY, SINGLE_STEP, BREAKPOINT, METHOD_EXIT events; ignore single SINGLE_STEP eventSTR==> testCombinedEvents001 is FINISHing as VM_DEATH is received!"); return; } logWriter.println(STR=> Clean request for METHOD_ENTRY event...STREventRequest::Clear"); logWriter.println(STR=> Clean request for SINGLE_STEP event...STREventRequest::ClearSTR=> Resume debuggee"); debuggeeWrapper.vmMirror.resume(); checkVMDeathEvent(); logWriter.println(STR==> testCombinedEvents003_01 PASSED"); }
/** * This test case checks events: * METHOD_ENTRY, SINGLE_STEP, BREAKPOINT, METHOD_EXIT * for empty method. */
This test case checks events: METHOD_ENTRY, SINGLE_STEP, BREAKPOINT, METHOD_EXIT for empty method
testCombinedEvents003_01
{ "repo_name": "s20121035/rk3288_android5.1_repo", "path": "external/apache-harmony/jdwp/src/test/java/org/apache/harmony/jpda/tests/jdwp/Events/CombinedEvents003Test.java", "license": "gpl-3.0", "size": 19190 }
[ "org.apache.harmony.jpda.tests.framework.jdwp.JDWPConstants" ]
import org.apache.harmony.jpda.tests.framework.jdwp.JDWPConstants;
import org.apache.harmony.jpda.tests.framework.jdwp.*;
[ "org.apache.harmony" ]
org.apache.harmony;
1,029,942
return authorize(new AuthorizationRequest()); }
return authorize(new AuthorizationRequest()); }
/** * Query the server for all permissions. * * @return an {@link AuthorizationResponse} with a RPT holding all granted permissions * @throws AuthorizationDeniedException in case the request was denied by the server */
Query the server for all permissions
authorize
{ "repo_name": "keycloak/keycloak", "path": "authz/client/src/main/java/org/keycloak/authorization/client/resource/AuthorizationResource.java", "license": "apache-2.0", "size": 4042 }
[ "org.keycloak.representations.idm.authorization.AuthorizationRequest" ]
import org.keycloak.representations.idm.authorization.AuthorizationRequest;
import org.keycloak.representations.idm.authorization.*;
[ "org.keycloak.representations" ]
org.keycloak.representations;
1,610,506
private static void placePhiFunctions (SsaMethod ssaMeth, LocalVariableInfo localInfo, int threshold) { ArrayList<SsaBasicBlock> ssaBlocks; int regCount; int blockCount; ssaBlocks = ssaMeth.getBlocks(); blockCount = ssaBlocks.size(); regCount = ssaMeth.getRegCount() - threshold; DomFront df = new DomFront(ssaMeth); DomFront.DomInfo[] domInfos = df.run(); // Bit set of registers vs block index "definition sites" BitSet[] defsites = new BitSet[regCount]; // Bit set of registers vs block index "phi placement sites" BitSet[] phisites = new BitSet[regCount]; for (int i = 0; i < regCount; i++) { defsites[i] = new BitSet(blockCount); phisites[i] = new BitSet(blockCount); } for (int bi = 0, s = ssaBlocks.size(); bi < s; bi++) { SsaBasicBlock b = ssaBlocks.get(bi); for (SsaInsn insn : b.getInsns()) { RegisterSpec rs = insn.getResult(); if (rs != null && rs.getReg() - threshold >= 0) { defsites[rs.getReg() - threshold].set(bi); } } } if (DEBUG) { System.out.println("defsites"); for (int i = 0; i < regCount; i++) { StringBuilder sb = new StringBuilder(); sb.append('v').append(i).append(": "); sb.append(defsites[i].toString()); System.out.println(sb); } } BitSet worklist; for (int reg = 0, s = regCount; reg < s; reg++) { int workBlockIndex; worklist = (BitSet) (defsites[reg].clone()); while (0 <= (workBlockIndex = worklist.nextSetBit(0))) { worklist.clear(workBlockIndex); IntIterator dfIterator = domInfos[workBlockIndex].dominanceFrontiers.iterator(); while (dfIterator.hasNext()) { int dfBlockIndex = dfIterator.next(); if (!phisites[reg].get(dfBlockIndex)) { phisites[reg].set(dfBlockIndex); int tReg = reg + threshold; RegisterSpec rs = localInfo.getStarts(dfBlockIndex).get(tReg); if (rs == null) { ssaBlocks.get(dfBlockIndex).addPhiInsnForReg(tReg); } else { ssaBlocks.get(dfBlockIndex).addPhiInsnForReg(rs); } if (!defsites[reg].get(dfBlockIndex)) { worklist.set(dfBlockIndex); } } } } } if (DEBUG) { System.out.println("phisites"); for (int i = 0; i < regCount; i++) { StringBuilder sb = new StringBuilder(); sb.append('v').append(i).append(": "); sb.append(phisites[i].toString()); System.out.println(sb); } } }
static void function (SsaMethod ssaMeth, LocalVariableInfo localInfo, int threshold) { ArrayList<SsaBasicBlock> ssaBlocks; int regCount; int blockCount; ssaBlocks = ssaMeth.getBlocks(); blockCount = ssaBlocks.size(); regCount = ssaMeth.getRegCount() - threshold; DomFront df = new DomFront(ssaMeth); DomFront.DomInfo[] domInfos = df.run(); BitSet[] defsites = new BitSet[regCount]; BitSet[] phisites = new BitSet[regCount]; for (int i = 0; i < regCount; i++) { defsites[i] = new BitSet(blockCount); phisites[i] = new BitSet(blockCount); } for (int bi = 0, s = ssaBlocks.size(); bi < s; bi++) { SsaBasicBlock b = ssaBlocks.get(bi); for (SsaInsn insn : b.getInsns()) { RegisterSpec rs = insn.getResult(); if (rs != null && rs.getReg() - threshold >= 0) { defsites[rs.getReg() - threshold].set(bi); } } } if (DEBUG) { System.out.println(STR); for (int i = 0; i < regCount; i++) { StringBuilder sb = new StringBuilder(); sb.append('v').append(i).append(STR); sb.append(defsites[i].toString()); System.out.println(sb); } } BitSet worklist; for (int reg = 0, s = regCount; reg < s; reg++) { int workBlockIndex; worklist = (BitSet) (defsites[reg].clone()); while (0 <= (workBlockIndex = worklist.nextSetBit(0))) { worklist.clear(workBlockIndex); IntIterator dfIterator = domInfos[workBlockIndex].dominanceFrontiers.iterator(); while (dfIterator.hasNext()) { int dfBlockIndex = dfIterator.next(); if (!phisites[reg].get(dfBlockIndex)) { phisites[reg].set(dfBlockIndex); int tReg = reg + threshold; RegisterSpec rs = localInfo.getStarts(dfBlockIndex).get(tReg); if (rs == null) { ssaBlocks.get(dfBlockIndex).addPhiInsnForReg(tReg); } else { ssaBlocks.get(dfBlockIndex).addPhiInsnForReg(rs); } if (!defsites[reg].get(dfBlockIndex)) { worklist.set(dfBlockIndex); } } } } } if (DEBUG) { System.out.println(STR); for (int i = 0; i < regCount; i++) { StringBuilder sb = new StringBuilder(); sb.append('v').append(i).append(STR); sb.append(phisites[i].toString()); System.out.println(sb); } } }
/** * See Appel algorithm 19.6: * * Place Phi functions in appropriate locations. * * @param ssaMeth {@code non-null;} method to process. * Modifications are made in-place. * @param localInfo {@code non-null;} local variable info, used * when placing phis * @param threshold registers below this number are ignored */
See Appel algorithm 19.6: Place Phi functions in appropriate locations
placePhiFunctions
{ "repo_name": "marcinkwiatkowski/buck", "path": "third-party/java/dx/src/com/android/dx/ssa/SsaConverter.java", "license": "apache-2.0", "size": 13444 }
[ "com.android.dx.rop.code.RegisterSpec", "com.android.dx.util.IntIterator", "java.util.ArrayList", "java.util.BitSet" ]
import com.android.dx.rop.code.RegisterSpec; import com.android.dx.util.IntIterator; import java.util.ArrayList; import java.util.BitSet;
import com.android.dx.rop.code.*; import com.android.dx.util.*; import java.util.*;
[ "com.android.dx", "java.util" ]
com.android.dx; java.util;
2,657,025
public void actionPerformed(final ActionEvent _event) { // Show / Hide console if (_event.getSource().equals(getPaintObjects() .getI1b_console().getActionCause())) { view.forms.Console.getInstance().setVisible( !view.forms.Console.getInstance().isVisible()); } else if ( //generate view diagram (analyse.png) _event.getSource().equals(getPaintObjects() .getI1b_diagramView().getActionCause())) { DebugUtil.performCheckViewComponents( cp.getView(), DebugUtil.CHECK_OP_IMAGE); } else if ( //generate actionLog (text file) _event.getSource().equals(getPaintObjects() .getI1b_generateLog().getActionCause())) { String actions = ActionManager.externalizeAction(); // JOptionPane.showMessageDialog(cp.getView(), "The following actions were fetched:\n" // + actions); MessageDialog.showMessage("The following actions were fetched:\n" + actions, cp.getView()); } else if ( //Report bug _event.getSource().equals(getPaintObjects() .getI1b_reportBug().getActionCause())) { State.getLogger().warning("not implemented yet."); } else { //action for elements that represent paintObjects. Component[] c = getPaintObjects().getJpnl_items().getComponents(); for (int i = 0; i < c.length; i++) { if (c[i] instanceof Item1Button && ((Item1Button) c[i]).getAdditionalInformation() instanceof PaintObject) { Item1Button i1b = (Item1Button) c[i]; final PaintObject po_cu = (PaintObject) i1b .getAdditionalInformation(); if (_event.getSource().equals(i1b.getActionCause())) { i1b.setActivated(false); showPaintObjectInformation(po_cu); cp.getPicture().releaseSelected( cp.getControlPaintSelection(), cp.getcTabSelection(), cp.getView().getTabs().getTab_debug(), cp.getView().getPage().getJlbl_painting() .getLocation().x, cp.getView().getPage().getJlbl_painting() .getLocation().y); getControlPicture().releaseSelected(); State.setIndexOperation( Constants.CONTROL_PAINTING_INDEX_MOVE); //decativate other menuitems and activate the //current one //(move) cp.getPicture().createSelected(); getPaintObjects().deactivate(); cp.getPicture().insertIntoSelected(po_cu, cp.getView().getTabs().getTab_debug()); new PictureOverview(getPaintObjects()).remove( po_cu); cp.getPicture().getLs_po_sortedByY().remove( SecureList.ID_NO_PREDECESSOR); } } else { State.getLogger().severe("Error in ActionListener: " + "wrong kind of element. " + "This error should never occure"); } } //finish insertion into selected. cp.getPicture().finishSelection(cp.getcTabSelection()); cp.getPicture().paintSelected(getPage(), cp.getControlPic(), cp.getControlPaintSelection()); getControlPicture().refreshPaint(); getPaintObjects().repaint(); getPage().getJlbl_backgroundStructure().repaint(); } }
void function(final ActionEvent _event) { if (_event.getSource().equals(getPaintObjects() .getI1b_console().getActionCause())) { view.forms.Console.getInstance().setVisible( !view.forms.Console.getInstance().isVisible()); } else if ( _event.getSource().equals(getPaintObjects() .getI1b_diagramView().getActionCause())) { DebugUtil.performCheckViewComponents( cp.getView(), DebugUtil.CHECK_OP_IMAGE); } else if ( _event.getSource().equals(getPaintObjects() .getI1b_generateLog().getActionCause())) { String actions = ActionManager.externalizeAction(); MessageDialog.showMessage(STR + actions, cp.getView()); } else if ( _event.getSource().equals(getPaintObjects() .getI1b_reportBug().getActionCause())) { State.getLogger().warning(STR); } else { Component[] c = getPaintObjects().getJpnl_items().getComponents(); for (int i = 0; i < c.length; i++) { if (c[i] instanceof Item1Button && ((Item1Button) c[i]).getAdditionalInformation() instanceof PaintObject) { Item1Button i1b = (Item1Button) c[i]; final PaintObject po_cu = (PaintObject) i1b .getAdditionalInformation(); if (_event.getSource().equals(i1b.getActionCause())) { i1b.setActivated(false); showPaintObjectInformation(po_cu); cp.getPicture().releaseSelected( cp.getControlPaintSelection(), cp.getcTabSelection(), cp.getView().getTabs().getTab_debug(), cp.getView().getPage().getJlbl_painting() .getLocation().x, cp.getView().getPage().getJlbl_painting() .getLocation().y); getControlPicture().releaseSelected(); State.setIndexOperation( Constants.CONTROL_PAINTING_INDEX_MOVE); cp.getPicture().createSelected(); getPaintObjects().deactivate(); cp.getPicture().insertIntoSelected(po_cu, cp.getView().getTabs().getTab_debug()); new PictureOverview(getPaintObjects()).remove( po_cu); cp.getPicture().getLs_po_sortedByY().remove( SecureList.ID_NO_PREDECESSOR); } } else { State.getLogger().severe(STR + STR + STR); } } cp.getPicture().finishSelection(cp.getcTabSelection()); cp.getPicture().paintSelected(getPage(), cp.getControlPic(), cp.getControlPaintSelection()); getControlPicture().refreshPaint(); getPaintObjects().repaint(); getPage().getJlbl_backgroundStructure().repaint(); } }
/** * ActionListener deals with the action performed by the buttons containing * a PaintObject of the PaintObjects' view. * @param _event the event that is thrown. */
ActionListener deals with the action performed by the buttons containing a PaintObject of the PaintObjects' view
actionPerformed
{ "repo_name": "juliusHuelsmann/paint", "path": "PaintNotes/src/main/java/control/forms/tabs/CTabDebug.java", "license": "apache-2.0", "size": 19758 }
[ "java.awt.Component", "java.awt.event.ActionEvent" ]
import java.awt.Component; import java.awt.event.ActionEvent;
import java.awt.*; import java.awt.event.*;
[ "java.awt" ]
java.awt;
2,417,697
@Test public void testGetTrackedNodeAfterClear() { ImmutableNode node = NodeStructureHelper.nodeForKey(model, "tables/table(1)"); NodeKeyResolver<ImmutableNode> resolver = createResolver(); model.trackNode(selector, resolver); model.clear(resolver); assertSame("Wrong node", node, model.getTrackedNode(selector)); }
void function() { ImmutableNode node = NodeStructureHelper.nodeForKey(model, STR); NodeKeyResolver<ImmutableNode> resolver = createResolver(); model.trackNode(selector, resolver); model.clear(resolver); assertSame(STR, node, model.getTrackedNode(selector)); }
/** * Tests whether a tracked node can be queried even after the model was * cleared. */
Tests whether a tracked node can be queried even after the model was cleared
testGetTrackedNodeAfterClear
{ "repo_name": "mohanaraosv/commons-configuration", "path": "src/test/java/org/apache/commons/configuration2/tree/TestInMemoryNodeModelTrackedNodes.java", "license": "apache-2.0", "size": 36379 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
1,439,237
public static String readString(InputStream in) throws IOException { final StringBuilder bldr = new StringBuilder(); byte b; while ((b = (byte) in.read()) != 0) { bldr.append((char) b); } return bldr.toString(); }
static String function(InputStream in) throws IOException { final StringBuilder bldr = new StringBuilder(); byte b; while ((b = (byte) in.read()) != 0) { bldr.append((char) b); } return bldr.toString(); }
/** * Reads a null-terminated string from the specified * <code>InputStream</code>. * * @param in * The input stream. * @return The string. * @throws IOException * if an I/O error occurs, such as the stream closing. */
Reads a null-terminated string from the specified <code>InputStream</code>
readString
{ "repo_name": "lightstorm/lightstorm-server", "path": "src/org/hyperion/util/Streams.java", "license": "mit", "size": 2667 }
[ "java.io.IOException", "java.io.InputStream" ]
import java.io.IOException; import java.io.InputStream;
import java.io.*;
[ "java.io" ]
java.io;
2,310,416
@Test public void nextObjective() { TrafficTreatment treatment = DefaultTrafficTreatment.emptyTreatment(); NextObjective next = DefaultNextObjective.builder() .withId(manager.allocateNextId()) .addTreatment(treatment) .withType(NextObjective.Type.BROADCAST) .fromApp(NetTestTools.APP_ID) .makePermanent() .add(); manager.next(id1, next); TestTools.assertAfter(RETRY_MS, () -> assertThat(nextObjectives, hasSize(1))); assertThat(forwardingObjectives, hasSize(0)); assertThat(filteringObjectives, hasSize(0)); assertThat(nextObjectives, hasItem("of:d1")); }
void function() { TrafficTreatment treatment = DefaultTrafficTreatment.emptyTreatment(); NextObjective next = DefaultNextObjective.builder() .withId(manager.allocateNextId()) .addTreatment(treatment) .withType(NextObjective.Type.BROADCAST) .fromApp(NetTestTools.APP_ID) .makePermanent() .add(); manager.next(id1, next); TestTools.assertAfter(RETRY_MS, () -> assertThat(nextObjectives, hasSize(1))); assertThat(forwardingObjectives, hasSize(0)); assertThat(filteringObjectives, hasSize(0)); assertThat(nextObjectives, hasItem("of:d1")); }
/** * Tests adding a next objective. */
Tests adding a next objective
nextObjective
{ "repo_name": "LorenzReinhart/ONOSnew", "path": "core/net/src/test/java/org/onosproject/net/flowobjective/impl/FlowObjectiveManagerTest.java", "license": "apache-2.0", "size": 13517 }
[ "org.hamcrest.CoreMatchers", "org.hamcrest.MatcherAssert", "org.hamcrest.Matchers", "org.onosproject.net.NetTestTools", "org.onosproject.net.flow.DefaultTrafficTreatment", "org.onosproject.net.flow.TrafficTreatment", "org.onosproject.net.flowobjective.DefaultNextObjective", "org.onosproject.net.flowobjective.NextObjective", "org.onosproject.net.intent.TestTools" ]
import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.onosproject.net.NetTestTools; import org.onosproject.net.flow.DefaultTrafficTreatment; import org.onosproject.net.flow.TrafficTreatment; import org.onosproject.net.flowobjective.DefaultNextObjective; import org.onosproject.net.flowobjective.NextObjective; import org.onosproject.net.intent.TestTools;
import org.hamcrest.*; import org.onosproject.net.*; import org.onosproject.net.flow.*; import org.onosproject.net.flowobjective.*; import org.onosproject.net.intent.*;
[ "org.hamcrest", "org.onosproject.net" ]
org.hamcrest; org.onosproject.net;
275,773
private void setTagMarkerColor(Category tag) { String colorsPref = prefs.getString("settings_colors_app", Constants.PREF_COLORS_APP_DEFAULT); // Checking preference if (!colorsPref.equals("disabled")){ // Choosing target view depending on another preference ArrayList<View> target = new ArrayList<View>(); if (colorsPref.equals("complete")){ target.add(getView().findViewById(R.id.title_wrapper)); target.add(getView().findViewById(R.id.detail_content_card)); } else { target.add(getView().findViewById(R.id.tag_marker)); } // Coloring the target if (tag != null && tag.getColor() != null) { for (View view : target) { view.setBackgroundColor(Integer.parseInt(tag.getColor())); } } else { for (View view : target) { view.setBackgroundColor(Color.parseColor("#00000000")); } } } }
void function(Category tag) { String colorsPref = prefs.getString(STR, Constants.PREF_COLORS_APP_DEFAULT); if (!colorsPref.equals(STR)){ ArrayList<View> target = new ArrayList<View>(); if (colorsPref.equals(STR)){ target.add(getView().findViewById(R.id.title_wrapper)); target.add(getView().findViewById(R.id.detail_content_card)); } else { target.add(getView().findViewById(R.id.tag_marker)); } if (tag != null && tag.getColor() != null) { for (View view : target) { view.setBackgroundColor(Integer.parseInt(tag.getColor())); } } else { for (View view : target) { view.setBackgroundColor(Color.parseColor(STR)); } } } }
/** * Colors tag marker in note title TextView */
Colors tag marker in note title TextView
setTagMarkerColor
{ "repo_name": "kanpol/omni-note", "path": "src/it/feio/android/omninotes/DetailFragment.java", "license": "apache-2.0", "size": 80198 }
[ "android.graphics.Color", "android.view.View", "it.feio.android.omninotes.models.Category", "it.feio.android.omninotes.utils.Constants", "java.util.ArrayList" ]
import android.graphics.Color; import android.view.View; import it.feio.android.omninotes.models.Category; import it.feio.android.omninotes.utils.Constants; import java.util.ArrayList;
import android.graphics.*; import android.view.*; import it.feio.android.omninotes.models.*; import it.feio.android.omninotes.utils.*; import java.util.*;
[ "android.graphics", "android.view", "it.feio.android", "java.util" ]
android.graphics; android.view; it.feio.android; java.util;
1,894,858
private static OIndexSearchResult createIndexedProperty(final OSQLFilterCondition iCondition, final Object iItem) { if (iItem == null || !(iItem instanceof OSQLFilterItemField)) return null; if (iCondition.getLeft() instanceof OSQLFilterItemField && iCondition.getRight() instanceof OSQLFilterItemField) return null; final OSQLFilterItemField item = (OSQLFilterItemField) iItem; if (item.hasChainOperators() && !item.isFieldChain()) return null; final Object origValue = iCondition.getLeft() == iItem ? iCondition.getRight() : iCondition.getLeft(); if (iCondition.getOperator() instanceof OQueryOperatorBetween || iCondition.getOperator() instanceof OQueryOperatorIn) { return new OIndexSearchResult(iCondition.getOperator(), item.getFieldChain(), origValue); } final Object value = OSQLHelper.getValue(origValue); if (value == null) return null; return new OIndexSearchResult(iCondition.getOperator(), item.getFieldChain(), value); }
static OIndexSearchResult function(final OSQLFilterCondition iCondition, final Object iItem) { if (iItem == null !(iItem instanceof OSQLFilterItemField)) return null; if (iCondition.getLeft() instanceof OSQLFilterItemField && iCondition.getRight() instanceof OSQLFilterItemField) return null; final OSQLFilterItemField item = (OSQLFilterItemField) iItem; if (item.hasChainOperators() && !item.isFieldChain()) return null; final Object origValue = iCondition.getLeft() == iItem ? iCondition.getRight() : iCondition.getLeft(); if (iCondition.getOperator() instanceof OQueryOperatorBetween iCondition.getOperator() instanceof OQueryOperatorIn) { return new OIndexSearchResult(iCondition.getOperator(), item.getFieldChain(), origValue); } final Object value = OSQLHelper.getValue(origValue); if (value == null) return null; return new OIndexSearchResult(iCondition.getOperator(), item.getFieldChain(), value); }
/** * Add SQL filter field to the search candidate list. * * @param iCondition * Condition item * @param iItem * Value to search * @return true if the property was indexed and found, otherwise false */
Add SQL filter field to the search candidate list
createIndexedProperty
{ "repo_name": "nengxu/OrientDB", "path": "core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLSelect.java", "license": "apache-2.0", "size": 51055 }
[ "com.orientechnologies.orient.core.sql.filter.OSQLFilterCondition", "com.orientechnologies.orient.core.sql.filter.OSQLFilterItemField", "com.orientechnologies.orient.core.sql.operator.OQueryOperatorBetween", "com.orientechnologies.orient.core.sql.operator.OQueryOperatorIn" ]
import com.orientechnologies.orient.core.sql.filter.OSQLFilterCondition; import com.orientechnologies.orient.core.sql.filter.OSQLFilterItemField; import com.orientechnologies.orient.core.sql.operator.OQueryOperatorBetween; import com.orientechnologies.orient.core.sql.operator.OQueryOperatorIn;
import com.orientechnologies.orient.core.sql.filter.*; import com.orientechnologies.orient.core.sql.operator.*;
[ "com.orientechnologies.orient" ]
com.orientechnologies.orient;
992,728
protected void initKey(String providerId, String providerUserId) { if (providerUserId == null) { providerUserId = setValues().providerUserId; } key = new ConnectionKey(providerId, providerUserId); }
void function(String providerId, String providerUserId) { if (providerUserId == null) { providerUserId = setValues().providerUserId; } key = new ConnectionKey(providerId, providerUserId); }
/** * Hook that should be called by subclasses to initialize the key property when establishing a new connection. * @param providerId the providerId * @param providerUserId the providerUserId */
Hook that should be called by subclasses to initialize the key property when establishing a new connection
initKey
{ "repo_name": "okohub/spring-social", "path": "spring-social-core/src/main/java/org/springframework/social/connect/support/AbstractConnection.java", "license": "apache-2.0", "size": 5014 }
[ "org.springframework.social.connect.ConnectionKey" ]
import org.springframework.social.connect.ConnectionKey;
import org.springframework.social.connect.*;
[ "org.springframework.social" ]
org.springframework.social;
1,339,844
boolean isRunfileLinksEnabled(PathFragment runfilesDir);
boolean isRunfileLinksEnabled(PathFragment runfilesDir);
/** * Returns whether it's allowed to create runfile symlinks in the {@code runfilesDir}. Also * returns {@code false} if the runfiles supplier doesn't know about the directory. * * @param runfilesDir runfiles directory relative to the exec root */
Returns whether it's allowed to create runfile symlinks in the runfilesDir. Also returns false if the runfiles supplier doesn't know about the directory
isRunfileLinksEnabled
{ "repo_name": "bazelbuild/bazel", "path": "src/main/java/com/google/devtools/build/lib/actions/RunfilesSupplier.java", "license": "apache-2.0", "size": 2583 }
[ "com.google.devtools.build.lib.vfs.PathFragment" ]
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.*;
[ "com.google.devtools" ]
com.google.devtools;
1,593,885
public static UserId splitUserId(final String userId) { if (!TextUtils.isEmpty(userId)) { final Matcher matcher = USER_ID_PATTERN.matcher(userId); if (matcher.matches()) { return new UserId(matcher.group(1), matcher.group(3), matcher.group(2)); } } return new UserId(null, null, null); }
static UserId function(final String userId) { if (!TextUtils.isEmpty(userId)) { final Matcher matcher = USER_ID_PATTERN.matcher(userId); if (matcher.matches()) { return new UserId(matcher.group(1), matcher.group(3), matcher.group(2)); } } return new UserId(null, null, null); }
/** * Splits userId string into naming part, email part, and comment part * <p/> * User ID matching: * http://fiddle.re/t4p6f * * @param userId * @return theParsedUserInfo */
Splits userId string into naming part, email part, and comment part User ID matching: HREF
splitUserId
{ "repo_name": "cketti/k-9", "path": "plugins/openpgp-api-lib/openpgp-api/src/main/java/org/openintents/openpgp/util/OpenPgpUtils.java", "license": "apache-2.0", "size": 5157 }
[ "android.text.TextUtils", "java.util.regex.Matcher" ]
import android.text.TextUtils; import java.util.regex.Matcher;
import android.text.*; import java.util.regex.*;
[ "android.text", "java.util" ]
android.text; java.util;
855,455
public boolean equals(Object that) { if (!super.equals(that)) return false; CType t = (CType)that; if (!Compare.equals(m_descriptionList, t.m_descriptionList)) return false; if (!Compare.equals(m_commentList, t.m_commentList)) return false; if (!Compare.equals(m_third, t.m_third)) return false; return true; }
boolean function(Object that) { if (!super.equals(that)) return false; CType t = (CType)that; if (!Compare.equals(m_descriptionList, t.m_descriptionList)) return false; if (!Compare.equals(m_commentList, t.m_commentList)) return false; if (!Compare.equals(m_third, t.m_third)) return false; return true; }
/** * This method compares this and that. * @return true if this and that are the same, false otherwise. */
This method compares this and that
equals
{ "repo_name": "lolkedijkstra/xml2j-gen", "path": "tutorial/group/src/main/java/com/xml2j/tutorial/group/CType.java", "license": "mit", "size": 4477 }
[ "com.xml2j.util.Compare" ]
import com.xml2j.util.Compare;
import com.xml2j.util.*;
[ "com.xml2j.util" ]
com.xml2j.util;
1,183,745
public void testEtranCodeIncomeOrExpense_False() { EndowmentTransactionLine endowmentSourceTransactionLine = EndowmentTransactionLineFixture.ENDOWMENT_TRANSACTIONAL_LINE_POSITIVE_AMT.createEndowmentTransactionLine(true); EndowmentTransactionCode endowmentTransactionCode = EndowmentTransactionCodeFixture.ASSET_TRANSACTION_CODE.createEndowmentTransactionCode(); endowmentSourceTransactionLine.setEtranCode(endowmentTransactionCode.getCode()); endowmentSourceTransactionLine.setEtranCodeObj(endowmentTransactionCode); assertFalse(rule.validateEndowmentTransactionTypeCode(document, endowmentSourceTransactionLine, rule.getErrorPrefix(endowmentSourceTransactionLine, -1))); } // The ETRAN Code used must have an appropriately identified general ledger object code record; one that matches the Chart for // the KEMID associated general ledger account.
void function() { EndowmentTransactionLine endowmentSourceTransactionLine = EndowmentTransactionLineFixture.ENDOWMENT_TRANSACTIONAL_LINE_POSITIVE_AMT.createEndowmentTransactionLine(true); EndowmentTransactionCode endowmentTransactionCode = EndowmentTransactionCodeFixture.ASSET_TRANSACTION_CODE.createEndowmentTransactionCode(); endowmentSourceTransactionLine.setEtranCode(endowmentTransactionCode.getCode()); endowmentSourceTransactionLine.setEtranCodeObj(endowmentTransactionCode); assertFalse(rule.validateEndowmentTransactionTypeCode(document, endowmentSourceTransactionLine, rule.getErrorPrefix(endowmentSourceTransactionLine, -1))); }
/** * Validates that validateEndowmentTransactionTypeCode returns false when the etran code type is not income or expense. */
Validates that validateEndowmentTransactionTypeCode returns false when the etran code type is not income or expense
testEtranCodeIncomeOrExpense_False
{ "repo_name": "ua-eas/ua-kfs-5.3", "path": "test/unit/src/org/kuali/kfs/module/endow/document/validation/impl/SecurityTransferDocumentRulesTest.java", "license": "agpl-3.0", "size": 34081 }
[ "org.kuali.kfs.module.endow.businessobject.EndowmentTransactionCode", "org.kuali.kfs.module.endow.businessobject.EndowmentTransactionLine", "org.kuali.kfs.module.endow.fixture.EndowmentTransactionCodeFixture", "org.kuali.kfs.module.endow.fixture.EndowmentTransactionLineFixture" ]
import org.kuali.kfs.module.endow.businessobject.EndowmentTransactionCode; import org.kuali.kfs.module.endow.businessobject.EndowmentTransactionLine; import org.kuali.kfs.module.endow.fixture.EndowmentTransactionCodeFixture; import org.kuali.kfs.module.endow.fixture.EndowmentTransactionLineFixture;
import org.kuali.kfs.module.endow.businessobject.*; import org.kuali.kfs.module.endow.fixture.*;
[ "org.kuali.kfs" ]
org.kuali.kfs;
2,358,936
public void setImageParameters( COSDictionary params) { imageParameters = params; }
void function( COSDictionary params) { imageParameters = params; }
/** * This will set the image parameters, this is only valid for BI operators. * * @param params The image parameters. */
This will set the image parameters, this is only valid for BI operators
setImageParameters
{ "repo_name": "mdamt/PdfBox-Android", "path": "library/src/main/java/org/apache/pdfbox/contentstream/operator/Operator.java", "license": "apache-2.0", "size": 3458 }
[ "org.apache.pdfbox.cos.COSDictionary" ]
import org.apache.pdfbox.cos.COSDictionary;
import org.apache.pdfbox.cos.*;
[ "org.apache.pdfbox" ]
org.apache.pdfbox;
2,262,792
void smoothScrollTo(int x, int y, int velocity) { if (getChildCount() == 0) { // Nothing to do. setScrollingCacheEnabled(false); return; } int sx = getScrollX(); int sy = getScrollY(); int dx = x - sx; int dy = y - sy; if (dx == 0 && dy == 0) { completeScroll(false); populate(); setScrollState(SCROLL_STATE_IDLE); return; } setScrollingCacheEnabled(true); setScrollState(SCROLL_STATE_SETTLING); final int width = getClientWidth(); final int halfWidth = width / 2; final float distanceRatio = Math.min(1f, 1.0f * Math.abs(dx) / width); final float distance = halfWidth + halfWidth * distanceInfluenceForSnapDuration(distanceRatio); int duration = 0; velocity = Math.abs(velocity); if (velocity > 0) { duration = 4 * Math.round(1000 * Math.abs(distance / velocity)); } else { final float pageWidth = width * mAdapter.getPageWidth(mCurItem); final float pageDelta = (float) Math.abs(dx) / (pageWidth + mPageMargin); duration = (int) ((pageDelta + 1) * 100); } duration = Math.min(duration, MAX_SETTLE_DURATION); mScroller.startScroll(sx, sy, dx, dy, duration); ViewCompat.postInvalidateOnAnimation(this); }
void smoothScrollTo(int x, int y, int velocity) { if (getChildCount() == 0) { setScrollingCacheEnabled(false); return; } int sx = getScrollX(); int sy = getScrollY(); int dx = x - sx; int dy = y - sy; if (dx == 0 && dy == 0) { completeScroll(false); populate(); setScrollState(SCROLL_STATE_IDLE); return; } setScrollingCacheEnabled(true); setScrollState(SCROLL_STATE_SETTLING); final int width = getClientWidth(); final int halfWidth = width / 2; final float distanceRatio = Math.min(1f, 1.0f * Math.abs(dx) / width); final float distance = halfWidth + halfWidth * distanceInfluenceForSnapDuration(distanceRatio); int duration = 0; velocity = Math.abs(velocity); if (velocity > 0) { duration = 4 * Math.round(1000 * Math.abs(distance / velocity)); } else { final float pageWidth = width * mAdapter.getPageWidth(mCurItem); final float pageDelta = (float) Math.abs(dx) / (pageWidth + mPageMargin); duration = (int) ((pageDelta + 1) * 100); } duration = Math.min(duration, MAX_SETTLE_DURATION); mScroller.startScroll(sx, sy, dx, dy, duration); ViewCompat.postInvalidateOnAnimation(this); }
/** * Like {@link View#scrollBy}, but scroll smoothly instead of immediately. * * @param x the number of pixels to scroll by on the X axis * @param y the number of pixels to scroll by on the Y axis * @param velocity the velocity associated with a fling, if applicable. (0 otherwise) */
Like <code>View#scrollBy</code>, but scroll smoothly instead of immediately
smoothScrollTo
{ "repo_name": "jjhesk/LoyalNativeSlider", "path": "library/src/main/java/com/hkm/slider/Tricks/ViewPagerEx.java", "license": "mit", "size": 115752 }
[ "android.support.v4.view.ViewCompat" ]
import android.support.v4.view.ViewCompat;
import android.support.v4.view.*;
[ "android.support" ]
android.support;
712,837
public static void writeHtml(String templateFile, OutputStream output, Map<String, String> replacements) throws IOException { final InputStream is = Resources.getResourceAsStream(templateFile); try (BufferedReader br = new BufferedReader(new InputStreamReader(is))) { try (final LineWriter pw = new LineWriter(new OutputStreamWriter(output))) { fillTemplate(replacements, br, pw); } } }
static void function(String templateFile, OutputStream output, Map<String, String> replacements) throws IOException { final InputStream is = Resources.getResourceAsStream(templateFile); try (BufferedReader br = new BufferedReader(new InputStreamReader(is))) { try (final LineWriter pw = new LineWriter(new OutputStreamWriter(output))) { fillTemplate(replacements, br, pw); } } }
/** * writes given template resource to given output stream replacing things in <code>replacements</code> when found * @param templateFile name ot resource containing template * @param output output stream * @param replacements mapping between placeholder and final text * @throws IOException if an IO error occurs */
writes given template resource to given output stream replacing things in <code>replacements</code> when found
writeHtml
{ "repo_name": "RealTimeGenomics/rtg-tools", "path": "src/com/rtg/report/ReportUtils.java", "license": "bsd-2-clause", "size": 4525 }
[ "com.rtg.util.Resources", "com.rtg.util.io.LineWriter", "java.io.BufferedReader", "java.io.IOException", "java.io.InputStream", "java.io.InputStreamReader", "java.io.OutputStream", "java.io.OutputStreamWriter", "java.util.Map" ]
import com.rtg.util.Resources; import com.rtg.util.io.LineWriter; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.util.Map;
import com.rtg.util.*; import com.rtg.util.io.*; import java.io.*; import java.util.*;
[ "com.rtg.util", "java.io", "java.util" ]
com.rtg.util; java.io; java.util;
1,770,311
protected void processFrame() { if (getFrame()) { for (int i = 0; i < fftSize; i++) { reBuffer[i] = window[i] * circBuffer[cbIndex]; if (++cbIndex == fftSize) cbIndex = 0; } Arrays.fill(imBuffer, 0); FFT.magnitudePhaseFFT(reBuffer, imBuffer); Arrays.fill(newFrame, 0); double flux = 0; for (int i = 0; i <= fftSize/2; i++) { if (reBuffer[i] > prevFrame[i]) flux += reBuffer[i] - prevFrame[i]; newFrame[freqMap[i]] += reBuffer[i]; } spectralFlux[frameCount] = flux; for (int i = 0; i < freqMapSize; i++) frames[frameCount][i] = newFrame[i]; int index = cbIndex - (fftSize - hopSize); if (index < 0) index += fftSize; int sz = (fftSize - hopSize) / energyOversampleFactor; for (int j = 0; j < energyOversampleFactor; j++) { double newEnergy = 0; for (int i = 0; i < sz; i++) { newEnergy += circBuffer[index] * circBuffer[index]; if (++index == fftSize) index = 0; } energy[frameCount * energyOversampleFactor + j] = newEnergy / sz <= 1e-6? 0: Math.log(newEnergy / sz) + 13.816; } double decay = frameCount >= 200? 0.99: (frameCount < 100? 0: (frameCount - 100) / 100.0); if (ltAverage == 0) ltAverage = frameRMS; else ltAverage = ltAverage * decay + frameRMS * (1.0 - decay); if (frameRMS <= silenceThreshold) for (int i = 0; i < freqMapSize; i++) frames[frameCount][i] = 0; else { if (normaliseMode == 1) for (int i = 0; i < freqMapSize; i++) frames[frameCount][i] /= frameRMS; else if (normaliseMode == 2) for (int i = 0; i < freqMapSize; i++) frames[frameCount][i] /= ltAverage; for (int i = 0; i < freqMapSize; i++) { frames[frameCount][i] = Math.log(frames[frameCount][i]) + rangeThreshold; if (frames[frameCount][i] < 0) frames[frameCount][i] = 0; } } // weightedPhaseDeviation(); // if (debug) // System.err.printf("PhaseDev: t=%7.3f phDev=%7.3f RMS=%7.3f\n", // frameCount * hopTime, // phaseDeviation[frameCount], // frameRMS); double[] tmp = prevFrame; prevFrame = reBuffer; reBuffer = tmp; frameCount++; // if ((frameCount % 100) == 0) { // if (!silent) { // System.err.printf("Progress: %1d %5.3f %5.3f\n", // frameCount, frameRMS, ltAverage); // Profile.report(); // } // if ((progressCallback != null) && (totalFrames > 0)) // progressCallback.setFraction((double)frameCount/totalFrames); // } } } // processFrame()
void function() { if (getFrame()) { for (int i = 0; i < fftSize; i++) { reBuffer[i] = window[i] * circBuffer[cbIndex]; if (++cbIndex == fftSize) cbIndex = 0; } Arrays.fill(imBuffer, 0); FFT.magnitudePhaseFFT(reBuffer, imBuffer); Arrays.fill(newFrame, 0); double flux = 0; for (int i = 0; i <= fftSize/2; i++) { if (reBuffer[i] > prevFrame[i]) flux += reBuffer[i] - prevFrame[i]; newFrame[freqMap[i]] += reBuffer[i]; } spectralFlux[frameCount] = flux; for (int i = 0; i < freqMapSize; i++) frames[frameCount][i] = newFrame[i]; int index = cbIndex - (fftSize - hopSize); if (index < 0) index += fftSize; int sz = (fftSize - hopSize) / energyOversampleFactor; for (int j = 0; j < energyOversampleFactor; j++) { double newEnergy = 0; for (int i = 0; i < sz; i++) { newEnergy += circBuffer[index] * circBuffer[index]; if (++index == fftSize) index = 0; } energy[frameCount * energyOversampleFactor + j] = newEnergy / sz <= 1e-6? 0: Math.log(newEnergy / sz) + 13.816; } double decay = frameCount >= 200? 0.99: (frameCount < 100? 0: (frameCount - 100) / 100.0); if (ltAverage == 0) ltAverage = frameRMS; else ltAverage = ltAverage * decay + frameRMS * (1.0 - decay); if (frameRMS <= silenceThreshold) for (int i = 0; i < freqMapSize; i++) frames[frameCount][i] = 0; else { if (normaliseMode == 1) for (int i = 0; i < freqMapSize; i++) frames[frameCount][i] /= frameRMS; else if (normaliseMode == 2) for (int i = 0; i < freqMapSize; i++) frames[frameCount][i] /= ltAverage; for (int i = 0; i < freqMapSize; i++) { frames[frameCount][i] = Math.log(frames[frameCount][i]) + rangeThreshold; if (frames[frameCount][i] < 0) frames[frameCount][i] = 0; } } double[] tmp = prevFrame; prevFrame = reBuffer; reBuffer = tmp; frameCount++; } }
/** Processes a frame of audio data by first computing the STFT with a * Hamming window, then mapping the frequency bins into a part-linear * part-logarithmic array, then computing the spectral flux * then (optionally) normalising and calculating onsets. */
Processes a frame of audio data by first computing the STFT with a Hamming window, then mapping the frequency bins into a part-linear part-logarithmic array, then computing the spectral flux then (optionally) normalising and calculating onsets
processFrame
{ "repo_name": "nwaldispuehl/interval-music-compositor", "path": "intervalmusiccompositor.beatroot/src/main/java/at/ofai/music/beatroot/AudioProcessor.java", "license": "gpl-3.0", "size": 29459 }
[ "at.ofai.music.audio.FFT", "java.util.Arrays" ]
import at.ofai.music.audio.FFT; import java.util.Arrays;
import at.ofai.music.audio.*; import java.util.*;
[ "at.ofai.music", "java.util" ]
at.ofai.music; java.util;
1,015,995
private void partitionTracks() { List<MediaTrack> allTracks = mMediaInfo.getMediaTracks(); mAudioTracks.clear(); mTextTracks.clear(); mVideoTracks.clear(); mTextTracks.add(buildNoneTrack()); mSelectedTextPosition = 0; mSelectedAudioPosition = -1; if (allTracks != null) { int textPosition = 1; int audioPosition = 0; for (MediaTrack track : allTracks) { switch (track.getType()) { case MediaTrack.TYPE_TEXT: mTextTracks.add(track); if (mActiveTracks != null) { for (long mActiveTrack : mActiveTracks) { if (mActiveTrack == track.getId()) { mSelectedTextPosition = textPosition; } } } textPosition++; break; case MediaTrack.TYPE_AUDIO: mAudioTracks.add(track); if (mActiveTracks != null) { for (long mActiveTrack : mActiveTracks) { if (mActiveTrack == track.getId()) { mSelectedAudioPosition = audioPosition; } } } audioPosition++; break; case MediaTrack.TYPE_VIDEO: mVideoTracks.add(track); } } } }
void function() { List<MediaTrack> allTracks = mMediaInfo.getMediaTracks(); mAudioTracks.clear(); mTextTracks.clear(); mVideoTracks.clear(); mTextTracks.add(buildNoneTrack()); mSelectedTextPosition = 0; mSelectedAudioPosition = -1; if (allTracks != null) { int textPosition = 1; int audioPosition = 0; for (MediaTrack track : allTracks) { switch (track.getType()) { case MediaTrack.TYPE_TEXT: mTextTracks.add(track); if (mActiveTracks != null) { for (long mActiveTrack : mActiveTracks) { if (mActiveTrack == track.getId()) { mSelectedTextPosition = textPosition; } } } textPosition++; break; case MediaTrack.TYPE_AUDIO: mAudioTracks.add(track); if (mActiveTracks != null) { for (long mActiveTrack : mActiveTracks) { if (mActiveTrack == track.getId()) { mSelectedAudioPosition = audioPosition; } } } audioPosition++; break; case MediaTrack.TYPE_VIDEO: mVideoTracks.add(track); } } } }
/** * This method loops through the tracks and partitions them into a group of Text tracks and a * group of Audio tracks, and skips over the Video tracks. */
This method loops through the tracks and partitions them into a group of Text tracks and a group of Audio tracks, and skips over the Video tracks
partitionTracks
{ "repo_name": "geecko86/Shuttle", "path": "libraries/ccl/src/com/google/android/libraries/cast/companionlibrary/cast/tracks/ui/TracksChooserDialog.java", "license": "gpl-3.0", "size": 11297 }
[ "com.google.android.gms.cast.MediaTrack", "java.util.List" ]
import com.google.android.gms.cast.MediaTrack; import java.util.List;
import com.google.android.gms.cast.*; import java.util.*;
[ "com.google.android", "java.util" ]
com.google.android; java.util;
939,234
public void responsePhase3Handler(Response resp, RoRequest req) throws IOException, ModuleException { if (req.getMethod().equals("HEAD")) return; String md5_digest = resp.getHeader("Content-MD5"); String trailer = resp.getHeader("Trailer"); boolean md5_tok = false; try { if (trailer != null) md5_tok = Util.hasToken(trailer, "Content-MD5"); } catch (ParseException pe) { throw new ModuleException(pe.toString()); } if ((md5_digest == null && !md5_tok) || resp.getHeader("Transfer-Encoding") != null) return; if (md5_digest != null) Log.write(Log.MODS, "CMD5M: Received digest: " + md5_digest + " - pushing md5-check-stream"); else Log.write(Log.MODS, "CMD5M: Expecting digest in trailer " + " - pushing md5-check-stream"); resp.inp_stream = new MD5InputStream(resp.inp_stream, new VerifyMD5(resp)); }
void function(Response resp, RoRequest req) throws IOException, ModuleException { if (req.getMethod().equals("HEAD")) return; String md5_digest = resp.getHeader(STR); String trailer = resp.getHeader(STR); boolean md5_tok = false; try { if (trailer != null) md5_tok = Util.hasToken(trailer, STR); } catch (ParseException pe) { throw new ModuleException(pe.toString()); } if ((md5_digest == null && !md5_tok) resp.getHeader(STR) != null) return; if (md5_digest != null) Log.write(Log.MODS, STR + md5_digest + STR); else Log.write(Log.MODS, STR + STR); resp.inp_stream = new MD5InputStream(resp.inp_stream, new VerifyMD5(resp)); }
/** * Invoked by the HTTPClient. */
Invoked by the HTTPClient
responsePhase3Handler
{ "repo_name": "slantview/DrupalLoadTest", "path": "lib/grinder/grinder-httpclient/src/main/java/HTTPClient/ContentMD5Module.java", "license": "gpl-2.0", "size": 4525 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
147,964
static javax.sql.DataSource getDataSource(TestConfiguration config, HashMap beanProperties) { return (javax.sql.DataSource) getDataSource(config, beanProperties, config.getJDBCClient().getDataSourceClassName()); }
static javax.sql.DataSource getDataSource(TestConfiguration config, HashMap beanProperties) { return (javax.sql.DataSource) getDataSource(config, beanProperties, config.getJDBCClient().getDataSourceClassName()); }
/** * Create a new DataSource object setup from the passed in TestConfiguration. * The getConnection() method will return a connection identical to * TestConfiguration.openDefaultConnection(). */
Create a new DataSource object setup from the passed in TestConfiguration. The getConnection() method will return a connection identical to TestConfiguration.openDefaultConnection()
getDataSource
{ "repo_name": "splicemachine/spliceengine", "path": "db-tools-testing/src/main/java/com/splicemachine/dbTesting/junit/JDBCDataSource.java", "license": "agpl-3.0", "size": 10810 }
[ "java.util.HashMap" ]
import java.util.HashMap;
import java.util.*;
[ "java.util" ]
java.util;
2,773,711
public SampleDto byId(String sampleId) throws HttpResponseException { return getResource(resourceDir + sampleId); }
SampleDto function(String sampleId) throws HttpResponseException { return getResource(resourceDir + sampleId); }
/** * Retrieves a single sample by ID * * @param sampleId LIMS ID of the sample to retrieve * @return the sample * @throws HttpResponseException on any HTTP Status other than 200 OK */
Retrieves a single sample by ID
byId
{ "repo_name": "oicr-gsi/pinery", "path": "pinery-client/src/main/java/ca/on/oicr/pinery/client/SampleClient.java", "license": "gpl-3.0", "size": 4175 }
[ "ca.on.oicr.ws.dto.SampleDto" ]
import ca.on.oicr.ws.dto.SampleDto;
import ca.on.oicr.ws.dto.*;
[ "ca.on.oicr" ]
ca.on.oicr;
2,836,090
public void notifyChanged(Notification notification) { updateChildren(notification); switch (notification.getFeatureID(XSDResult.class)) { case XsdRulesPackage.XSD_RESULT__ERROR_MESSAGE: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false)); return; } super.notifyChanged(notification); }
void function(Notification notification) { updateChildren(notification); switch (notification.getFeatureID(XSDResult.class)) { case XsdRulesPackage.XSD_RESULT__ERROR_MESSAGE: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false)); return; } super.notifyChanged(notification); }
/** * This handles model notifications by calling {@link #updateChildren} to update any cached * children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */
This handles model notifications by calling <code>#updateChildren</code> to update any cached children and by creating a viewer notification, which it passes to <code>#fireNotifyChanged</code>.
notifyChanged
{ "repo_name": "TristanFAURE/oclCheckTool", "path": "plugins/org.topcased.checktool.xsdrules.edit/src/org/topcased/checktool/xsdrules/xsdRules/provider/XSDResultItemProvider.java", "license": "epl-1.0", "size": 6298 }
[ "org.eclipse.emf.common.notify.Notification", "org.eclipse.emf.edit.provider.ViewerNotification", "org.topcased.checktool.xsdrules.xsdRules.XSDResult", "org.topcased.checktool.xsdrules.xsdRules.XsdRulesPackage" ]
import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.edit.provider.ViewerNotification; import org.topcased.checktool.xsdrules.xsdRules.XSDResult; import org.topcased.checktool.xsdrules.xsdRules.XsdRulesPackage;
import org.eclipse.emf.common.notify.*; import org.eclipse.emf.edit.provider.*; import org.topcased.checktool.xsdrules.*;
[ "org.eclipse.emf", "org.topcased.checktool" ]
org.eclipse.emf; org.topcased.checktool;
272,702
public void remove(VarSymbol sym) { hidden.add(sym.getSimpleName()); } }
void function(VarSymbol sym) { hidden.add(sym.getSimpleName()); } }
/** * Records a non-constant variable declaration that hides any previously declared constants of * the same name. */
Records a non-constant variable declaration that hides any previously declared constants of the same name
remove
{ "repo_name": "google/error-prone", "path": "core/src/main/java/com/google/errorprone/bugpatterns/DeduplicateConstants.java", "license": "apache-2.0", "size": 6643 }
[ "com.sun.tools.javac.code.Symbol" ]
import com.sun.tools.javac.code.Symbol;
import com.sun.tools.javac.code.*;
[ "com.sun.tools" ]
com.sun.tools;
2,216,329
private void registerReceiver() { // Create an Intent filter that handles Intents from the // UploadVideoService. IntentFilter intentFilter = new IntentFilter(UploadVideoService.ACTION_UPLOAD_SERVICE_RESPONSE); intentFilter.addCategory(Intent.CATEGORY_DEFAULT); // Register the BroadcastReceiver. LocalBroadcastManager.getInstance(this) .registerReceiver(mUploadResultReceiver, intentFilter); }
void function() { IntentFilter intentFilter = new IntentFilter(UploadVideoService.ACTION_UPLOAD_SERVICE_RESPONSE); intentFilter.addCategory(Intent.CATEGORY_DEFAULT); LocalBroadcastManager.getInstance(this) .registerReceiver(mUploadResultReceiver, intentFilter); }
/** * Register a BroadcastReceiver that receives a result from the * UploadVideoService when a video upload completes. */
Register a BroadcastReceiver that receives a result from the UploadVideoService when a video upload completes
registerReceiver
{ "repo_name": "Volant/mobilecloud-15", "path": "VideoClient/Client/app/src/main/java/vandy/mooc/view/VideoListActivity.java", "license": "apache-2.0", "size": 9798 }
[ "android.content.Intent", "android.content.IntentFilter", "android.support.v4.content.LocalBroadcastManager" ]
import android.content.Intent; import android.content.IntentFilter; import android.support.v4.content.LocalBroadcastManager;
import android.content.*; import android.support.v4.content.*;
[ "android.content", "android.support" ]
android.content; android.support;
2,076,505
EClass getResources();
EClass getResources();
/** * Returns the meta object for class '{@link io.opensemantics.semiotics.model.assessment.Resources <em>Resources</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for class '<em>Resources</em>'. * @see io.opensemantics.semiotics.model.assessment.Resources * @generated */
Returns the meta object for class '<code>io.opensemantics.semiotics.model.assessment.Resources Resources</code>'.
getResources
{ "repo_name": "CoastalHacking/semiotics-main", "path": "bundles/io.opensemantics.semiotics.model.assessment/src-gen/io/opensemantics/semiotics/model/assessment/AssessmentPackage.java", "license": "apache-2.0", "size": 151116 }
[ "org.eclipse.emf.ecore.EClass" ]
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
1,795,341
void writeToNBT(NBTTagCompound data);
void writeToNBT(NBTTagCompound data);
/** * write nbt data * * @param data to be written data */
write nbt data
writeToNBT
{ "repo_name": "rolandoislas/PeripheralsPlusPlus", "path": "src/api/java/appeng/api/parts/IFacadeContainer.java", "license": "gpl-2.0", "size": 2522 }
[ "net.minecraft.nbt.NBTTagCompound" ]
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.*;
[ "net.minecraft.nbt" ]
net.minecraft.nbt;
2,629,056
@Override public Response toResponse(IOException exception) { if (exception instanceof FileNotFoundException) { logger.error( "FileNotFoundException when calling " + request.getRequestURL().toString() + " ! Message: " + exception.getMessage() ); return Response.status(HttpURLConnection.HTTP_NOT_FOUND). entity(exception.getMessage()). type(MediaType.TEXT_PLAIN). build(); } logger.error( "IOException when calling " + request.getRequestURL().toString() + " ! Message: " + exception.getMessage() ); return Response.serverError(). entity(exception.getMessage()). type(MediaType.TEXT_PLAIN). build(); }
Response function(IOException exception) { if (exception instanceof FileNotFoundException) { logger.error( STR + request.getRequestURL().toString() + STR + exception.getMessage() ); return Response.status(HttpURLConnection.HTTP_NOT_FOUND). entity(exception.getMessage()). type(MediaType.TEXT_PLAIN). build(); } logger.error( STR + request.getRequestURL().toString() + STR + exception.getMessage() ); return Response.serverError(). entity(exception.getMessage()). type(MediaType.TEXT_PLAIN). build(); }
/** * Maps an IOException to a Response with the body * containing the exception's message. The status code * is 404 NOT FOUND if the exception is FileNotFoundException or * 500 SERVER ERROR otherwise. */
Maps an IOException to a Response with the body containing the exception's message. The status code is 404 NOT FOUND if the exception is FileNotFoundException or 500 SERVER ERROR otherwise
toResponse
{ "repo_name": "eBay/oink", "path": "service/src/main/java/org/pig/oink/rest/IOExceptionMapper.java", "license": "apache-2.0", "size": 2329 }
[ "java.io.FileNotFoundException", "java.io.IOException", "java.net.HttpURLConnection", "javax.ws.rs.core.MediaType", "javax.ws.rs.core.Response" ]
import java.io.FileNotFoundException; import java.io.IOException; import java.net.HttpURLConnection; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response;
import java.io.*; import java.net.*; import javax.ws.rs.core.*;
[ "java.io", "java.net", "javax.ws" ]
java.io; java.net; javax.ws;
1,965,876