method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
sequence
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
sequence
libraries_info
stringlengths
6
661
id
int64
0
2.92M
private static void checksumDir(Checksum chk, String path) throws IOException { File dir = new File(path); if (!dir.exists()) { throw new FileNotFoundException( "FileUtil.checksumDir: The directory " + path + " does not exist"); } if (dir.isDirectory()) { String[] names = dir.list(); // Needed because list() method may sort different the files // depending on implementation, and CRC32 is non-commutative. Arrays.sort(names, String.CASE_INSENSITIVE_ORDER); for (int i = 0; i < names.length; i++) { if (!names[i].endsWith(CHECKSUM_FILENAME)) { String filespec = path + File.separatorChar + names[i]; File file = new File(filespec); if (file.isDirectory()) { checksumDir(chk, filespec); } else { checksumFile(chk, filespec); } } } } }
static void function(Checksum chk, String path) throws IOException { File dir = new File(path); if (!dir.exists()) { throw new FileNotFoundException( STR + path + STR); } if (dir.isDirectory()) { String[] names = dir.list(); Arrays.sort(names, String.CASE_INSENSITIVE_ORDER); for (int i = 0; i < names.length; i++) { if (!names[i].endsWith(CHECKSUM_FILENAME)) { String filespec = path + File.separatorChar + names[i]; File file = new File(filespec); if (file.isDirectory()) { checksumDir(chk, filespec); } else { checksumFile(chk, filespec); } } } } }
/** * Recursively computes the checksum of a given directory tree and adds it * to the provided Checksum object. * @throws java.io.FileNotFoundException, java.io.IOException */
Recursively computes the checksum of a given directory tree and adds it to the provided Checksum object
checksumDir
{ "repo_name": "linkedin/indextank-engine", "path": "flaptor-util/com/flaptor/util/FileUtil.java", "license": "apache-2.0", "size": 22899 }
[ "java.io.File", "java.io.FileNotFoundException", "java.io.IOException", "java.util.Arrays", "java.util.zip.Checksum" ]
import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Arrays; import java.util.zip.Checksum;
import java.io.*; import java.util.*; import java.util.zip.*;
[ "java.io", "java.util" ]
java.io; java.util;
2,328,302
public static ShardRouting newUnassigned(ShardId shardId, boolean primary, RecoverySource recoverySource, UnassignedInfo unassignedInfo) { return new ShardRouting(shardId, null, null, primary, ShardRoutingState.UNASSIGNED, recoverySource, unassignedInfo, null, UNAVAILABLE_EXPECTED_SHARD_SIZE); }
static ShardRouting function(ShardId shardId, boolean primary, RecoverySource recoverySource, UnassignedInfo unassignedInfo) { return new ShardRouting(shardId, null, null, primary, ShardRoutingState.UNASSIGNED, recoverySource, unassignedInfo, null, UNAVAILABLE_EXPECTED_SHARD_SIZE); }
/** * Creates a new unassigned shard. */
Creates a new unassigned shard
newUnassigned
{ "repo_name": "ern/elasticsearch", "path": "server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java", "license": "apache-2.0", "size": 26215 }
[ "org.elasticsearch.index.shard.ShardId" ]
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.*;
[ "org.elasticsearch.index" ]
org.elasticsearch.index;
2,503,330
public void retractRightTuple(final RightTuple rightTuple, final PropagationContext context, final InternalWorkingMemory workingMemory) { final BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( this ); if ( isUnlinkingEnabled() ) { doDeleteRightTuple( rightTuple, workingMemory, memory ); return; } RightTupleMemory rightTupleMemory = memory.getRightTupleMemory(); boolean useComparisonIndex = rightTupleMemory.getIndexType().isComparison(); FastIterator rightIt = rightTupleMemory.fastIterator(); RightTuple rootBlocker = useComparisonIndex ? null : (RightTuple) rightIt.next(rightTuple); rightTupleMemory.remove( rightTuple ); rightTuple.setMemory( null ); if ( rightTuple.getBlocked() == null ) { return; } for ( LeftTuple leftTuple = (LeftTuple) rightTuple.getBlocked(); leftTuple != null; ) { LeftTuple temp = leftTuple.getBlockedNext(); leftTuple.setBlocker( null ); leftTuple.setBlockedPrevious( null ); leftTuple.setBlockedNext( null ); this.constraints.updateFromTuple( memory.getContext(), workingMemory, leftTuple ); if (useComparisonIndex) { rootBlocker = getFirstRightTuple( leftTuple, rightTupleMemory, context, rightIt ); } // we know that older tuples have been checked so continue previously for ( RightTuple newBlocker = rootBlocker; newBlocker != null; newBlocker = (RightTuple) rightIt.next(newBlocker ) ) { if ( this.constraints.isAllowedCachedLeft( memory.getContext(), newBlocker.getFactHandle() ) ) { leftTuple.setBlocker( newBlocker ); newBlocker.addBlocked( leftTuple ); break; } } if ( leftTuple.getBlocker() == null ) { // was previous blocked and not in memory, so add memory.getLeftTupleMemory().add( leftTuple ); this.sink.propagateRetractLeftTuple( leftTuple, context, workingMemory ); } leftTuple = temp; } rightTuple.nullBlocked(); this.constraints.resetTuple( memory.getContext() ); }
void function(final RightTuple rightTuple, final PropagationContext context, final InternalWorkingMemory workingMemory) { final BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( this ); if ( isUnlinkingEnabled() ) { doDeleteRightTuple( rightTuple, workingMemory, memory ); return; } RightTupleMemory rightTupleMemory = memory.getRightTupleMemory(); boolean useComparisonIndex = rightTupleMemory.getIndexType().isComparison(); FastIterator rightIt = rightTupleMemory.fastIterator(); RightTuple rootBlocker = useComparisonIndex ? null : (RightTuple) rightIt.next(rightTuple); rightTupleMemory.remove( rightTuple ); rightTuple.setMemory( null ); if ( rightTuple.getBlocked() == null ) { return; } for ( LeftTuple leftTuple = (LeftTuple) rightTuple.getBlocked(); leftTuple != null; ) { LeftTuple temp = leftTuple.getBlockedNext(); leftTuple.setBlocker( null ); leftTuple.setBlockedPrevious( null ); leftTuple.setBlockedNext( null ); this.constraints.updateFromTuple( memory.getContext(), workingMemory, leftTuple ); if (useComparisonIndex) { rootBlocker = getFirstRightTuple( leftTuple, rightTupleMemory, context, rightIt ); } for ( RightTuple newBlocker = rootBlocker; newBlocker != null; newBlocker = (RightTuple) rightIt.next(newBlocker ) ) { if ( this.constraints.isAllowedCachedLeft( memory.getContext(), newBlocker.getFactHandle() ) ) { leftTuple.setBlocker( newBlocker ); newBlocker.addBlocked( leftTuple ); break; } } if ( leftTuple.getBlocker() == null ) { memory.getLeftTupleMemory().add( leftTuple ); this.sink.propagateRetractLeftTuple( leftTuple, context, workingMemory ); } leftTuple = temp; } rightTuple.nullBlocked(); this.constraints.resetTuple( memory.getContext() ); }
/** * Retract the <code>FactHandleImpl</code>. If the handle has any * <code>ReteTuple</code> matches and those tuples now have no * other match, retract tuple * * @param handle * the <codeFactHandleImpl</code> being retracted * @param context * The <code>PropagationContext</code> * @param workingMemory * The working memory session. */
Retract the <code>FactHandleImpl</code>. If the handle has any <code>ReteTuple</code> matches and those tuples now have no other match, retract tuple
retractRightTuple
{ "repo_name": "yurloc/drools", "path": "drools-core/src/main/java/org/drools/reteoo/ExistsNode.java", "license": "apache-2.0", "size": 25982 }
[ "org.drools.common.InternalWorkingMemory", "org.drools.core.util.FastIterator", "org.drools.spi.PropagationContext" ]
import org.drools.common.InternalWorkingMemory; import org.drools.core.util.FastIterator; import org.drools.spi.PropagationContext;
import org.drools.common.*; import org.drools.core.util.*; import org.drools.spi.*;
[ "org.drools.common", "org.drools.core", "org.drools.spi" ]
org.drools.common; org.drools.core; org.drools.spi;
1,908,816
public String executeSupportRequest(HttpUriRequest request) { log.debug("executeSupportRequest " + request.getURI().toString()); CloseableHttpClient httpClient = getHttpClient(); HttpResponse response = null; String result = null; try { response = httpClient.execute(request); log.debug("Status : " + response.getStatusLine().getStatusCode()); if (response.getStatusLine().getStatusCode() == http_code_ok) { InputStream is = response.getEntity().getContent(); result = convertStreamToString(is); log.debug("Result " + result); is.close(); response.getEntity().getContent().close(); } } catch (Exception e) { log.warn("Error in the request, message:" + response.getStatusLine().getStatusCode() + " : " + e.getMessage()); } return result; }
String function(HttpUriRequest request) { log.debug(STR + request.getURI().toString()); CloseableHttpClient httpClient = getHttpClient(); HttpResponse response = null; String result = null; try { response = httpClient.execute(request); log.debug(STR + response.getStatusLine().getStatusCode()); if (response.getStatusLine().getStatusCode() == http_code_ok) { InputStream is = response.getEntity().getContent(); result = convertStreamToString(is); log.debug(STR + result); is.close(); response.getEntity().getContent().close(); } } catch (Exception e) { log.warn(STR + response.getStatusLine().getStatusCode() + STR + e.getMessage()); } return result; }
/** * Method to execute a request and get the response from NOVA. * * @param request * the request to be executed * @return HttpUriRequest the response from server * @throws OpenStackException */
Method to execute a request and get the response from NOVA
executeSupportRequest
{ "repo_name": "Fiware/cloud.PaaS", "path": "core/src/main/java/com/telefonica/euro_iaas/paasmanager/util/OpenOperationUtilImpl.java", "license": "apache-2.0", "size": 18738 }
[ "java.io.InputStream", "org.apache.http.HttpResponse", "org.apache.http.client.methods.HttpUriRequest", "org.apache.http.impl.client.CloseableHttpClient" ]
import java.io.InputStream; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.impl.client.CloseableHttpClient;
import java.io.*; import org.apache.http.*; import org.apache.http.client.methods.*; import org.apache.http.impl.client.*;
[ "java.io", "org.apache.http" ]
java.io; org.apache.http;
615,656
ByteBuffer getRecvBuffer( int size );
ByteBuffer getRecvBuffer( int size );
/** * Ask for a ByteBuffer with at least the given amount of bytes remaining. * Depending on how the interface is used, the buffer might be used for receiving or for transmitting. * The caller must call {@link #recvCompleted()} when done with the buffer. * @param size * @return */
Ask for a ByteBuffer with at least the given amount of bytes remaining. Depending on how the interface is used, the buffer might be used for receiving or for transmitting. The caller must call <code>#recvCompleted()</code> when done with the buffer
getRecvBuffer
{ "repo_name": "keinfarbton/Chabu", "path": "java/org.chabu/src/org/chabu/prot/v1/ChabuRecvByteTarget.java", "license": "mit", "size": 830 }
[ "java.nio.ByteBuffer" ]
import java.nio.ByteBuffer;
import java.nio.*;
[ "java.nio" ]
java.nio;
1,354,156
@Override public int compareTo(PrimaryKey other) { return UnsignedLongs.compare(data, other.data); }
int function(PrimaryKey other) { return UnsignedLongs.compare(data, other.data); }
/** * Compares keys such that they are sorted in ascending order. */
Compares keys such that they are sorted in ascending order
compareTo
{ "repo_name": "MattiasZurkovic/concourse", "path": "concourse-server/src/main/java/org/cinchapi/concourse/server/model/PrimaryKey.java", "license": "apache-2.0", "size": 4713 }
[ "com.google.common.primitives.UnsignedLongs" ]
import com.google.common.primitives.UnsignedLongs;
import com.google.common.primitives.*;
[ "com.google.common" ]
com.google.common;
73,233
protected void addDomainPropertyDescriptor(Object object) { itemPropertyDescriptors.add(createItemPropertyDescriptor( ((ComposeableAdapterFactory) adapterFactory) .getRootAdapterFactory(), getResourceLocator(), getString("_UI_ResourceRefType_domain_feature"), getString( "_UI_PropertyDescriptor_description", "_UI_ResourceRefType_domain_feature", "_UI_ResourceRefType_type"), NamingPackage.eINSTANCE .getResourceRefType_Domain(), true, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); }
void function(Object object) { itemPropertyDescriptors.add(createItemPropertyDescriptor( ((ComposeableAdapterFactory) adapterFactory) .getRootAdapterFactory(), getResourceLocator(), getString(STR), getString( STR, STR, STR), NamingPackage.eINSTANCE .getResourceRefType_Domain(), true, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); }
/** * This adds a property descriptor for the Domain feature. <!-- * begin-user-doc --> <!-- end-user-doc --> * * @generated */
This adds a property descriptor for the Domain feature.
addDomainPropertyDescriptor
{ "repo_name": "apache/geronimo-devtools", "path": "plugins/org.apache.geronimo.deployment.model.edit/src/org/apache/geronimo/xml/ns/naming/provider/ResourceRefTypeItemProvider.java", "license": "apache-2.0", "size": 13484 }
[ "org.apache.geronimo.xml.ns.naming.NamingPackage", "org.eclipse.emf.edit.provider.ComposeableAdapterFactory", "org.eclipse.emf.edit.provider.ItemPropertyDescriptor" ]
import org.apache.geronimo.xml.ns.naming.NamingPackage; import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.eclipse.emf.edit.provider.ItemPropertyDescriptor;
import org.apache.geronimo.xml.ns.naming.*; import org.eclipse.emf.edit.provider.*;
[ "org.apache.geronimo", "org.eclipse.emf" ]
org.apache.geronimo; org.eclipse.emf;
489,993
public static final void writeLongArrayXml(long[] val, String name, XmlSerializer out) throws XmlPullParserException, java.io.IOException { if (val == null) { out.startTag(null, "null"); out.endTag(null, "null"); return; } out.startTag(null, "long-array"); if (name != null) { out.attribute(null, "name", name); } final int N = val.length; out.attribute(null, "num", Integer.toString(N)); for (int i=0; i<N; i++) { out.startTag(null, "item"); out.attribute(null, "value", Long.toString(val[i])); out.endTag(null, "item"); } out.endTag(null, "long-array"); }
static final void function(long[] val, String name, XmlSerializer out) throws XmlPullParserException, java.io.IOException { if (val == null) { out.startTag(null, "null"); out.endTag(null, "null"); return; } out.startTag(null, STR); if (name != null) { out.attribute(null, "name", name); } final int N = val.length; out.attribute(null, "num", Integer.toString(N)); for (int i=0; i<N; i++) { out.startTag(null, "item"); out.attribute(null, "value", Long.toString(val[i])); out.endTag(null, "item"); } out.endTag(null, STR); }
/** * Flatten a long[] into an XmlSerializer. The list can later be read back * with readThisLongArrayXml(). * * @param val The long array to be flattened. * @param name Name attribute to include with this array's tag, or null for * none. * @param out XmlSerializer to write the array into. * * @see #writeMapXml * @see #writeValueXml * @see #readThisIntArrayXml */
Flatten a long[] into an XmlSerializer. The list can later be read back with readThisLongArrayXml()
writeLongArrayXml
{ "repo_name": "mobvoi/ticdesign", "path": "ticDesign/src/main/java/ticwear/design/internal/XmlUtils.java", "license": "apache-2.0", "size": 60785 }
[ "java.io.IOException", "org.xmlpull.v1.XmlPullParserException", "org.xmlpull.v1.XmlSerializer" ]
import java.io.IOException; import org.xmlpull.v1.XmlPullParserException; import org.xmlpull.v1.XmlSerializer;
import java.io.*; import org.xmlpull.v1.*;
[ "java.io", "org.xmlpull.v1" ]
java.io; org.xmlpull.v1;
2,708,465
@Override public void onEvent(DeviceInfoRequest deviceInfoRequest, final String sourceEndpointKey) { sendDeviceInfo(sourceEndpointKey); }
void function(DeviceInfoRequest deviceInfoRequest, final String sourceEndpointKey) { sendDeviceInfo(sourceEndpointKey); }
/** Handle the device info request from the endpoint identified by sourceEndpointKey. * Send the device info in the response. */
Handle the device info request from the endpoint identified by sourceEndpointKey
onEvent
{ "repo_name": "maxml/sample-apps", "path": "smarthousedemo/source/src/org/kaaproject/kaa/demo/smarthousedemo/controller/SmartHouseController.java", "license": "apache-2.0", "size": 23702 }
[ "org.kaaproject.kaa.demo.smarthouse.device.DeviceInfoRequest" ]
import org.kaaproject.kaa.demo.smarthouse.device.DeviceInfoRequest;
import org.kaaproject.kaa.demo.smarthouse.device.*;
[ "org.kaaproject.kaa" ]
org.kaaproject.kaa;
641,488
protected String readString(int n) throws IOException { return readString(n, US_ASCII); }
String function(int n) throws IOException { return readString(n, US_ASCII); }
/** * Reads a String from the InputStream with US-ASCII charset. The parser * will read n bytes and convert it to ascii string. This is used for * reading values of type {@link ExifTag#TYPE_ASCII}. */
Reads a String from the InputStream with US-ASCII charset. The parser will read n bytes and convert it to ascii string. This is used for reading values of type <code>ExifTag#TYPE_ASCII</code>
readString
{ "repo_name": "aizhang/Android-Exif", "path": "src/com/ai/exif/ExifParser.java", "license": "gpl-2.0", "size": 34305 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
2,520,954
protected void attachFlowLoad(TrafficLink link) { link.addLoad(getLinkFlowLoad(link.one())); link.addLoad(getLinkFlowLoad(link.two())); }
void function(TrafficLink link) { link.addLoad(getLinkFlowLoad(link.one())); link.addLoad(getLinkFlowLoad(link.two())); }
/** * Processes the given traffic link to attach the "flow load" attributed * to the underlying topology links. * * @param link the traffic link to process */
Processes the given traffic link to attach the "flow load" attributed to the underlying topology links
attachFlowLoad
{ "repo_name": "LorenzReinhart/ONOSnew", "path": "web/gui/src/main/java/org/onosproject/ui/impl/TrafficMonitorBase.java", "license": "apache-2.0", "size": 13921 }
[ "org.onosproject.ui.impl.topo.util.TrafficLink" ]
import org.onosproject.ui.impl.topo.util.TrafficLink;
import org.onosproject.ui.impl.topo.util.*;
[ "org.onosproject.ui" ]
org.onosproject.ui;
2,086,367
void kill() throws MachineException;
void kill() throws MachineException;
/** * Kills this process. * * @throws MachineException if internal error occurs */
Kills this process
kill
{ "repo_name": "TypeFox/che", "path": "wsmaster/che-core-api-machine/src/main/java/org/eclipse/che/api/machine/server/spi/InstanceProcess.java", "license": "epl-1.0", "size": 2060 }
[ "org.eclipse.che.api.machine.server.exception.MachineException" ]
import org.eclipse.che.api.machine.server.exception.MachineException;
import org.eclipse.che.api.machine.server.exception.*;
[ "org.eclipse.che" ]
org.eclipse.che;
560,593
public void setResponseObject(ResponseObject responseObject) { this.responseObject = responseObject; }
void function(ResponseObject responseObject) { this.responseObject = responseObject; }
/** * setter method * * @see EntireObject#responseObject * @param responseObject * the responseObject to set */
setter method
setResponseObject
{ "repo_name": "owen-chen/crawl-center", "path": "cc-crawl/src/main/java/org/archmage/cc/crawl/bean/EntireObject.java", "license": "gpl-2.0", "size": 2157 }
[ "org.archmage.cc.infosource.dto.response.ResponseObject" ]
import org.archmage.cc.infosource.dto.response.ResponseObject;
import org.archmage.cc.infosource.dto.response.*;
[ "org.archmage.cc" ]
org.archmage.cc;
720,057
final JettyLauncher launcher = new JettyLauncher(args); final File exploded = launcher.extractWar(); launcher.deleteExplodedOnShutdown(exploded); launcher.start(exploded); } public JettyLauncher(String[] args) { super(args); }
final JettyLauncher launcher = new JettyLauncher(args); final File exploded = launcher.extractWar(); launcher.deleteExplodedOnShutdown(exploded); launcher.start(exploded); } public JettyLauncher(String[] args) { super(args); }
/** * Start the server. * * @param args optional; 1st is context path, 2nd is host, 3rd is http port, * 4th is SSL port, 5th is SSL keystore path, 6th is keystore password * @throws IOException */
Start the server
main
{ "repo_name": "grails-plugins/grails-standalone", "path": "src/runtime/grails/plugin/standalone/JettyLauncher.java", "license": "apache-2.0", "size": 8234 }
[ "java.io.File" ]
import java.io.File;
import java.io.*;
[ "java.io" ]
java.io;
2,388,872
public static AbstractSchema unwrapAsDrillSchemaInstance(SchemaPlus schemaPlus) { try { return (AbstractSchema) schemaPlus.unwrap(AbstractSchema.class).getDefaultSchema(); } catch (ClassCastException e) { throw UserException.validationError(e) .message("Schema [%s] is not a Drill schema.", getSchemaPath(schemaPlus)) .build(logger); } }
static AbstractSchema function(SchemaPlus schemaPlus) { try { return (AbstractSchema) schemaPlus.unwrap(AbstractSchema.class).getDefaultSchema(); } catch (ClassCastException e) { throw UserException.validationError(e) .message(STR, getSchemaPath(schemaPlus)) .build(logger); } }
/** * Unwrap given <i>SchemaPlus</i> instance as Drill schema instance (<i>AbstractSchema</i>). Once unwrapped, return * default schema from <i>AbstractSchema</i>. If the given schema is not an instance of <i>AbstractSchema</i> a * {@link UserException} is thrown. */
Unwrap given SchemaPlus instance as Drill schema instance (AbstractSchema). Once unwrapped, return default schema from AbstractSchema. If the given schema is not an instance of AbstractSchema a <code>UserException</code> is thrown
unwrapAsDrillSchemaInstance
{ "repo_name": "cwestin/incubator-drill", "path": "exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SchemaUtilites.java", "license": "apache-2.0", "size": 6958 }
[ "org.apache.calcite.schema.SchemaPlus", "org.apache.drill.common.exceptions.UserException", "org.apache.drill.exec.store.AbstractSchema" ]
import org.apache.calcite.schema.SchemaPlus; import org.apache.drill.common.exceptions.UserException; import org.apache.drill.exec.store.AbstractSchema;
import org.apache.calcite.schema.*; import org.apache.drill.common.exceptions.*; import org.apache.drill.exec.store.*;
[ "org.apache.calcite", "org.apache.drill" ]
org.apache.calcite; org.apache.drill;
632,782
@ServiceMethod(returns = ReturnType.SINGLE) public Mono<VulnerabilityAssessmentScanRecordInner> getAsync( String resourceGroupName, String managedInstanceName, String databaseName, VulnerabilityAssessmentName vulnerabilityAssessmentName, String scanId) { return getWithResponseAsync( resourceGroupName, managedInstanceName, databaseName, vulnerabilityAssessmentName, scanId) .flatMap( (Response<VulnerabilityAssessmentScanRecordInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); }
@ServiceMethod(returns = ReturnType.SINGLE) Mono<VulnerabilityAssessmentScanRecordInner> function( String resourceGroupName, String managedInstanceName, String databaseName, VulnerabilityAssessmentName vulnerabilityAssessmentName, String scanId) { return getWithResponseAsync( resourceGroupName, managedInstanceName, databaseName, vulnerabilityAssessmentName, scanId) .flatMap( (Response<VulnerabilityAssessmentScanRecordInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); }
/** * Gets a vulnerability assessment scan record of a database. * * @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value * from the Azure Resource Manager API or the portal. * @param managedInstanceName The name of the managed instance. * @param databaseName The name of the database. * @param vulnerabilityAssessmentName The name of the vulnerability assessment. * @param scanId The vulnerability assessment scan Id of the scan to retrieve. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a vulnerability assessment scan record of a database. */
Gets a vulnerability assessment scan record of a database
getAsync
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-sql/src/main/java/com/azure/resourcemanager/sql/implementation/ManagedDatabaseVulnerabilityAssessmentScansClientImpl.java", "license": "mit", "size": 63912 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.core.http.rest.Response", "com.azure.resourcemanager.sql.fluent.models.VulnerabilityAssessmentScanRecordInner", "com.azure.resourcemanager.sql.models.VulnerabilityAssessmentName" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.Response; import com.azure.resourcemanager.sql.fluent.models.VulnerabilityAssessmentScanRecordInner; import com.azure.resourcemanager.sql.models.VulnerabilityAssessmentName;
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.resourcemanager.sql.fluent.models.*; import com.azure.resourcemanager.sql.models.*;
[ "com.azure.core", "com.azure.resourcemanager" ]
com.azure.core; com.azure.resourcemanager;
2,261,067
@Transactional public CreateCacheSecurityGroupActionResultMessage doWork( CreateCacheSecurityGroupActionRequestMessage req) throws Exception { logger.debug("Performing work for CreateCacheSecurityGroupAction."); return super.doWork(req, getSession()); }
CreateCacheSecurityGroupActionResultMessage function( CreateCacheSecurityGroupActionRequestMessage req) throws Exception { logger.debug(STR); return super.doWork(req, getSession()); }
/** * We need a local copy of this doWork to provide the transactional * annotation. Transaction management is handled by the annotation, which * can only be on a concrete class. * @param req * @return * @throws Exception */
We need a local copy of this doWork to provide the transactional annotation. Transaction management is handled by the annotation, which can only be on a concrete class
doWork
{ "repo_name": "TranscendComputing/TopStackElasticCache", "path": "src/com/transcend/elasticache/worker/CreateCacheSecurityGroupActionWorker.java", "license": "apache-2.0", "size": 2895 }
[ "com.transcend.elasticache.message.CreateCacheSecurityGroupActionMessage" ]
import com.transcend.elasticache.message.CreateCacheSecurityGroupActionMessage;
import com.transcend.elasticache.message.*;
[ "com.transcend.elasticache" ]
com.transcend.elasticache;
282,270
void handleImage(RenderedImage image, Element imageElement, SVGGeneratorContext generatorContext);
void handleImage(RenderedImage image, Element imageElement, SVGGeneratorContext generatorContext);
/** * The handler should set the xlink:href tag and the width and * height attributes. */
The handler should set the xlink:href tag and the width and height attributes
handleImage
{ "repo_name": "sflyphotobooks/crp-batik", "path": "sources/org/apache/batik/svggen/ImageHandler.java", "license": "apache-2.0", "size": 2325 }
[ "java.awt.image.RenderedImage", "org.w3c.dom.Element" ]
import java.awt.image.RenderedImage; import org.w3c.dom.Element;
import java.awt.image.*; import org.w3c.dom.*;
[ "java.awt", "org.w3c.dom" ]
java.awt; org.w3c.dom;
699,721
private void updateAllocateResponse(AllocateResponse response, List<NMToken> nmTokens, List<Container> allocatedContainers) { List<NMToken> newTokens = new ArrayList<>(); if (allocatedContainers.size() > 0) { response.getAllocatedContainers().addAll(allocatedContainers); for (Container alloc : allocatedContainers) { if (!nodeTokens.containsKey(alloc.getNodeId())) { newTokens.add(nmSecretManager.generateNMToken(appSubmitter, alloc)); } } List<NMToken> retTokens = new ArrayList<>(nmTokens); retTokens.addAll(newTokens); response.setNMTokens(retTokens); } }
void function(AllocateResponse response, List<NMToken> nmTokens, List<Container> allocatedContainers) { List<NMToken> newTokens = new ArrayList<>(); if (allocatedContainers.size() > 0) { response.getAllocatedContainers().addAll(allocatedContainers); for (Container alloc : allocatedContainers) { if (!nodeTokens.containsKey(alloc.getNodeId())) { newTokens.add(nmSecretManager.generateNMToken(appSubmitter, alloc)); } } List<NMToken> retTokens = new ArrayList<>(nmTokens); retTokens.addAll(newTokens); response.setNMTokens(retTokens); } }
/** * Adds all the newly allocated Containers to the allocate Response. * Additionally, in case the NMToken for one of the nodes does not exist, it * generates one and adds it to the response. */
Adds all the newly allocated Containers to the allocate Response. Additionally, in case the NMToken for one of the nodes does not exist, it generates one and adds it to the response
updateAllocateResponse
{ "repo_name": "plusplusjiajia/hadoop", "path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/scheduler/DistributedScheduler.java", "license": "apache-2.0", "size": 10982 }
[ "java.util.ArrayList", "java.util.List", "org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse", "org.apache.hadoop.yarn.api.records.Container", "org.apache.hadoop.yarn.api.records.NMToken" ]
import java.util.ArrayList; import java.util.List; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.NMToken;
import java.util.*; import org.apache.hadoop.yarn.api.protocolrecords.*; import org.apache.hadoop.yarn.api.records.*;
[ "java.util", "org.apache.hadoop" ]
java.util; org.apache.hadoop;
2,391,583
public DLockRemoteToken queryLock(final Object name) { // long statStart = getStats().startLockRelease(); try { DLockQueryReplyMessage queryReply = null; while (queryReply == null || queryReply.repliedNotGrantor()) { checkDestroyed(); // TODO: consider using peekLockGrantor instead... LockGrantorId theLockGrantorId = getLockGrantorId(); try { queryReply = DLockQueryProcessor.query(theLockGrantorId.getLockGrantorMember(), this.serviceName, name, false , this.dm); } catch (LockGrantorDestroyedException e) { // loop back around to get next lock grantor } finally { if (queryReply != null && queryReply.repliedNotGrantor()) { notLockGrantorId(theLockGrantorId, 0, TimeUnit.MILLISECONDS); } } } // while querying return DLockRemoteToken.create(name, queryReply.getLesseeThread(), queryReply.getLeaseId(), queryReply.getLeaseExpireTime()); } // try finally { // getStats().endLockRelease(statStart); } } // ------------------------------------------------------------------------- // Creation methods // -------------------------------------------------------------------------
DLockRemoteToken function(final Object name) { try { DLockQueryReplyMessage queryReply = null; while (queryReply == null queryReply.repliedNotGrantor()) { checkDestroyed(); LockGrantorId theLockGrantorId = getLockGrantorId(); try { queryReply = DLockQueryProcessor.query(theLockGrantorId.getLockGrantorMember(), this.serviceName, name, false , this.dm); } catch (LockGrantorDestroyedException e) { } finally { if (queryReply != null && queryReply.repliedNotGrantor()) { notLockGrantorId(theLockGrantorId, 0, TimeUnit.MILLISECONDS); } } } return DLockRemoteToken.create(name, queryReply.getLesseeThread(), queryReply.getLeaseId(), queryReply.getLeaseExpireTime()); } finally { } }
/** * Query the grantor for current leasing information of a lock. Returns the current lease info. * * @param name the named lock to get lease information for * @return snapshot of the remote lock information * @throws LockServiceDestroyedException if local instance of lock service has been destroyed */
Query the grantor for current leasing information of a lock. Returns the current lease info
queryLock
{ "repo_name": "pivotal-amurmann/geode", "path": "geode-core/src/main/java/org/apache/geode/distributed/internal/locks/DLockService.java", "license": "apache-2.0", "size": 116745 }
[ "java.util.concurrent.TimeUnit", "org.apache.geode.distributed.internal.locks.DLockQueryProcessor" ]
import java.util.concurrent.TimeUnit; import org.apache.geode.distributed.internal.locks.DLockQueryProcessor;
import java.util.concurrent.*; import org.apache.geode.distributed.internal.locks.*;
[ "java.util", "org.apache.geode" ]
java.util; org.apache.geode;
1,566,010
public Map<String, String> metadata() { return this.metadata; }
Map<String, String> function() { return this.metadata; }
/** * Get a name-value pair to associate with the share as metadata. * * @return the metadata value */
Get a name-value pair to associate with the share as metadata
metadata
{ "repo_name": "navalev/azure-sdk-for-java", "path": "sdk/storage/mgmt-v2019_06_01/src/main/java/com/microsoft/azure/management/storage/v2019_06_01/implementation/FileShareInner.java", "license": "mit", "size": 2854 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
939,812
@Test public void testBucketEnd() { final SimpleDateFormat fm = new SimpleDateFormat("HH:mm:ss"); final Calendar calendar = Calendar.getInstance(); calendar.setFirstDayOfWeek(Calendar.SUNDAY); // define the granularity to use final DateGranularity m = DateGranularity.MINUTES; final DateGranularity d = DateGranularity.DAYS; final DateGranularity w = DateGranularity.WEEKS; final DateGranularity mo = DateGranularity.MONTHS; // create the logic IRasterLogic<Date> logic; RasterBucket bucket; Date value; // first use MINUTES logic = DateRasterUtilities.createDateRasterLogic(m, 5); bucket = new RasterBucket(0); value = logic.getBucketEnd(bucket); assertEquals(fm.format(value), "00:05:00"); bucket = new RasterBucket(4); value = logic.getBucketEnd(bucket); assertEquals(fm.format(value), "00:05:00"); bucket = new RasterBucket(5); value = logic.getBucketEnd(bucket); assertEquals(fm.format(value), "00:10:00"); bucket = new RasterBucket(31); value = logic.getBucketEnd(bucket); assertEquals(fm.format(value), "00:35:00"); bucket = new RasterBucket(1440); value = logic.getBucketEnd(bucket); assertEquals(fm.format(value), "00:05:00"); bucket = new RasterBucket(-1); value = logic.getBucketEnd(bucket); assertEquals(fm.format(value), "00:00:00"); // now use DAYS logic = DateRasterUtilities.createDateRasterLogic(d, 2); bucket = new RasterBucket(1); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 3); bucket = new RasterBucket(2); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 3); bucket = new RasterBucket(3); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 5); bucket = new RasterBucket(4); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 5); bucket = new RasterBucket(7); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 1); bucket = new RasterBucket(8); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 3); bucket = new RasterBucket(9); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 3); bucket = new RasterBucket(-1); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 7); bucket = new RasterBucket(0); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 1); bucket = new RasterBucket(-1); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 7); // now use WEEKS logic = DateRasterUtilities.createDateRasterLogic(w, 2); bucket = new RasterBucket(1); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.WEEK_OF_YEAR), 3); bucket = new RasterBucket(2); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.WEEK_OF_YEAR), 3); bucket = new RasterBucket(53); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.WEEK_OF_YEAR), 3); bucket = new RasterBucket(52); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.WEEK_OF_YEAR), 1); bucket = new RasterBucket(51); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.WEEK_OF_YEAR), 1); bucket = new RasterBucket(0); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.WEEK_OF_YEAR), 1); // now use MONTHS logic = DateRasterUtilities.createDateRasterLogic(mo, 4); bucket = new RasterBucket(0); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.MONTH), 4); bucket = new RasterBucket(3); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.MONTH), 4); bucket = new RasterBucket(4); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.MONTH), 8); bucket = new RasterBucket(11); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.MONTH), 0); bucket = new RasterBucket(13); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.MONTH), 4); bucket = new RasterBucket(-1); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.MONTH), 0); }
void function() { final SimpleDateFormat fm = new SimpleDateFormat(STR); final Calendar calendar = Calendar.getInstance(); calendar.setFirstDayOfWeek(Calendar.SUNDAY); final DateGranularity m = DateGranularity.MINUTES; final DateGranularity d = DateGranularity.DAYS; final DateGranularity w = DateGranularity.WEEKS; final DateGranularity mo = DateGranularity.MONTHS; IRasterLogic<Date> logic; RasterBucket bucket; Date value; logic = DateRasterUtilities.createDateRasterLogic(m, 5); bucket = new RasterBucket(0); value = logic.getBucketEnd(bucket); assertEquals(fm.format(value), STR); bucket = new RasterBucket(4); value = logic.getBucketEnd(bucket); assertEquals(fm.format(value), STR); bucket = new RasterBucket(5); value = logic.getBucketEnd(bucket); assertEquals(fm.format(value), STR); bucket = new RasterBucket(31); value = logic.getBucketEnd(bucket); assertEquals(fm.format(value), STR); bucket = new RasterBucket(1440); value = logic.getBucketEnd(bucket); assertEquals(fm.format(value), STR); bucket = new RasterBucket(-1); value = logic.getBucketEnd(bucket); assertEquals(fm.format(value), STR); logic = DateRasterUtilities.createDateRasterLogic(d, 2); bucket = new RasterBucket(1); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 3); bucket = new RasterBucket(2); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 3); bucket = new RasterBucket(3); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 5); bucket = new RasterBucket(4); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 5); bucket = new RasterBucket(7); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 1); bucket = new RasterBucket(8); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 3); bucket = new RasterBucket(9); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 3); bucket = new RasterBucket(-1); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 7); bucket = new RasterBucket(0); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 1); bucket = new RasterBucket(-1); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.DAY_OF_WEEK), 7); logic = DateRasterUtilities.createDateRasterLogic(w, 2); bucket = new RasterBucket(1); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.WEEK_OF_YEAR), 3); bucket = new RasterBucket(2); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.WEEK_OF_YEAR), 3); bucket = new RasterBucket(53); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.WEEK_OF_YEAR), 3); bucket = new RasterBucket(52); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.WEEK_OF_YEAR), 1); bucket = new RasterBucket(51); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.WEEK_OF_YEAR), 1); bucket = new RasterBucket(0); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.WEEK_OF_YEAR), 1); logic = DateRasterUtilities.createDateRasterLogic(mo, 4); bucket = new RasterBucket(0); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.MONTH), 4); bucket = new RasterBucket(3); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.MONTH), 4); bucket = new RasterBucket(4); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.MONTH), 8); bucket = new RasterBucket(11); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.MONTH), 0); bucket = new RasterBucket(13); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.MONTH), 4); bucket = new RasterBucket(-1); value = logic.getBucketEnd(bucket); calendar.setTime(value); assertEquals(calendar.get(Calendar.MONTH), 0); }
/** * Tests the implementation of * <code>RasterBucket.getBucketEnd(RasterBucket)</code> */
Tests the implementation of <code>RasterBucket.getBucketEnd(RasterBucket)</code>
testBucketEnd
{ "repo_name": "pmeisen/gen-misc", "path": "test/net/meisen/general/genmisc/raster/definition/impl/date/TestDateRasterLogic.java", "license": "mit", "size": 20149 }
[ "java.text.SimpleDateFormat", "java.util.Calendar", "java.util.Date", "net.meisen.general.genmisc.raster.definition.IRasterLogic", "net.meisen.general.genmisc.raster.definition.RasterBucket", "net.meisen.general.genmisc.raster.utilities.DateRasterUtilities", "org.junit.Assert" ]
import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import net.meisen.general.genmisc.raster.definition.IRasterLogic; import net.meisen.general.genmisc.raster.definition.RasterBucket; import net.meisen.general.genmisc.raster.utilities.DateRasterUtilities; import org.junit.Assert;
import java.text.*; import java.util.*; import net.meisen.general.genmisc.raster.definition.*; import net.meisen.general.genmisc.raster.utilities.*; import org.junit.*;
[ "java.text", "java.util", "net.meisen.general", "org.junit" ]
java.text; java.util; net.meisen.general; org.junit;
637,498
public Collection<IRecord> pull(int max);
Collection<IRecord> function(int max);
/** * Get a collection of batch records from the FIFO buffer. This method will * wait until a group of records can be returned, thus implementations should * be performance aware. * * @param max The maximum number of records to pull * @return The records pulled */
Get a collection of batch records from the FIFO buffer. This method will wait until a group of records can be returned, thus implementations should be performance aware
pull
{ "repo_name": "jmuthu/OpenRate", "path": "src/main/java/OpenRate/buffer/ISupplier.java", "license": "gpl-2.0", "size": 4377 }
[ "java.util.Collection" ]
import java.util.Collection;
import java.util.*;
[ "java.util" ]
java.util;
976,513
private JTextField getTxtLength() { if (txtLength == null) { txtLength = new JTextField(); txtLength.setBounds(new Rectangle(137, 143, 66, 20)); } return txtLength; }
JTextField function() { if (txtLength == null) { txtLength = new JTextField(); txtLength.setBounds(new Rectangle(137, 143, 66, 20)); } return txtLength; }
/** * This method initializes txtLength * @return javax.swing.JTextField * @uml.property name="txtLength" */
This method initializes txtLength
getTxtLength
{ "repo_name": "smendez-hi/SUMO-hib", "path": "tools/contributed/trafficmodeler/src/ui/dialogs/DlgAddEditVehicleType.java", "license": "gpl-3.0", "size": 11848 }
[ "java.awt.Rectangle", "javax.swing.JTextField" ]
import java.awt.Rectangle; import javax.swing.JTextField;
import java.awt.*; import javax.swing.*;
[ "java.awt", "javax.swing" ]
java.awt; javax.swing;
2,644,999
public static final HandlerRegistration addAuthCancelledHandler(AuthCancelledHandler handler) { return AuthCancelledEvent.register(eventBus, handler); }
static final HandlerRegistration function(AuthCancelledHandler handler) { return AuthCancelledEvent.register(eventBus, handler); }
/** * Adds a cancelled handler. * * @param handler * the cancelled handler to be added * @return an object to deregister the handler */
Adds a cancelled handler
addAuthCancelledHandler
{ "repo_name": "xose/gwt-persona", "path": "src/main/java/com/github/xose/persona/client/Persona.java", "license": "apache-2.0", "size": 10700 }
[ "com.google.web.bindery.event.shared.HandlerRegistration" ]
import com.google.web.bindery.event.shared.HandlerRegistration;
import com.google.web.bindery.event.shared.*;
[ "com.google.web" ]
com.google.web;
1,238,825
public void setDialect(Dialect dialect) { this.dialect = dialect; } public ScriptExecutorImpl() { globalProperties = new Properties(); }
void function(Dialect dialect) { this.dialect = dialect; } public ScriptExecutorImpl() { globalProperties = new Properties(); }
/** * Sets the previously auto-detected Hibernate dialect. * * @param dialect * the dialect */
Sets the previously auto-detected Hibernate dialect
setDialect
{ "repo_name": "nguyentienlong/community-edition", "path": "projects/repository/source/java/org/alfresco/repo/domain/schema/script/ScriptExecutorImpl.java", "license": "lgpl-3.0", "size": 26157 }
[ "java.util.Properties", "org.hibernate.dialect.Dialect" ]
import java.util.Properties; import org.hibernate.dialect.Dialect;
import java.util.*; import org.hibernate.dialect.*;
[ "java.util", "org.hibernate.dialect" ]
java.util; org.hibernate.dialect;
1,526,162
public static void registerUncaughtExceptionHandler(UncaughtExceptionHandler handler) { UncaughtExceptionHandlerDelegate.delegate = handler; System.setProperty("sun.awt.exception.handler", UncaughtExceptionHandlerDelegate.class.getName()); //$NON-NLS-1$ }
static void function(UncaughtExceptionHandler handler) { UncaughtExceptionHandlerDelegate.delegate = handler; System.setProperty(STR, UncaughtExceptionHandlerDelegate.class.getName()); }
/** * register uncaught exception handler */
register uncaught exception handler
registerUncaughtExceptionHandler
{ "repo_name": "jurgendl/swing-easy", "path": "src/main/java/org/swingeasy/UIUtils.java", "license": "mit", "size": 27249 }
[ "java.lang.Thread" ]
import java.lang.Thread;
import java.lang.*;
[ "java.lang" ]
java.lang;
1,460,087
public Map<String, String> getParameterNameValues() { return parameterNameValues; }
Map<String, String> function() { return parameterNameValues; }
/** * Returns a mapping of the URI templates to the mapped values */
Returns a mapping of the URI templates to the mapped values
getParameterNameValues
{ "repo_name": "janstey/fabric8", "path": "gateway/gateway-model/src/main/java/io/fabric8/gateway/support/MappingResult.java", "license": "apache-2.0", "size": 2520 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
2,886,499
Map<String, String> options= project != null ? project.getOptions(true) : JavaCore.getOptions(); return ToolFactory.createCodeFormatter(options).createIndentationString(indentationUnits); }
Map<String, String> options= project != null ? project.getOptions(true) : JavaCore.getOptions(); return ToolFactory.createCodeFormatter(options).createIndentationString(indentationUnits); }
/** * Creates a string that represents the given number of indentation units. * The returned string can contain tabs and/or spaces depending on the core formatter preferences. * * @param indentationUnits * the number of indentation units to generate * @param project * the project from which to get the formatter settings, * <code>null</code> if the workspace default should be used * @return the indent string */
Creates a string that represents the given number of indentation units. The returned string can contain tabs and/or spaces depending on the core formatter preferences
createIndentString
{ "repo_name": "brunyuriy/quick-fix-scout", "path": "org.eclipse.jdt.ui_3.7.1.r371_v20110824-0800/src/org/eclipse/jdt/internal/corext/util/CodeFormatterUtil.java", "license": "mit", "size": 21109 }
[ "java.util.Map", "org.eclipse.jdt.core.JavaCore", "org.eclipse.jdt.core.ToolFactory" ]
import java.util.Map; import org.eclipse.jdt.core.JavaCore; import org.eclipse.jdt.core.ToolFactory;
import java.util.*; import org.eclipse.jdt.core.*;
[ "java.util", "org.eclipse.jdt" ]
java.util; org.eclipse.jdt;
2,634,989
EList<ConfigDescription> getConfigDescriptions();
EList<ConfigDescription> getConfigDescriptions();
/** * Returns the value of the '<em><b>Config Descriptions</b></em>' containment reference list. * The list contents are of type {@link org_eclipse_smarthome_schemas_thing_description_v1__0Simplified.ConfigDescription}. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Config Descriptions</em>' containment reference list isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Config Descriptions</em>' containment reference list. * @see org_eclipse_smarthome_schemas_thing_description_v1__0Simplified.org_eclipse_smarthome_schemas_thing_description_v1__0SimplifiedPackage#getConfigDescriptionsType_ConfigDescriptions() * @model containment="true" required="true" * @generated */
Returns the value of the 'Config Descriptions' containment reference list. The list contents are of type <code>org_eclipse_smarthome_schemas_thing_description_v1__0Simplified.ConfigDescription</code>. If the meaning of the 'Config Descriptions' containment reference list isn't clear, there really should be more of a description here...
getConfigDescriptions
{ "repo_name": "patrickneubauer/XMLIntellEdit", "path": "xmlintelledit/classes/src/org_eclipse_smarthome_schemas_thing_description_v1__0Simplified/ConfigDescriptionsType.java", "license": "mit", "size": 1675 }
[ "org.eclipse.emf.common.util.EList" ]
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.common.util.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
61,590
@Test public void pcepUpdateMsgTest6() throws PcepParseException, PcepOutOfBoundMessageException { byte[] updateMsg = new byte[] {0x20, 0x0b, 0x00, (byte) 0x48, 0x21, 0x10, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, //SRP object 0x00, 0x11, 0x00, 0x02, 0x54, 0x31, 0x00, 0x00, //SymbolicPathNameTlv 0x20, 0x10, 0x00, 0x2c, 0x00, 0x00, 0x10, 0x03, //LSP object 0x00, 0x12, 0x00, 0x10, //StatefulIPv4LspIdentidiersTlv (byte) 0xb6, 0x02, 0x4e, 0x1f, 0x00, 0x01, (byte) 0x80, 0x01, (byte) 0xb6, 0x02, 0x4e, 0x1f, (byte) 0xb6, 0x02, 0x4e, 0x20, 0x00, 0x11, 0x00, 0x02, 0x54, 0x31, 0x00, 0x00, //SymbolicPathNameTlv 0x00, 0x14, 0x00, 0x04, 0x00, 0x00, 0x00, 0x08, //StatefulLspErrorCodeTlv 0x07, 0x10, 0x00, 0x04 }; byte[] testupdateMsg = {0}; ChannelBuffer buffer = ChannelBuffers.dynamicBuffer(); buffer.writeBytes(updateMsg); PcepMessageReader<PcepMessage> reader = PcepFactories.getGenericReader(); PcepMessage message = null; message = reader.readFrom(buffer); assertThat(message, instanceOf(PcepUpdateMsg.class)); ChannelBuffer buf = ChannelBuffers.dynamicBuffer(); message.writeTo(buf); testupdateMsg = buf.array(); int readLen = buf.writerIndex() - 0; testupdateMsg = new byte[readLen]; buf.readBytes(testupdateMsg, 0, readLen); assertThat(testupdateMsg, is(updateMsg)); }
void function() throws PcepParseException, PcepOutOfBoundMessageException { byte[] updateMsg = new byte[] {0x20, 0x0b, 0x00, (byte) 0x48, 0x21, 0x10, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x11, 0x00, 0x02, 0x54, 0x31, 0x00, 0x00, 0x20, 0x10, 0x00, 0x2c, 0x00, 0x00, 0x10, 0x03, 0x00, 0x12, 0x00, 0x10, (byte) 0xb6, 0x02, 0x4e, 0x1f, 0x00, 0x01, (byte) 0x80, 0x01, (byte) 0xb6, 0x02, 0x4e, 0x1f, (byte) 0xb6, 0x02, 0x4e, 0x20, 0x00, 0x11, 0x00, 0x02, 0x54, 0x31, 0x00, 0x00, 0x00, 0x14, 0x00, 0x04, 0x00, 0x00, 0x00, 0x08, 0x07, 0x10, 0x00, 0x04 }; byte[] testupdateMsg = {0}; ChannelBuffer buffer = ChannelBuffers.dynamicBuffer(); buffer.writeBytes(updateMsg); PcepMessageReader<PcepMessage> reader = PcepFactories.getGenericReader(); PcepMessage message = null; message = reader.readFrom(buffer); assertThat(message, instanceOf(PcepUpdateMsg.class)); ChannelBuffer buf = ChannelBuffers.dynamicBuffer(); message.writeTo(buf); testupdateMsg = buf.array(); int readLen = buf.writerIndex() - 0; testupdateMsg = new byte[readLen]; buf.readBytes(testupdateMsg, 0, readLen); assertThat(testupdateMsg, is(updateMsg)); }
/** * This test case checks for SRP, LSP (SymbolicPathNameTlv, StatefulIPv4LspIdentidiersTlv, SymbolicPathNameTlv, * StatefulLspErrorCodeTlv), ERO objects in PcUpd message. */
This test case checks for SRP, LSP (SymbolicPathNameTlv, StatefulIPv4LspIdentidiersTlv, SymbolicPathNameTlv, StatefulLspErrorCodeTlv), ERO objects in PcUpd message
pcepUpdateMsgTest6
{ "repo_name": "kuujo/onos", "path": "protocols/pcep/pcepio/src/test/java/org/onosproject/pcepio/protocol/PcepUpdateMsgTest.java", "license": "apache-2.0", "size": 66899 }
[ "org.hamcrest.MatcherAssert", "org.hamcrest.Matchers", "org.hamcrest.core.Is", "org.jboss.netty.buffer.ChannelBuffer", "org.jboss.netty.buffer.ChannelBuffers", "org.onosproject.pcepio.exceptions.PcepOutOfBoundMessageException", "org.onosproject.pcepio.exceptions.PcepParseException" ]
import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.hamcrest.core.Is; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; import org.onosproject.pcepio.exceptions.PcepOutOfBoundMessageException; import org.onosproject.pcepio.exceptions.PcepParseException;
import org.hamcrest.*; import org.hamcrest.core.*; import org.jboss.netty.buffer.*; import org.onosproject.pcepio.exceptions.*;
[ "org.hamcrest", "org.hamcrest.core", "org.jboss.netty", "org.onosproject.pcepio" ]
org.hamcrest; org.hamcrest.core; org.jboss.netty; org.onosproject.pcepio;
2,804,859
@Override protected View onCreateDialogView() { // set the dialog layout setDialogLayoutResource(R.layout.pref_dialog_editpin); View dialog = super.onCreateDialogView(); /// M: set input type // set the transformation method and the key listener to ensure // correct input and presentation of the pin / puk. final EditText textfield = getEditText(); //textfield.setTransformationMethod(PasswordTransformationMethod.getInstance()); //textfield.setKeyListener(DigitsKeyListener.getInstance()); textfield.setInputType(InputType.TYPE_CLASS_NUMBER | InputType.TYPE_NUMBER_VARIATION_PASSWORD); if (this instanceof CallBarringChangePassword) { InputFilter filters[] = new InputFilter[1]; filters[0] = new InputFilter.LengthFilter(4); textfield.setFilters(filters); } return dialog; }
View function() { setDialogLayoutResource(R.layout.pref_dialog_editpin); View dialog = super.onCreateDialogView(); final EditText textfield = getEditText(); textfield.setInputType(InputType.TYPE_CLASS_NUMBER InputType.TYPE_NUMBER_VARIATION_PASSWORD); if (this instanceof CallBarringChangePassword) { InputFilter filters[] = new InputFilter[1]; filters[0] = new InputFilter.LengthFilter(4); textfield.setFilters(filters); } return dialog; }
/** * Overridden to setup the correct dialog layout, as well as setting up * other properties for the pin / puk entry field. */
Overridden to setup the correct dialog layout, as well as setting up other properties for the pin / puk entry field
onCreateDialogView
{ "repo_name": "rex-xxx/mt6572_x201", "path": "packages/apps/Phone/src/com/android/phone/EditPinPreference.java", "license": "gpl-2.0", "size": 8202 }
[ "android.text.InputFilter", "android.text.InputType", "android.view.View", "android.widget.EditText", "com.mediatek.settings.CallBarringChangePassword" ]
import android.text.InputFilter; import android.text.InputType; import android.view.View; import android.widget.EditText; import com.mediatek.settings.CallBarringChangePassword;
import android.text.*; import android.view.*; import android.widget.*; import com.mediatek.settings.*;
[ "android.text", "android.view", "android.widget", "com.mediatek.settings" ]
android.text; android.view; android.widget; com.mediatek.settings;
150,212
public void setColorFilter(ColorFilter cf) { if (mColorFilter != cf) { mColorFilter = cf; mColorMod = true; applyColorMod(); invalidate(); } }
void function(ColorFilter cf) { if (mColorFilter != cf) { mColorFilter = cf; mColorMod = true; applyColorMod(); invalidate(); } }
/** * Apply an arbitrary colorfilter to the image. * * @param cf the colorfilter to apply (may be null) */
Apply an arbitrary colorfilter to the image
setColorFilter
{ "repo_name": "mateor/PDroidHistory", "path": "frameworks/base/core/java/android/widget/ImageView.java", "license": "gpl-3.0", "size": 32783 }
[ "android.graphics.ColorFilter" ]
import android.graphics.ColorFilter;
import android.graphics.*;
[ "android.graphics" ]
android.graphics;
2,456,802
// [TARGET delete(Key...)] // [VARIABLE "my_key_name1"] // [VARIABLE "my_key_name2"] public void multipleDeleteEntities(String keyName1, String keyName2) { Datastore datastore = transaction.getDatastore(); // [START multipleDeleteEntities] Key key1 = datastore.newKeyFactory().setKind("MyKind").newKey(keyName1); Key key2 = datastore.newKeyFactory().setKind("MyKind").newKey(keyName2); transaction.delete(key1, key2); transaction.commit(); // [END multipleDeleteEntities] }
void function(String keyName1, String keyName2) { Datastore datastore = transaction.getDatastore(); Key key1 = datastore.newKeyFactory().setKind(STR).newKey(keyName1); Key key2 = datastore.newKeyFactory().setKind(STR).newKey(keyName2); transaction.delete(key1, key2); transaction.commit(); }
/** * Example of deleting multiple entities. */
Example of deleting multiple entities
multipleDeleteEntities
{ "repo_name": "shinfan/gcloud-java", "path": "google-cloud-examples/src/main/java/com/google/cloud/examples/datastore/snippets/TransactionSnippets.java", "license": "apache-2.0", "size": 14136 }
[ "com.google.cloud.datastore.Datastore", "com.google.cloud.datastore.Key" ]
import com.google.cloud.datastore.Datastore; import com.google.cloud.datastore.Key;
import com.google.cloud.datastore.*;
[ "com.google.cloud" ]
com.google.cloud;
1,217,707
void synchronizeTime(Address from, DateTime dateTime, boolean utc);
void synchronizeTime(Address from, DateTime dateTime, boolean utc);
/** * Notification that the device should synchronize its time to the given date/time value. The local device has * already been checked at this point to ensure that it supports time synchronization. * * @param from * @param dateTime * @param utc * true if a UTCTimeSynchronizationRequest was sent, false if TimeSynchronizationRequest. */
Notification that the device should synchronize its time to the given date/time value. The local device has already been checked at this point to ensure that it supports time synchronization
synchronizeTime
{ "repo_name": "mlohbihler/BACnet4J", "path": "src/main/java/com/serotonin/bacnet4j/event/DeviceEventListener.java", "license": "gpl-3.0", "size": 7351 }
[ "com.serotonin.bacnet4j.type.constructed.Address", "com.serotonin.bacnet4j.type.constructed.DateTime" ]
import com.serotonin.bacnet4j.type.constructed.Address; import com.serotonin.bacnet4j.type.constructed.DateTime;
import com.serotonin.bacnet4j.type.constructed.*;
[ "com.serotonin.bacnet4j" ]
com.serotonin.bacnet4j;
171,648
@Override public void renderHead(final Component _component, final IHeaderResponse _response) { super.renderHead(_component, _response); final StringBuilder js = new StringBuilder() .append("ready(function() {") .append("registry.byId(\"").append(_component.getMarkupId()) .append("\").set(\"content\", domConstruct.create(\"iframe\", {"); if (this.frameMarkupId != null) { js.append("\"id\": \"").append(this.frameMarkupId).append("\","); } js.append("\"src\": \"").append(_component.urlForListener(new PageParameters())) .append("\",\"style\": \"border: 0; width: 100%; height: 99%\", \"nodeId\": \"jan\"") .append("}));") .append("});"); _response.render(JavaScriptHeaderItem.forScript(DojoWrapper.require(js, DojoClasses.ready, DojoClasses.registry, DojoClasses.domConstruct), _component.getMarkupId() + "-Script")); }
void function(final Component _component, final IHeaderResponse _response) { super.renderHead(_component, _response); final StringBuilder js = new StringBuilder() .append(STR) .append(STRSTR\STRcontent\STRiframe\STR); if (this.frameMarkupId != null) { js.append("\"id\STRSTR\","); } js.append("\"src\STRSTR\",\"style\STRborder: 0; width: 100%; height: 99%\STRnodeId\STRjan\STR}));STR});STR-Script")); }
/** * Render the links for the head. * * @param _component component the header will be rendered for * @param _response resonse to add */
Render the links for the head
renderHead
{ "repo_name": "eFaps/eFaps-WebApp", "path": "src/main/java/org/efaps/ui/wicket/behaviors/dojo/LazyIframeBehavior.java", "license": "apache-2.0", "size": 3205 }
[ "org.apache.wicket.Component", "org.apache.wicket.markup.head.IHeaderResponse" ]
import org.apache.wicket.Component; import org.apache.wicket.markup.head.IHeaderResponse;
import org.apache.wicket.*; import org.apache.wicket.markup.head.*;
[ "org.apache.wicket" ]
org.apache.wicket;
349,648
private boolean handleParentProc(int pid, FileDescriptor[] descriptors, FileDescriptor pipeFd, Arguments parsedArgs) { if (pid > 0) { setChildPgid(pid); } if (descriptors != null) { for (FileDescriptor fd: descriptors) { IoUtils.closeQuietly(fd); } } boolean usingWrapper = false; if (pipeFd != null && pid > 0) { DataInputStream is = new DataInputStream(new FileInputStream(pipeFd)); int innerPid = -1; try { innerPid = is.readInt(); } catch (IOException ex) { Log.w(TAG, "Error reading pid from wrapped process, child may have died", ex); } finally { try { is.close(); } catch (IOException ex) { } } // Ensure that the pid reported by the wrapped process is either the // child process that we forked, or a descendant of it. if (innerPid > 0) { int parentPid = innerPid; while (parentPid > 0 && parentPid != pid) { parentPid = Process.getParentPid(parentPid); } if (parentPid > 0) { Log.i(TAG, "Wrapped process has pid " + innerPid); pid = innerPid; usingWrapper = true; } else { Log.w(TAG, "Wrapped process reported a pid that is not a child of " + "the process that we forked: childPid=" + pid + " innerPid=" + innerPid); } } } try { mSocketOutStream.writeInt(pid); mSocketOutStream.writeBoolean(usingWrapper); } catch (IOException ex) { Log.e(TAG, "Error writing to command socket", ex); return true; } return false; }
boolean function(int pid, FileDescriptor[] descriptors, FileDescriptor pipeFd, Arguments parsedArgs) { if (pid > 0) { setChildPgid(pid); } if (descriptors != null) { for (FileDescriptor fd: descriptors) { IoUtils.closeQuietly(fd); } } boolean usingWrapper = false; if (pipeFd != null && pid > 0) { DataInputStream is = new DataInputStream(new FileInputStream(pipeFd)); int innerPid = -1; try { innerPid = is.readInt(); } catch (IOException ex) { Log.w(TAG, STR, ex); } finally { try { is.close(); } catch (IOException ex) { } } if (innerPid > 0) { int parentPid = innerPid; while (parentPid > 0 && parentPid != pid) { parentPid = Process.getParentPid(parentPid); } if (parentPid > 0) { Log.i(TAG, STR + innerPid); pid = innerPid; usingWrapper = true; } else { Log.w(TAG, STR + STR + pid + STR + innerPid); } } } try { mSocketOutStream.writeInt(pid); mSocketOutStream.writeBoolean(usingWrapper); } catch (IOException ex) { Log.e(TAG, STR, ex); return true; } return false; }
/** * Handles post-fork cleanup of parent proc * * @param pid != 0; pid of child if &gt; 0 or indication of failed fork * if &lt; 0; * @param descriptors null-ok; file descriptors for child's new stdio if * specified. * @param pipeFd null-ok; pipe for communication with child. * @param parsedArgs non-null; zygote args * @return true for "exit command loop" and false for "continue command * loop" */
Handles post-fork cleanup of parent proc
handleParentProc
{ "repo_name": "syslover33/ctank", "path": "java/android-sdk-linux_r24.4.1_src/sources/android-23/com/android/internal/os/ZygoteConnection.java", "license": "gpl-3.0", "size": 32549 }
[ "android.os.Process", "android.util.Log", "java.io.DataInputStream", "java.io.FileDescriptor", "java.io.FileInputStream", "java.io.IOException" ]
import android.os.Process; import android.util.Log; import java.io.DataInputStream; import java.io.FileDescriptor; import java.io.FileInputStream; import java.io.IOException;
import android.os.*; import android.util.*; import java.io.*;
[ "android.os", "android.util", "java.io" ]
android.os; android.util; java.io;
499,519
public MultiCurrencyAmount currentCash(ResolvedFxSwapTrade trade, RatesProvider provider) { return productPricer.currentCash(trade.getProduct(), provider.getValuationDate()); }
MultiCurrencyAmount function(ResolvedFxSwapTrade trade, RatesProvider provider) { return productPricer.currentCash(trade.getProduct(), provider.getValuationDate()); }
/** * Calculates the current cash of the trade. * * @param trade the trade * @param provider the rates provider * @return the current cash of the trade in the settlement currency */
Calculates the current cash of the trade
currentCash
{ "repo_name": "ChinaQuants/Strata", "path": "modules/pricer/src/main/java/com/opengamma/strata/pricer/fx/DiscountingFxSwapTradePricer.java", "license": "apache-2.0", "size": 4251 }
[ "com.opengamma.strata.basics.currency.MultiCurrencyAmount", "com.opengamma.strata.pricer.rate.RatesProvider", "com.opengamma.strata.product.fx.ResolvedFxSwapTrade" ]
import com.opengamma.strata.basics.currency.MultiCurrencyAmount; import com.opengamma.strata.pricer.rate.RatesProvider; import com.opengamma.strata.product.fx.ResolvedFxSwapTrade;
import com.opengamma.strata.basics.currency.*; import com.opengamma.strata.pricer.rate.*; import com.opengamma.strata.product.fx.*;
[ "com.opengamma.strata" ]
com.opengamma.strata;
1,396,285
public int idDropped(int i, Random random, int j) { return mod_Rediscovered.Lectern.blockID; } @SideOnly(Side.CLIENT)
int function(int i, Random random, int j) { return mod_Rediscovered.Lectern.blockID; } @SideOnly(Side.CLIENT)
/** * Returns the ID of the items to drop on destruction. */
Returns the ID of the items to drop on destruction
idDropped
{ "repo_name": "Stormister/Rediscovered-Mod-1.6.4", "path": "source/main/RediscoveredMod/BlockLecternOpen.java", "license": "gpl-3.0", "size": 9510 }
[ "java.util.Random" ]
import java.util.Random;
import java.util.*;
[ "java.util" ]
java.util;
366,956
public static void startUpdateService(Context context, int[] appWidgetIds) { Intent intent = new Intent(Intent.ACTION_SYNC, null, context, WidgetValuesService.class); int numWidgets = 0; for (int appWidgetId : appWidgetIds) { intent.putExtra(WidgetValuesService.PARAM_WID_ID + numWidgets, appWidgetId); numWidgets++; } intent.putExtra(WidgetValuesService.PARAM_WID_NUM, numWidgets); context.startService(intent); }
static void function(Context context, int[] appWidgetIds) { Intent intent = new Intent(Intent.ACTION_SYNC, null, context, WidgetValuesService.class); int numWidgets = 0; for (int appWidgetId : appWidgetIds) { intent.putExtra(WidgetValuesService.PARAM_WID_ID + numWidgets, appWidgetId); numWidgets++; } intent.putExtra(WidgetValuesService.PARAM_WID_NUM, numWidgets); context.startService(intent); }
/** * Start background service to receive new data and update * the widget content * * @param context * Context of this application * @param appWidgetIds * List of all active widgets */
Start background service to receive new data and update the widget content
startUpdateService
{ "repo_name": "beegee-tokyo/WeatherStation", "path": "app/src/main/java/tk/giesecke/weatherstation/WidgetValues.java", "license": "gpl-2.0", "size": 10626 }
[ "android.content.Context", "android.content.Intent" ]
import android.content.Context; import android.content.Intent;
import android.content.*;
[ "android.content" ]
android.content;
1,060,623
private static void addSubtreeData( PackageData packageData, List<ClassData> currentClassDataList, List<PackageData> currentPackageDataList) { currentPackageDataList.add(packageData); for (ClassData childClassData : packageData.childClassDataByFileNameNoExt().values()) { currentClassDataList.add(childClassData); } for (PackageData childPackageData : packageData.childPackageDataByDirName().values()) { addSubtreeData( childPackageData, currentClassDataList, currentPackageDataList); } }
static void function( PackageData packageData, List<ClassData> currentClassDataList, List<PackageData> currentPackageDataList) { currentPackageDataList.add(packageData); for (ClassData childClassData : packageData.childClassDataByFileNameNoExt().values()) { currentClassDataList.add(childClassData); } for (PackageData childPackageData : packageData.childPackageDataByDirName().values()) { addSubtreeData( childPackageData, currentClassDataList, currentPackageDataList); } }
/** * This method is recursive. * * @param currentPackageDataList Always contains at least default package. */
This method is recursive
addSubtreeData
{ "repo_name": "jeffhain/jadecy", "path": "src/test/java/net/jadecy/code/CodeDataSturdinessTest.java", "license": "apache-2.0", "size": 48230 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,746,790
private String getStorageDirPath(int dnIndex, int dirIndex) { return "data/data" + (storagesPerDatanode * dnIndex + 1 + dirIndex); } /** * Get current directory corresponding to the datanode as defined in * (@link Storage#STORAGE_DIR_CURRENT}
String function(int dnIndex, int dirIndex) { return STR + (storagesPerDatanode * dnIndex + 1 + dirIndex); } /** * Get current directory corresponding to the datanode as defined in * (@link Storage#STORAGE_DIR_CURRENT}
/** * Calculate the DN instance-specific path for appending to the base dir * to determine the location of the storage of a DN instance in the mini cluster * @param dnIndex datanode index * @param dirIndex directory index. * @return storage directory path */
Calculate the DN instance-specific path for appending to the base dir to determine the location of the storage of a DN instance in the mini cluster
getStorageDirPath
{ "repo_name": "vlajos/hadoop", "path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java", "license": "apache-2.0", "size": 105726 }
[ "org.apache.hadoop.hdfs.server.common.Storage" ]
import org.apache.hadoop.hdfs.server.common.Storage;
import org.apache.hadoop.hdfs.server.common.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
952,945
public static java.util.List extractGpList(ims.domain.ILightweightDomainFactory domainFactory, ims.core.vo.GpVoCollection voCollection) { return extractGpList(domainFactory, voCollection, null, new HashMap()); }
static java.util.List function(ims.domain.ILightweightDomainFactory domainFactory, ims.core.vo.GpVoCollection voCollection) { return extractGpList(domainFactory, voCollection, null, new HashMap()); }
/** * Create the ims.core.resource.people.domain.objects.Gp list from the value object collection. * @param domainFactory - used to create existing (persistent) domain objects. * @param voCollection - the collection of value objects */
Create the ims.core.resource.people.domain.objects.Gp list from the value object collection
extractGpList
{ "repo_name": "IMS-MAXIMS/openMAXIMS", "path": "Source Library/openmaxims_workspace/ValueObjects/src/ims/core/vo/domain/GpVoAssembler.java", "license": "agpl-3.0", "size": 18332 }
[ "java.util.HashMap" ]
import java.util.HashMap;
import java.util.*;
[ "java.util" ]
java.util;
702,789
private static boolean endsWithPrototype(Node qualifiedName) { return qualifiedName.isGetProp() && qualifiedName.getLastChild().getString().equals("prototype"); }
static boolean function(Node qualifiedName) { return qualifiedName.isGetProp() && qualifiedName.getLastChild().getString().equals(STR); }
/** * Given a qualified name node, returns whether "prototype" is at the end. * For example: * a.b.c => false * a.b.c.prototype => true */
Given a qualified name node, returns whether "prototype" is at the end. For example: a.b.c => false a.b.c.prototype => true
endsWithPrototype
{ "repo_name": "MatrixFrog/closure-compiler", "path": "src/com/google/javascript/jscomp/ClosureCodingConvention.java", "license": "apache-2.0", "size": 19072 }
[ "com.google.javascript.rhino.Node" ]
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.*;
[ "com.google.javascript" ]
com.google.javascript;
1,790,312
public JSONArray put(Map value) { put(new JSONObject(value)); return this; }
JSONArray function(Map value) { put(new JSONObject(value)); return this; }
/** * Put a value in the JSONArray, where the value will be a * JSONObject which is produced from a Map. * @param value A Map value. * @return this. */
Put a value in the JSONArray, where the value will be a JSONObject which is produced from a Map
put
{ "repo_name": "jimklo/LearningRegistry", "path": "data-pumps/adl/RIMList-LR/src/org/json/JSONArray.java", "license": "apache-2.0", "size": 29778 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
841,950
public static Class<?> resolveType(Type genericType, Map<TypeVariable, Type> typeVariableMap) { Type resolvedType = getRawType(genericType, typeVariableMap); if (resolvedType instanceof GenericArrayType) { Type componentType = ((GenericArrayType) resolvedType).getGenericComponentType(); Class<?> componentClass = resolveType(componentType, typeVariableMap); resolvedType = Array.newInstance(componentClass, 0).getClass(); } return (resolvedType instanceof Class ? (Class) resolvedType : Object.class); }
static Class<?> function(Type genericType, Map<TypeVariable, Type> typeVariableMap) { Type resolvedType = getRawType(genericType, typeVariableMap); if (resolvedType instanceof GenericArrayType) { Type componentType = ((GenericArrayType) resolvedType).getGenericComponentType(); Class<?> componentClass = resolveType(componentType, typeVariableMap); resolvedType = Array.newInstance(componentClass, 0).getClass(); } return (resolvedType instanceof Class ? (Class) resolvedType : Object.class); }
/** * Resolve the specified generic type against the given TypeVariable map. * @param genericType the generic type to resolve * @param typeVariableMap the TypeVariable Map to resolved against * @return the type if it resolves to a Class, or {@code Object.class} otherwise */
Resolve the specified generic type against the given TypeVariable map
resolveType
{ "repo_name": "spring-projects/spring-android", "path": "spring-android-core/src/main/java/org/springframework/core/GenericTypeResolver.java", "license": "apache-2.0", "size": 20017 }
[ "java.lang.reflect.Array", "java.lang.reflect.GenericArrayType", "java.lang.reflect.Type", "java.lang.reflect.TypeVariable", "java.util.Map" ]
import java.lang.reflect.Array; import java.lang.reflect.GenericArrayType; import java.lang.reflect.Type; import java.lang.reflect.TypeVariable; import java.util.Map;
import java.lang.reflect.*; import java.util.*;
[ "java.lang", "java.util" ]
java.lang; java.util;
111,607
void writeToStream(OutputStream aOut) throws IOException;
void writeToStream(OutputStream aOut) throws IOException;
/** * This method allows to write the spectrum file to the specified OutputStream. * * @param aOut OutputStream to write the file to. This Stream * will <b>NOT</b> be closed by this method. * @exception IOException when the write operation fails. */
This method allows to write the spectrum file to the specified OutputStream
writeToStream
{ "repo_name": "compomics/compomics-utilities", "path": "src/main/java/com/compomics/util/interfaces/SpectrumFile.java", "license": "apache-2.0", "size": 4108 }
[ "java.io.IOException", "java.io.OutputStream" ]
import java.io.IOException; import java.io.OutputStream;
import java.io.*;
[ "java.io" ]
java.io;
2,376,533
protected List<ASTEntry> getEntryOccurrencesInSameModule(RefactoringStatus status, String initialName, SimpleNode root) { return getLocalOccurrences(initialName, root, status); }
List<ASTEntry> function(RefactoringStatus status, String initialName, SimpleNode root) { return getLocalOccurrences(initialName, root, status); }
/** * Will return the occurrences if we're in the same module for the method definition */
Will return the occurrences if we're in the same module for the method definition
getEntryOccurrencesInSameModule
{ "repo_name": "fabioz/Pydev", "path": "plugins/com.python.pydev.analysis/src/com/python/pydev/analysis/refactoring/wizards/rename/PyRenameFunctionProcess.java", "license": "epl-1.0", "size": 9176 }
[ "java.util.List", "org.eclipse.ltk.core.refactoring.RefactoringStatus", "org.python.pydev.parser.jython.SimpleNode", "org.python.pydev.parser.visitors.scope.ASTEntry" ]
import java.util.List; import org.eclipse.ltk.core.refactoring.RefactoringStatus; import org.python.pydev.parser.jython.SimpleNode; import org.python.pydev.parser.visitors.scope.ASTEntry;
import java.util.*; import org.eclipse.ltk.core.refactoring.*; import org.python.pydev.parser.jython.*; import org.python.pydev.parser.visitors.scope.*;
[ "java.util", "org.eclipse.ltk", "org.python.pydev" ]
java.util; org.eclipse.ltk; org.python.pydev;
1,556,586
@Test public void testOperatorLevelAmbiguousAttributeComplex() { testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD, Context.PortContext.AUTO_RECORD, StreamingApplication.DT_PREFIX + "operator" + LogicalPlanConfiguration.KEY_SEPARATOR + "*" + LogicalPlanConfiguration.KEY_SEPARATOR, OperatorContext.class.getCanonicalName(), Boolean.TRUE, true, false); }
void function() { testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD, Context.PortContext.AUTO_RECORD, StreamingApplication.DT_PREFIX + STR + LogicalPlanConfiguration.KEY_SEPARATOR + "*" + LogicalPlanConfiguration.KEY_SEPARATOR, OperatorContext.class.getCanonicalName(), Boolean.TRUE, true, false); }
/** * This should only set the attribute on the operator */
This should only set the attribute on the operator
testOperatorLevelAmbiguousAttributeComplex
{ "repo_name": "brightchen/incubator-apex-core", "path": "engine/src/test/java/com/datatorrent/stram/plan/logical/LogicalPlanConfigurationTest.java", "license": "apache-2.0", "size": 69482 }
[ "com.datatorrent.api.Context", "com.datatorrent.api.StreamingApplication" ]
import com.datatorrent.api.Context; import com.datatorrent.api.StreamingApplication;
import com.datatorrent.api.*;
[ "com.datatorrent.api" ]
com.datatorrent.api;
1,555,669
public static void writeLines(File file, Collection<?> lines, String lineEnding, boolean append) throws IOException { writeLines(file, null, lines, lineEnding, append); } //----------------------------------------------------------------------- /** * Deletes a file. If file is a directory, delete it and all sub-directories. * <p> * The difference between File.delete() and this method are: * <ul> * <li>A directory to be deleted does not have to be empty.</li> * <li>You get exceptions when a file or directory cannot be deleted. * (java.io.File methods returns a boolean)</li> * </ul> * * @param file file or directory to delete, must not be {@code null} * @throws NullPointerException if the directory is {@code null}
static void function(File file, Collection<?> lines, String lineEnding, boolean append) throws IOException { writeLines(file, null, lines, lineEnding, append); } /** * Deletes a file. If file is a directory, delete it and all sub-directories. * <p> * The difference between File.delete() and this method are: * <ul> * <li>A directory to be deleted does not have to be empty.</li> * <li>You get exceptions when a file or directory cannot be deleted. * (java.io.File methods returns a boolean)</li> * </ul> * * @param file file or directory to delete, must not be {@code null} * @throws NullPointerException if the directory is {@code null}
/** * Writes the <code>toString()</code> value of each item in a collection to * the specified <code>File</code> line by line. * The default VM encoding and the specified line ending will be used. * * @param file the file to write to * @param lines the lines to write, {@code null} entries produce blank lines * @param lineEnding the line separator to use, {@code null} is system default * @param append if {@code true}, then the lines will be added to the * end of the file rather than overwriting * @throws IOException in case of an I/O error * @since 2.1 */
Writes the <code>toString()</code> value of each item in a collection to the specified <code>File</code> line by line. The default VM encoding and the specified line ending will be used
writeLines
{ "repo_name": "wspeirs/sop4j-base", "path": "src/main/java/com/sop4j/base/apache/io/FileUtils.java", "license": "apache-2.0", "size": 119450 }
[ "java.io.File", "java.io.IOException", "java.util.Collection" ]
import java.io.File; import java.io.IOException; import java.util.Collection;
import java.io.*; import java.util.*;
[ "java.io", "java.util" ]
java.io; java.util;
2,381,590
@Nullable() public String getTargetProtocol() { return targetProtocol; }
@Nullable() String function() { return targetProtocol; }
/** * Retrieves the protocol used to forward the request to the backend server. * * @return The protocol used to forward the request to the backend server, or * {@code null} if it is not included in the log message. */
Retrieves the protocol used to forward the request to the backend server
getTargetProtocol
{ "repo_name": "UnboundID/ldapsdk", "path": "src/com/unboundid/ldap/sdk/unboundidds/logs/ExtendedForwardFailedAccessLogMessage.java", "license": "gpl-2.0", "size": 6252 }
[ "com.unboundid.util.Nullable" ]
import com.unboundid.util.Nullable;
import com.unboundid.util.*;
[ "com.unboundid.util" ]
com.unboundid.util;
192,611
public static <E> E removeAt(List<E> self, int index) { return self.remove(index); }
static <E> E function(List<E> self, int index) { return self.remove(index); }
/** * Modifies this list by removing the element at the specified position * in this list. Returns the removed element. Essentially an alias for * {@link List#remove(int)} but with no ambiguity for List&lt;Integer&gt;. * <p/> * Example: * <pre class="groovyTestCase"> * def list = [1, 2, 3] * list.removeAt(1) * assert [1, 3] == list * </pre> * * @param self a List * @param index the index of the element to be removed * @return the element previously at the specified position * @since 2.4.0 */
Modifies this list by removing the element at the specified position in this list. Returns the removed element. Essentially an alias for <code>List#remove(int)</code> but with no ambiguity for List&lt;Integer&gt;. Example: def list = [1, 2, 3] list.removeAt(1) assert [1, 3] == list </code>
removeAt
{ "repo_name": "apache/incubator-groovy", "path": "src/main/java/org/codehaus/groovy/runtime/DefaultGroovyMethods.java", "license": "apache-2.0", "size": 703151 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,620,942
public BoolState readBool(int bitIndex, int bytes) { return bitField.get(bitIndex) ? readBool(bytes) : BoolState.UNKNOWN; }
BoolState function(int bitIndex, int bytes) { return bitField.get(bitIndex) ? readBool(bytes) : BoolState.UNKNOWN; }
/** * Reads the indicated number of bytes from the current packet's payload if * the indicated bit in the current BitField is on, then coerces the zeroeth * byte read into a BoolState. Otherwise, the pointer is not moved, and * BoolState.UNKNOWN is returned. */
Reads the indicated number of bytes from the current packet's payload if the indicated bit in the current BitField is on, then coerces the zeroeth byte read into a BoolState. Otherwise, the pointer is not moved, and BoolState.UNKNOWN is returned
readBool
{ "repo_name": "JordanLongstaff/Artemis-Messenger", "path": "src/com/walkertribe/ian/iface/PacketReader.java", "license": "mit", "size": 16466 }
[ "com.walkertribe.ian.util.BoolState" ]
import com.walkertribe.ian.util.BoolState;
import com.walkertribe.ian.util.*;
[ "com.walkertribe.ian" ]
com.walkertribe.ian;
1,312,942
@Test public void testIsAutoOpen() { assertFalse(dialog.isAutoOpen()); dialog.setAutoOpen(true); assertTrue(dialog.isAutoOpen()); }
void function() { assertFalse(dialog.isAutoOpen()); dialog.setAutoOpen(true); assertTrue(dialog.isAutoOpen()); }
/** * Test method for {@link org.odlabs.wiquery.ui.dialog.Dialog#isAutoOpen()}. */
Test method for <code>org.odlabs.wiquery.ui.dialog.Dialog#isAutoOpen()</code>
testIsAutoOpen
{ "repo_name": "openengsb-attic/forks-org.odlabs.wiquery", "path": "src/test/java/org/odlabs/wiquery/ui/dialog/DialogTestCase.java", "license": "mit", "size": 15104 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
956,795
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { deSerializeMessages(in); deSerializeContext(in); }
void function(ObjectInputStream in) throws IOException, ClassNotFoundException { deSerializeMessages(in); deSerializeContext(in); }
/** * Deserialize this object from the given stream. * * @param in Stream. * @throws IOException This should never happen. * @throws ClassNotFoundException This should never happen. */
Deserialize this object from the given stream
readObject
{ "repo_name": "martingwhite/astor", "path": "examples/math_57/src/main/java/org/apache/commons/math/exception/MathRuntimeException.java", "license": "gpl-2.0", "size": 9437 }
[ "java.io.IOException", "java.io.ObjectInputStream" ]
import java.io.IOException; import java.io.ObjectInputStream;
import java.io.*;
[ "java.io" ]
java.io;
200,443
public ServiceFuture<List<HostingEnvironmentDiagnosticsInner>> listDiagnosticsAsync(String resourceGroupName, String name, final ServiceCallback<List<HostingEnvironmentDiagnosticsInner>> serviceCallback) { return ServiceFuture.fromResponse(listDiagnosticsWithServiceResponseAsync(resourceGroupName, name), serviceCallback); }
ServiceFuture<List<HostingEnvironmentDiagnosticsInner>> function(String resourceGroupName, String name, final ServiceCallback<List<HostingEnvironmentDiagnosticsInner>> serviceCallback) { return ServiceFuture.fromResponse(listDiagnosticsWithServiceResponseAsync(resourceGroupName, name), serviceCallback); }
/** * Get diagnostic information for an App Service Environment. * Get diagnostic information for an App Service Environment. * * @param resourceGroupName Name of the resource group to which the resource belongs. * @param name Name of the App Service Environment. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */
Get diagnostic information for an App Service Environment. Get diagnostic information for an App Service Environment
listDiagnosticsAsync
{ "repo_name": "anudeepsharma/azure-sdk-for-java", "path": "azure-mgmt-appservice/src/main/java/com/microsoft/azure/management/appservice/implementation/AppServiceEnvironmentsInner.java", "license": "mit", "size": 564891 }
[ "com.microsoft.rest.ServiceCallback", "com.microsoft.rest.ServiceFuture", "java.util.List" ]
import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; import java.util.List;
import com.microsoft.rest.*; import java.util.*;
[ "com.microsoft.rest", "java.util" ]
com.microsoft.rest; java.util;
2,611,631
public void cacheResult(java.util.List<CJProduct> cjProducts);
void function(java.util.List<CJProduct> cjProducts);
/** * Caches the c j products in the entity cache if it is enabled. * * @param cjProducts the c j products */
Caches the c j products in the entity cache if it is enabled
cacheResult
{ "repo_name": "FuadEfendi/liferay-osgi", "path": "modules/ca.efendi.datafeeds.api/src/main/java/ca/efendi/datafeeds/service/persistence/CJProductPersistence.java", "license": "apache-2.0", "size": 53359 }
[ "ca.efendi.datafeeds.model.CJProduct" ]
import ca.efendi.datafeeds.model.CJProduct;
import ca.efendi.datafeeds.model.*;
[ "ca.efendi.datafeeds" ]
ca.efendi.datafeeds;
1,675,431
private TupleResultSet executeTupleQuery(String strquery, int type) throws OBDAException { startExecute(strquery, type); TupleResultSet result = executionthread.getTupleResult(); if (result == null) throw new RuntimeException("Error, the result set was null"); return result; }
TupleResultSet function(String strquery, int type) throws OBDAException { startExecute(strquery, type); TupleResultSet result = executionthread.getTupleResult(); if (result == null) throw new RuntimeException(STR); return result; }
/** * The method executes select or ask queries by starting a new quest * execution thread * * @param strquery * the select or ask query string * @param type * 1 - SELECT, 2 - ASK * @return the obtained TupleResultSet result * @throws OBDAException */
The method executes select or ask queries by starting a new quest execution thread
executeTupleQuery
{ "repo_name": "clarkparsia/ontop", "path": "reformulation-core/src/main/java/it/unibz/krdb/obda/owlrefplatform/core/QuestStatement.java", "license": "apache-2.0", "size": 30740 }
[ "it.unibz.krdb.obda.model.OBDAException", "it.unibz.krdb.obda.model.TupleResultSet" ]
import it.unibz.krdb.obda.model.OBDAException; import it.unibz.krdb.obda.model.TupleResultSet;
import it.unibz.krdb.obda.model.*;
[ "it.unibz.krdb" ]
it.unibz.krdb;
2,437,254
private static PageComposition parsePageComposition(ParsableBitArray data, int length) { int timeoutSecs = data.readBits(8); int version = data.readBits(4); int state = data.readBits(2); data.skipBits(2); int remainingLength = length - 2; SparseArray<PageRegion> regions = new SparseArray<>(); while (remainingLength > 0) { int regionId = data.readBits(8); data.skipBits(8); // Skip reserved. int regionHorizontalAddress = data.readBits(16); int regionVerticalAddress = data.readBits(16); remainingLength -= 6; regions.put(regionId, new PageRegion(regionHorizontalAddress, regionVerticalAddress)); } return new PageComposition(timeoutSecs, version, state, regions); }
static PageComposition function(ParsableBitArray data, int length) { int timeoutSecs = data.readBits(8); int version = data.readBits(4); int state = data.readBits(2); data.skipBits(2); int remainingLength = length - 2; SparseArray<PageRegion> regions = new SparseArray<>(); while (remainingLength > 0) { int regionId = data.readBits(8); data.skipBits(8); int regionHorizontalAddress = data.readBits(16); int regionVerticalAddress = data.readBits(16); remainingLength -= 6; regions.put(regionId, new PageRegion(regionHorizontalAddress, regionVerticalAddress)); } return new PageComposition(timeoutSecs, version, state, regions); }
/** * Parses a page composition segment, as defined by ETSI EN 300 743 7.2.2. */
Parses a page composition segment, as defined by ETSI EN 300 743 7.2.2
parsePageComposition
{ "repo_name": "amzn/exoplayer-amazon-port", "path": "library/core/src/main/java/com/google/android/exoplayer2/text/dvb/DvbParser.java", "license": "apache-2.0", "size": 38488 }
[ "android.util.SparseArray", "com.google.android.exoplayer2.util.ParsableBitArray" ]
import android.util.SparseArray; import com.google.android.exoplayer2.util.ParsableBitArray;
import android.util.*; import com.google.android.exoplayer2.util.*;
[ "android.util", "com.google.android" ]
android.util; com.google.android;
1,600,277
public static int countPackageActions(Long sid) { SelectMode m = ModeFactory.getMode("System_queries", "package_actions_count"); Map<String, Object> params = new HashMap<String, Object>(); params.put("server_id", sid); DataResult<Map<String, Object>> dr = makeDataResult(params, params, null, m); return ((Long) dr.get(0).get("count")).intValue(); }
static int function(Long sid) { SelectMode m = ModeFactory.getMode(STR, STR); Map<String, Object> params = new HashMap<String, Object>(); params.put(STR, sid); DataResult<Map<String, Object>> dr = makeDataResult(params, params, null, m); return ((Long) dr.get(0).get("count")).intValue(); }
/** * Returns the number of package actions associated with a system * @param sid The system's id * @return number of package actions */
Returns the number of package actions associated with a system
countPackageActions
{ "repo_name": "mcalmer/spacewalk", "path": "java/code/src/com/redhat/rhn/manager/system/SystemManager.java", "license": "gpl-2.0", "size": 134651 }
[ "com.redhat.rhn.common.db.datasource.DataResult", "com.redhat.rhn.common.db.datasource.ModeFactory", "com.redhat.rhn.common.db.datasource.SelectMode", "java.util.HashMap", "java.util.Map" ]
import com.redhat.rhn.common.db.datasource.DataResult; import com.redhat.rhn.common.db.datasource.ModeFactory; import com.redhat.rhn.common.db.datasource.SelectMode; import java.util.HashMap; import java.util.Map;
import com.redhat.rhn.common.db.datasource.*; import java.util.*;
[ "com.redhat.rhn", "java.util" ]
com.redhat.rhn; java.util;
1,043,611
public static void assertSortedListEquals(final char[] array, final int... elements) { Assert.assertEquals(elements.length, array.length); Arrays.sort(array); Assert.assertArrayEquals(TestUtils.newArray(array, elements), array); }
static void function(final char[] array, final int... elements) { Assert.assertEquals(elements.length, array.length); Arrays.sort(array); Assert.assertArrayEquals(TestUtils.newArray(array, elements), array); }
/** * Check if the array's content is identical to a given sequence of elements. */
Check if the array's content is identical to a given sequence of elements
assertSortedListEquals
{ "repo_name": "vsonnier/hppcrt", "path": "hppcrt/src/test/java/com/carrotsearch/hppcrt/TestUtils.java", "license": "apache-2.0", "size": 16902 }
[ "java.util.Arrays", "org.junit.Assert" ]
import java.util.Arrays; import org.junit.Assert;
import java.util.*; import org.junit.*;
[ "java.util", "org.junit" ]
java.util; org.junit;
105,274
private boolean destroyOnDataStore(Object aCallbackArgument) { RegionAdvisor advisor = getRegionAdvisor(); Set<InternalDistributedMember> attempted = new HashSet<InternalDistributedMember>(); checkReadiness(); while(!isDestroyed()) { Set<InternalDistributedMember> available = advisor.adviseInitializedDataStore(); available.removeAll(attempted); if(available.isEmpty()) { return false; } InternalDistributedMember next = available.iterator().next(); try { DestroyRegionOnDataStoreMessage.send(next, this, aCallbackArgument); return true; } catch(ReplyException e) { //try the next member if(logger.isTraceEnabled()) { logger.trace("Error destroying " + this + " on " + next, e); } } } return true; }
boolean function(Object aCallbackArgument) { RegionAdvisor advisor = getRegionAdvisor(); Set<InternalDistributedMember> attempted = new HashSet<InternalDistributedMember>(); checkReadiness(); while(!isDestroyed()) { Set<InternalDistributedMember> available = advisor.adviseInitializedDataStore(); available.removeAll(attempted); if(available.isEmpty()) { return false; } InternalDistributedMember next = available.iterator().next(); try { DestroyRegionOnDataStoreMessage.send(next, this, aCallbackArgument); return true; } catch(ReplyException e) { if(logger.isTraceEnabled()) { logger.trace(STR + this + STR + next, e); } } } return true; }
/**Globally destroy the partitioned region by sending a message * to a data store to do the destroy. * @return true if the region was destroyed successfully */
Globally destroy the partitioned region by sending a message to a data store to do the destroy
destroyOnDataStore
{ "repo_name": "robertgeiger/incubator-geode", "path": "gemfire-core/src/main/java/com/gemstone/gemfire/internal/cache/PartitionedRegion.java", "license": "apache-2.0", "size": 426773 }
[ "com.gemstone.gemfire.distributed.internal.ReplyException", "com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember", "com.gemstone.gemfire.internal.cache.partitioned.DestroyRegionOnDataStoreMessage", "com.gemstone.gemfire.internal.cache.partitioned.RegionAdvisor", "java.util.HashSet", "java.util.Set" ]
import com.gemstone.gemfire.distributed.internal.ReplyException; import com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember; import com.gemstone.gemfire.internal.cache.partitioned.DestroyRegionOnDataStoreMessage; import com.gemstone.gemfire.internal.cache.partitioned.RegionAdvisor; import java.util.HashSet; import java.util.Set;
import com.gemstone.gemfire.distributed.internal.*; import com.gemstone.gemfire.distributed.internal.membership.*; import com.gemstone.gemfire.internal.cache.partitioned.*; import java.util.*;
[ "com.gemstone.gemfire", "java.util" ]
com.gemstone.gemfire; java.util;
318,018
public void addRootEntries(Collection<String> entries) { for (String entry : entries) { jarEntries.put(new File(entry).getName(), entry); } }
void function(Collection<String> entries) { for (String entry : entries) { jarEntries.put(new File(entry).getName(), entry); } }
/** * Adds a collection of entries to the jar, each with a given source path, and with * the resulting file in the root of the jar. * <pre> * some/long/path.foo => (path.foo, some/long/path.foo) * </pre> */
Adds a collection of entries to the jar, each with a given source path, and with the resulting file in the root of the jar. <code> some/long/path.foo => (path.foo, some/long/path.foo) </code>
addRootEntries
{ "repo_name": "sdtwigg/rules_scala", "path": "src/java/io/bazel/rulesscala/jar/JarCreator.java", "license": "apache-2.0", "size": 6889 }
[ "java.io.File", "java.util.Collection" ]
import java.io.File; import java.util.Collection;
import java.io.*; import java.util.*;
[ "java.io", "java.util" ]
java.io; java.util;
1,512,093
List<LocusScore> getCoverageScores(String chr, int start, int end, int zoom);
List<LocusScore> getCoverageScores(String chr, int start, int end, int zoom);
/** * Return a list of coverage values spanning the given interval. This can be null if coverage is not known * or relevant. * * @param chr * @param start * @param end * @param zoom the zoom level * @return */
Return a list of coverage values spanning the given interval. This can be null if coverage is not known or relevant
getCoverageScores
{ "repo_name": "popitsch/varan-gie", "path": "src/org/broad/igv/track/FeatureSource.java", "license": "mit", "size": 3141 }
[ "java.util.List", "org.broad.igv.feature.LocusScore" ]
import java.util.List; import org.broad.igv.feature.LocusScore;
import java.util.*; import org.broad.igv.feature.*;
[ "java.util", "org.broad.igv" ]
java.util; org.broad.igv;
2,809,587
public SnmpVarBind[] decodeVarBindList(BerDecoder bdec) throws BerException { bdec.openSequence() ; Vector<SnmpVarBind> tmp = new Vector<SnmpVarBind>() ; while (bdec.cannotCloseSequence()) { SnmpVarBind bind = new SnmpVarBind() ; bdec.openSequence() ; bind.oid = new SnmpOid(bdec.fetchOid()) ; bind.setSnmpValue(decodeVarBindValue(bdec)) ; bdec.closeSequence() ; tmp.addElement(bind) ; } bdec.closeSequence() ; SnmpVarBind[] varBindList= new SnmpVarBind[tmp.size()] ; tmp.copyInto(varBindList); return varBindList ; }
SnmpVarBind[] function(BerDecoder bdec) throws BerException { bdec.openSequence() ; Vector<SnmpVarBind> tmp = new Vector<SnmpVarBind>() ; while (bdec.cannotCloseSequence()) { SnmpVarBind bind = new SnmpVarBind() ; bdec.openSequence() ; bind.oid = new SnmpOid(bdec.fetchOid()) ; bind.setSnmpValue(decodeVarBindValue(bdec)) ; bdec.closeSequence() ; tmp.addElement(bind) ; } bdec.closeSequence() ; SnmpVarBind[] varBindList= new SnmpVarBind[tmp.size()] ; tmp.copyInto(varBindList); return varBindList ; }
/** * For SNMP Runtime private use only. */
For SNMP Runtime private use only
decodeVarBindList
{ "repo_name": "rokn/Count_Words_2015", "path": "testing/openjdk2/jdk/src/share/classes/com/sun/jmx/snmp/SnmpMsg.java", "license": "mit", "size": 16647 }
[ "java.util.Vector" ]
import java.util.Vector;
import java.util.*;
[ "java.util" ]
java.util;
2,593,353
public static List lookupPublishedByOriginal(User user, Errata original) { return ErrataFactory.lookupPublishedByOriginal(user.getOrg(), original); }
static List function(User user, Errata original) { return ErrataFactory.lookupPublishedByOriginal(user.getOrg(), original); }
/** * Lookup all the clones of a particular errata * @param user User that is performing the cloning operation * @param original Original errata that the clones are clones of * @return list of clones of the errata */
Lookup all the clones of a particular errata
lookupPublishedByOriginal
{ "repo_name": "Pajinek/spacewalk", "path": "java/code/src/com/redhat/rhn/manager/errata/ErrataManager.java", "license": "gpl-2.0", "size": 70020 }
[ "com.redhat.rhn.domain.errata.Errata", "com.redhat.rhn.domain.errata.ErrataFactory", "com.redhat.rhn.domain.user.User", "java.util.List" ]
import com.redhat.rhn.domain.errata.Errata; import com.redhat.rhn.domain.errata.ErrataFactory; import com.redhat.rhn.domain.user.User; import java.util.List;
import com.redhat.rhn.domain.errata.*; import com.redhat.rhn.domain.user.*; import java.util.*;
[ "com.redhat.rhn", "java.util" ]
com.redhat.rhn; java.util;
2,443,678
@Override public void setByteArrayRepresentation(byte[] in) { feature = SerializationUtils.toDoubleArray(in); }
void function(byte[] in) { feature = SerializationUtils.toDoubleArray(in); }
/** * Reads descriptor from a byte array. Much faster than the String based method. * * @param in byte array from corresponding method * @see CEDD#getByteArrayRepresentation */
Reads descriptor from a byte array. Much faster than the String based method
setByteArrayRepresentation
{ "repo_name": "huquanbo/lirenewdemo", "path": "src/main/java/net/semanticmetadata/lire/imageanalysis/features/local/surf/SurfFeature.java", "license": "gpl-2.0", "size": 5287 }
[ "net.semanticmetadata.lire.utils.SerializationUtils" ]
import net.semanticmetadata.lire.utils.SerializationUtils;
import net.semanticmetadata.lire.utils.*;
[ "net.semanticmetadata.lire" ]
net.semanticmetadata.lire;
1,618,069
void recordViewUsesNonDeterministicKeyCoder(PTransform<?, ?> ptransform) { ptransformViewsWithNonDeterministicKeyCoders.add(ptransform); } private class BatchWriteFactory<T> implements PTransformOverrideFactory<PCollection<T>, PDone, Write<T>> { private final DataflowRunner runner; private BatchWriteFactory(DataflowRunner dataflowRunner) { this.runner = dataflowRunner; }
void recordViewUsesNonDeterministicKeyCoder(PTransform<?, ?> ptransform) { ptransformViewsWithNonDeterministicKeyCoders.add(ptransform); } private class BatchWriteFactory<T> implements PTransformOverrideFactory<PCollection<T>, PDone, Write<T>> { private final DataflowRunner runner; private BatchWriteFactory(DataflowRunner dataflowRunner) { this.runner = dataflowRunner; }
/** * Records that the {@link PTransform} requires a deterministic key coder. */
Records that the <code>PTransform</code> requires a deterministic key coder
recordViewUsesNonDeterministicKeyCoder
{ "repo_name": "xsm110/Apache-Beam", "path": "runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowRunner.java", "license": "apache-2.0", "size": 58095 }
[ "org.apache.beam.sdk.io.Write", "org.apache.beam.sdk.runners.PTransformOverrideFactory", "org.apache.beam.sdk.transforms.PTransform", "org.apache.beam.sdk.values.PCollection", "org.apache.beam.sdk.values.PDone" ]
import org.apache.beam.sdk.io.Write; import org.apache.beam.sdk.runners.PTransformOverrideFactory; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PDone;
import org.apache.beam.sdk.io.*; import org.apache.beam.sdk.runners.*; import org.apache.beam.sdk.transforms.*; import org.apache.beam.sdk.values.*;
[ "org.apache.beam" ]
org.apache.beam;
1,984,706
private boolean updateField(boolean predecessorSet, Collection<Node<T>> newSet) { if (predecessorSet) { preds = newSet; } else { succs = newSet; } return true; }
boolean function(boolean predecessorSet, Collection<Node<T>> newSet) { if (predecessorSet) { preds = newSet; } else { succs = newSet; } return true; }
/** * Update either the {@link #preds} or {@link #succs} field to point to the * new set. * @return {@code true}, because the set must have been updated */
Update either the <code>#preds</code> or <code>#succs</code> field to point to the new set
updateField
{ "repo_name": "vt09/bazel", "path": "src/main/java/com/google/devtools/build/lib/graph/Node.java", "license": "apache-2.0", "size": 8057 }
[ "java.util.Collection" ]
import java.util.Collection;
import java.util.*;
[ "java.util" ]
java.util;
2,707,419
public ByteSizeValue storeSize() { long bytes = -1; for (IndexShardStatus shard : this) { if (shard.storeSize() != null) { if (bytes == -1) { bytes = 0; } bytes += shard.storeSize().bytes(); } } if (bytes == -1) { return null; } return new ByteSizeValue(bytes); }
ByteSizeValue function() { long bytes = -1; for (IndexShardStatus shard : this) { if (shard.storeSize() != null) { if (bytes == -1) { bytes = 0; } bytes += shard.storeSize().bytes(); } } if (bytes == -1) { return null; } return new ByteSizeValue(bytes); }
/** * Returns the full store size in bytes, of both primaries and replicas. */
Returns the full store size in bytes, of both primaries and replicas
storeSize
{ "repo_name": "jprante/elasticsearch-client", "path": "elasticsearch-client-admin/src/main/java/org/elasticsearch/action/admin/indices/status/IndexStatus.java", "license": "apache-2.0", "size": 6364 }
[ "org.elasticsearch.common.unit.ByteSizeValue" ]
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.*;
[ "org.elasticsearch.common" ]
org.elasticsearch.common;
1,943,217
public final IContentProvider acquireUnstableProvider(Uri uri) { if (!SCHEME_CONTENT.equals(uri.getScheme())) { return null; } String auth = uri.getAuthority(); if (auth != null) { return acquireUnstableProvider(mContext, uri.getAuthority()); } return null; }
final IContentProvider function(Uri uri) { if (!SCHEME_CONTENT.equals(uri.getScheme())) { return null; } String auth = uri.getAuthority(); if (auth != null) { return acquireUnstableProvider(mContext, uri.getAuthority()); } return null; }
/** * Returns the content provider for the given content URI. * * @param uri The URI to a content provider * @return The ContentProvider for the given URI, or null if no content provider is found. * @hide */
Returns the content provider for the given content URI
acquireUnstableProvider
{ "repo_name": "haikuowuya/android_system_code", "path": "src/android/content/ContentResolver.java", "license": "apache-2.0", "size": 82308 }
[ "android.net.Uri" ]
import android.net.Uri;
import android.net.*;
[ "android.net" ]
android.net;
428,838
private Cache.ValueWrapper findCachedItem(Collection<CacheOperationContext> contexts) { Object result = ExpressionEvaluator.NO_RESULT; for (CacheOperationContext context : contexts) { if (isConditionPassing(context, result)) { Object key = generateKey(context, result); Cache.ValueWrapper cached = findInCaches(context, key); if (cached != null) { return cached; } } } return null; }
Cache.ValueWrapper function(Collection<CacheOperationContext> contexts) { Object result = ExpressionEvaluator.NO_RESULT; for (CacheOperationContext context : contexts) { if (isConditionPassing(context, result)) { Object key = generateKey(context, result); Cache.ValueWrapper cached = findInCaches(context, key); if (cached != null) { return cached; } } } return null; }
/** * Find a cached item only for {@link CacheableOperation} that passes the condition. * @param contexts the cacheable operations * @return a {@link Cache.ValueWrapper} holding the cached item, * or {@code null} if none is found */
Find a cached item only for <code>CacheableOperation</code> that passes the condition
findCachedItem
{ "repo_name": "leogoing/spring_jeesite", "path": "spring-context-4.0/org/springframework/cache/interceptor/CacheAspectSupport.java", "license": "apache-2.0", "size": 15760 }
[ "java.util.Collection", "org.springframework.cache.Cache" ]
import java.util.Collection; import org.springframework.cache.Cache;
import java.util.*; import org.springframework.cache.*;
[ "java.util", "org.springframework.cache" ]
java.util; org.springframework.cache;
2,857,326
public void retryFailedRelease(Integer repositoryId, Integer releaseId) { URLBuilder url = new URLBuilder(host, "/api/" + repositoryId + "/releases/" + releaseId + "/retry.xml"); httpConnection.doPut(url.toURL(), null); }
void function(Integer repositoryId, Integer releaseId) { URLBuilder url = new URLBuilder(host, "/api/" + repositoryId + STR + releaseId + STR); httpConnection.doPut(url.toURL(), null); }
/** * Retry failed deployment * * @param repositoryId * @param releaseId */
Retry failed deployment
retryFailedRelease
{ "repo_name": "raupachz/beanstalk4j", "path": "src/main/java/org/beanstalk4j/BeanstalkApi.java", "license": "apache-2.0", "size": 38861 }
[ "org.beanstalk4j.http.URLBuilder" ]
import org.beanstalk4j.http.URLBuilder;
import org.beanstalk4j.http.*;
[ "org.beanstalk4j.http" ]
org.beanstalk4j.http;
2,582,954
public void setQueries(List<Query> queries) { this.queries = queries; }
void function(List<Query> queries) { this.queries = queries; }
/** * Sets the queries associated with the batch. * * @param queries The queries associated with the batch. */
Sets the queries associated with the batch
setQueries
{ "repo_name": "prestonfff/Argus", "path": "ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/Batch.java", "license": "bsd-3-clause", "size": 8449 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
240,550
public void setItems(List<T> newItems) { clear(); if (newItems != null) { addPage(newItems, false); } }
void function(List<T> newItems) { clear(); if (newItems != null) { addPage(newItems, false); } }
/** * Clears existing items from the adapter and sets given list as the data. * If null is provided, this method clears the existing values. * This avoids null value errors. * @param newItems */
Clears existing items from the adapter and sets given list as the data. If null is provided, this method clears the existing values. This avoids null value errors
setItems
{ "repo_name": "KirillMakarov/edx-app-android", "path": "VideoLocker/src/main/java/org/edx/mobile/view/adapters/BaseListAdapter.java", "license": "apache-2.0", "size": 8556 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
346,720
public static List<String> getJiraMetricNames() { String metrics[] = { "PROJECTS_PER_SYSTEM_INSTANCE", "ISSUES_PER_PROJECTS_PER_SYSTEM_INSTANCE", "FIXED_ISSUES_PER_PROJECT", "UNRESOLVED_ISSUES_PER_PROJECT", "UNRESOLVED_BUG_ISSUES_PER_PROJECT", "UNRESOLVED_TASK_ISSUES_PER_PROJECT" }; return Arrays.asList(metrics); }
static List<String> function() { String metrics[] = { STR, STR, STR, STR, STR, STR }; return Arrays.asList(metrics); }
/** * Get the Jira metric names * TODO: Fetch the list of metrics from uQasarMetric * @return */
Get the Jira metric names
getJiraMetricNames
{ "repo_name": "schlotze/u-qasar.platform", "path": "src/main/java/eu/uqasar/util/UQasarUtil.java", "license": "apache-2.0", "size": 44740 }
[ "java.util.Arrays", "java.util.List" ]
import java.util.Arrays; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
358,637
@RequestMapping(method = RequestMethod.GET, value="/status/config") protected ModelAndView handleRequestInternal( final HttpServletRequest request, final HttpServletResponse response) throws Exception { return new ModelAndView(VIEW_CONFIG); }
@RequestMapping(method = RequestMethod.GET, value=STR) ModelAndView function( final HttpServletRequest request, final HttpServletResponse response) throws Exception { return new ModelAndView(VIEW_CONFIG); }
/** * Handle request. * * @param request the request * @param response the response * @return the model and view * @throws Exception the exception */
Handle request
handleRequestInternal
{ "repo_name": "joansmith/cas", "path": "cas-server-webapp-reports/src/main/java/org/jasig/cas/web/report/InternalConfigStateController.java", "license": "apache-2.0", "size": 1974 }
[ "javax.servlet.http.HttpServletRequest", "javax.servlet.http.HttpServletResponse", "org.springframework.web.bind.annotation.RequestMapping", "org.springframework.web.bind.annotation.RequestMethod", "org.springframework.web.servlet.ModelAndView" ]
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.*; import org.springframework.web.bind.annotation.*; import org.springframework.web.servlet.*;
[ "javax.servlet", "org.springframework.web" ]
javax.servlet; org.springframework.web;
75,495
protected String getInStateTripTypeCode() { String inStateTripTypeCode = this.getParameterService().getParameterValueAsString(PerDiemLoadStep.class, PerDiemParameter.IN_STATE_TRIP_TYPE_CODE_PARAM_NAME); return inStateTripTypeCode; }
String function() { String inStateTripTypeCode = this.getParameterService().getParameterValueAsString(PerDiemLoadStep.class, PerDiemParameter.IN_STATE_TRIP_TYPE_CODE_PARAM_NAME); return inStateTripTypeCode; }
/** * get in state trip type code defined as an application parameter * * @return in state trip type code */
get in state trip type code defined as an application parameter
getInStateTripTypeCode
{ "repo_name": "ua-eas/ua-kfs-5.3", "path": "work/src/org/kuali/kfs/module/tem/service/impl/PerDiemServiceImpl.java", "license": "agpl-3.0", "size": 36338 }
[ "org.kuali.kfs.module.tem.TemConstants", "org.kuali.kfs.module.tem.batch.PerDiemLoadStep" ]
import org.kuali.kfs.module.tem.TemConstants; import org.kuali.kfs.module.tem.batch.PerDiemLoadStep;
import org.kuali.kfs.module.tem.*; import org.kuali.kfs.module.tem.batch.*;
[ "org.kuali.kfs" ]
org.kuali.kfs;
37,611
public long getExtendedOperationResponseTimeoutMillis( @NotNull final String requestOID) { final Long timeout = responseTimeoutMillisByExtendedOperationType.get(requestOID); if (timeout == null) { return responseTimeoutMillisByOperationType.get(OperationType.EXTENDED); } else { return timeout; } }
long function( @NotNull final String requestOID) { final Long timeout = responseTimeoutMillisByExtendedOperationType.get(requestOID); if (timeout == null) { return responseTimeoutMillisByOperationType.get(OperationType.EXTENDED); } else { return timeout; } }
/** * Retrieves the maximum length of time in milliseconds that an extended * operation with the specified request OID should be allowed to block while * waiting for a response from the server. * * @param requestOID The request OID for the extended operation for which to * make the determination. It must not be {@code null}. * * @return The maximum length of time in milliseconds that the specified type * of extended operation should be allowed to block while waiting for * a response from the server, or zero if there should not be any * default timeout. */
Retrieves the maximum length of time in milliseconds that an extended operation with the specified request OID should be allowed to block while waiting for a response from the server
getExtendedOperationResponseTimeoutMillis
{ "repo_name": "UnboundID/ldapsdk", "path": "src/com/unboundid/ldap/sdk/LDAPConnectionOptions.java", "license": "gpl-2.0", "size": 112622 }
[ "com.unboundid.util.NotNull" ]
import com.unboundid.util.NotNull;
import com.unboundid.util.*;
[ "com.unboundid.util" ]
com.unboundid.util;
1,018,228
public Observable<ServiceResponse<SubnetInner>> getWithServiceResponseAsync(String resourceGroupName, String virtualNetworkName, String subnetName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (virtualNetworkName == null) { throw new IllegalArgumentException("Parameter virtualNetworkName is required and cannot be null."); } if (subnetName == null) { throw new IllegalArgumentException("Parameter subnetName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); }
Observable<ServiceResponse<SubnetInner>> function(String resourceGroupName, String virtualNetworkName, String subnetName) { if (resourceGroupName == null) { throw new IllegalArgumentException(STR); } if (virtualNetworkName == null) { throw new IllegalArgumentException(STR); } if (subnetName == null) { throw new IllegalArgumentException(STR); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); }
/** * Gets the specified subnet by virtual network and resource group. * * @param resourceGroupName The name of the resource group. * @param virtualNetworkName The name of the virtual network. * @param subnetName The name of the subnet. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the SubnetInner object */
Gets the specified subnet by virtual network and resource group
getWithServiceResponseAsync
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/network/mgmt-v2020_03_01/src/main/java/com/microsoft/azure/management/network/v2020_03_01/implementation/SubnetsInner.java", "license": "mit", "size": 84300 }
[ "com.microsoft.rest.ServiceResponse" ]
import com.microsoft.rest.ServiceResponse;
import com.microsoft.rest.*;
[ "com.microsoft.rest" ]
com.microsoft.rest;
2,286,728
public boolean existsPluggedInterfaceWithSameMac(VmNic interfaceToPlug) { List<VmNic> vmNetworkIntrefaces = getVmNicDao().getPluggedForMac(interfaceToPlug.getMacAddress()); for (VmNic vmNetworkInterface : vmNetworkIntrefaces) { if (!interfaceToPlug.getId().equals(vmNetworkInterface.getId())) { return true; } } return false; }
boolean function(VmNic interfaceToPlug) { List<VmNic> vmNetworkIntrefaces = getVmNicDao().getPluggedForMac(interfaceToPlug.getMacAddress()); for (VmNic vmNetworkInterface : vmNetworkIntrefaces) { if (!interfaceToPlug.getId().equals(vmNetworkInterface.getId())) { return true; } } return false; }
/*** * Returns whether or not there is a plugged network interface with the same MAC address as the given interface * * @param interfaceToPlug * the network interface that needs to be plugged * @return <code>true</code> if the MAC is used by another plugged network interface, <code>false</code> otherwise. */
Returns whether or not there is a plugged network interface with the same MAC address as the given interface
existsPluggedInterfaceWithSameMac
{ "repo_name": "jtux270/translate", "path": "ovirt/3.6_source/backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/network/VmInterfaceManager.java", "license": "gpl-3.0", "size": 11106 }
[ "java.util.List", "org.ovirt.engine.core.common.businessentities.network.VmNic" ]
import java.util.List; import org.ovirt.engine.core.common.businessentities.network.VmNic;
import java.util.*; import org.ovirt.engine.core.common.businessentities.network.*;
[ "java.util", "org.ovirt.engine" ]
java.util; org.ovirt.engine;
2,119,622
public void setHundredDollarCount(Integer count) { if (count != null) { this.financialDocumentHundredDollarAmount = new KualiDecimal(count.intValue()).multiply(KFSConstants.CurrencyTypeAmounts.HUNDRED_DOLLAR_AMOUNT); } }
void function(Integer count) { if (count != null) { this.financialDocumentHundredDollarAmount = new KualiDecimal(count.intValue()).multiply(KFSConstants.CurrencyTypeAmounts.HUNDRED_DOLLAR_AMOUNT); } }
/** * This sets the count of hundred dollar bills present in the drawer * * @param count the number of hundred dollar bills present in the drawer */
This sets the count of hundred dollar bills present in the drawer
setHundredDollarCount
{ "repo_name": "quikkian-ua-devops/will-financials", "path": "kfs-core/src/main/java/org/kuali/kfs/fp/businessobject/CurrencyDetail.java", "license": "agpl-3.0", "size": 32469 }
[ "org.kuali.kfs.sys.KFSConstants", "org.kuali.rice.core.api.util.type.KualiDecimal" ]
import org.kuali.kfs.sys.KFSConstants; import org.kuali.rice.core.api.util.type.KualiDecimal;
import org.kuali.kfs.sys.*; import org.kuali.rice.core.api.util.type.*;
[ "org.kuali.kfs", "org.kuali.rice" ]
org.kuali.kfs; org.kuali.rice;
2,406,926
public synchronized XSObject item(int index) { if (fArray == null) { // calculate the total number of elements getLength(); fArray = new XSObject[fLength]; int pos = 0; // get components from all SymbolHashes for (int i = 0; i < fNSNum; i++) { pos += fMaps[i].getValues(fArray, pos); } } if (index < 0 || index >= fLength) { return null; } return fArray[index]; }
synchronized XSObject function(int index) { if (fArray == null) { getLength(); fArray = new XSObject[fLength]; int pos = 0; for (int i = 0; i < fNSNum; i++) { pos += fMaps[i].getValues(fArray, pos); } } if (index < 0 index >= fLength) { return null; } return fArray[index]; }
/** * Returns the <code>index</code>th item in the collection or * <code>null</code> if <code>index</code> is greater than or equal to * the number of objects in the list. The index starts at 0. * @param index index into the collection. * @return The <code>XSObject</code> at the <code>index</code>th * position in the <code>XSObjectList</code>, or <code>null</code> if * the index specified is not valid. */
Returns the <code>index</code>th item in the collection or <code>null</code> if <code>index</code> is greater than or equal to the number of objects in the list. The index starts at 0
item
{ "repo_name": "md-5/jdk10", "path": "src/java.xml/share/classes/com/sun/org/apache/xerces/internal/impl/xs/util/XSNamedMapImpl.java", "license": "gpl-2.0", "size": 10775 }
[ "com.sun.org.apache.xerces.internal.xs.XSObject" ]
import com.sun.org.apache.xerces.internal.xs.XSObject;
import com.sun.org.apache.xerces.internal.xs.*;
[ "com.sun.org" ]
com.sun.org;
366,122
private Result execute(final int nr, Result prev_result, final JobEntryCopy jobEntryCopy, JobEntryCopy previous, String reason) throws KettleException { Result res = null; if (stopped.get()) { res=new Result(nr); res.stopped=true; return res; } if(log.isDetailed()) log.logDetailed("exec("+nr+", "+(prev_result!=null?prev_result.getNrErrors():0)+", "+(jobEntryCopy!=null?jobEntryCopy.toString():"null")+")"); // What entry is next? JobEntryInterface jobEntryInterface = jobEntryCopy.getEntry(); jobEntryInterface.getLogChannel().setLogLevel(logLevel); // Track the fact that we are going to launch the next job entry... JobEntryResult jerBefore = new JobEntryResult(null, null, BaseMessages.getString(PKG, "Job.Comment.JobStarted"), reason, jobEntryCopy.getName(), jobEntryCopy.getNr(), environmentSubstitute(jobEntryCopy.getEntry().getFilename())); jobTracker.addJobTracker(new JobTracker(jobMeta, jerBefore)); Result prevResult = null; if ( prev_result != null ) { prevResult = (Result)prev_result.clone(); } else { prevResult = new Result(); } ClassLoader cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(jobEntryInterface.getClass().getClassLoader()); // Execute this entry... JobEntryInterface cloneJei = (JobEntryInterface)jobEntryInterface.clone(); ((VariableSpace)cloneJei).copyVariablesFrom(this); cloneJei.setRepository(rep); cloneJei.setParentJob(this); final long start = System.currentTimeMillis(); cloneJei.getLogChannel().logDetailed("Starting job entry"); for (JobEntryListener jobEntryListener : jobEntryListeners) { jobEntryListener.beforeExecution(this, jobEntryCopy, cloneJei); } if (interactive) { if (jobEntryCopy.isTransformation()) { getActiveJobEntryTransformations().put(jobEntryCopy, (JobEntryTrans)cloneJei); } if (jobEntryCopy.isJob()) { getActiveJobEntryJobs().put(jobEntryCopy, (JobEntryJob)cloneJei); } } final Result result = cloneJei.execute(prevResult, nr); final long end = System.currentTimeMillis(); if (interactive) { if (jobEntryCopy.isTransformation()) { getActiveJobEntryTransformations().remove(jobEntryCopy); } if (jobEntryCopy.isJob()) { getActiveJobEntryJobs().remove(jobEntryCopy); } } if (cloneJei instanceof JobEntryTrans) { String throughput = result.getReadWriteThroughput((int)((end-start) / 1000)); if (throughput != null) { log.logMinimal(throughput); } } for (JobEntryListener jobEntryListener : jobEntryListeners) { jobEntryListener.afterExecution(this, jobEntryCopy, cloneJei, result); } Thread.currentThread().setContextClassLoader(cl); addErrors((int)result.getNrErrors()); // Also capture the logging text after the execution... // Log4jBufferAppender appender = CentralLogStore.getAppender(); StringBuffer logTextBuffer = appender.getBuffer(cloneJei.getLogChannel().getLogChannelId(), false); result.setLogText( logTextBuffer.toString() ); // Save this result as well... // JobEntryResult jerAfter = new JobEntryResult(result, cloneJei.getLogChannel().getLogChannelId(), BaseMessages.getString(PKG, "Job.Comment.JobFinished"), null, jobEntryCopy.getName(), jobEntryCopy.getNr(), environmentSubstitute(jobEntryCopy.getEntry().getFilename())); jobTracker.addJobTracker(new JobTracker(jobMeta, jerAfter)); jobEntryResults.add(jerAfter); // Try all next job entries. // // Keep track of all the threads we fired in case of parallel execution... // Keep track of the results of these executions too. // final List<Thread> threads = new ArrayList<Thread>(); final List<Result> threadResults = new ArrayList<Result>(); final List<KettleException> threadExceptions = new ArrayList<KettleException>(); final List<JobEntryCopy> threadEntries= new ArrayList<JobEntryCopy>(); // Launch only those where the hop indicates true or false // int nrNext = jobMeta.findNrNextJobEntries(jobEntryCopy); for (int i=0;i<nrNext && !isStopped();i++) { // The next entry is... final JobEntryCopy nextEntry = jobMeta.findNextJobEntry(jobEntryCopy, i); // See if we need to execute this... final JobHopMeta hi = jobMeta.findJobHop(jobEntryCopy, nextEntry); // The next comment... final String nextComment; if (hi.isUnconditional()) { nextComment = BaseMessages.getString(PKG, "Job.Comment.FollowedUnconditional"); } else { if (result.getResult()) { nextComment = BaseMessages.getString(PKG, "Job.Comment.FollowedSuccess"); } else { nextComment = BaseMessages.getString(PKG, "Job.Comment.FollowedFailure"); } } // // If the link is unconditional, execute the next job entry (entries). // If the start point was an evaluation and the link color is correct: green or red, execute the next job entry... // if ( hi.isUnconditional() || ( jobEntryCopy.evaluates() && ( ! ( hi.getEvaluation() ^ result.getResult() ) ) ) ) { // Start this next step! if(log.isBasic()) log.logBasic(BaseMessages.getString(PKG, "Job.Log.StartingEntry",nextEntry.getName())); // Pass along the previous result, perhaps the next job can use it... // However, set the number of errors back to 0 (if it should be reset) // When an evaluation is executed the errors e.g. should not be reset. if ( nextEntry.resetErrorsBeforeExecution() ) { result.setNrErrors(0); } // Now execute! // // if (we launch in parallel, fire the execution off in a new thread... // if (jobEntryCopy.isLaunchingInParallel()) { threadEntries.add(nextEntry);
Result function(final int nr, Result prev_result, final JobEntryCopy jobEntryCopy, JobEntryCopy previous, String reason) throws KettleException { Result res = null; if (stopped.get()) { res=new Result(nr); res.stopped=true; return res; } if(log.isDetailed()) log.logDetailed("exec("+nr+STR+(prev_result!=null?prev_result.getNrErrors():0)+STR+(jobEntryCopy!=null?jobEntryCopy.toString():"null")+")"); JobEntryInterface jobEntryInterface = jobEntryCopy.getEntry(); jobEntryInterface.getLogChannel().setLogLevel(logLevel); JobEntryResult jerBefore = new JobEntryResult(null, null, BaseMessages.getString(PKG, STR), reason, jobEntryCopy.getName(), jobEntryCopy.getNr(), environmentSubstitute(jobEntryCopy.getEntry().getFilename())); jobTracker.addJobTracker(new JobTracker(jobMeta, jerBefore)); Result prevResult = null; if ( prev_result != null ) { prevResult = (Result)prev_result.clone(); } else { prevResult = new Result(); } ClassLoader cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(jobEntryInterface.getClass().getClassLoader()); JobEntryInterface cloneJei = (JobEntryInterface)jobEntryInterface.clone(); ((VariableSpace)cloneJei).copyVariablesFrom(this); cloneJei.setRepository(rep); cloneJei.setParentJob(this); final long start = System.currentTimeMillis(); cloneJei.getLogChannel().logDetailed(STR); for (JobEntryListener jobEntryListener : jobEntryListeners) { jobEntryListener.beforeExecution(this, jobEntryCopy, cloneJei); } if (interactive) { if (jobEntryCopy.isTransformation()) { getActiveJobEntryTransformations().put(jobEntryCopy, (JobEntryTrans)cloneJei); } if (jobEntryCopy.isJob()) { getActiveJobEntryJobs().put(jobEntryCopy, (JobEntryJob)cloneJei); } } final Result result = cloneJei.execute(prevResult, nr); final long end = System.currentTimeMillis(); if (interactive) { if (jobEntryCopy.isTransformation()) { getActiveJobEntryTransformations().remove(jobEntryCopy); } if (jobEntryCopy.isJob()) { getActiveJobEntryJobs().remove(jobEntryCopy); } } if (cloneJei instanceof JobEntryTrans) { String throughput = result.getReadWriteThroughput((int)((end-start) / 1000)); if (throughput != null) { log.logMinimal(throughput); } } for (JobEntryListener jobEntryListener : jobEntryListeners) { jobEntryListener.afterExecution(this, jobEntryCopy, cloneJei, result); } Thread.currentThread().setContextClassLoader(cl); addErrors((int)result.getNrErrors()); Log4jBufferAppender appender = CentralLogStore.getAppender(); StringBuffer logTextBuffer = appender.getBuffer(cloneJei.getLogChannel().getLogChannelId(), false); result.setLogText( logTextBuffer.toString() ); JobEntryResult jerAfter = new JobEntryResult(result, cloneJei.getLogChannel().getLogChannelId(), BaseMessages.getString(PKG, STR), null, jobEntryCopy.getName(), jobEntryCopy.getNr(), environmentSubstitute(jobEntryCopy.getEntry().getFilename())); jobTracker.addJobTracker(new JobTracker(jobMeta, jerAfter)); jobEntryResults.add(jerAfter); final List<Thread> threads = new ArrayList<Thread>(); final List<Result> threadResults = new ArrayList<Result>(); final List<KettleException> threadExceptions = new ArrayList<KettleException>(); final List<JobEntryCopy> threadEntries= new ArrayList<JobEntryCopy>(); int nrNext = jobMeta.findNrNextJobEntries(jobEntryCopy); for (int i=0;i<nrNext && !isStopped();i++) { final JobEntryCopy nextEntry = jobMeta.findNextJobEntry(jobEntryCopy, i); final JobHopMeta hi = jobMeta.findJobHop(jobEntryCopy, nextEntry); final String nextComment; if (hi.isUnconditional()) { nextComment = BaseMessages.getString(PKG, STR); } else { if (result.getResult()) { nextComment = BaseMessages.getString(PKG, STR); } else { nextComment = BaseMessages.getString(PKG, STR); } } if ( hi.isUnconditional() ( jobEntryCopy.evaluates() && ( ! ( hi.getEvaluation() ^ result.getResult() ) ) ) ) { if(log.isBasic()) log.logBasic(BaseMessages.getString(PKG, STR,nextEntry.getName())); if ( nextEntry.resetErrorsBeforeExecution() ) { result.setNrErrors(0); } if (jobEntryCopy.isLaunchingInParallel()) { threadEntries.add(nextEntry);
/** * Execute a job entry recursively and move to the next job entry automatically.<br> * Uses a back-tracking algorithm.<br> * * @param nr * @param prev_result * @param jobEntryCopy * @param previous * @param reason * @return * @throws KettleException */
Execute a job entry recursively and move to the next job entry automatically. Uses a back-tracking algorithm
execute
{ "repo_name": "juanmjacobs/kettle", "path": "src/org/pentaho/di/job/Job.java", "license": "lgpl-2.1", "size": 50393 }
[ "java.util.ArrayList", "java.util.List", "org.pentaho.di.core.Result", "org.pentaho.di.core.exception.KettleException", "org.pentaho.di.core.gui.JobTracker", "org.pentaho.di.core.logging.CentralLogStore", "org.pentaho.di.core.logging.Log4jBufferAppender", "org.pentaho.di.core.variables.VariableSpace", "org.pentaho.di.i18n.BaseMessages", "org.pentaho.di.job.entries.job.JobEntryJob", "org.pentaho.di.job.entries.trans.JobEntryTrans", "org.pentaho.di.job.entry.JobEntryCopy", "org.pentaho.di.job.entry.JobEntryInterface" ]
import java.util.ArrayList; import java.util.List; import org.pentaho.di.core.Result; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.gui.JobTracker; import org.pentaho.di.core.logging.CentralLogStore; import org.pentaho.di.core.logging.Log4jBufferAppender; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.entries.job.JobEntryJob; import org.pentaho.di.job.entries.trans.JobEntryTrans; import org.pentaho.di.job.entry.JobEntryCopy; import org.pentaho.di.job.entry.JobEntryInterface;
import java.util.*; import org.pentaho.di.core.*; import org.pentaho.di.core.exception.*; import org.pentaho.di.core.gui.*; import org.pentaho.di.core.logging.*; import org.pentaho.di.core.variables.*; import org.pentaho.di.i18n.*; import org.pentaho.di.job.entries.job.*; import org.pentaho.di.job.entries.trans.*; import org.pentaho.di.job.entry.*;
[ "java.util", "org.pentaho.di" ]
java.util; org.pentaho.di;
2,000,020
public void force (boolean metaData) throws IOException { if (!isOpen ()) throw new ClosedChannelException (); // FIXME: What to do with metaData ? nio_msync (fd, address, length); }
void function (boolean metaData) throws IOException { if (!isOpen ()) throw new ClosedChannelException (); nio_msync (fd, address, length); }
/** * msync with the disk */
msync with the disk
force
{ "repo_name": "unofficial-opensource-apple/gccfast", "path": "libjava/gnu/java/nio/FileChannelImpl.java", "license": "gpl-2.0", "size": 9240 }
[ "java.io.IOException", "java.nio.channels.ClosedChannelException" ]
import java.io.IOException; import java.nio.channels.ClosedChannelException;
import java.io.*; import java.nio.channels.*;
[ "java.io", "java.nio" ]
java.io; java.nio;
1,324,554
public int getSoTimeout() throws IOException { if (isClosed()) throw new SocketException("ServerSocket is closed"); Object timeout = impl.getOption(SocketOptions.SO_TIMEOUT); if (! (timeout instanceof Integer)) throw new IOException("Internal Error"); return ((Integer) timeout).intValue(); }
int function() throws IOException { if (isClosed()) throw new SocketException(STR); Object timeout = impl.getOption(SocketOptions.SO_TIMEOUT); if (! (timeout instanceof Integer)) throw new IOException(STR); return ((Integer) timeout).intValue(); }
/** * Retrieves the current value of the SO_TIMEOUT setting. A value of 0 * implies that SO_TIMEOUT is disabled (ie, operations never time out). * This is the number of milliseconds a socket operation can block before * an InterruptedIOException is thrown. * * @return The value of SO_TIMEOUT * * @exception IOException If an error occurs * * @since 1.1 */
Retrieves the current value of the SO_TIMEOUT setting. A value of 0 implies that SO_TIMEOUT is disabled (ie, operations never time out). This is the number of milliseconds a socket operation can block before an InterruptedIOException is thrown
getSoTimeout
{ "repo_name": "SanDisk-Open-Source/SSD_Dashboard", "path": "uefi/gcc/gcc-4.6.3/libjava/classpath/java/net/ServerSocket.java", "license": "gpl-2.0", "size": 17899 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
414
public static Ignite ignite() { return Ignition.localIgnite(); }
static Ignite function() { return Ignition.localIgnite(); }
/** * Gets local Ignite instance. */
Gets local Ignite instance
ignite
{ "repo_name": "pperalta/ignite", "path": "modules/ml/src/main/java/org/apache/ignite/ml/math/impls/CacheUtils.java", "license": "apache-2.0", "size": 12432 }
[ "org.apache.ignite.Ignite", "org.apache.ignite.Ignition" ]
import org.apache.ignite.Ignite; import org.apache.ignite.Ignition;
import org.apache.ignite.*;
[ "org.apache.ignite" ]
org.apache.ignite;
327,926
public static boolean verifyMutable(Object mutableObject, final String sourceCodePathname) throws AssertionFailedError { return verifyMutable(mutableObject, sourceCodePathname, 0); }
static boolean function(Object mutableObject, final String sourceCodePathname) throws AssertionFailedError { return verifyMutable(mutableObject, sourceCodePathname, 0); }
/** * This method attempts to invoke all matching accessor methods on a mutable * object. * * @param mutableObject * Mutable object whose methods are subject to verification. * @param sourceCodePathname * A pathname string or null for no Javadoc tag processing. * @return <code>boolean</code> - Indicates if verification resulted in * the anticipated number of "warnings" and "glitches". * @throws AssertionFailedError */
This method attempts to invoke all matching accessor methods on a mutable object
verifyMutable
{ "repo_name": "autermann/geosoftware", "path": "src/test/java/com/gtcgroup/testutil/TestUtil.java", "license": "gpl-3.0", "size": 26658 }
[ "junit.framework.AssertionFailedError" ]
import junit.framework.AssertionFailedError;
import junit.framework.*;
[ "junit.framework" ]
junit.framework;
2,112,765
@Transactional(readOnly = true) public Page<Node> findAll(Pageable pageable) { log.debug("Request to get all Nodes"); Page<Node> result = nodeRepository.findAll(pageable); return result; }
@Transactional(readOnly = true) Page<Node> function(Pageable pageable) { log.debug(STR); Page<Node> result = nodeRepository.findAll(pageable); return result; }
/** * get all the nodes. * @return the list of entities */
get all the nodes
findAll
{ "repo_name": "CloudWorkers/cloudworker", "path": "server/src/main/java/com/cloudworkers/cloudworker/service/NodeService.java", "license": "apache-2.0", "size": 4024 }
[ "com.cloudworkers.cloudworker.domain.Node", "org.springframework.data.domain.Page", "org.springframework.data.domain.Pageable", "org.springframework.transaction.annotation.Transactional" ]
import com.cloudworkers.cloudworker.domain.Node; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.transaction.annotation.Transactional;
import com.cloudworkers.cloudworker.domain.*; import org.springframework.data.domain.*; import org.springframework.transaction.annotation.*;
[ "com.cloudworkers.cloudworker", "org.springframework.data", "org.springframework.transaction" ]
com.cloudworkers.cloudworker; org.springframework.data; org.springframework.transaction;
77,545
private void setupExecRoot() { try { FileSystemUtils.createDirectoryAndParents(directories.getExecRoot()); } catch (IOException e) { LOG.warning( "failed to create execution root '" + directories.getExecRoot() + "': " + e.getMessage()); } }
void function() { try { FileSystemUtils.createDirectoryAndParents(directories.getExecRoot()); } catch (IOException e) { LOG.warning( STR + directories.getExecRoot() + STR + e.getMessage()); } }
/** * Creates the execRoot dir under outputBase. */
Creates the execRoot dir under outputBase
setupExecRoot
{ "repo_name": "kchodorow/bazel", "path": "src/main/java/com/google/devtools/build/lib/runtime/BlazeWorkspace.java", "license": "apache-2.0", "size": 12243 }
[ "com.google.devtools.build.lib.vfs.FileSystemUtils", "java.io.IOException" ]
import com.google.devtools.build.lib.vfs.FileSystemUtils; import java.io.IOException;
import com.google.devtools.build.lib.vfs.*; import java.io.*;
[ "com.google.devtools", "java.io" ]
com.google.devtools; java.io;
2,377,391
public synchronized Collection<? extends DNSEntry> getDNSEntryList(String name, DNSRecordType type, DNSRecordClass recordClass) { Collection<? extends DNSEntry> entryList = this._getDNSEntryList(name); if (entryList != null) { entryList = new ArrayList<DNSEntry>(entryList); for (Iterator<? extends DNSEntry> i = entryList.iterator(); i.hasNext();) { DNSEntry testDNSEntry = i.next(); if (!testDNSEntry.matchRecordType(type) || (!testDNSEntry.matchRecordClass(recordClass))) { i.remove(); } } } else { entryList = Collections.emptyList(); } return entryList; }
synchronized Collection<? extends DNSEntry> function(String name, DNSRecordType type, DNSRecordClass recordClass) { Collection<? extends DNSEntry> entryList = this._getDNSEntryList(name); if (entryList != null) { entryList = new ArrayList<DNSEntry>(entryList); for (Iterator<? extends DNSEntry> i = entryList.iterator(); i.hasNext();) { DNSEntry testDNSEntry = i.next(); if (!testDNSEntry.matchRecordType(type) (!testDNSEntry.matchRecordClass(recordClass))) { i.remove(); } } } else { entryList = Collections.emptyList(); } return entryList; }
/** * Get all matching DNS entries from the table. * * @param name * @param type * @param recordClass * @return list of entries */
Get all matching DNS entries from the table
getDNSEntryList
{ "repo_name": "farukcank/jmdns", "path": "src/main/java/javax/jmdns/impl/DNSCache.java", "license": "apache-2.0", "size": 15750 }
[ "java.util.ArrayList", "java.util.Collection", "java.util.Collections", "java.util.Iterator", "javax.jmdns.impl.constants.DNSRecordClass", "javax.jmdns.impl.constants.DNSRecordType" ]
import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import javax.jmdns.impl.constants.DNSRecordClass; import javax.jmdns.impl.constants.DNSRecordType;
import java.util.*; import javax.jmdns.impl.constants.*;
[ "java.util", "javax.jmdns" ]
java.util; javax.jmdns;
2,577,803
@Operation(desc = "Gets the sessions creation time", impact = MBeanOperationInfo.INFO) String getSessionCreationTime(@Parameter(desc = "session name", name = "sessionID") String sessionID) throws Exception;
@Operation(desc = STR, impact = MBeanOperationInfo.INFO) String getSessionCreationTime(@Parameter(desc = STR, name = STR) String sessionID) throws Exception;
/** * Gets the session's creation time. */
Gets the session's creation time
getSessionCreationTime
{ "repo_name": "paulgallagher75/activemq-artemis", "path": "artemis-jms-client/src/main/java/org/apache/activemq/artemis/api/jms/management/JMSServerControl.java", "license": "apache-2.0", "size": 24627 }
[ "javax.management.MBeanOperationInfo", "org.apache.activemq.artemis.api.core.management.Operation", "org.apache.activemq.artemis.api.core.management.Parameter" ]
import javax.management.MBeanOperationInfo; import org.apache.activemq.artemis.api.core.management.Operation; import org.apache.activemq.artemis.api.core.management.Parameter;
import javax.management.*; import org.apache.activemq.artemis.api.core.management.*;
[ "javax.management", "org.apache.activemq" ]
javax.management; org.apache.activemq;
2,482,093
public String[] fastSearch(String queryS, String toSearch, int n) { QueryParser parser = new QueryParser(Version.LUCENE_30, toSearch, analyzer); BooleanQuery.setMaxClauseCount(4096); String status = "true"; String[] results = null; if (!"".equals(queryS) && !queryS.trim().startsWith("*")) { Query query = null; if (queryS.endsWith(" ")) { queryS = queryS.substring(0, queryS.length() - 1); } String[] tmp; if (queryS.contains(" ")) { tmp = queryS.replaceAll(" +", " ").trim().split(" "); queryS = new String(); for (int i = 0; i < tmp.length; i++) { queryS += tmp[i]; if (i < tmp.length - 1) { queryS += "* AND "; } } } try { query = parser.parse(queryS + "*"); TopDocs topDoc = null; try { topDoc = indexSearch.search(query, null, n); ScoreDoc[] docs = topDoc.scoreDocs; results = new String[docs.length + 1]; for (int i = 1; i < docs.length + 1; i++) { try { results[i] = indexSearch.doc(docs[i - 1].doc).get(toSearch); } catch (IOException e) { status = "No results! Please try again."; } } results[0] = status; } catch (IOException e) { status = "Please type in more characters to get results."; results = new String[1]; results[0] = status; } catch (Throwable e1) { status = "Please type in more characters to get results."; results = new String[1]; results[0] = status; } } catch (ParseException e) { status = "No results! Please try again."; } return results; } return null; }
String[] function(String queryS, String toSearch, int n) { QueryParser parser = new QueryParser(Version.LUCENE_30, toSearch, analyzer); BooleanQuery.setMaxClauseCount(4096); String status = "true"; String[] results = null; if (!STR*STR STR STR +STR STR STR* AND STR*STRNo results! Please try again.STRPlease type in more characters to get results.STRPlease type in more characters to get results.STRNo results! Please try again."; } return results; } return null; }
/** * Perform the search but only return n results. * @param queryS * the string for what you search in the indexes * @param toSearch * the field in which you search * @param n * first n results * @return array of ScoreDoc[] with n elements */
Perform the search but only return n results
fastSearch
{ "repo_name": "julie-sullivan/phytomine", "path": "intermine/web/main/src/org/intermine/web/autocompletion/LuceneSearchEngine.java", "license": "lgpl-2.1", "size": 6400 }
[ "org.apache.lucene.queryParser.QueryParser", "org.apache.lucene.search.BooleanQuery", "org.apache.lucene.util.Version" ]
import org.apache.lucene.queryParser.QueryParser; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.util.Version;
import org.apache.lucene.*; import org.apache.lucene.search.*; import org.apache.lucene.util.*;
[ "org.apache.lucene" ]
org.apache.lucene;
979,857
protected String contentsOfResource(final String resourceName) throws IOException { // Scanner trick: http://stackoverflow.com/a/5445161 try (InputStream is = this.getClass().getResourceAsStream(resourceName); Scanner scanner = new Scanner(is, "UTF-8")) { scanner.useDelimiter("\\A"); if (scanner.hasNext()) { String returnVal = scanner.next(); returnVal = returnVal.replaceAll("\\r?\\n", System.lineSeparator()); return returnVal; } else { return ""; } } }
String function(final String resourceName) throws IOException { try (InputStream is = this.getClass().getResourceAsStream(resourceName); Scanner scanner = new Scanner(is, "UTF-8")) { scanner.useDelimiter("\\A"); if (scanner.hasNext()) { String returnVal = scanner.next(); returnVal = returnVal.replaceAll(STR, System.lineSeparator()); return returnVal; } else { return ""; } } }
/** * Return contents of a resource file as a string. * * <p> * The resource is found via the class, and copied to a string. LF characters are replaced with CR LF to match PDF * content streams * * @param resourceName name of the resource to copy to a string * @return string containing the contents of the file, with LF translated to CR LF * @throws IOException an I/O operation failed or was interrupted */
Return contents of a resource file as a string. The resource is found via the class, and copied to a string. LF characters are replaced with CR LF to match PDF content streams
contentsOfResource
{ "repo_name": "datalogics-coreyy/pdf-java-toolkit-samples", "path": "src/test/java/com/datalogics/pdf/samples/SampleTest.java", "license": "mit", "size": 19934 }
[ "java.io.IOException", "java.io.InputStream", "java.util.Scanner" ]
import java.io.IOException; import java.io.InputStream; import java.util.Scanner;
import java.io.*; import java.util.*;
[ "java.io", "java.util" ]
java.io; java.util;
2,332,725
public List<Sponsor> getSponsors() { return sponsorRepository.findAll(); }
List<Sponsor> function() { return sponsorRepository.findAll(); }
/** * Gets the sponsors. * * @return the sponsors */
Gets the sponsors
getSponsors
{ "repo_name": "uaijug/chronos", "path": "src/main/java/br/com/uaijug/chronos/event/sponsor/controller/SponsorController.java", "license": "gpl-3.0", "size": 6209 }
[ "br.com.uaijug.chronos.event.sponsor.model.Sponsor", "java.util.List" ]
import br.com.uaijug.chronos.event.sponsor.model.Sponsor; import java.util.List;
import br.com.uaijug.chronos.event.sponsor.model.*; import java.util.*;
[ "br.com.uaijug", "java.util" ]
br.com.uaijug; java.util;
1,213,819
public static NodesStatsRequest nodesStatsRequest(String... nodesIds) { return new NodesStatsRequest(nodesIds); }
static NodesStatsRequest function(String... nodesIds) { return new NodesStatsRequest(nodesIds); }
/** * Creates a nodes stats request against one or more nodes. Pass <tt>null</tt> or an empty array for all nodes. * * @param nodesIds The nodes ids to get the stats for * @return The nodes info request * @see org.elasticsearch.client.ClusterAdminClient#nodesStats(org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest) */
Creates a nodes stats request against one or more nodes. Pass null or an empty array for all nodes
nodesStatsRequest
{ "repo_name": "PhaedrusTheGreek/elasticsearch", "path": "core/src/main/java/org/elasticsearch/client/Requests.java", "license": "apache-2.0", "size": 19483 }
[ "org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest" ]
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest;
import org.elasticsearch.action.admin.cluster.node.stats.*;
[ "org.elasticsearch.action" ]
org.elasticsearch.action;
655,982
public Object clone() throws CloneNotSupportedException { XYPlot clone = (XYPlot) super.clone(); clone.domainAxes = (ObjectList) ObjectUtilities.clone(this.domainAxes); for (int i = 0; i < this.domainAxes.size(); i++) { ValueAxis axis = (ValueAxis) this.domainAxes.get(i); if (axis != null) { ValueAxis clonedAxis = (ValueAxis) axis.clone(); clone.domainAxes.set(i, clonedAxis); clonedAxis.setPlot(clone); clonedAxis.addChangeListener(clone); } } clone.domainAxisLocations = (ObjectList) this.domainAxisLocations.clone(); clone.rangeAxes = (ObjectList) ObjectUtilities.clone(this.rangeAxes); for (int i = 0; i < this.rangeAxes.size(); i++) { ValueAxis axis = (ValueAxis) this.rangeAxes.get(i); if (axis != null) { ValueAxis clonedAxis = (ValueAxis) axis.clone(); clone.rangeAxes.set(i, clonedAxis); clonedAxis.setPlot(clone); clonedAxis.addChangeListener(clone); } } clone.rangeAxisLocations = (ObjectList) ObjectUtilities.clone(this.rangeAxisLocations); // the datasets are not cloned, but listeners need to be added... clone.datasets = (ObjectList) ObjectUtilities.clone(this.datasets); for (int i = 0; i < clone.datasets.size(); ++i) { XYDataset d = getDataset(i); if (d != null) { d.addChangeListener(clone); } } clone.datasetToDomainAxisMap = new TreeMap(); clone.datasetToDomainAxisMap.putAll(this.datasetToDomainAxisMap); clone.datasetToRangeAxisMap = new TreeMap(); clone.datasetToRangeAxisMap.putAll(this.datasetToRangeAxisMap); clone.renderers = (ObjectList) ObjectUtilities.clone(this.renderers); for (int i = 0; i < this.renderers.size(); i++) { XYItemRenderer renderer2 = (XYItemRenderer) this.renderers.get(i); if (renderer2 instanceof PublicCloneable) { PublicCloneable pc = (PublicCloneable) renderer2; clone.renderers.set(i, pc.clone()); } } clone.foregroundDomainMarkers = (Map) ObjectUtilities.clone( this.foregroundDomainMarkers ); clone.backgroundDomainMarkers = (Map) ObjectUtilities.clone( this.backgroundDomainMarkers ); clone.foregroundRangeMarkers = (Map) ObjectUtilities.clone( this.foregroundRangeMarkers ); clone.backgroundRangeMarkers = (Map) ObjectUtilities.clone( this.backgroundRangeMarkers ); clone.annotations = (List) ObjectUtilities.deepClone(this.annotations); if (this.fixedDomainAxisSpace != null) { clone.fixedDomainAxisSpace = (AxisSpace) ObjectUtilities.clone( this.fixedDomainAxisSpace ); } if (this.fixedRangeAxisSpace != null) { clone.fixedRangeAxisSpace = (AxisSpace) ObjectUtilities.clone( this.fixedRangeAxisSpace ); } return clone; }
Object function() throws CloneNotSupportedException { XYPlot clone = (XYPlot) super.clone(); clone.domainAxes = (ObjectList) ObjectUtilities.clone(this.domainAxes); for (int i = 0; i < this.domainAxes.size(); i++) { ValueAxis axis = (ValueAxis) this.domainAxes.get(i); if (axis != null) { ValueAxis clonedAxis = (ValueAxis) axis.clone(); clone.domainAxes.set(i, clonedAxis); clonedAxis.setPlot(clone); clonedAxis.addChangeListener(clone); } } clone.domainAxisLocations = (ObjectList) this.domainAxisLocations.clone(); clone.rangeAxes = (ObjectList) ObjectUtilities.clone(this.rangeAxes); for (int i = 0; i < this.rangeAxes.size(); i++) { ValueAxis axis = (ValueAxis) this.rangeAxes.get(i); if (axis != null) { ValueAxis clonedAxis = (ValueAxis) axis.clone(); clone.rangeAxes.set(i, clonedAxis); clonedAxis.setPlot(clone); clonedAxis.addChangeListener(clone); } } clone.rangeAxisLocations = (ObjectList) ObjectUtilities.clone(this.rangeAxisLocations); clone.datasets = (ObjectList) ObjectUtilities.clone(this.datasets); for (int i = 0; i < clone.datasets.size(); ++i) { XYDataset d = getDataset(i); if (d != null) { d.addChangeListener(clone); } } clone.datasetToDomainAxisMap = new TreeMap(); clone.datasetToDomainAxisMap.putAll(this.datasetToDomainAxisMap); clone.datasetToRangeAxisMap = new TreeMap(); clone.datasetToRangeAxisMap.putAll(this.datasetToRangeAxisMap); clone.renderers = (ObjectList) ObjectUtilities.clone(this.renderers); for (int i = 0; i < this.renderers.size(); i++) { XYItemRenderer renderer2 = (XYItemRenderer) this.renderers.get(i); if (renderer2 instanceof PublicCloneable) { PublicCloneable pc = (PublicCloneable) renderer2; clone.renderers.set(i, pc.clone()); } } clone.foregroundDomainMarkers = (Map) ObjectUtilities.clone( this.foregroundDomainMarkers ); clone.backgroundDomainMarkers = (Map) ObjectUtilities.clone( this.backgroundDomainMarkers ); clone.foregroundRangeMarkers = (Map) ObjectUtilities.clone( this.foregroundRangeMarkers ); clone.backgroundRangeMarkers = (Map) ObjectUtilities.clone( this.backgroundRangeMarkers ); clone.annotations = (List) ObjectUtilities.deepClone(this.annotations); if (this.fixedDomainAxisSpace != null) { clone.fixedDomainAxisSpace = (AxisSpace) ObjectUtilities.clone( this.fixedDomainAxisSpace ); } if (this.fixedRangeAxisSpace != null) { clone.fixedRangeAxisSpace = (AxisSpace) ObjectUtilities.clone( this.fixedRangeAxisSpace ); } return clone; }
/** * Returns a clone of the plot. * * @return A clone. * * @throws CloneNotSupportedException this can occur if some component of * the plot cannot be cloned. */
Returns a clone of the plot
clone
{ "repo_name": "raedle/univis", "path": "lib/jfreechart-1.0.1/src/org/jfree/chart/plot/XYPlot.java", "license": "lgpl-2.1", "size": 137931 }
[ "java.util.List", "java.util.Map", "java.util.TreeMap", "org.jfree.chart.axis.AxisSpace", "org.jfree.chart.axis.ValueAxis", "org.jfree.chart.renderer.xy.XYItemRenderer", "org.jfree.data.xy.XYDataset", "org.jfree.util.ObjectList", "org.jfree.util.ObjectUtilities", "org.jfree.util.PublicCloneable" ]
import java.util.List; import java.util.Map; import java.util.TreeMap; import org.jfree.chart.axis.AxisSpace; import org.jfree.chart.axis.ValueAxis; import org.jfree.chart.renderer.xy.XYItemRenderer; import org.jfree.data.xy.XYDataset; import org.jfree.util.ObjectList; import org.jfree.util.ObjectUtilities; import org.jfree.util.PublicCloneable;
import java.util.*; import org.jfree.chart.axis.*; import org.jfree.chart.renderer.xy.*; import org.jfree.data.xy.*; import org.jfree.util.*;
[ "java.util", "org.jfree.chart", "org.jfree.data", "org.jfree.util" ]
java.util; org.jfree.chart; org.jfree.data; org.jfree.util;
2,480,658
public static Integer MonthFromDate(LocalDate date) { return date == null ? null : date.getMonthValue(); }
static Integer function(LocalDate date) { return date == null ? null : date.getMonthValue(); }
/** * Returns the month number from the given date. If date is null, null will be returned. * * @param date of {@link LocalDate} type. * @return month value in date. */
Returns the month number from the given date. If date is null, null will be returned
MonthFromDate
{ "repo_name": "data-integrations/wrangler", "path": "wrangler-core/src/main/java/io/cdap/functions/DateAndTime.java", "license": "apache-2.0", "size": 25447 }
[ "java.time.LocalDate" ]
import java.time.LocalDate;
import java.time.*;
[ "java.time" ]
java.time;
1,663,678
public void markValidationStatus(CollectorBatch batch, boolean validStatus) { throwExceptionIfBatchNotAdded(batch); validationStatuses.put(batch.getBatchName(), Boolean.valueOf(validStatus)); }
void function(CollectorBatch batch, boolean validStatus) { throwExceptionIfBatchNotAdded(batch); validationStatuses.put(batch.getBatchName(), Boolean.valueOf(validStatus)); }
/** * Marks whether or not a batch is valid or not * * @param batch collector batch from input xml * @param validStatus valid status fro batch */
Marks whether or not a batch is valid or not
markValidationStatus
{ "repo_name": "quikkian-ua-devops/will-financials", "path": "kfs-core/src/main/java/org/kuali/kfs/gl/report/CollectorReportData.java", "license": "agpl-3.0", "size": 15408 }
[ "org.kuali.kfs.gl.batch.CollectorBatch" ]
import org.kuali.kfs.gl.batch.CollectorBatch;
import org.kuali.kfs.gl.batch.*;
[ "org.kuali.kfs" ]
org.kuali.kfs;
1,822,616
private void save(String content) throws IOException { File tf = getTargetFile(); if (tf.getParentFile() != null && !tf.getParentFile().exists()) { tf.getParentFile().mkdirs(); } if (!tf.exists() || isOverwriteExistingFile()) { PrintWriter pw = new PrintWriter(tf, "UTF-8"); pw.write(content); pw.close(); } else { throw new FileAlreadyExistsException(tf.getAbsolutePath(), "", "If you want to overwrite existing files use param " + CliParameters.FORCE); } }
void function(String content) throws IOException { File tf = getTargetFile(); if (tf.getParentFile() != null && !tf.getParentFile().exists()) { tf.getParentFile().mkdirs(); } if (!tf.exists() isOverwriteExistingFile()) { PrintWriter pw = new PrintWriter(tf, "UTF-8"); pw.write(content); pw.close(); } else { throw new FileAlreadyExistsException(tf.getAbsolutePath(), STRIf you want to overwrite existing files use param " + CliParameters.FORCE); } }
/** * Stores the given <b>content</b> into the configured target file. * * @param content content to save * @throws IOException either writer {@link IOException}, or if the target file already exists and fore overwrite is not enabled. */
Stores the given content into the configured target file
save
{ "repo_name": "arnehaber/xmind2latex", "path": "src/main/java/de/haber/xmind2latex/XMindToLatexExporter.java", "license": "apache-2.0", "size": 16962 }
[ "de.haber.xmind2latex.cli.CliParameters", "java.io.File", "java.io.IOException", "java.io.PrintWriter", "java.nio.file.FileAlreadyExistsException" ]
import de.haber.xmind2latex.cli.CliParameters; import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.nio.file.FileAlreadyExistsException;
import de.haber.xmind2latex.cli.*; import java.io.*; import java.nio.file.*;
[ "de.haber.xmind2latex", "java.io", "java.nio" ]
de.haber.xmind2latex; java.io; java.nio;
869,040