id
stringlengths 7
14
| text
stringlengths 1
106k
|
---|---|
223551095_2112 | @Override
public InputStream readFrom(String spec, String requestMethod, String body, Map<String, String> headers)
throws IOException {
return getInputStream(spec, requestMethod, headers, body == null ? null : body.getBytes());
} |
223551095_2113 | @Override
public InputStream readAs(String spec, String requestMethod, String body, Map<String, String> headers,
String userName)
throws IOException {
return readFrom(addDoAs(spec, userName), requestMethod, body, headers);
} |
223551095_2114 | @Override
public InputStream readAsCurrent(String spec, String requestMethod, String body, Map<String, String> headers)
throws IOException {
return readAs(spec, requestMethod, body, headers, viewContext.getUsername());
} |
223551095_2115 | @Override
public HttpURLConnection getConnection(String spec,
String requestMethod,
String body,
Map<String, String> headers) throws IOException {
return getHttpURLConnection(spec, requestMethod, headers, body == null ? null : body.getBytes());
} |
223551095_2116 | @Override
public HttpURLConnection getConnectionAs(String spec,
String requestMethod,
String body,
Map<String, String> headers,
String userName) throws IOException {
return getConnection(addDoAs(spec, userName), requestMethod, body, headers);
} |
223551095_2117 | @Override
public HttpURLConnection getConnectionAsCurrent(String spec,
String requestMethod,
String body,
Map<String, String> headers) throws IOException {
return getConnectionAs(spec, requestMethod, body, headers, viewContext.getUsername());
} |
223551095_2122 | public Set<SubResourceDefinition> getSubResourceDefinitions(
String viewName, String version) {
viewName = ViewEntity.getViewName(viewName, version);
return subResourceDefinitionsMap.get(viewName);
} |
223551095_2123 | public void addInstanceDefinition(ViewEntity definition, ViewInstanceEntity instanceDefinition) {
Map<String, ViewInstanceEntity> instanceDefinitions = viewInstanceDefinitions.get(definition);
if (instanceDefinitions == null) {
instanceDefinitions = new HashMap<>();
viewInstanceDefinitions.put(definition, instanceDefinitions);
}
View view = definition.getView();
if (view != null) {
view.onCreate(instanceDefinition);
}
instanceDefinitions.put(instanceDefinition.getName(), instanceDefinition);
} |
223551095_2124 | public void installViewInstance(ViewInstanceEntity instanceEntity)
throws ValidationException, IllegalArgumentException, SystemException {
ViewEntity viewEntity = getDefinition(instanceEntity.getViewName());
if (viewEntity != null) {
String instanceName = instanceEntity.getName();
String viewName = viewEntity.getCommonName();
String version = viewEntity.getVersion();
if (getInstanceDefinition(viewName, version, instanceName) == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Creating view instance {}/{}/{}", viewName, version, instanceName);
}
instanceEntity.validate(viewEntity, Validator.ValidationContext.PRE_CREATE);
setPersistenceEntities(instanceEntity);
ViewInstanceEntity persistedInstance = mergeViewInstance(instanceEntity, viewEntity.getResourceType());
instanceEntity.setViewInstanceId(persistedInstance.getViewInstanceId());
syncViewInstance(instanceEntity, persistedInstance);
try {
// bind the view instance to a view
bindViewInstance(viewEntity, instanceEntity);
} catch (Exception e) {
String message = "Caught exception installing view instance.";
LOG.error(message, e);
throw new IllegalStateException(message, e);
}
// update the registry
addInstanceDefinition(viewEntity, instanceEntity);
// add the web app context
handlerList.addViewInstance(instanceEntity);
}
} else {
String message = "Attempt to install an instance for an unknown view " +
instanceEntity.getViewName() + ".";
LOG.error(message);
throw new IllegalArgumentException(message);
}
} |
223551095_2126 | public void setViewInstanceProperties(ViewInstanceEntity instanceEntity, Map<String, String> properties,
ViewConfig viewConfig, ClassLoader classLoader) throws SystemException {
try {
Masker masker = getMasker(viewConfig.getMaskerClass(classLoader));
Map<String, ParameterConfig> parameterConfigMap = new HashMap<>();
for (ParameterConfig paramConfig : viewConfig.getParameters()) {
parameterConfigMap.put(paramConfig.getName(), paramConfig);
}
for (Map.Entry<String, String> entry : properties.entrySet()) {
String name = entry.getKey();
String value = entry.getValue();
ParameterConfig parameterConfig = parameterConfigMap.get(name);
if (parameterConfig != null && parameterConfig.isMasked()) {
value = masker.mask(value);
}
instanceEntity.putProperty(name, value);
}
} catch (Exception e) {
throw new SystemException("Caught exception while setting instance property.", e);
}
} |
223551095_2127 | @Transactional
public void uninstallViewInstance(ViewInstanceEntity instanceEntity) throws IllegalStateException {
try {
viewInstanceOperationHandler.uninstallViewInstance(instanceEntity);
updateCaches(instanceEntity);
} catch (IllegalStateException illegalStateExcpetion) {
LOG.error("Exception occurred while uninstalling view : {}", instanceEntity, illegalStateExcpetion);
throw illegalStateExcpetion;
}
} |
223551095_2130 | protected static boolean extractViewArchive(String archivePath, ViewModule viewModule, boolean systemOnly)
throws Exception {
Injector injector = Guice.createInjector(viewModule);
ViewExtractor extractor = injector.getInstance(ViewExtractor.class);
ViewArchiveUtility archiveUtility = injector.getInstance(ViewArchiveUtility.class);
Configuration configuration = injector.getInstance(Configuration.class);
File viewDir = configuration.getViewsDir();
String extractedArchivesPath = viewDir.getAbsolutePath() +
File.separator + EXTRACTED_ARCHIVES_DIR;
if (extractor.ensureExtractedArchiveDirectory(extractedArchivesPath)) {
File archiveFile = archiveUtility.getFile(archivePath);
ViewConfig viewConfig = archiveUtility.getViewConfigFromArchive(archiveFile);
String commonName = viewConfig.getName();
String version = viewConfig.getVersion();
String viewName = ViewEntity.getViewName(commonName, version);
String extractedArchiveDirPath = extractedArchivesPath + File.separator + viewName;
File extractedArchiveDirFile = archiveUtility.getFile(extractedArchiveDirPath);
if (!extractedArchiveDirFile.exists()) {
ViewEntity viewDefinition = new ViewEntity(viewConfig, configuration, extractedArchiveDirPath);
if (!systemOnly || viewDefinition.isSystem()) {
ClassLoader classLoader = null;
try {
List<File> additionalPaths = getViewsAdditionalClasspath(configuration);
classLoader = extractor.extractViewArchive(viewDefinition, archiveFile, extractedArchiveDirFile, additionalPaths);
return true;
} finally {
if (classLoader instanceof Closeable) {
Closeables.closeSilently((Closeable) classLoader);
}
}
}
}
}
return false;
} |
223551095_2132 | public ClassLoader extractViewArchive(ViewEntity view, File viewArchive, File archiveDir, List<File> viewsAdditionalClasspath)
throws ExtractionException {
String archivePath = archiveDir.getAbsolutePath();
try {
// Remove directory if jar was updated since last extracting
if (archiveDir.exists() && viewArchive != null && viewArchive.lastModified() > archiveDir.lastModified()) {
FileUtils.deleteDirectory(archiveDir);
}
// Skip if the archive has already been extracted
if (!archiveDir.exists()) {
String msg = "Creating archive folder " + archivePath + ".";
view.setStatusDetail(msg);
LOG.info(msg);
if (archiveDir.mkdir()) {
JarInputStream jarInputStream = archiveUtility.getJarFileStream(viewArchive);
try {
msg = "Extracting files from " + viewArchive.getName() + ".";
view.setStatusDetail(msg);
LOG.info(msg);
// create the META-INF directory
File metaInfDir = archiveUtility.getFile(archivePath + File.separator + "META-INF");
if (!metaInfDir.mkdir()) {
msg = "Could not create archive META-INF directory.";
view.setStatusDetail(msg);
LOG.error(msg);
throw new ExtractionException(msg);
}
JarEntry jarEntry;
while ((jarEntry = jarInputStream.getNextJarEntry())!= null){
try {
String entryPath = archivePath + File.separator + jarEntry.getName();
LOG.debug("Extracting {}", entryPath);
File entryFile = archiveUtility.getFile(entryPath);
if (jarEntry.isDirectory()) {
LOG.debug("Making directory {}", entryPath);
if (!entryFile.mkdir()) {
msg = "Could not create archive entry directory " + entryPath + ".";
view.setStatusDetail(msg);
LOG.error(msg);
throw new ExtractionException(msg);
}
} else {
FileOutputStream fos = archiveUtility.getFileOutputStream(entryFile);
try {
LOG.debug("Begin copying from {} to {}", jarEntry.getName(), entryPath);
byte[] buffer = new byte[BUFFER_SIZE];
int n;
while((n = jarInputStream.read(buffer)) > -1) {
fos.write(buffer, 0, n);
}
LOG.debug("Finish copying from {} to {}", jarEntry.getName(), entryPath);
} finally {
fos.flush();
fos.close();
}
}
} finally {
jarInputStream.closeEntry();
}
}
} finally {
jarInputStream.close();
}
} else {
msg = "Could not create archive directory " + archivePath + ".";
view.setStatusDetail(msg);
LOG.error(msg);
throw new ExtractionException(msg);
}
}
ViewConfig viewConfig = archiveUtility.getViewConfigFromExtractedArchive(archivePath, false);
return getArchiveClassLoader(viewConfig, archiveDir, viewsAdditionalClasspath);
} catch (Exception e) {
String msg = "Caught exception trying to extract the view archive " + archivePath + ".";
view.setStatusDetail(msg);
LOG.error(msg, e);
throw new ExtractionException(msg, e);
}
} |
223551095_2133 | public boolean ensureExtractedArchiveDirectory(String extractedArchivesPath) {
File extractedArchiveDir = archiveUtility.getFile(extractedArchivesPath);
return extractedArchiveDir.exists() || extractedArchiveDir.mkdir();
} |
223551095_2138 | public static Result mkdir(String directoryPath, boolean sudo) throws IOException, InterruptedException {
// If this directory already exists, do not try to create it
if (pathExists(directoryPath, sudo).isSuccessful()) {
return new Result(0, "The directory already exists, skipping.", ""); // Success!
} else {
ArrayList<String> command = new ArrayList<>();
command.add("/bin/mkdir");
if (!WINDOWS) {
command.add("-p"); // create parent directories
}
command.add(directoryPath);
return runCommand(command, null, null, sudo);
}
} |
223551095_2139 | public static Result copyFile(String srcFile, String destFile, boolean force, boolean sudo) throws IOException, InterruptedException {
ArrayList<String> command = new ArrayList<>();
if (WINDOWS) {
command.add("copy");
if (force) {
command.add("/Y"); // force overwrite
}
} else {
command.add("cp");
command.add("-p"); // preserve mode, ownership, timestamps
if (force) {
command.add("-f"); // force overwrite
}
}
command.add(srcFile);
command.add(destFile);
return runCommand(command, null, null, sudo);
} |
223551095_2147 | public boolean isRunning() {
return !isStopped;
} |
223551095_2148 | public void start() {
pauseLock.lock();
try {
isStopped = false;
unpaused.signalAll();
} finally {
pauseLock.unlock();
}
} |
223551095_2152 | public static <T> List<Set<T>> split(Set<T> original, int subsetSize) {
if(subsetSize <= 0) {
throw new IllegalArgumentException("Incorrect max size");
}
if(original == null || original.isEmpty()) {
return Collections.emptyList();
}
int subsetCount = (int) (Math.ceil((double)original.size() / subsetSize));
ArrayList<Set<T>> subsets = new ArrayList<>(subsetCount);
Iterator<T> iterator = original.iterator();
for(int i = 0; i < subsetCount; i++) {
Set<T> subset = new LinkedHashSet<>(subsetSize);
for(int j = 0; j < subsetSize && iterator.hasNext(); j++) {
subset.add(iterator.next());
}
subsets.add(subset);
}
return subsets;
} |
223551095_2153 | public String readPassword(String passwordProperty, String defaultPassword) {
if (StringUtils.isNotBlank(passwordProperty)) {
if (CredentialProvider.isAliasString(passwordProperty)) {
return readPasswordFromStore(passwordProperty);
} else {
final String pw = readPasswordFromFile(passwordProperty, defaultPassword);
return CredentialProvider.isAliasString(pw) ? readPasswordFromStore(pw) : pw;
}
}
return defaultPassword;
} |
223551095_2154 | public String readPassword(String passwordProperty, String defaultPassword) {
if (StringUtils.isNotBlank(passwordProperty)) {
if (CredentialProvider.isAliasString(passwordProperty)) {
return readPasswordFromStore(passwordProperty);
} else {
final String pw = readPasswordFromFile(passwordProperty, defaultPassword);
return CredentialProvider.isAliasString(pw) ? readPasswordFromStore(pw) : pw;
}
}
return defaultPassword;
} |
223551095_2155 | public String readPassword(String passwordProperty, String defaultPassword) {
if (StringUtils.isNotBlank(passwordProperty)) {
if (CredentialProvider.isAliasString(passwordProperty)) {
return readPasswordFromStore(passwordProperty);
} else {
final String pw = readPasswordFromFile(passwordProperty, defaultPassword);
return CredentialProvider.isAliasString(pw) ? readPasswordFromStore(pw) : pw;
}
}
return defaultPassword;
} |
223551095_2156 | public String readPassword(String passwordProperty, String defaultPassword) {
if (StringUtils.isNotBlank(passwordProperty)) {
if (CredentialProvider.isAliasString(passwordProperty)) {
return readPasswordFromStore(passwordProperty);
} else {
final String pw = readPasswordFromFile(passwordProperty, defaultPassword);
return CredentialProvider.isAliasString(pw) ? readPasswordFromStore(pw) : pw;
}
}
return defaultPassword;
} |
223551095_2157 | public String readPassword(String passwordProperty, String defaultPassword) {
if (StringUtils.isNotBlank(passwordProperty)) {
if (CredentialProvider.isAliasString(passwordProperty)) {
return readPasswordFromStore(passwordProperty);
} else {
final String pw = readPasswordFromFile(passwordProperty, defaultPassword);
return CredentialProvider.isAliasString(pw) ? readPasswordFromStore(pw) : pw;
}
}
return defaultPassword;
} |
223551095_2158 | public static String getRemoteAddress(HttpServletRequest request) {
String ip = null;
for (String header : headersToCheck) {
ip = request.getHeader(header);
if (!isRemoteAddressUnknown(ip)) {
break;
}
}
if (isRemoteAddressUnknown(ip)) {
ip = request.getRemoteAddr();
}
if (containsMultipleRemoteAddresses(ip)) {
ip = ip.substring(0, ip.indexOf(","));
}
return ip;
} |
223551095_2159 | public static String getRemoteAddress(HttpServletRequest request) {
String ip = null;
for (String header : headersToCheck) {
ip = request.getHeader(header);
if (!isRemoteAddressUnknown(ip)) {
break;
}
}
if (isRemoteAddressUnknown(ip)) {
ip = request.getRemoteAddr();
}
if (containsMultipleRemoteAddresses(ip)) {
ip = ip.substring(0, ip.indexOf(","));
}
return ip;
} |
223551095_2160 | public static String getRemoteAddress(HttpServletRequest request) {
String ip = null;
for (String header : headersToCheck) {
ip = request.getHeader(header);
if (!isRemoteAddressUnknown(ip)) {
break;
}
}
if (isRemoteAddressUnknown(ip)) {
ip = request.getRemoteAddr();
}
if (containsMultipleRemoteAddresses(ip)) {
ip = ip.substring(0, ip.indexOf(","));
}
return ip;
} |
223551095_2161 | public static String getRemoteAddress(HttpServletRequest request) {
String ip = null;
for (String header : headersToCheck) {
ip = request.getHeader(header);
if (!isRemoteAddressUnknown(ip)) {
break;
}
}
if (isRemoteAddressUnknown(ip)) {
ip = request.getRemoteAddr();
}
if (containsMultipleRemoteAddresses(ip)) {
ip = ip.substring(0, ip.indexOf(","));
}
return ip;
} |
223551095_2162 | public static MultiValueMap<String, String> getQueryStringParameters(HttpServletRequest request) {
// Manually parse the query string rather than use HttpServletRequest#getParameter so that
// the message body remains intact and available. Calling HttpServletRequest#getParameter
// could interfere with processing the body of this request later since the body needs to be
// parsed to find any form parameters.
String queryString = request.getQueryString();
return (StringUtils.isEmpty(queryString))
? null
: UriComponentsBuilder.newInstance().query(queryString).build().getQueryParams();
} |
223551095_2163 | public static List<String> getQueryStringParameterValues(HttpServletRequest request, String parameterName) {
MultiValueMap<String, String> valueMap = getQueryStringParameters(request);
return ((valueMap == null) || !valueMap.containsKey(parameterName))
? null
: valueMap.get(parameterName);
} |
223551095_2164 | public static String getQueryStringParameterValue(HttpServletRequest request, String parameterName) {
MultiValueMap<String, String> valueMap = getQueryStringParameters(request);
return ((valueMap == null) || !valueMap.containsKey(parameterName))
? null
: valueMap.getFirst(parameterName);
} |
223551095_2165 | public static String jaxbToString(Object jaxbObj) throws JAXBException,
JsonGenerationException, JsonMappingException, IOException {
return getGson().toJson(jaxbObj);
} |
223551095_2166 | public static Map<String, Set<String>> getClusterHostInfo(Cluster cluster) throws AmbariException {
//Fill hosts and ports lists
Set<String> hostsSet = new LinkedHashSet<>();
List<Integer> portsList = new ArrayList<>();
List<String> rackList = new ArrayList<>();
List<String> ipV4List = new ArrayList<>();
Collection<Host> allHosts = cluster.getHosts();
for (Host host : allHosts) {
hostsSet.add(host.getHostName());
Integer currentPingPort = host.getCurrentPingPort();
portsList.add(currentPingPort == null ? DEFAULT_PING_PORT : currentPingPort);
String rackInfo = host.getRackInfo();
rackList.add(StringUtils.isEmpty(rackInfo) ? DEFAULT_RACK : rackInfo );
String iPv4 = host.getIPv4();
ipV4List.add(StringUtils.isEmpty(iPv4) ? DEFAULT_IPV4_ADDRESS : iPv4 );
}
// add hosts from topology manager
Map<String, Collection<String>> pendingHostComponents = topologyManager.getPendingHostComponents();
for (String hostname : pendingHostComponents.keySet()) {
if (!hostsSet.contains(hostname)) {
hostsSet.add(hostname);
portsList.add(DEFAULT_PING_PORT);
rackList.add(DEFAULT_RACK);
ipV4List.add(DEFAULT_IPV4_ADDRESS);
}
}
List<String> hostsList = new ArrayList<>(hostsSet);
Map<String, String> additionalComponentToClusterInfoKeyMap = new HashMap<>();
// Fill hosts for services
Map<String, SortedSet<Integer>> hostRolesInfo = new HashMap<>();
for (Map.Entry<String, Service> serviceEntry : cluster.getServices().entrySet()) {
Service service = serviceEntry.getValue();
for (Map.Entry<String, ServiceComponent> serviceComponentEntry : service.getServiceComponents().entrySet()) {
ServiceComponent serviceComponent = serviceComponentEntry.getValue();
String componentName = serviceComponent.getName();
String roleName = componentToClusterInfoKeyMap.get(componentName);
if(null == roleName) {
roleName = additionalComponentToClusterInfoKeyMap.get(componentName);
}
if (null == roleName && !serviceComponent.isClientComponent()) {
roleName = componentName.toLowerCase() + "_hosts";
additionalComponentToClusterInfoKeyMap.put(componentName, roleName);
}
if (roleName == null) {
continue;
}
for (String hostName : serviceComponent.getServiceComponentHosts().keySet()) {
if (roleName != null) {
SortedSet<Integer> hostsForComponentsHost = hostRolesInfo.get(roleName);
if (hostsForComponentsHost == null) {
hostsForComponentsHost = new TreeSet<>();
hostRolesInfo.put(roleName, hostsForComponentsHost);
}
int hostIndex = hostsList.indexOf(hostName);
//Add index of host to current host role
hostsForComponentsHost.add(hostIndex);
}
}
}
}
// add components from topology manager
for (Map.Entry<String, Collection<String>> entry : pendingHostComponents.entrySet()) {
String hostname = entry.getKey();
Collection<String> hostComponents = entry.getValue();
for (String hostComponent : hostComponents) {
String roleName = getClusterHostInfoKey(hostComponent);
if (null == roleName) {
roleName = additionalComponentToClusterInfoKeyMap.get(hostComponent);
}
if (null == roleName) {
// even though all mappings are being added, componentToClusterInfoKeyMap is
// a higher priority lookup
for (Service service : cluster.getServices().values()) {
for (ServiceComponent sc : service.getServiceComponents().values()) {
if (sc.getName().equals(hostComponent)) {
roleName = hostComponent.toLowerCase() + "_hosts";
additionalComponentToClusterInfoKeyMap.put(hostComponent, roleName);
}
}
}
}
if (roleName != null) {
SortedSet<Integer> hostsForComponentsHost = hostRolesInfo.get(roleName);
if (hostsForComponentsHost == null) {
hostsForComponentsHost = new TreeSet<>();
hostRolesInfo.put(roleName, hostsForComponentsHost);
}
int hostIndex = hostsList.indexOf(hostname);
if (hostIndex != -1) {
if (!hostsForComponentsHost.contains(hostIndex)) {
hostsForComponentsHost.add(hostIndex);
}
} else {
//todo: I don't think that this can happen
//todo: determine if it can and if so, handle properly
//todo: if it 'cant' should probably enforce invariant
throw new RuntimeException("Unable to get host index for host: " + hostname);
}
}
}
}
Map<String, Set<String>> clusterHostInfo = new HashMap<>();
for (Map.Entry<String, SortedSet<Integer>> entry : hostRolesInfo.entrySet()) {
TreeSet<Integer> sortedSet = new TreeSet<>(entry.getValue());
Set<String> replacedRangesSet = replaceRanges(sortedSet);
clusterHostInfo.put(entry.getKey(), replacedRangesSet);
}
clusterHostInfo.put(HOSTS_LIST, hostsSet);
clusterHostInfo.put(PORTS, replaceMappedRanges(portsList));
clusterHostInfo.put(IPV4_ADDRESSES, replaceMappedRanges(ipV4List));
clusterHostInfo.put(RACKS, replaceMappedRanges(rackList));
// Fill server host
/*
* Note: We don't replace server host name, port, ssl usage by an index (like we do
* with component hostnames), because if ambari-agent is not installed
* at ambari-server host, then allHosts map will not contain
* ambari-server hostname.
*/
clusterHostInfo.put(AMBARI_SERVER_HOST, Sets.newHashSet(getHostName()));
boolean serverUseSsl = configuration.getApiSSLAuthentication();
int port = serverUseSsl ? configuration.getClientSSLApiPort() : configuration.getClientApiPort();
clusterHostInfo.put(AMBARI_SERVER_PORT, Sets.newHashSet(Integer.toString(port)));
clusterHostInfo.put(AMBARI_SERVER_USE_SSL, Sets.newHashSet(Boolean.toString(serverUseSsl)));
return clusterHostInfo;
} |
223551095_2167 | public static void useAmbariJdkInCommandParams(Map<String, String> commandParams, Configuration configuration) {
if (StringUtils.isNotEmpty(configuration.getJavaHome()) && !configuration.getJavaHome().equals(configuration.getStackJavaHome())) {
commandParams.put(AMBARI_JAVA_HOME, configuration.getJavaHome());
commandParams.put(AMBARI_JAVA_VERSION, String.valueOf(configuration.getJavaVersion()));
if (StringUtils.isNotEmpty(configuration.getJDKName())) { // if not custom jdk
commandParams.put(AMBARI_JDK_NAME, configuration.getJDKName());
}
if (StringUtils.isNotEmpty(configuration.getJCEName())) { // if not custom jdk
commandParams.put(AMBARI_JCE_NAME, configuration.getJCEName());
}
}
} |
223551095_2168 | public static void useAmbariJdkInCommandParams(Map<String, String> commandParams, Configuration configuration) {
if (StringUtils.isNotEmpty(configuration.getJavaHome()) && !configuration.getJavaHome().equals(configuration.getStackJavaHome())) {
commandParams.put(AMBARI_JAVA_HOME, configuration.getJavaHome());
commandParams.put(AMBARI_JAVA_VERSION, String.valueOf(configuration.getJavaVersion()));
if (StringUtils.isNotEmpty(configuration.getJDKName())) { // if not custom jdk
commandParams.put(AMBARI_JDK_NAME, configuration.getJDKName());
}
if (StringUtils.isNotEmpty(configuration.getJCEName())) { // if not custom jdk
commandParams.put(AMBARI_JCE_NAME, configuration.getJCEName());
}
}
} |
223551095_2169 | public static void useAmbariJdkInCommandParams(Map<String, String> commandParams, Configuration configuration) {
if (StringUtils.isNotEmpty(configuration.getJavaHome()) && !configuration.getJavaHome().equals(configuration.getStackJavaHome())) {
commandParams.put(AMBARI_JAVA_HOME, configuration.getJavaHome());
commandParams.put(AMBARI_JAVA_VERSION, String.valueOf(configuration.getJavaVersion()));
if (StringUtils.isNotEmpty(configuration.getJDKName())) { // if not custom jdk
commandParams.put(AMBARI_JDK_NAME, configuration.getJDKName());
}
if (StringUtils.isNotEmpty(configuration.getJCEName())) { // if not custom jdk
commandParams.put(AMBARI_JCE_NAME, configuration.getJCEName());
}
}
} |
223551095_2170 | public static void useStackJdkIfExists(Map<String, String> hostLevelParams, Configuration configuration) {
// set defaults first
hostLevelParams.put(JAVA_HOME, configuration.getJavaHome());
hostLevelParams.put(JDK_NAME, configuration.getJDKName());
hostLevelParams.put(JCE_NAME, configuration.getJCEName());
hostLevelParams.put(JAVA_VERSION, String.valueOf(configuration.getJavaVersion()));
if (StringUtils.isNotEmpty(configuration.getStackJavaHome())
&& !configuration.getStackJavaHome().equals(configuration.getJavaHome())) {
hostLevelParams.put(JAVA_HOME, configuration.getStackJavaHome());
if (StringUtils.isNotEmpty(configuration.getStackJavaVersion())) {
hostLevelParams.put(JAVA_VERSION, configuration.getStackJavaVersion());
}
if (StringUtils.isNotEmpty(configuration.getStackJDKName())) {
hostLevelParams.put(JDK_NAME, configuration.getStackJDKName());
} else {
hostLevelParams.put(JDK_NAME, null); // custom jdk for stack
}
if (StringUtils.isNotEmpty(configuration.getStackJCEName())) {
hostLevelParams.put(JCE_NAME, configuration.getStackJCEName());
} else {
hostLevelParams.put(JCE_NAME, null); // custom jdk for stack
}
}
} |
223551095_2171 | public static void useStackJdkIfExists(Map<String, String> hostLevelParams, Configuration configuration) {
// set defaults first
hostLevelParams.put(JAVA_HOME, configuration.getJavaHome());
hostLevelParams.put(JDK_NAME, configuration.getJDKName());
hostLevelParams.put(JCE_NAME, configuration.getJCEName());
hostLevelParams.put(JAVA_VERSION, String.valueOf(configuration.getJavaVersion()));
if (StringUtils.isNotEmpty(configuration.getStackJavaHome())
&& !configuration.getStackJavaHome().equals(configuration.getJavaHome())) {
hostLevelParams.put(JAVA_HOME, configuration.getStackJavaHome());
if (StringUtils.isNotEmpty(configuration.getStackJavaVersion())) {
hostLevelParams.put(JAVA_VERSION, configuration.getStackJavaVersion());
}
if (StringUtils.isNotEmpty(configuration.getStackJDKName())) {
hostLevelParams.put(JDK_NAME, configuration.getStackJDKName());
} else {
hostLevelParams.put(JDK_NAME, null); // custom jdk for stack
}
if (StringUtils.isNotEmpty(configuration.getStackJCEName())) {
hostLevelParams.put(JCE_NAME, configuration.getStackJCEName());
} else {
hostLevelParams.put(JCE_NAME, null); // custom jdk for stack
}
}
} |
223551095_2172 | public static void useStackJdkIfExists(Map<String, String> hostLevelParams, Configuration configuration) {
// set defaults first
hostLevelParams.put(JAVA_HOME, configuration.getJavaHome());
hostLevelParams.put(JDK_NAME, configuration.getJDKName());
hostLevelParams.put(JCE_NAME, configuration.getJCEName());
hostLevelParams.put(JAVA_VERSION, String.valueOf(configuration.getJavaVersion()));
if (StringUtils.isNotEmpty(configuration.getStackJavaHome())
&& !configuration.getStackJavaHome().equals(configuration.getJavaHome())) {
hostLevelParams.put(JAVA_HOME, configuration.getStackJavaHome());
if (StringUtils.isNotEmpty(configuration.getStackJavaVersion())) {
hostLevelParams.put(JAVA_VERSION, configuration.getStackJavaVersion());
}
if (StringUtils.isNotEmpty(configuration.getStackJDKName())) {
hostLevelParams.put(JDK_NAME, configuration.getStackJDKName());
} else {
hostLevelParams.put(JDK_NAME, null); // custom jdk for stack
}
if (StringUtils.isNotEmpty(configuration.getStackJCEName())) {
hostLevelParams.put(JCE_NAME, configuration.getStackJCEName());
} else {
hostLevelParams.put(JCE_NAME, null); // custom jdk for stack
}
}
} |
223551095_2173 | @Subscribe
@Transactional
public void onServiceEvent(ServiceInstalledEvent event) {
if (LOG.isDebugEnabled()) {
LOG.debug(event.toString());
}
try {
Cluster cluster = clusters.get().getClusterById(event.getClusterId());
Map<String, ServiceComponent> serviceComponents = cluster.getService(event.getServiceName()).getServiceComponents();
// Determine hosts that become OUT_OF_SYNC when adding components for new service
Map<String, List<ServiceComponent>> affectedHosts =
new HashMap<>();
for (ServiceComponent component : serviceComponents.values()) {
for (String hostname : component.getServiceComponentHosts().keySet()) {
if (! affectedHosts.containsKey(hostname)) {
affectedHosts.put(hostname, new ArrayList<>());
}
affectedHosts.get(hostname).add(component);
}
}
for (String hostName : affectedHosts.keySet()) {
List<HostVersionEntity> hostVersionEntities =
hostVersionDAO.get().findByClusterAndHost(cluster.getClusterName(), hostName);
for (HostVersionEntity hostVersionEntity : hostVersionEntities) {
RepositoryVersionEntity repositoryVersion = hostVersionEntity.getRepositoryVersion();
// If added components do not advertise version, it makes no sense to mark version OUT_OF_SYNC
// We perform check per-stack version, because component may be not versionAdvertised in current
// stack, but become versionAdvertised in some future (installed, but not yet upgraded to) stack
boolean hasChangedComponentsWithVersions = false;
String serviceName = event.getServiceName();
for (ServiceComponent comp : affectedHosts.get(hostName)) {
String componentName = comp.getName();
// Skip lookup if stack does not contain the component
if (!ami.get().isValidServiceComponent(repositoryVersion.getStackName(),
repositoryVersion.getStackVersion(), serviceName, componentName)) {
LOG.debug("Component not found is host stack, stack={}, version={}, service={}, component={}",
repositoryVersion.getStackName(), repositoryVersion.getStackVersion(), serviceName, componentName);
continue;
}
ComponentInfo component = ami.get().getComponent(repositoryVersion.getStackName(),
repositoryVersion.getStackVersion(), serviceName, componentName);
if (component.isVersionAdvertised()) {
hasChangedComponentsWithVersions = true;
}
}
if (! hasChangedComponentsWithVersions) {
continue;
}
if (hostVersionEntity.getState().equals(RepositoryVersionState.INSTALLED)) {
hostVersionEntity.setState(RepositoryVersionState.OUT_OF_SYNC);
hostVersionDAO.get().merge(hostVersionEntity);
}
}
}
} catch (AmbariException e) {
LOG.error("Can not update hosts about out of sync", e);
}
} |
223551095_2174 | @Subscribe
@Transactional
public void onServiceComponentEvent(ServiceComponentInstalledEvent event) {
if (LOG.isDebugEnabled()) {
LOG.debug(event.toString());
}
m_lock.lock();
try {
Cluster cluster = clusters.get().getClusterById(event.getClusterId());
List<HostVersionEntity> hostVersionEntities =
hostVersionDAO.get().findByClusterAndHost(cluster.getClusterName(), event.getHostName());
Service service = cluster.getService(event.getServiceName());
ServiceComponent serviceComponent = service.getServiceComponent(event.getComponentName());
RepositoryVersionEntity componentRepo = serviceComponent.getDesiredRepositoryVersion();
for (HostVersionEntity hostVersionEntity : hostVersionEntities) {
StackEntity hostStackEntity = hostVersionEntity.getRepositoryVersion().getStack();
StackId hostStackId = new StackId(hostStackEntity);
// If added components do not advertise version, it makes no sense to mark version OUT_OF_SYNC
// We perform check per-stack version, because component may be not versionAdvertised in current
// stack, but become versionAdvertised in some future (installed, but not yet upgraded to) stack
String serviceName = event.getServiceName();
String componentName = event.getComponentName();
// Skip lookup if stack does not contain the component
if (!ami.get().isValidServiceComponent(hostStackId.getStackName(),
hostStackId.getStackVersion(), serviceName, componentName)) {
LOG.debug("Component not found is host stack, stack={}, version={}, service={}, component={}",
hostStackId.getStackName(), hostStackId.getStackVersion(), serviceName, componentName);
continue;
}
ComponentInfo component = ami.get().getComponent(hostStackId.getStackName(),
hostStackId.getStackVersion(), serviceName, componentName);
if (!component.isVersionAdvertised()) {
RepositoryVersionState state = checkAllHostComponents(hostStackId, hostVersionEntity.getHostEntity());
if (null != state) {
hostVersionEntity.setState(state);
hostVersionDAO.get().merge(hostVersionEntity);
}
continue;
}
// !!! we shouldn't be changing other versions to OUT_OF_SYNC if the event
// component repository doesn't match
if (!hostVersionEntity.getRepositoryVersion().equals(componentRepo)) {
continue;
}
switch (hostVersionEntity.getState()) {
case INSTALLED:
case NOT_REQUIRED:
hostVersionEntity.setState(RepositoryVersionState.OUT_OF_SYNC);
hostVersionDAO.get().merge(hostVersionEntity);
break;
default:
break;
}
}
} catch (AmbariException e) {
LOG.error("Can not update hosts about out of sync", e);
} finally {
m_lock.unlock();
}
} |
223551095_2175 | @Subscribe
@Transactional
public void onHostEvent(HostsAddedEvent event) {
if (LOG.isDebugEnabled()) {
LOG.debug(event.toString());
}
// create host version entries for every repository
@Experimental(feature=ExperimentalFeature.PATCH_UPGRADES, comment="Eventually take into account deleted repositories")
List<RepositoryVersionEntity> repos = repositoryVersionDAO.get().findAll();
for (String hostName : event.getHostNames()) {
HostEntity hostEntity = hostDAO.get().findByName(hostName);
for (RepositoryVersionEntity repositoryVersion : repos) {
// we don't have the knowledge yet to know if we need the record
HostVersionEntity missingHostVersion = new HostVersionEntity(hostEntity,
repositoryVersion, RepositoryVersionState.NOT_REQUIRED);
LOG.info("Creating host version for {}, state={}, repo={} (repo_id={})",
missingHostVersion.getHostName(), missingHostVersion.getState(),
missingHostVersion.getRepositoryVersion().getVersion(), missingHostVersion.getRepositoryVersion().getId());
hostVersionDAO.get().create(missingHostVersion);
hostDAO.get().merge(hostEntity);
hostEntity.getHostVersionEntities().add(missingHostVersion);
hostEntity = hostDAO.get().merge(hostEntity);
}
}
} |
223551095_2176 | @Subscribe
public void onAmbariEvent(HostComponentVersionAdvertisedEvent event) throws AmbariException {
LOG.debug("Received event {}", event);
Cluster cluster = event.getCluster();
ServiceComponentHost sch = event.getServiceComponentHost();
String newVersion = event.getVersion();
if (StringUtils.isEmpty(newVersion)) {
return;
}
// if the cluster is upgrading, there's no need to update the repo version -
// it better be right
if (null != event.getRepositoryVersionId() && null == cluster.getUpgradeInProgress()) {
// !!! make sure the repo_version record actually has the same version.
// This is NOT true when installing a cluster using a public repo where the
// exact version is not known in advance.
RepositoryVersionEntity rve = repositoryVersionDAO.findByPK(event.getRepositoryVersionId());
if (null != rve) {
boolean updated = false;
String currentRepoVersion = rve.getVersion();
if (!StringUtils.equals(currentRepoVersion, newVersion)) {
rve.setVersion(newVersion);
rve.setResolved(true);
repositoryVersionDAO.merge(rve);
updated = true;
} else {
// the reported versions are the same - we should ensure that the repo
// is resolved
if (!rve.isResolved()) {
rve.setResolved(true);
repositoryVersionDAO.merge(rve);
updated = true;
}
}
if (updated) {
m_hostLevelParamsHolder.get().updateData(m_hostLevelParamsHolder.get().getCurrentData(sch.getHost().getHostId()));
}
}
}
// Update host component version value if needed
try {
// get the component information for the desired stack; if a component
// moves from UNKNOWN to providing a version, we must do the version
// advertised check against the target stack
StackId desiredStackId = sch.getDesiredStackId();
AmbariMetaInfo ambariMetaInfo = ambariMetaInfoProvider.get();
ComponentInfo componentInfo = ambariMetaInfo.getComponent(desiredStackId.getStackName(),
desiredStackId.getStackVersion(), sch.getServiceName(), sch.getServiceComponentName());
// not advertising a version, do nothing
if (!componentInfo.isVersionAdvertised()) {
// that's odd; a version came back - log it and still do nothing
if (!StringUtils.equalsIgnoreCase(UNKNOWN_VERSION, newVersion)) {
LOG.warn(
"ServiceComponent {} doesn't advertise version, however ServiceHostComponent {} on host {} advertised version as {}. Skipping version update",
sch.getServiceComponentName(), sch.getServiceComponentName(), sch.getHostName(),
newVersion);
}
return;
}
ServiceComponent sc = cluster.getService(sch.getServiceName()).getServiceComponent(
sch.getServiceComponentName());
// proces the UNKNOWN version
if (StringUtils.equalsIgnoreCase(UNKNOWN_VERSION, newVersion)) {
processUnknownDesiredVersion(cluster, sc, sch, newVersion);
return;
}
processComponentAdvertisedVersion(cluster, sc, sch, newVersion);
} catch (Exception e) {
LOG.error(
"Unable to propagate version for ServiceHostComponent on component: {}, host: {}. Error: {}",
sch.getServiceComponentName(), sch.getHostName(), e.getMessage());
}
} |
223551095_2177 | @Subscribe
public void onAmbariEvent(HostComponentVersionAdvertisedEvent event) throws AmbariException {
LOG.debug("Received event {}", event);
Cluster cluster = event.getCluster();
ServiceComponentHost sch = event.getServiceComponentHost();
String newVersion = event.getVersion();
if (StringUtils.isEmpty(newVersion)) {
return;
}
// if the cluster is upgrading, there's no need to update the repo version -
// it better be right
if (null != event.getRepositoryVersionId() && null == cluster.getUpgradeInProgress()) {
// !!! make sure the repo_version record actually has the same version.
// This is NOT true when installing a cluster using a public repo where the
// exact version is not known in advance.
RepositoryVersionEntity rve = repositoryVersionDAO.findByPK(event.getRepositoryVersionId());
if (null != rve) {
boolean updated = false;
String currentRepoVersion = rve.getVersion();
if (!StringUtils.equals(currentRepoVersion, newVersion)) {
rve.setVersion(newVersion);
rve.setResolved(true);
repositoryVersionDAO.merge(rve);
updated = true;
} else {
// the reported versions are the same - we should ensure that the repo
// is resolved
if (!rve.isResolved()) {
rve.setResolved(true);
repositoryVersionDAO.merge(rve);
updated = true;
}
}
if (updated) {
m_hostLevelParamsHolder.get().updateData(m_hostLevelParamsHolder.get().getCurrentData(sch.getHost().getHostId()));
}
}
}
// Update host component version value if needed
try {
// get the component information for the desired stack; if a component
// moves from UNKNOWN to providing a version, we must do the version
// advertised check against the target stack
StackId desiredStackId = sch.getDesiredStackId();
AmbariMetaInfo ambariMetaInfo = ambariMetaInfoProvider.get();
ComponentInfo componentInfo = ambariMetaInfo.getComponent(desiredStackId.getStackName(),
desiredStackId.getStackVersion(), sch.getServiceName(), sch.getServiceComponentName());
// not advertising a version, do nothing
if (!componentInfo.isVersionAdvertised()) {
// that's odd; a version came back - log it and still do nothing
if (!StringUtils.equalsIgnoreCase(UNKNOWN_VERSION, newVersion)) {
LOG.warn(
"ServiceComponent {} doesn't advertise version, however ServiceHostComponent {} on host {} advertised version as {}. Skipping version update",
sch.getServiceComponentName(), sch.getServiceComponentName(), sch.getHostName(),
newVersion);
}
return;
}
ServiceComponent sc = cluster.getService(sch.getServiceName()).getServiceComponent(
sch.getServiceComponentName());
// proces the UNKNOWN version
if (StringUtils.equalsIgnoreCase(UNKNOWN_VERSION, newVersion)) {
processUnknownDesiredVersion(cluster, sc, sch, newVersion);
return;
}
processComponentAdvertisedVersion(cluster, sc, sch, newVersion);
} catch (Exception e) {
LOG.error(
"Unable to propagate version for ServiceHostComponent on component: {}, host: {}. Error: {}",
sch.getServiceComponentName(), sch.getHostName(), e.getMessage());
}
} |
223551095_2178 | @Subscribe
public void onAmbariEvent(HostComponentVersionAdvertisedEvent event) throws AmbariException {
LOG.debug("Received event {}", event);
Cluster cluster = event.getCluster();
ServiceComponentHost sch = event.getServiceComponentHost();
String newVersion = event.getVersion();
if (StringUtils.isEmpty(newVersion)) {
return;
}
// if the cluster is upgrading, there's no need to update the repo version -
// it better be right
if (null != event.getRepositoryVersionId() && null == cluster.getUpgradeInProgress()) {
// !!! make sure the repo_version record actually has the same version.
// This is NOT true when installing a cluster using a public repo where the
// exact version is not known in advance.
RepositoryVersionEntity rve = repositoryVersionDAO.findByPK(event.getRepositoryVersionId());
if (null != rve) {
boolean updated = false;
String currentRepoVersion = rve.getVersion();
if (!StringUtils.equals(currentRepoVersion, newVersion)) {
rve.setVersion(newVersion);
rve.setResolved(true);
repositoryVersionDAO.merge(rve);
updated = true;
} else {
// the reported versions are the same - we should ensure that the repo
// is resolved
if (!rve.isResolved()) {
rve.setResolved(true);
repositoryVersionDAO.merge(rve);
updated = true;
}
}
if (updated) {
m_hostLevelParamsHolder.get().updateData(m_hostLevelParamsHolder.get().getCurrentData(sch.getHost().getHostId()));
}
}
}
// Update host component version value if needed
try {
// get the component information for the desired stack; if a component
// moves from UNKNOWN to providing a version, we must do the version
// advertised check against the target stack
StackId desiredStackId = sch.getDesiredStackId();
AmbariMetaInfo ambariMetaInfo = ambariMetaInfoProvider.get();
ComponentInfo componentInfo = ambariMetaInfo.getComponent(desiredStackId.getStackName(),
desiredStackId.getStackVersion(), sch.getServiceName(), sch.getServiceComponentName());
// not advertising a version, do nothing
if (!componentInfo.isVersionAdvertised()) {
// that's odd; a version came back - log it and still do nothing
if (!StringUtils.equalsIgnoreCase(UNKNOWN_VERSION, newVersion)) {
LOG.warn(
"ServiceComponent {} doesn't advertise version, however ServiceHostComponent {} on host {} advertised version as {}. Skipping version update",
sch.getServiceComponentName(), sch.getServiceComponentName(), sch.getHostName(),
newVersion);
}
return;
}
ServiceComponent sc = cluster.getService(sch.getServiceName()).getServiceComponent(
sch.getServiceComponentName());
// proces the UNKNOWN version
if (StringUtils.equalsIgnoreCase(UNKNOWN_VERSION, newVersion)) {
processUnknownDesiredVersion(cluster, sc, sch, newVersion);
return;
}
processComponentAdvertisedVersion(cluster, sc, sch, newVersion);
} catch (Exception e) {
LOG.error(
"Unable to propagate version for ServiceHostComponent on component: {}, host: {}. Error: {}",
sch.getServiceComponentName(), sch.getHostName(), e.getMessage());
}
} |
223551095_2179 | @Subscribe
public void onTaskUpdateEvent(TaskUpdateEvent event) {
LOG.debug("Received task update event {}", event);
List<HostRoleCommand> hostRoleCommandListAll = event.getHostRoleCommands();
List<HostRoleCommand> hostRoleCommandWithReceivedStatus = new ArrayList<>();
Set<StageEntityPK> stagesWithReceivedTaskStatus = new HashSet<>();
Set<Long> requestIdsWithReceivedTaskStatus = new HashSet<>();
Set<RequestUpdateEvent> requestsToPublish = new HashSet<>();
for (HostRoleCommand hostRoleCommand : hostRoleCommandListAll) {
Long reportedTaskId = hostRoleCommand.getTaskId();
HostRoleCommand activeTask = activeTasksMap.get(reportedTaskId);
if (activeTask == null) {
LOG.error(String.format("Received update for a task %d which is not being tracked as running task", reportedTaskId));
} else {
hostRoleCommandWithReceivedStatus.add(hostRoleCommand);
StageEntityPK stageEntityPK = new StageEntityPK();
stageEntityPK.setRequestId(hostRoleCommand.getRequestId());
stageEntityPK.setStageId(hostRoleCommand.getStageId());
stagesWithReceivedTaskStatus.add(stageEntityPK);
requestIdsWithReceivedTaskStatus.add(hostRoleCommand.getRequestId());
if (!activeTasksMap.get(reportedTaskId).getStatus().equals(hostRoleCommand.getStatus())) {
// Ignore requests not related to any cluster. "requests" topic is used for cluster requests only.
Long clusterId = activeRequestMap.get(hostRoleCommand.getRequestId()).getClusterId();
if (clusterId != null && clusterId != -1) {
Set<RequestUpdateEvent.HostRoleCommand> hostRoleCommands = new HashSet<>();
hostRoleCommands.add(new RequestUpdateEvent.HostRoleCommand(hostRoleCommand.getTaskId(),
hostRoleCommand.getRequestId(),
hostRoleCommand.getStatus(),
hostRoleCommand.getHostName()));
requestsToPublish.add(new RequestUpdateEvent(hostRoleCommand.getRequestId(),
activeRequestMap.get(hostRoleCommand.getRequestId()).getStatus(), hostRoleCommands));
} else {
LOG.debug("No STOMP request update event was fired for host component status change due no cluster related, " +
"request id: {}, role: {}, role command: {}, host: {}, task id: {}, old state: {}, new state: {}",
hostRoleCommand.getRequestId(),
hostRoleCommand.getRole(),
hostRoleCommand.getRoleCommand(),
hostRoleCommand.getHostName(),
hostRoleCommand.getTaskId(),
activeTasksMap.get(reportedTaskId).getStatus(),
hostRoleCommand.getStatus());
}
}
}
}
updateActiveTasksMap(hostRoleCommandWithReceivedStatus);
Boolean didAnyStageStatusUpdated = updateActiveStagesStatus(stagesWithReceivedTaskStatus, hostRoleCommandListAll);
// Presumption: If there is no update in any of the running stage's status
// then none of the running request status needs to be updated
if (didAnyStageStatusUpdated) {
updateActiveRequestsStatus(requestIdsWithReceivedTaskStatus, stagesWithReceivedTaskStatus);
}
for (RequestUpdateEvent requestToPublish : requestsToPublish) {
STOMPUpdatePublisher.publish(requestToPublish);
}
} |
223551095_2181 | public void scheduleBatch(RequestExecution requestExecution)
throws AmbariException {
if (!isSchedulerAvailable()) {
throw new AmbariException("Scheduler unavailable.");
}
// Check if scheduler is running, if not start immediately before scheduling jobs
try {
if (!executionScheduler.isSchedulerStarted()) {
executionScheduler.startScheduler(null);
}
} catch (SchedulerException e) {
LOG.error("Unable to determine scheduler state.", e);
throw new AmbariException("Scheduler unavailable.");
}
// Create and persist jobs based on batches
JobDetail firstJobDetail = persistBatch(requestExecution);
if (firstJobDetail == null) {
throw new AmbariException("Unable to schedule jobs. firstJobDetail = "
+ firstJobDetail);
}
// Create a cron trigger for the first batch job
// If no schedule is specified create simple trigger to fire right away
Schedule schedule = requestExecution.getSchedule();
if (schedule != null) {
String triggerExpression = schedule.getScheduleExpression();
Date startDate = null;
Date endDate = null;
try {
String startTime = schedule.getStartTime();
String endTime = schedule.getEndTime();
startDate = startTime != null && !startTime.isEmpty() ?
DateUtils.convertToDate(startTime) : new Date();
endDate = endTime != null && !endTime.isEmpty() ?
DateUtils.convertToDate(endTime) : null;
} catch (ParseException e) {
LOG.error("Unable to parse startTime / endTime.", e);
}
Trigger trigger = newTrigger()
.withIdentity(REQUEST_EXECUTION_TRIGGER_PREFIX + "-" +
requestExecution.getId(), ExecutionJob.LINEAR_EXECUTION_TRIGGER_GROUP)
.withSchedule(cronSchedule(triggerExpression)
.withMisfireHandlingInstructionFireAndProceed())
.forJob(firstJobDetail)
.startAt(startDate)
.endAt(endDate)
.build();
try {
executionScheduler.scheduleJob(trigger);
LOG.debug("Scheduled trigger next fire time: {}", trigger.getNextFireTime());
} catch (SchedulerException e) {
LOG.error("Unable to schedule request execution.", e);
throw new AmbariException(e.getMessage());
}
} else {
// Create trigger for immediate job execution
Trigger trigger = newTrigger()
.forJob(firstJobDetail)
.withIdentity(REQUEST_EXECUTION_TRIGGER_PREFIX + "-" +
requestExecution.getId(), ExecutionJob.LINEAR_EXECUTION_TRIGGER_GROUP)
.withSchedule(simpleSchedule().withMisfireHandlingInstructionFireNow())
.startNow()
.build();
try {
executionScheduler.scheduleJob(trigger);
LOG.debug("Scheduled trigger next fire time: {}", trigger.getNextFireTime());
} catch (SchedulerException e) {
LOG.error("Unable to schedule request execution.", e);
throw new AmbariException(e.getMessage());
}
}
} |
223551095_2182 | public void deleteAllJobs(RequestExecution requestExecution) throws AmbariException {
if (!isSchedulerAvailable()) {
throw new AmbariException("Scheduler unavailable.");
}
// Delete all jobs for this request execution
Batch batch = requestExecution.getBatch();
if (batch != null) {
List<BatchRequest> batchRequests = batch.getBatchRequests();
if (batchRequests != null) {
for (BatchRequest batchRequest : batchRequests) {
String jobName = getJobName(requestExecution.getId(),
batchRequest.getOrderId());
LOG.debug("Deleting Job, jobName = {}", jobName);
try {
executionScheduler.deleteJob(JobKey.jobKey(jobName,
ExecutionJob.LINEAR_EXECUTION_JOB_GROUP));
} catch (SchedulerException e) {
LOG.warn("Unable to delete job, " + jobName, e);
throw new AmbariException(e.getMessage());
}
}
}
}
} |
223551095_2183 | public Long executeBatchRequest(long executionId,
long batchId,
String clusterName) throws AmbariException {
String type = null;
String uri = null;
String body = null;
try {
RequestExecution requestExecution = clusters.getCluster(clusterName).getAllRequestExecutions().get(executionId);
BatchRequest batchRequest = requestExecution.getBatchRequest(batchId);
type = batchRequest.getType();
uri = batchRequest.getUri();
body = requestExecution.getRequestBody(batchId);
BatchRequestResponse batchRequestResponse = performApiRequest(uri, body, type, requestExecution.getAuthenticatedUserId());
updateBatchRequest(executionId, batchId, clusterName, batchRequestResponse, false);
if (batchRequestResponse.getRequestId() != null) {
actionDBAccessor.setSourceScheduleForRequest(batchRequestResponse.getRequestId(), executionId);
}
return batchRequestResponse.getRequestId();
} catch (Exception e) {
throw new AmbariException("Exception occurred while performing request", e);
}
} |
223551095_2184 | public void updateBatchRequest(long executionId, long batchId, String clusterName,
BatchRequestResponse batchRequestResponse,
boolean statusOnly) throws AmbariException {
Cluster cluster = clusters.getCluster(clusterName);
RequestExecution requestExecution = cluster.getAllRequestExecutions().get(executionId);
if (requestExecution == null) {
throw new AmbariException("Unable to find request schedule with id = "
+ executionId);
}
requestExecution.updateBatchRequest(batchId, batchRequestResponse, statusOnly);
} |
223551095_2186 | public boolean hasToleranceThresholdExceeded(Long executionId,
String clusterName, Map<String, Integer> taskCounts) throws AmbariException {
Cluster cluster = clusters.getCluster(clusterName);
RequestExecution requestExecution = cluster.getAllRequestExecutions().get(executionId);
if (requestExecution == null) {
throw new AmbariException("Unable to find request schedule with id = "
+ executionId);
}
BatchSettings batchSettings = requestExecution.getBatch().getBatchSettings();
if (batchSettings != null
&& batchSettings.getTaskFailureToleranceLimit() != null) {
return taskCounts.get(BatchRequestJob.BATCH_REQUEST_FAILED_TASKS_KEY) >
batchSettings.getTaskFailureToleranceLimit();
}
return false;
} |
223551095_2187 | public void finalizeBatch(long executionId, String clusterName)
throws AmbariException {
Cluster cluster = clusters.getCluster(clusterName);
RequestExecution requestExecution = cluster.getAllRequestExecutions().get(executionId);
if (requestExecution == null) {
throw new AmbariException("Unable to find request schedule with id = "
+ executionId);
}
Batch batch = requestExecution.getBatch();
BatchRequest firstBatchRequest = null;
if (batch != null) {
List<BatchRequest> batchRequests = batch.getBatchRequests();
if (batchRequests != null && batchRequests.size() > 0) {
Collections.sort(batchRequests);
firstBatchRequest = batchRequests.get(0);
}
}
boolean markCompleted = false;
if (firstBatchRequest != null) {
String jobName = getJobName(executionId, firstBatchRequest.getOrderId());
JobKey jobKey = JobKey.jobKey(jobName, ExecutionJob.LINEAR_EXECUTION_JOB_GROUP);
JobDetail jobDetail;
try {
jobDetail = executionScheduler.getJobDetail(jobKey);
} catch (SchedulerException e) {
LOG.warn("Unable to retrieve job details from scheduler. job: " + jobKey);
e.printStackTrace();
return;
}
if (jobDetail != null) {
try {
List<? extends Trigger> triggers = executionScheduler.getTriggersForJob(jobKey);
if (triggers != null && triggers.size() > 0) {
if (triggers.size() > 1) {
throw new AmbariException("Too many triggers defined for job. " +
"job: " + jobKey);
}
Trigger trigger = triggers.get(0);
// Note: If next fire time is in the past, it could be a misfire
// If final fire time is null, means it is a forever running job
if (!trigger.mayFireAgain() ||
(trigger.getFinalFireTime() != null &&
!DateUtils.isFutureTime(trigger.getFinalFireTime()))) {
markCompleted = true;
}
} else {
// No triggers for job
markCompleted = true;
}
} catch (SchedulerException e) {
LOG.warn("Unable to retrieve triggers for job: " + jobKey);
e.printStackTrace();
return;
}
}
}
if (markCompleted) {
requestExecution.updateStatus(RequestExecution.Status.COMPLETED);
}
} |
223551095_2188 | public boolean continueOnMisfire(JobExecutionContext jobExecutionContext) {
if (jobExecutionContext != null) {
Date scheduledTime = jobExecutionContext.getScheduledFireTime();
Long diff = DateUtils.getDateDifferenceInMinutes(scheduledTime);
return (diff < configuration.getExecutionSchedulerMisfireToleration());
}
return true;
} |
223551095_2189 | protected WebResource extendApiResource(WebResource webResource, String relativeUri) {
WebResource result = webResource;
if (StringUtils.isNotEmpty(relativeUri) && !CONTAINS_API_VERSION_PATTERN.matcher(relativeUri).matches()) {
result = webResource.path(DEFAULT_API_PATH);
}
return result.path(relativeUri);
} |
223551095_2190 | public String getName() {
return name;
} |
223551095_2191 | public synchronized String getCommonName() {
if (commonName == null) {
// Strip version from the internal name
commonName = name.replaceAll("\\{(.+)\\}", "");
}
return commonName;
} |
223551095_2192 | @Override
public String getLabel() {
return label;
} |
223551095_2193 | @Override
public String getVersion() {
return version;
} |
223551095_2194 | @Override
public String getBuild() {
return build;
} |
223551095_2195 | public String getIcon() {
return icon;
} |
223551095_2196 | public String getIcon64() {
return icon64;
} |
223551095_2197 | public boolean isClusterConfigurable() {
return clusterConfigurable;
} |
223551095_2198 | public String getAmbariProperty(String key) {
return ambariConfiguration.getProperty(key);
} |
223551095_2199 | public ClassLoader getClassLoader() {
return classLoader;
} |
223551095_2200 | public String getArchive() {
return archive;
} |
223551095_2201 | public Configuration getAmbariConfiguration() {
return ambariConfiguration;
} |
223551095_2202 | public boolean isDeployed() {
return status.equals(ViewStatus.DEPLOYED);
} |
223551095_2203 | public void removePrivilege(PrivilegeEntity privilege) {
privileges.remove(privilege);
} |
223551095_2210 | public void setCustomCommandName(String customCommandName) {
this.customCommandName = customCommandName;
} |
223551095_2211 | public void setCommandDetail(String commandDetail) {
String truncatedCommandDetail = commandDetail;
if (commandDetail != null) {
if (commandDetail.length() > MAX_COMMAND_DETAIL_LENGTH) {
truncatedCommandDetail = commandDetail.substring(0, MAX_COMMAND_DETAIL_LENGTH) + "...";
}
}
this.commandDetail = truncatedCommandDetail;
} |
223551095_2212 | public void setOpsDisplayName(String opsDisplayName) {
this.opsDisplayName = opsDisplayName;
} |
223551095_2213 | public PrincipalType getPrincipalType() {
return principalType;
} |
223551095_2214 | public SyncType getSyncType() {
return syncType;
} |
223551095_2215 | public List<String> getPrincipalNames() {
return principalNames;
} |
223551095_2217 | public ViewEntity getViewEntity() {
return view;
} |
223551095_2218 | public InstanceConfig getConfiguration() {
return instanceConfig;
} |
223551095_2219 | public String getName() {
return name;
} |
223551095_2220 | @Override
public String getLabel() {
return label;
} |
223551095_2221 | @Override
public String getDescription() {
return description;
} |
223551095_2222 | @Override
public boolean isVisible() {
return visible == 'y' || visible == 'Y';
} |
223551095_2223 | public String getIcon() {
return icon;
} |
223551095_2224 | public boolean alterNames() {
return alterNames == 1;
} |
223551095_2225 | public String getIcon64() {
return icon64;
} |
223551095_2226 | public String getContextPath() {
return getContextPath(view.getCommonName(), view.getVersion(), getName());
} |
223551095_2227 | public void validate(ViewEntity viewEntity, Validator.ValidationContext context) throws ValidationException {
InstanceValidationResultImpl result = getValidationResult(viewEntity, context);
if (!result.isValid()) {
throw new ValidationException(result.toJson());
}
} |
223551095_2228 | public InstanceValidationResultImpl getValidationResult(ViewEntity viewEntity, Validator.ValidationContext context)
throws IllegalStateException {
Map<String, ValidationResult> propertyResults = new HashMap<>();
if (context.equals(Validator.ValidationContext.PRE_CREATE) ||
context.equals(Validator.ValidationContext.PRE_UPDATE)) {
// make sure that there is an instance property value defined
// for each required view parameter
Set<String> requiredParameterNames = new HashSet<>();
for (ViewParameterEntity parameter : viewEntity.getParameters()) {
if (parameter.isRequired()) {
// Don't enforce 'required' validation for cluster config parameters since
// the value will be obtained through cluster association, not user input
if (parameter.getClusterConfig()== null) {
requiredParameterNames.add(parameter.getName());
}
}
}
Map<String, String> propertyMap = getPropertyMap();
for (Map.Entry<String, String> entry : propertyMap.entrySet()) {
if (entry.getValue() != null) {
requiredParameterNames.remove(entry.getKey());
}
}
// required but missing instance properties...
for (String requiredParameterName : requiredParameterNames) {
propertyResults.put(requiredParameterName,
new ValidationResultImpl(false,
"No property values exist for the required parameter " + requiredParameterName + "."));
}
}
ValidationResult instanceResult = null;
Validator validator = viewEntity.getValidator();
// if the view provides its own validator, run it
if (validator != null) {
instanceResult = validator.validateInstance(this, context);
for ( String property : getPropertyMap().keySet()) {
if (!propertyResults.containsKey(property)) {
propertyResults.put(property,
ValidationResultImpl.create(validator.validateProperty(property, this, context)));
}
}
}
return new InstanceValidationResultImpl(ValidationResultImpl.create(instanceResult), propertyResults);
} |
223551095_2229 | public Collection<HostComponentDesiredStateEntity> getHostComponentDesiredStateEntities() {
return Collections.unmodifiableCollection(hostComponentDesiredStateEntities);
} |
223551095_2230 | public void addHostComponentDesiredStateEntity(HostComponentDesiredStateEntity stateEntity) {
hostComponentDesiredStateEntities.add(stateEntity);
} |
223551095_2231 | public void removeHostComponentDesiredStateEntity(HostComponentDesiredStateEntity stateEntity) {
hostComponentDesiredStateEntities.remove(stateEntity);
} |
223551095_2232 | public Collection<HostComponentStateEntity> getHostComponentStateEntities() {
return Collections.unmodifiableCollection(hostComponentStateEntities);
} |
223551095_2233 | public void addHostComponentStateEntity(HostComponentStateEntity stateEntity) {
hostComponentStateEntities.add(stateEntity);
} |
223551095_2234 | public void removeHostComponentStateEntity(HostComponentStateEntity stateEntity) {
hostComponentStateEntities.remove(stateEntity);
} |
223551095_2235 | @Override
@Transactional
public void create(RepositoryVersionEntity entity){
super.create(entity);
} |
223551095_2236 | @RequiresSession
public RepositoryVersionEntity findByDisplayName(String displayName) {
// TODO, this assumes that the display name is unique, but neither the code nor the DB schema enforces this.
final TypedQuery<RepositoryVersionEntity> query = entityManagerProvider.get().createNamedQuery("repositoryVersionByDisplayName", RepositoryVersionEntity.class);
query.setParameter("displayname", displayName);
return daoUtils.selectSingle(query);
} |
223551095_2237 | @RequiresSession
public RepositoryVersionEntity findByStackAndVersion(StackId stackId,
String version) {
return findByStackNameAndVersion(stackId.getStackName(), version);
} |
223551095_2238 | @RequiresSession
public List<RepositoryVersionEntity> findByStack(StackId stackId) {
final TypedQuery<RepositoryVersionEntity> query = entityManagerProvider.get().createNamedQuery("repositoryVersionByStack", RepositoryVersionEntity.class);
query.setParameter("stackName", stackId.getStackName());
query.setParameter("stackVersion", stackId.getStackVersion());
return daoUtils.selectList(query);
} |
223551095_2239 | @RequiresSession
public RepositoryVersionEntity findByStackAndVersion(StackId stackId,
String version) {
return findByStackNameAndVersion(stackId.getStackName(), version);
} |
223551095_2240 | @RequiresSession
public List<RepositoryVersionEntity> findByStackAndType(StackId stackId, RepositoryType type) {
final TypedQuery<RepositoryVersionEntity> query = entityManagerProvider.get().createNamedQuery(
"repositoryVersionByStackAndType", RepositoryVersionEntity.class);
query.setParameter("stackName", stackId.getStackName());
query.setParameter("stackVersion", stackId.getStackVersion());
query.setParameter("type", type);
return daoUtils.selectList(query);
} |
223551095_2241 | @Transactional
public void create(HostConfigMapping hostConfigMapping) {
populateCache();
//create in db
entityManagerProvider.get().persist(buildHostConfigMappingEntity(hostConfigMapping));
//create in cache
Long hostId = hostConfigMapping.getHostId();
if (hostId != null) {
Set<HostConfigMapping> set;
if (hostConfigMappingByHost.containsKey(hostId)) {
set = hostConfigMappingByHost.get(hostId);
} else {
set = new HashSet<>();
hostConfigMappingByHost.put(hostId, set);
}
set.add(hostConfigMapping);
}
} |
223551095_2242 | @RequiresSession
public Set<HostConfigMapping> findByType(final long clusterId, Long hostId, final String type) {
populateCache();
if (!hostConfigMappingByHost.containsKey(hostId))
return Collections.emptySet();
Set<HostConfigMapping> set = new HashSet<>(hostConfigMappingByHost.get(hostId));
CollectionUtils.filter(set, new Predicate() {
@Override
public boolean evaluate(Object arg0) {
return ((HostConfigMapping) arg0).getClusterId().equals(clusterId)
&& ((HostConfigMapping) arg0).getType().equals(type);
}
});
return set;
} |